+2017-10-06 Commit Queue <commit-queue@webkit.org>
+
+ Unreviewed, rolling out r222791 and r222873.
+ https://bugs.webkit.org/show_bug.cgi?id=178031
+
+ Caused crashes with workers/wasm LayoutTests (Requested by
+ ryanhaddad on #webkit).
+
+ Reverted changesets:
+
+ "WebAssembly: no VM / JS version of everything but Instance"
+ https://bugs.webkit.org/show_bug.cgi?id=177473
+ http://trac.webkit.org/changeset/222791
+
+ "WebAssembly: address no VM / JS follow-ups"
+ https://bugs.webkit.org/show_bug.cgi?id=177887
+ http://trac.webkit.org/changeset/222873
+
2017-10-05 Saam Barati <sbarati@apple.com>
Make sure all prototypes under poly proto get added into the VM's prototype map
assertEq(buffer.byteLength, 2*64*1024);
// This shouldn't neuter the buffer since it fails.
- assertThrows(() => memory.grow(1000), RangeError, "WebAssembly.Memory.grow would exceed the memory's declared maximum size");
+ assertThrows(() => memory.grow(1000), Error, "Out of memory");
assertEq(buffer.byteLength, 2*64*1024);
assertEq(memory.buffer, buffer);
}
"WebAssembly.Module doesn't parse at byte 29 / 32: resizable limits has a initial page count of 4294967295 which is greater than its maximum 4294967294 (evaluating 'new WebAssembly.Module(builder.WebAssembly().get())')",
"WebAssembly.Module doesn't parse at byte 37 / 43: resizable limits has a initial page count of 4294967295 which is greater than its maximum 4294967294 (evaluating 'new WebAssembly.Module(builder.WebAssembly().get())')"],
[{initial: 2**31, element: "anyfunc"},
- "WebAssembly.Module doesn't parse at byte 24 / 27: Table's initial page count of 2147483648 is too big, maximum 10000000 (evaluating 'new WebAssembly.Module(builder.WebAssembly().get())')",
- "WebAssembly.Module doesn't parse at byte 32 / 38: Table's initial page count of 2147483648 is too big, maximum 10000000 (evaluating 'new WebAssembly.Module(builder.WebAssembly().get())')"],
+ "WebAssembly.Module doesn't parse at byte 24 / 27: Table's initial page count of 2147483648 is invalid (evaluating 'new WebAssembly.Module(builder.WebAssembly().get())')",
+ "WebAssembly.Module doesn't parse at byte 32 / 38: Table's initial page count of 2147483648 is invalid (evaluating 'new WebAssembly.Module(builder.WebAssembly().get())')"],
];
for (const d of badDescriptions) {
+2017-10-06 Commit Queue <commit-queue@webkit.org>
+
+ Unreviewed, rolling out r222791 and r222873.
+ https://bugs.webkit.org/show_bug.cgi?id=178031
+
+ Caused crashes with workers/wasm LayoutTests (Requested by
+ ryanhaddad on #webkit).
+
+ Reverted changesets:
+
+ "WebAssembly: no VM / JS version of everything but Instance"
+ https://bugs.webkit.org/show_bug.cgi?id=177473
+ http://trac.webkit.org/changeset/222791
+
+ "WebAssembly: address no VM / JS follow-ups"
+ https://bugs.webkit.org/show_bug.cgi?id=177887
+ http://trac.webkit.org/changeset/222873
+
2017-10-06 Robin Morisset <rmorisset@apple.com>
Avoid integer overflow in DFGStrengthReduction.cpp
79C4B15E1BA2158F00FD592E /* DFGLiveCatchVariablePreservationPhase.h in Headers */ = {isa = PBXBuildFile; fileRef = 79C4B15C1BA2158F00FD592E /* DFGLiveCatchVariablePreservationPhase.h */; settings = {ATTRIBUTES = (Private, ); }; };
79CFC6F01C33B10000C768EA /* LLIntPCRanges.h in Headers */ = {isa = PBXBuildFile; fileRef = 79CFC6EF1C33B10000C768EA /* LLIntPCRanges.h */; settings = {ATTRIBUTES = (Private, ); }; };
79D5CD5B1C1106A900CECA07 /* SamplingProfiler.h in Headers */ = {isa = PBXBuildFile; fileRef = 79D5CD591C1106A900CECA07 /* SamplingProfiler.h */; settings = {ATTRIBUTES = (Private, ); }; };
- 79DAE27A1E03C82200B526AA /* WasmExceptionType.h in Headers */ = {isa = PBXBuildFile; fileRef = 79DAE2791E03C82200B526AA /* WasmExceptionType.h */; settings = {ATTRIBUTES = (Private, ); }; };
+ 79DAE27A1E03C82200B526AA /* WasmExceptionType.h in Headers */ = {isa = PBXBuildFile; fileRef = 79DAE2791E03C82200B526AA /* WasmExceptionType.h */; };
79DFCBDB1D88C59600527D03 /* HasOwnPropertyCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 79DFCBDA1D88C59600527D03 /* HasOwnPropertyCache.h */; settings = {ATTRIBUTES = (Private, ); }; };
79EE0C001B4AFB85000385C9 /* VariableEnvironment.h in Headers */ = {isa = PBXBuildFile; fileRef = 79EE0BFE1B4AFB85000385C9 /* VariableEnvironment.h */; settings = {ATTRIBUTES = (Private, ); }; };
79EFD4841EBC045C00F3DFEA /* JSWebAssemblyCodeBlockSubspace.h in Headers */ = {isa = PBXBuildFile; fileRef = 79EFD4821EBC045C00F3DFEA /* JSWebAssemblyCodeBlockSubspace.h */; settings = {ATTRIBUTES = (Private, ); }; };
AD2FCC201DB59CB200B3E736 /* WebAssemblyTableConstructor.lut.h in Headers */ = {isa = PBXBuildFile; fileRef = AD2FCC141DB59C5900B3E736 /* WebAssemblyTableConstructor.lut.h */; };
AD2FCC211DB59CB200B3E736 /* WebAssemblyTablePrototype.lut.h in Headers */ = {isa = PBXBuildFile; fileRef = AD2FCC151DB59C5900B3E736 /* WebAssemblyTablePrototype.lut.h */; };
AD2FCC2D1DB838FD00B3E736 /* WebAssemblyPrototype.h in Headers */ = {isa = PBXBuildFile; fileRef = AD2FCC271DB838C400B3E736 /* WebAssemblyPrototype.h */; };
+ AD2FCC311DB83D4900B3E736 /* JSWebAssembly.h in Headers */ = {isa = PBXBuildFile; fileRef = AD2FCC2F1DB839F700B3E736 /* JSWebAssembly.h */; };
AD412B341E7B2E9E008AF157 /* WasmContext.h in Headers */ = {isa = PBXBuildFile; fileRef = AD412B321E7B2E8A008AF157 /* WasmContext.h */; settings = {ATTRIBUTES = (Private, ); }; };
AD4252511E5D0E14009D2A97 /* FullCodeOrigin.h in Headers */ = {isa = PBXBuildFile; fileRef = AD4252501E5D0DEB009D2A97 /* FullCodeOrigin.h */; };
AD4937C41DDBE6140077C807 /* AbstractModuleRecord.h in Headers */ = {isa = PBXBuildFile; fileRef = AD4937C21DDBE60A0077C807 /* AbstractModuleRecord.h */; settings = {ATTRIBUTES = (Private, ); }; };
AD4937D41DDD27DE0077C807 /* WebAssemblyFunction.h in Headers */ = {isa = PBXBuildFile; fileRef = AD4937CA1DDD27340077C807 /* WebAssemblyFunction.h */; };
AD4B1DFA1DF244E20071AE32 /* WasmBinding.h in Headers */ = {isa = PBXBuildFile; fileRef = AD4B1DF81DF244D70071AE32 /* WasmBinding.h */; };
AD5B416F1EBAFB77008EFA43 /* WasmName.h in Headers */ = {isa = PBXBuildFile; fileRef = AD5B416E1EBAFB65008EFA43 /* WasmName.h */; settings = {ATTRIBUTES = (Private, ); }; };
- AD5C36DD1F688B65000BCAAF /* WasmEmbedder.h in Headers */ = {isa = PBXBuildFile; fileRef = AD5C36DC1F688B5F000BCAAF /* WasmEmbedder.h */; settings = {ATTRIBUTES = (Private, ); }; };
- AD5C36E21F699EC0000BCAAF /* WasmInstance.h in Headers */ = {isa = PBXBuildFile; fileRef = AD5C36DF1F699EB6000BCAAF /* WasmInstance.h */; settings = {ATTRIBUTES = (Private, ); }; };
- AD5C36E61F69EC91000BCAAF /* WasmTable.h in Headers */ = {isa = PBXBuildFile; fileRef = AD5C36E41F69EC8B000BCAAF /* WasmTable.h */; settings = {ATTRIBUTES = (Private, ); }; };
- AD5C36EA1F75AD6A000BCAAF /* JSToWasm.h in Headers */ = {isa = PBXBuildFile; fileRef = AD8DD6CF1F67089F0004EB52 /* JSToWasm.h */; settings = {ATTRIBUTES = (Private, ); }; };
- AD5C36EB1F75AD73000BCAAF /* JSWebAssembly.h in Headers */ = {isa = PBXBuildFile; fileRef = ADD09AF31F62482E001313C2 /* JSWebAssembly.h */; settings = {ATTRIBUTES = (Private, ); }; };
- AD5C36EC1F75AD7C000BCAAF /* WasmToJS.h in Headers */ = {isa = PBXBuildFile; fileRef = ADD09AEE1F5F623F001313C2 /* WasmToJS.h */; settings = {ATTRIBUTES = (Private, ); }; };
- AD5C36EF1F7A263A000BCAAF /* WasmMemoryMode.h in Headers */ = {isa = PBXBuildFile; fileRef = AD5C36EE1F7A2629000BCAAF /* WasmMemoryMode.h */; settings = {ATTRIBUTES = (Private, ); }; };
AD7438C01E0457A400FD0C2A /* WasmSignature.h in Headers */ = {isa = PBXBuildFile; fileRef = AD7438BF1E04579200FD0C2A /* WasmSignature.h */; settings = {ATTRIBUTES = (Private, ); }; };
AD86A93E1AA4D88D002FE77F /* WeakGCMapInlines.h in Headers */ = {isa = PBXBuildFile; fileRef = AD86A93D1AA4D87C002FE77F /* WeakGCMapInlines.h */; settings = {ATTRIBUTES = (Private, ); }; };
AD8FF3981EB5BDB20087FF82 /* WasmIndexOrName.h in Headers */ = {isa = PBXBuildFile; fileRef = AD8FF3951EB5BD850087FF82 /* WasmIndexOrName.h */; settings = {ATTRIBUTES = (Private, ); }; };
AD2FCC151DB59C5900B3E736 /* WebAssemblyTablePrototype.lut.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WebAssemblyTablePrototype.lut.h; sourceTree = "<group>"; };
AD2FCC261DB838C400B3E736 /* WebAssemblyPrototype.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = WebAssemblyPrototype.cpp; path = js/WebAssemblyPrototype.cpp; sourceTree = "<group>"; };
AD2FCC271DB838C400B3E736 /* WebAssemblyPrototype.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = WebAssemblyPrototype.h; path = js/WebAssemblyPrototype.h; sourceTree = "<group>"; };
+ AD2FCC2E1DB839F700B3E736 /* JSWebAssembly.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = JSWebAssembly.cpp; sourceTree = "<group>"; };
+ AD2FCC2F1DB839F700B3E736 /* JSWebAssembly.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = JSWebAssembly.h; sourceTree = "<group>"; };
AD2FCC321DC4045300B3E736 /* WasmFormat.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = WasmFormat.cpp; sourceTree = "<group>"; };
- AD3F1E471F4BA78600669912 /* WABase.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WABase.h; sourceTree = "<group>"; };
- AD3F1E4A1F4DE68C00669912 /* WAInstance.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WAInstance.h; sourceTree = "<group>"; };
- AD3F1E4B1F4DE68C00669912 /* WATable.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WATable.h; sourceTree = "<group>"; };
- AD3F1E4C1F4DE68C00669912 /* WAMemory.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WAMemory.h; sourceTree = "<group>"; };
- AD3F1E521F4F4AE500669912 /* WAMemory.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = WAMemory.cpp; sourceTree = "<group>"; };
- AD3F1E531F4F4AE500669912 /* WAModule.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = WAModule.cpp; sourceTree = "<group>"; };
- AD3F1E541F4F4AE500669912 /* WAInstance.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = WAInstance.cpp; sourceTree = "<group>"; };
- AD3F1E551F4F4AE500669912 /* WATable.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = WATable.cpp; sourceTree = "<group>"; };
- AD3F1E641F50E57F00669912 /* WAException.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WAException.h; sourceTree = "<group>"; };
- AD3F1E651F50E57F00669912 /* WAException.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = WAException.cpp; sourceTree = "<group>"; };
- AD3F1E681F547CB600669912 /* WAMemoryDescriptor.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = WAMemoryDescriptor.cpp; sourceTree = "<group>"; };
- AD3F1E691F547CB600669912 /* WAMemoryDescriptor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WAMemoryDescriptor.h; sourceTree = "<group>"; };
- AD3F1E6A1F547CB600669912 /* WAImportObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WAImportObject.h; sourceTree = "<group>"; };
- AD3F1E6B1F547CB800669912 /* WATableDescriptor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WATableDescriptor.h; sourceTree = "<group>"; };
- AD3F1E6C1F547CB800669912 /* WAImportObject.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = WAImportObject.cpp; sourceTree = "<group>"; };
- AD3F1E6D1F547CB800669912 /* WATableDescriptor.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = WATableDescriptor.cpp; sourceTree = "<group>"; };
- AD3F1E701F54C3AA00669912 /* WAFunction.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WAFunction.h; sourceTree = "<group>"; };
- AD3F1E711F54C3AA00669912 /* WAFunction.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = WAFunction.cpp; sourceTree = "<group>"; };
AD412B311E7B2E8A008AF157 /* WasmContext.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = WasmContext.cpp; sourceTree = "<group>"; };
AD412B321E7B2E8A008AF157 /* WasmContext.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WasmContext.h; sourceTree = "<group>"; };
AD412B351E7B57C0008AF157 /* AllowMacroScratchRegisterUsageIf.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AllowMacroScratchRegisterUsageIf.h; sourceTree = "<group>"; };
AD4B1DF71DF244D70071AE32 /* WasmBinding.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = WasmBinding.cpp; sourceTree = "<group>"; };
AD4B1DF81DF244D70071AE32 /* WasmBinding.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WasmBinding.h; sourceTree = "<group>"; };
AD5B416E1EBAFB65008EFA43 /* WasmName.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WasmName.h; sourceTree = "<group>"; };
- AD5C36DC1F688B5F000BCAAF /* WasmEmbedder.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WasmEmbedder.h; sourceTree = "<group>"; };
- AD5C36DE1F699EB6000BCAAF /* WasmInstance.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = WasmInstance.cpp; sourceTree = "<group>"; };
- AD5C36DF1F699EB6000BCAAF /* WasmInstance.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WasmInstance.h; sourceTree = "<group>"; };
- AD5C36E31F69EC8B000BCAAF /* WasmTable.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = WasmTable.cpp; sourceTree = "<group>"; };
- AD5C36E41F69EC8B000BCAAF /* WasmTable.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WasmTable.h; sourceTree = "<group>"; };
- AD5C36EE1F7A2629000BCAAF /* WasmMemoryMode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WasmMemoryMode.h; sourceTree = "<group>"; };
- AD5C36F01F7A26BF000BCAAF /* WasmMemoryMode.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = WasmMemoryMode.cpp; sourceTree = "<group>"; };
AD7438BE1E04579200FD0C2A /* WasmSignature.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = WasmSignature.cpp; sourceTree = "<group>"; };
AD7438BF1E04579200FD0C2A /* WasmSignature.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WasmSignature.h; sourceTree = "<group>"; };
AD86A93D1AA4D87C002FE77F /* WeakGCMapInlines.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WeakGCMapInlines.h; sourceTree = "<group>"; };
- AD8DD6CF1F67089F0004EB52 /* JSToWasm.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = JSToWasm.h; path = js/JSToWasm.h; sourceTree = "<group>"; };
- AD8DD6D01F6708A30004EB52 /* JSToWasm.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; name = JSToWasm.cpp; path = js/JSToWasm.cpp; sourceTree = "<group>"; };
AD8FF3951EB5BD850087FF82 /* WasmIndexOrName.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WasmIndexOrName.h; sourceTree = "<group>"; };
AD8FF3961EB5BD850087FF82 /* WasmIndexOrName.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = WasmIndexOrName.cpp; sourceTree = "<group>"; };
AD9E852E1E8A0C6E008DE39E /* JSWebAssemblyCodeBlock.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = JSWebAssemblyCodeBlock.h; path = js/JSWebAssemblyCodeBlock.h; sourceTree = "<group>"; };
ADB6F67C1E15D7500082F384 /* WasmPageCount.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = WasmPageCount.cpp; sourceTree = "<group>"; };
ADBC54D21DF8EA00005BF738 /* WebAssemblyToJSCallee.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = WebAssemblyToJSCallee.cpp; path = js/WebAssemblyToJSCallee.cpp; sourceTree = "<group>"; };
ADBC54D31DF8EA00005BF738 /* WebAssemblyToJSCallee.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = WebAssemblyToJSCallee.h; path = js/WebAssemblyToJSCallee.h; sourceTree = "<group>"; };
- ADD09AEE1F5F623F001313C2 /* WasmToJS.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = WasmToJS.h; path = js/WasmToJS.h; sourceTree = "<group>"; };
- ADD09AEF1F5F623F001313C2 /* WasmToJS.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; name = WasmToJS.cpp; path = js/WasmToJS.cpp; sourceTree = "<group>"; };
- ADD09AF21F624829001313C2 /* JSWebAssembly.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; name = JSWebAssembly.cpp; path = js/JSWebAssembly.cpp; sourceTree = "<group>"; };
- ADD09AF31F62482E001313C2 /* JSWebAssembly.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = JSWebAssembly.h; path = js/JSWebAssembly.h; sourceTree = "<group>"; };
ADD8FA431EB3077100DF542F /* WasmNameSectionParser.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WasmNameSectionParser.h; sourceTree = "<group>"; };
ADD8FA441EB3077100DF542F /* WasmNameSectionParser.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = WasmNameSectionParser.cpp; sourceTree = "<group>"; };
ADDB1F6218D77DB7009B58A8 /* OpaqueRootSet.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OpaqueRootSet.h; sourceTree = "<group>"; };
ADE802961E08F1C90058DE78 /* WebAssemblyLinkErrorPrototype.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = WebAssemblyLinkErrorPrototype.cpp; path = js/WebAssemblyLinkErrorPrototype.cpp; sourceTree = "<group>"; };
ADE802971E08F1C90058DE78 /* WebAssemblyLinkErrorPrototype.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = WebAssemblyLinkErrorPrototype.h; path = js/WebAssemblyLinkErrorPrototype.h; sourceTree = "<group>"; };
ADE8029D1E08F2260058DE78 /* WebAssemblyLinkErrorConstructor.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = WebAssemblyLinkErrorConstructor.cpp; path = js/WebAssemblyLinkErrorConstructor.cpp; sourceTree = "<group>"; };
- ADFC30F71F47A7B8006451D3 /* WebAssembly.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WebAssembly.h; sourceTree = "<group>"; };
- ADFC30F91F47A8C0006451D3 /* WAModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WAModule.h; sourceTree = "<group>"; };
B59F89371891AD3300D5CCDC /* UnlinkedInstructionStream.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = UnlinkedInstructionStream.h; sourceTree = "<group>"; };
B59F89381891ADB500D5CCDC /* UnlinkedInstructionStream.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = UnlinkedInstructionStream.cpp; sourceTree = "<group>"; };
BC021BF2136900C300FC5467 /* ToolExecutable.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; path = ToolExecutable.xcconfig; sourceTree = "<group>"; };
86F3EEB616855A5B0077B92A /* ObjcRuntimeExtras.h */,
E124A8F60E555775003091F1 /* OpaqueJSString.cpp */,
E124A8F50E555775003091F1 /* OpaqueJSString.h */,
- AD3F1E471F4BA78600669912 /* WABase.h */,
- AD3F1E651F50E57F00669912 /* WAException.cpp */,
- AD3F1E641F50E57F00669912 /* WAException.h */,
- AD3F1E711F54C3AA00669912 /* WAFunction.cpp */,
- AD3F1E701F54C3AA00669912 /* WAFunction.h */,
- AD3F1E6C1F547CB800669912 /* WAImportObject.cpp */,
- AD3F1E6A1F547CB600669912 /* WAImportObject.h */,
- AD3F1E541F4F4AE500669912 /* WAInstance.cpp */,
- AD3F1E4A1F4DE68C00669912 /* WAInstance.h */,
- AD3F1E521F4F4AE500669912 /* WAMemory.cpp */,
- AD3F1E4C1F4DE68C00669912 /* WAMemory.h */,
- AD3F1E681F547CB600669912 /* WAMemoryDescriptor.cpp */,
- AD3F1E691F547CB600669912 /* WAMemoryDescriptor.h */,
- AD3F1E531F4F4AE500669912 /* WAModule.cpp */,
- ADFC30F91F47A8C0006451D3 /* WAModule.h */,
- AD3F1E551F4F4AE500669912 /* WATable.cpp */,
- AD3F1E4B1F4DE68C00669912 /* WATable.h */,
- AD3F1E6D1F547CB800669912 /* WATableDescriptor.cpp */,
- AD3F1E6B1F547CB800669912 /* WATableDescriptor.h */,
- ADFC30F71F47A7B8006451D3 /* WebAssembly.h */,
5DE3D0F40DD8DDFB00468714 /* WebKitAvailability.h */,
);
path = API;
isa = PBXGroup;
children = (
AD2FCB8A1DB5840000B3E736 /* js */,
+ AD2FCC2E1DB839F700B3E736 /* JSWebAssembly.cpp */,
+ AD2FCC2F1DB839F700B3E736 /* JSWebAssembly.h */,
53F40E8E1D5902820099A1B6 /* WasmB3IRGenerator.cpp */,
53F40E921D5A4AB30099A1B6 /* WasmB3IRGenerator.h */,
53CA73071EA533D80076049D /* WasmBBQPlan.cpp */,
526AC4B51E977C5D003500E1 /* WasmCodeBlock.h */,
AD412B311E7B2E8A008AF157 /* WasmContext.cpp */,
AD412B321E7B2E8A008AF157 /* WasmContext.h */,
- AD5C36DC1F688B5F000BCAAF /* WasmEmbedder.h */,
79DAE2791E03C82200B526AA /* WasmExceptionType.h */,
5381B9361E60E9660090F794 /* WasmFaultSignalHandler.cpp */,
5381B9381E60E97D0090F794 /* WasmFaultSignalHandler.h */,
53F40E8A1D5901BB0099A1B6 /* WasmFunctionParser.h */,
AD8FF3961EB5BD850087FF82 /* WasmIndexOrName.cpp */,
AD8FF3951EB5BD850087FF82 /* WasmIndexOrName.h */,
- AD5C36DE1F699EB6000BCAAF /* WasmInstance.cpp */,
- AD5C36DF1F699EB6000BCAAF /* WasmInstance.h */,
AD00659D1ECAC7FE000CA926 /* WasmLimits.h */,
53E9E0A91EAE83DE00FEE251 /* WasmMachineThreads.cpp */,
53E9E0AA1EAE83DE00FEE251 /* WasmMachineThreads.h */,
535557131D9D9EA5006D583B /* WasmMemory.h */,
79B759711DFA4C600052174C /* WasmMemoryInformation.cpp */,
79B759721DFA4C600052174C /* WasmMemoryInformation.h */,
- AD5C36F01F7A26BF000BCAAF /* WasmMemoryMode.cpp */,
- AD5C36EE1F7A2629000BCAAF /* WasmMemoryMode.h */,
790081361E95A8EC0052D7CD /* WasmModule.cpp */,
790081371E95A8EC0052D7CD /* WasmModule.h */,
53E777E11E92E265007CBEC4 /* WasmModuleInformation.cpp */,
53F40E841D58F9770099A1B6 /* WasmSections.h */,
AD7438BE1E04579200FD0C2A /* WasmSignature.cpp */,
AD7438BF1E04579200FD0C2A /* WasmSignature.h */,
- AD5C36E31F69EC8B000BCAAF /* WasmTable.cpp */,
- AD5C36E41F69EC8B000BCAAF /* WasmTable.h */,
5250D2CF1E8DA05A0029A932 /* WasmThunks.cpp */,
5250D2D01E8DA05A0029A932 /* WasmThunks.h */,
53E9E0AE1EAEC45700FEE251 /* WasmTierUpCount.h */,
AD2FCB8A1DB5840000B3E736 /* js */ = {
isa = PBXGroup;
children = (
- AD8DD6D01F6708A30004EB52 /* JSToWasm.cpp */,
- AD8DD6CF1F67089F0004EB52 /* JSToWasm.h */,
- ADD09AF21F624829001313C2 /* JSWebAssembly.cpp */,
- ADD09AF31F62482E001313C2 /* JSWebAssembly.h */,
5383AA2F1E65E8A100A532FC /* JSWebAssemblyCodeBlock.cpp */,
AD9E852E1E8A0C6E008DE39E /* JSWebAssemblyCodeBlock.h */,
79EFD4811EBC045C00F3DFEA /* JSWebAssemblyCodeBlockSubspace.cpp */,
AD2FCBAD1DB58DA400B3E736 /* JSWebAssemblyRuntimeError.h */,
AD2FCBAE1DB58DA400B3E736 /* JSWebAssemblyTable.cpp */,
AD2FCBAF1DB58DA400B3E736 /* JSWebAssemblyTable.h */,
- ADD09AEF1F5F623F001313C2 /* WasmToJS.cpp */,
- ADD09AEE1F5F623F001313C2 /* WasmToJS.h */,
AD2FCBB01DB58DA400B3E736 /* WebAssemblyCompileErrorConstructor.cpp */,
AD2FCBB11DB58DA400B3E736 /* WebAssemblyCompileErrorConstructor.h */,
AD2FCBB21DB58DA400B3E736 /* WebAssemblyCompileErrorPrototype.cpp */,
0FEC85BB1BE1462F0080FF74 /* B3InsertionSetInlines.h in Headers */,
0FDF67D21D9C6D27001B9825 /* B3Kind.h in Headers */,
436E54531C468E7400B5AF73 /* B3LegalizeMemoryOffsets.h in Headers */,
- AD5C36EC1F75AD7C000BCAAF /* WasmToJS.h in Headers */,
0F338E1E1BF286EA0013C88F /* B3LowerMacros.h in Headers */,
4319DA041C1BE40D001D260B /* B3LowerMacrosAfterOptimizations.h in Headers */,
0FEC851E1BDACDAC0080FF74 /* B3LowerToAir.h in Headers */,
E328DAEB1D38D005001A2529 /* BytecodeRewriter.h in Headers */,
6514F21918B3E1670098FF8B /* Bytecodes.h in Headers */,
0F885E111849A3BE00F1E3FA /* BytecodeUseDef.h in Headers */,
- AD5C36EB1F75AD73000BCAAF /* JSWebAssembly.h in Headers */,
0F8023EA1613832B00A0BA45 /* ByValInfo.h in Headers */,
65B8392E1BACAD360044E824 /* CachedRecovery.h in Headers */,
0FEC3C601F379F5300F59B6C /* CagedBarrierPtr.h in Headers */,
0FA581BC150E953000B9A2D9 /* DFGNodeType.h in Headers */,
DCEE220D1CEBAF75000C2396 /* DFGNullAbstractState.h in Headers */,
0F2B9CE719D0BA7D00B1D1B5 /* DFGObjectAllocationSinkingPhase.h in Headers */,
- AD5C36DD1F688B65000BCAAF /* WasmEmbedder.h in Headers */,
0F2B9CE919D0BA7D00B1D1B5 /* DFGObjectMaterializationData.h in Headers */,
86EC9DD01328DF82002B2AD7 /* DFGOperations.h in Headers */,
0F7C39FF1C90C55B00480151 /* DFGOpInfo.h in Headers */,
0F3AC754188E5EC80032029F /* ExitingJITType.h in Headers */,
0FB105861675481200F8AB6E /* ExitKind.h in Headers */,
0F0B83AB14BCF5BB00885B4F /* ExpressionRangeInfo.h in Headers */,
- AD5C36EA1F75AD6A000BCAAF /* JSToWasm.h in Headers */,
0FEC3C571F33A45300F59B6C /* FastMallocAlignedMemoryAllocator.h in Headers */,
A7A8AF3817ADB5F3005AB174 /* Float32Array.h in Headers */,
- AD5C36E21F699EC0000BCAAF /* WasmInstance.h in Headers */,
A7A8AF3917ADB5F3005AB174 /* Float64Array.h in Headers */,
0F24E54317EA9F5900ABB217 /* FPRInfo.h in Headers */,
E34EDBF71DB5FFC900DC87A5 /* FrameTracers.h in Headers */,
A7482B9311671147003B0712 /* JSWeakObjectMapRefPrivate.h in Headers */,
0F0B286B1EB8E6CF000EB5D2 /* JSWeakPrivate.h in Headers */,
709FB8681AE335C60039D069 /* JSWeakSet.h in Headers */,
+ AD2FCC311DB83D4900B3E736 /* JSWebAssembly.h in Headers */,
AD9E852F1E8A0C7C008DE39E /* JSWebAssemblyCodeBlock.h in Headers */,
79EFD4841EBC045C00F3DFEA /* JSWebAssemblyCodeBlockSubspace.h in Headers */,
AD2FCBE31DB58DAD00B3E736 /* JSWebAssemblyCompileError.h in Headers */,
BCD202D60E170708002C7E82 /* RegExpConstructor.lut.h in Headers */,
0F7C39FB1C8F629300480151 /* RegExpInlines.h in Headers */,
A1712B4111C7B235007A5315 /* RegExpKey.h in Headers */,
- AD5C36E61F69EC91000BCAAF /* WasmTable.h in Headers */,
BC18C45B0E16F5CD00B34460 /* RegExpObject.h in Headers */,
0F7C39FD1C8F659500480151 /* RegExpObjectInlines.h in Headers */,
BCD202C40E1706A7002C7E82 /* RegExpPrototype.h in Headers */,
0FE0502D1AA9095600D33B33 /* VarOffset.h in Headers */,
0F426A491460CBB700131F8F /* VirtualRegister.h in Headers */,
0F1FB3931E177A7200A9BE50 /* VisitingTimeout.h in Headers */,
- AD5C36EF1F7A263A000BCAAF /* WasmMemoryMode.h in Headers */,
0F952AA11DF7860900E06FBD /* VisitRaceKey.h in Headers */,
BC18C4200E16F5CD00B34460 /* VM.h in Headers */,
658D3A5619638268003C45D6 /* VMEntryRecord.h in Headers */,
tools/SigillCrashAnalyzer.cpp
tools/VMInspector.cpp
+wasm/JSWebAssembly.cpp
wasm/WasmB3IRGenerator.cpp
wasm/WasmBBQPlan.cpp
wasm/WasmBinding.cpp
wasm/WasmCallingConvention.cpp
wasm/WasmCodeBlock.cpp
wasm/WasmContext.cpp
-wasm/WasmEmbedder.h
wasm/WasmFaultSignalHandler.cpp
wasm/WasmFormat.cpp
wasm/WasmIndexOrName.cpp
-wasm/WasmInstance.cpp
-wasm/WasmInstance.h
wasm/WasmMachineThreads.cpp
wasm/WasmMemory.cpp
wasm/WasmMemoryInformation.cpp
-wasm/WasmMemoryMode.cpp
wasm/WasmModule.cpp
wasm/WasmModuleInformation.cpp
wasm/WasmModuleParser.cpp
wasm/WasmPageCount.cpp
wasm/WasmPlan.cpp
wasm/WasmSignature.cpp
-wasm/WasmTable.cpp
-wasm/WasmTable.h
wasm/WasmThunks.cpp
wasm/WasmValidate.cpp
wasm/WasmWorklist.cpp
-wasm/js/JSToWasm.cpp
-wasm/js/JSToWasm.h
-wasm/js/JSWebAssembly.cpp
wasm/js/JSWebAssemblyCodeBlock.cpp
wasm/js/JSWebAssemblyCodeBlockSubspace.cpp
wasm/js/JSWebAssemblyCompileError.cpp
wasm/js/JSWebAssemblyModule.cpp
wasm/js/JSWebAssemblyRuntimeError.cpp
wasm/js/JSWebAssemblyTable.cpp
-wasm/js/WasmToJS.cpp
-wasm/js/WasmToJS.h
wasm/js/WebAssemblyCompileErrorConstructor.cpp
wasm/js/WebAssemblyCompileErrorPrototype.cpp
wasm/js/WebAssemblyFunction.cpp
void AccessGenerationState::emitExplicitExceptionHandler()
{
restoreScratch();
- jit->copyCalleeSavesToEntryFrameCalleeSavesBuffer(m_vm.topEntryFrame);
+ jit->copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(m_vm);
if (needsToRestoreRegistersIfException()) {
// To the JIT that produces the original exception handling
// call site, they will expect the OSR exit to be arrived
/*
- * Copyright (C) 2008-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2008, 2013, 2014, 2016 Apple Inc. All rights reserved.
* Copyright (C) 1999-2001 Harri Porten (porten@kde.org)
* Copyright (C) 2001 Peter Kelly (pmk@post.com)
*
if (!m_isPaused)
return;
- EntryFrame* topEntryFrame = m_vm.topEntryFrame;
- m_pauseOnCallFrame = m_currentCallFrame ? m_currentCallFrame->callerFrame(topEntryFrame) : nullptr;
+ VMEntryFrame* topVMEntryFrame = m_vm.topVMEntryFrame;
+ m_pauseOnCallFrame = m_currentCallFrame ? m_currentCallFrame->callerFrame(topVMEntryFrame) : nullptr;
m_pauseOnStepOut = true;
setSteppingMode(SteppingModeEnabled);
notifyDoneProcessingDebuggerEvents();
if (!m_currentCallFrame)
return;
- EntryFrame* topEntryFrame = m_vm.topEntryFrame;
- CallFrame* callerFrame = m_currentCallFrame->callerFrame(topEntryFrame);
+ VMEntryFrame* topVMEntryFrame = m_vm.topVMEntryFrame;
+ CallFrame* callerFrame = m_currentCallFrame->callerFrame(topVMEntryFrame);
// Returning from a call, there was at least one expression on the statement we are returning to.
m_pastFirstExpressionInStatement = true;
if (!m_currentCallFrame)
return;
- EntryFrame* topEntryFrame = m_vm.topEntryFrame;
- CallFrame* callerFrame = m_currentCallFrame->callerFrame(topEntryFrame);
+ VMEntryFrame* topVMEntryFrame = m_vm.topVMEntryFrame;
+ CallFrame* callerFrame = m_currentCallFrame->callerFrame(topVMEntryFrame);
// Treat stepping over an exception location like a step-out.
if (m_currentCallFrame == m_pauseOnCallFrame)
if (!m_currentCallFrame)
return;
- EntryFrame* topEntryFrame = m_vm.topEntryFrame;
- CallFrame* callerFrame = m_currentCallFrame->callerFrame(topEntryFrame);
+ VMEntryFrame* topVMEntryFrame = m_vm.topVMEntryFrame;
+ CallFrame* callerFrame = m_currentCallFrame->callerFrame(topVMEntryFrame);
// Returning from a program, could be eval(), there was at least one expression on the statement we are returning to.
m_pastFirstExpressionInStatement = true;
if (!m_exceptionChecksWithCallFrameRollback.empty()) {
m_exceptionChecksWithCallFrameRollback.link(this);
- copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
+ copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm());
// lookupExceptionHandlerFromCallerFrame is passed two arguments, the VM and the exec (the CallFrame*).
move(TrustedImmPtr(vm()), GPRInfo::argumentGPR0);
if (!m_exceptionChecks.empty()) {
m_exceptionChecks.link(this);
- copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
+ copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm());
// lookupExceptionHandler is passed two arguments, the VM and the exec (the CallFrame*).
move(TrustedImmPtr(vm()), GPRInfo::argumentGPR0);
/*
- * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2011, 2013-2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
RegisterSet dontSaveRegisters = RegisterSet(RegisterSet::stackRegisters(), RegisterSet::allFPRs());
unsigned registerCount = registerSaveLocations->size();
- VMEntryRecord* record = vmEntryRecord(vm->topEntryFrame);
+ VMEntryRecord* record = vmEntryRecord(vm->topVMEntryFrame);
for (unsigned i = 0; i < registerCount; i++) {
RegisterAtOffset currentEntry = registerSaveLocations->at(i);
if (dontSaveRegisters.get(currentEntry.reg()))
/*
- * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2011, 2013 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
if (exit.m_kind == GenericUnwind) {
// We are acting as a defacto op_catch because we arrive here from genericUnwind().
// So, we must restore our call frame and stack pointer.
- jit.restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
+ jit.restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(*vm);
jit.loadPtr(vm->addressOfCallFrameForCatch(), GPRInfo::callFrameRegister);
}
jit.addPtr(
jit.emitMaterializeTagCheckRegisters();
if (exit.isExceptionHandler())
- jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm.topEntryFrame);
+ jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(vm);
// Do all data format conversions and store the results into the stack.
jit.abortWithReason(DFGUnreasonableOSREntryJumpDestination);
ok.link(&jit);
- jit.restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
+ jit.restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(*vm);
jit.emitMaterializeTagCheckRegisters();
jit.jump(GPRInfo::regT1);
// Emit the exception handler.
*state.exceptionHandler = jit.label();
- jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm.topEntryFrame);
+ jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(vm);
jit.move(MacroAssembler::TrustedImmPtr(&vm), GPRInfo::argumentGPR0);
jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1);
CCallHelpers::Call call = jit.call();
/*
- * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
CCallHelpers::Call callArityCheck = jit.call();
auto noException = jit.branch32(CCallHelpers::GreaterThanOrEqual, GPRInfo::returnValueGPR, CCallHelpers::TrustedImm32(0));
- jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm.topEntryFrame);
+ jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(vm);
jit.move(CCallHelpers::TrustedImmPtr(&vm), GPRInfo::argumentGPR0);
jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1);
CCallHelpers::Call callLookupExceptionHandlerFromCallerFrame = jit.call();
jit.store32(
MacroAssembler::TrustedImm32(callSiteIndex.bits()),
CCallHelpers::tagFor(VirtualRegister(CallFrameSlot::argumentCount)));
- jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
+ jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()), GPRInfo::argumentGPR1);
/*
- * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
// The first thing we need to do is restablish our frame in the case of an exception.
if (exit.isGenericUnwindHandler()) {
RELEASE_ASSERT(vm->callFrameForCatch); // The first time we hit this exit, like at all other times, this field should be non-null.
- jit.restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
+ jit.restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(*vm);
jit.loadPtr(vm->addressOfCallFrameForCatch(), MacroAssembler::framePointerRegister);
jit.addPtr(CCallHelpers::TrustedImm32(codeBlock->stackPointerOffset() * sizeof(Register)),
MacroAssembler::framePointerRegister, CCallHelpers::stackPointerRegister);
RegisterAtOffsetList* vmCalleeSaves = VM::getAllCalleeSaveRegisterOffsets();
RegisterSet vmCalleeSavesToSkip = RegisterSet::stackRegisters();
if (exit.isExceptionHandler()) {
- jit.loadPtr(&vm->topEntryFrame, GPRInfo::regT1);
- jit.addPtr(CCallHelpers::TrustedImm32(EntryFrame::calleeSaveRegistersBufferOffset()), GPRInfo::regT1);
+ jit.loadPtr(&vm->topVMEntryFrame, GPRInfo::regT1);
+ jit.addPtr(CCallHelpers::TrustedImm32(VMEntryFrame::calleeSaveRegistersBufferOffset()), GPRInfo::regT1);
}
for (Reg reg = Reg::first(); reg <= Reg::last(); reg = reg.next()) {
/*
- * Copyright (C) 2008-2017 Apple Inc. All Rights Reserved.
+ * Copyright (C) 2008, 2013-2014, 2016 Apple Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
#include "InlineCallFrame.h"
#include "Interpreter.h"
#include "JSCInlines.h"
-#include "JSWebAssemblyInstance.h"
#include "VMEntryScope.h"
#include "WasmContext.h"
#include <wtf/StringPrintStream.h>
#if ENABLE(WEBASSEMBLY)
if (!callee().isWasm())
return lexicalGlobalObject();
- return vm.wasmContext.load()->globalObject();
+ return Wasm::loadContext(vm)->globalObject();
#else
UNUSED_PARAM(vm);
return lexicalGlobalObject();
return false;
}
-CallFrame* CallFrame::callerFrame(EntryFrame*& currEntryFrame)
+CallFrame* CallFrame::callerFrame(VMEntryFrame*& currVMEntryFrame)
{
- if (callerFrameOrEntryFrame() == currEntryFrame) {
- VMEntryRecord* currVMEntryRecord = vmEntryRecord(currEntryFrame);
- currEntryFrame = currVMEntryRecord->prevTopEntryFrame();
+ if (callerFrameOrVMEntryFrame() == currVMEntryFrame) {
+ VMEntryRecord* currVMEntryRecord = vmEntryRecord(currVMEntryFrame);
+ currVMEntryFrame = currVMEntryRecord->prevTopVMEntryFrame();
return currVMEntryRecord->prevTopCallFrame();
}
- return static_cast<CallFrame*>(callerFrameOrEntryFrame());
+ return static_cast<CallFrame*>(callerFrameOrVMEntryFrame());
}
-SUPPRESS_ASAN CallFrame* CallFrame::unsafeCallerFrame(EntryFrame*& currEntryFrame)
+SUPPRESS_ASAN CallFrame* CallFrame::unsafeCallerFrame(VMEntryFrame*& currVMEntryFrame)
{
- if (unsafeCallerFrameOrEntryFrame() == currEntryFrame) {
- VMEntryRecord* currVMEntryRecord = vmEntryRecord(currEntryFrame);
- currEntryFrame = currVMEntryRecord->unsafePrevTopEntryFrame();
+ if (unsafeCallerFrameOrVMEntryFrame() == currVMEntryFrame) {
+ VMEntryRecord* currVMEntryRecord = vmEntryRecord(currVMEntryFrame);
+ currVMEntryFrame = currVMEntryRecord->unsafePrevTopVMEntryFrame();
return currVMEntryRecord->unsafePrevTopCallFrame();
}
- return static_cast<CallFrame*>(unsafeCallerFrameOrEntryFrame());
+ return static_cast<CallFrame*>(unsafeCallerFrameOrVMEntryFrame());
}
SourceOrigin CallFrame::callerSourceOrigin()
CallFrame& operator=(const Register& r) { *static_cast<Register*>(this) = r; return *this; }
- CallFrame* callerFrame() const { return static_cast<CallFrame*>(callerFrameOrEntryFrame()); }
- void* callerFrameOrEntryFrame() const { return callerFrameAndPC().callerFrame; }
- SUPPRESS_ASAN void* unsafeCallerFrameOrEntryFrame() const { return unsafeCallerFrameAndPC().callerFrame; }
+ CallFrame* callerFrame() const { return static_cast<CallFrame*>(callerFrameOrVMEntryFrame()); }
+ void* callerFrameOrVMEntryFrame() const { return callerFrameAndPC().callerFrame; }
+ SUPPRESS_ASAN void* unsafeCallerFrameOrVMEntryFrame() const { return unsafeCallerFrameAndPC().callerFrame; }
- CallFrame* unsafeCallerFrame(EntryFrame*&);
- JS_EXPORT_PRIVATE CallFrame* callerFrame(EntryFrame*&);
+ CallFrame* unsafeCallerFrame(VMEntryFrame*&);
+ JS_EXPORT_PRIVATE CallFrame* callerFrame(VMEntryFrame*&);
JS_EXPORT_PRIVATE SourceOrigin callerSourceOrigin();
/*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
namespace JSC {
-struct EntryFrame;
+struct VMEntryFrame;
class SuspendExceptionScope {
public:
{
ASSERT(vm);
ASSERT(callFrame);
- ASSERT(reinterpret_cast<void*>(callFrame) < reinterpret_cast<void*>(vm->topEntryFrame));
+ ASSERT(reinterpret_cast<void*>(callFrame) < reinterpret_cast<void*>(vm->topVMEntryFrame));
vm->topCallFrame = callFrame;
}
};
class NativeCallFrameTracerWithRestore {
public:
- ALWAYS_INLINE NativeCallFrameTracerWithRestore(VM* vm, EntryFrame* EntryFrame, CallFrame* callFrame)
+ ALWAYS_INLINE NativeCallFrameTracerWithRestore(VM* vm, VMEntryFrame* vmEntryFrame, CallFrame* callFrame)
: m_vm(vm)
{
ASSERT(vm);
ASSERT(callFrame);
- m_savedTopEntryFrame = vm->topEntryFrame;
+ m_savedTopVMEntryFrame = vm->topVMEntryFrame;
m_savedTopCallFrame = vm->topCallFrame;
- vm->topEntryFrame = EntryFrame;
+ vm->topVMEntryFrame = vmEntryFrame;
vm->topCallFrame = callFrame;
}
ALWAYS_INLINE ~NativeCallFrameTracerWithRestore()
{
- m_vm->topEntryFrame = m_savedTopEntryFrame;
+ m_vm->topVMEntryFrame = m_savedTopVMEntryFrame;
m_vm->topCallFrame = m_savedTopCallFrame;
}
private:
VM* m_vm;
- EntryFrame* m_savedTopEntryFrame;
+ VMEntryFrame* m_savedTopVMEntryFrame;
CallFrame* m_savedTopCallFrame;
};
notifyDebuggerOfUnwinding(m_vm, m_callFrame);
- copyCalleeSavesToEntryFrameCalleeSavesBuffer(visitor);
+ copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(visitor);
- bool shouldStopUnwinding = visitor->callerIsEntryFrame();
+ bool shouldStopUnwinding = visitor->callerIsVMEntryFrame();
if (shouldStopUnwinding)
return StackVisitor::Done;
}
private:
- void copyCalleeSavesToEntryFrameCalleeSavesBuffer(StackVisitor& visitor) const
+ void copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(StackVisitor& visitor) const
{
#if ENABLE(JIT) && NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
RegisterAtOffsetList* currentCalleeSaves = visitor->calleeSaveRegisters();
intptr_t* frame = reinterpret_cast<intptr_t*>(m_callFrame->registers());
unsigned registerCount = currentCalleeSaves->size();
- VMEntryRecord* record = vmEntryRecord(m_vm.topEntryFrame);
+ VMEntryRecord* record = vmEntryRecord(m_vm.topVMEntryFrame);
for (unsigned i = 0; i < registerCount; i++) {
RegisterAtOffset currentEntry = currentCalleeSaves->at(i);
if (dontCopyRegisters.get(currentEntry.reg()))
auto scope = DECLARE_CATCH_SCOPE(vm);
if (unwindStart == UnwindFromCallerFrame) {
- if (callFrame->callerFrameOrEntryFrame() == vm.topEntryFrame)
+ if (callFrame->callerFrameOrVMEntryFrame() == vm.topVMEntryFrame)
return nullptr;
callFrame = callFrame->callerFrame();
/*
- * Copyright (C) 2013, 2015-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2013, 2015-2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
CallFrame* topFrame;
if (startFrame) {
ASSERT(vm);
- m_frame.m_entryFrame = vm->topEntryFrame;
+ m_frame.m_VMEntryFrame = vm->topVMEntryFrame;
topFrame = vm->topCallFrame;
- if (topFrame && static_cast<void*>(m_frame.m_entryFrame) == static_cast<void*>(topFrame)) {
- topFrame = vmEntryRecord(m_frame.m_entryFrame)->m_prevTopCallFrame;
- m_frame.m_entryFrame = vmEntryRecord(m_frame.m_entryFrame)->m_prevTopEntryFrame;
+ if (topFrame && static_cast<void*>(m_frame.m_VMEntryFrame) == static_cast<void*>(topFrame)) {
+ topFrame = vmEntryRecord(m_frame.m_VMEntryFrame)->m_prevTopCallFrame;
+ m_frame.m_VMEntryFrame = vmEntryRecord(m_frame.m_VMEntryFrame)->m_prevTopVMEntryFrame;
}
} else {
- m_frame.m_entryFrame = 0;
+ m_frame.m_VMEntryFrame = 0;
topFrame = 0;
}
- m_frame.m_callerIsEntryFrame = false;
+ m_frame.m_callerIsVMEntryFrame = false;
readFrame(topFrame);
// Find the frame the caller wants to start unwinding from.
readInlinedFrame(m_frame.callFrame(), &inlineCallFrame->directCaller);
inlineCallFrame = m_frame.inlineCallFrame();
}
- m_frame.m_entryFrame = m_frame.m_callerEntryFrame;
+ m_frame.m_VMEntryFrame = m_frame.m_CallerVMEntryFrame;
readFrame(m_frame.callerFrame());
} else
readInlinedFrame(m_frame.callFrame(), callerCodeOrigin);
return;
}
#endif // ENABLE(DFG_JIT)
- m_frame.m_entryFrame = m_frame.m_callerEntryFrame;
+ m_frame.m_VMEntryFrame = m_frame.m_CallerVMEntryFrame;
readFrame(m_frame.callerFrame());
}
{
m_frame.m_callFrame = callFrame;
m_frame.m_argumentCountIncludingThis = callFrame->argumentCountIncludingThis();
- m_frame.m_callerEntryFrame = m_frame.m_entryFrame;
- m_frame.m_callerFrame = callFrame->callerFrame(m_frame.m_callerEntryFrame);
- m_frame.m_callerIsEntryFrame = m_frame.m_callerEntryFrame != m_frame.m_entryFrame;
+ m_frame.m_CallerVMEntryFrame = m_frame.m_VMEntryFrame;
+ m_frame.m_callerFrame = callFrame->callerFrame(m_frame.m_CallerVMEntryFrame);
+ m_frame.m_callerIsVMEntryFrame = m_frame.m_CallerVMEntryFrame != m_frame.m_VMEntryFrame;
m_frame.m_isWasmFrame = false;
CalleeBits callee = callFrame->callee();
indent--;
}
- out.print(indent, "EntryFrame: ", RawPointer(m_entryFrame), "\n");
+ out.print(indent, "vmEntryFrame: ", RawPointer(vmEntryFrame()), "\n");
indent--;
}
out.print(indent, "}\n");
/*
- * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2013, 2015-2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
size_t index() const { return m_index; }
size_t argumentCountIncludingThis() const { return m_argumentCountIncludingThis; }
- bool callerIsEntryFrame() const { return m_callerIsEntryFrame; }
+ bool callerIsVMEntryFrame() const { return m_callerIsVMEntryFrame; }
CallFrame* callerFrame() const { return m_callerFrame; }
CalleeBits callee() const { return m_callee; }
CodeBlock* codeBlock() const { return m_codeBlock; }
RegisterAtOffsetList* calleeSaveRegisters();
ClonedArguments* createArguments();
+ VMEntryFrame* vmEntryFrame() const { return m_VMEntryFrame; }
CallFrame* callFrame() const { return m_callFrame; }
void dump(PrintStream&, Indenter = Indenter()) const;
InlineCallFrame* m_inlineCallFrame;
#endif
CallFrame* m_callFrame;
- EntryFrame* m_entryFrame;
- EntryFrame* m_callerEntryFrame;
+ VMEntryFrame* m_VMEntryFrame;
+ VMEntryFrame* m_CallerVMEntryFrame;
CallFrame* m_callerFrame;
CalleeBits m_callee;
CodeBlock* m_codeBlock;
size_t m_argumentCountIncludingThis;
unsigned m_bytecodeOffset;
Wasm::IndexOrName m_wasmFunctionIndexOrName;
- bool m_callerIsEntryFrame : 1;
+ bool m_callerIsVMEntryFrame : 1;
bool m_isWasmFrame : 1;
friend class StackVisitor;
/*
- * Copyright (C) 2014-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2014, 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
namespace JSC {
-struct EntryFrame;
+struct VMEntryFrame;
class ExecState;
class VM;
*/
VM* m_vm;
ExecState* m_prevTopCallFrame;
- EntryFrame* m_prevTopEntryFrame;
+ VMEntryFrame* m_prevTopVMEntryFrame;
#if ENABLE(JIT) && NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
intptr_t calleeSaveRegistersBuffer[NUMBER_OF_CALLEE_SAVES_REGISTERS];
ExecState* prevTopCallFrame() { return m_prevTopCallFrame; }
SUPPRESS_ASAN ExecState* unsafePrevTopCallFrame() { return m_prevTopCallFrame; }
- EntryFrame* prevTopEntryFrame() { return m_prevTopEntryFrame; }
- SUPPRESS_ASAN EntryFrame* unsafePrevTopEntryFrame() { return m_prevTopEntryFrame; }
+ VMEntryFrame* prevTopVMEntryFrame() { return m_prevTopVMEntryFrame; }
+ SUPPRESS_ASAN VMEntryFrame* unsafePrevTopVMEntryFrame() { return m_prevTopVMEntryFrame; }
};
-extern "C" VMEntryRecord* vmEntryRecord(EntryFrame*);
+extern "C" VMEntryRecord* vmEntryRecord(VMEntryFrame*);
-struct EntryFrame {
+struct VMEntryFrame {
#if ENABLE(JIT) && NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
static ptrdiff_t vmEntryRecordOffset()
{
- EntryFrame* fakeEntryFrame = reinterpret_cast<EntryFrame*>(0x1000);
- VMEntryRecord* record = vmEntryRecord(fakeEntryFrame);
+ VMEntryFrame* fakeVMEntryFrame = reinterpret_cast<VMEntryFrame*>(0x1000);
+ VMEntryRecord* record = vmEntryRecord(fakeVMEntryFrame);
return static_cast<ptrdiff_t>(
- reinterpret_cast<char*>(record) - reinterpret_cast<char*>(fakeEntryFrame));
+ reinterpret_cast<char*>(record) - reinterpret_cast<char*>(fakeVMEntryFrame));
}
static ptrdiff_t calleeSaveRegistersBufferOffset()
}
#endif
-void AssemblyHelpers::restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(EntryFrame*& topEntryFrame)
+void AssemblyHelpers::restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(VM& vm)
{
#if NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
RegisterAtOffsetList* allCalleeSaves = VM::getAllCalleeSaveRegisterOffsets();
}
ASSERT(scratch != InvalidGPRReg);
- loadPtr(&topEntryFrame, scratch);
- addPtr(TrustedImm32(EntryFrame::calleeSaveRegistersBufferOffset()), scratch);
+ loadPtr(&vm.topVMEntryFrame, scratch);
+ addPtr(TrustedImm32(VMEntryFrame::calleeSaveRegistersBufferOffset()), scratch);
// Restore all callee saves except for the scratch.
for (unsigned i = 0; i < registerCount; i++) {
ASSERT(scratch == entry.reg().gpr());
loadPtr(Address(scratch, entry.offset()), scratch);
#else
- UNUSED_PARAM(topEntryFrame);
+ UNUSED_PARAM(vm);
#endif
}
}
#if ENABLE(WEBASSEMBLY)
-void AssemblyHelpers::loadWasmContextInstance(GPRReg dst)
+void AssemblyHelpers::loadWasmContext(GPRReg dst)
{
#if ENABLE(FAST_TLS_JIT)
- if (Wasm::Context::useFastTLS()) {
+ if (Wasm::useFastTLSForContext()) {
loadFromTLSPtr(fastTLSOffsetForKey(WTF_WASM_CONTEXT_KEY), dst);
return;
}
#endif
- move(Wasm::PinnedRegisterInfo::get().wasmContextInstancePointer, dst);
+ move(Wasm::PinnedRegisterInfo::get().wasmContextPointer, dst);
}
-void AssemblyHelpers::storeWasmContextInstance(GPRReg src)
+void AssemblyHelpers::storeWasmContext(GPRReg src)
{
#if ENABLE(FAST_TLS_JIT)
- if (Wasm::Context::useFastTLS()) {
+ if (Wasm::useFastTLSForContext()) {
storeToTLSPtr(src, fastTLSOffsetForKey(WTF_WASM_CONTEXT_KEY));
return;
}
#endif
- move(src, Wasm::PinnedRegisterInfo::get().wasmContextInstancePointer);
+ move(src, Wasm::PinnedRegisterInfo::get().wasmContextPointer);
}
-bool AssemblyHelpers::loadWasmContextInstanceNeedsMacroScratchRegister()
+bool AssemblyHelpers::loadWasmContextNeedsMacroScratchRegister()
{
#if ENABLE(FAST_TLS_JIT)
- if (Wasm::Context::useFastTLS())
+ if (Wasm::useFastTLSForContext())
return loadFromTLSPtrNeedsMacroScratchRegister();
#endif
return false;
}
-bool AssemblyHelpers::storeWasmContextInstanceNeedsMacroScratchRegister()
+bool AssemblyHelpers::storeWasmContextNeedsMacroScratchRegister()
{
#if ENABLE(FAST_TLS_JIT)
- if (Wasm::Context::useFastTLS())
+ if (Wasm::useFastTLSForContext())
return storeToTLSPtrNeedsMacroScratchRegister();
#endif
return false;
}
}
-void AssemblyHelpers::copyCalleeSavesToEntryFrameCalleeSavesBufferImpl(GPRReg calleeSavesBuffer)
+void AssemblyHelpers::copyCalleeSavesToVMEntryFrameCalleeSavesBufferImpl(GPRReg calleeSavesBuffer)
{
#if NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
- addPtr(TrustedImm32(EntryFrame::calleeSaveRegistersBufferOffset()), calleeSavesBuffer);
+ addPtr(TrustedImm32(VMEntryFrame::calleeSaveRegistersBufferOffset()), calleeSavesBuffer);
RegisterAtOffsetList* allCalleeSaves = VM::getAllCalleeSaveRegisterOffsets();
RegisterSet dontCopyRegisters = RegisterSet::stackRegisters();
void copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(GPRReg vmGPR)
{
#if NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
- loadPtr(Address(vmGPR, VM::topEntryFrameOffset()), vmGPR);
- copyCalleeSavesToEntryFrameCalleeSavesBufferImpl(vmGPR);
+ loadPtr(Address(vmGPR, VM::topVMEntryFrameOffset()), vmGPR);
+ copyCalleeSavesToVMEntryFrameCalleeSavesBufferImpl(vmGPR);
#else
UNUSED_PARAM(vmGPR);
#endif
}
- void copyCalleeSavesToEntryFrameCalleeSavesBuffer(EntryFrame*& topEntryFrame, const TempRegisterSet& usedRegisters = { RegisterSet::stubUnavailableRegisters() })
+ void copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(VM& vm, const TempRegisterSet& usedRegisters = { RegisterSet::stubUnavailableRegisters() })
{
#if NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
GPRReg temp1 = usedRegisters.getFreeGPR(0);
- loadPtr(&topEntryFrame, temp1);
- copyCalleeSavesToEntryFrameCalleeSavesBufferImpl(temp1);
+ loadPtr(&vm.topVMEntryFrame, temp1);
+ copyCalleeSavesToVMEntryFrameCalleeSavesBufferImpl(temp1);
#else
- UNUSED_PARAM(topEntryFrame);
+ UNUSED_PARAM(vm);
UNUSED_PARAM(usedRegisters);
#endif
}
- void restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(EntryFrame*&);
+ void restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(VM&);
- void copyCalleeSavesFromFrameOrRegisterToEntryFrameCalleeSavesBuffer(EntryFrame*& topEntryFrame, const TempRegisterSet& usedRegisters = { RegisterSet::stubUnavailableRegisters() })
+ void copyCalleeSavesFromFrameOrRegisterToVMEntryFrameCalleeSavesBuffer(VM& vm, const TempRegisterSet& usedRegisters = { RegisterSet::stubUnavailableRegisters() })
{
#if NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
GPRReg temp1 = usedRegisters.getFreeGPR(0);
ASSERT(codeBlock());
// Copy saved calleeSaves on stack or unsaved calleeSaves in register to vm calleeSave buffer
- loadPtr(&topEntryFrame, temp1);
- addPtr(TrustedImm32(EntryFrame::calleeSaveRegistersBufferOffset()), temp1);
+ loadPtr(&vm.topVMEntryFrame, temp1);
+ addPtr(TrustedImm32(VMEntryFrame::calleeSaveRegistersBufferOffset()), temp1);
RegisterAtOffsetList* allCalleeSaves = VM::getAllCalleeSaveRegisterOffsets();
RegisterAtOffsetList* currentCalleeSaves = codeBlock()->calleeSaveRegisters();
unsigned registerCount = allCalleeSaves->size();
for (unsigned i = 0; i < registerCount; i++) {
- RegisterAtOffset entry = allCalleeSaves->at(i);
- if (dontCopyRegisters.get(entry.reg()))
+ RegisterAtOffset vmEntry = allCalleeSaves->at(i);
+ if (dontCopyRegisters.get(vmEntry.reg()))
continue;
- RegisterAtOffset* currentFrameEntry = currentCalleeSaves->find(entry.reg());
+ RegisterAtOffset* currentFrameEntry = currentCalleeSaves->find(vmEntry.reg());
- if (entry.reg().isGPR()) {
+ if (vmEntry.reg().isGPR()) {
GPRReg regToStore;
if (currentFrameEntry) {
// Load calleeSave from stack into temp register
loadPtr(Address(framePointerRegister, currentFrameEntry->offset()), regToStore);
} else
// Just store callee save directly
- regToStore = entry.reg().gpr();
+ regToStore = vmEntry.reg().gpr();
- storePtr(regToStore, Address(temp1, entry.offset()));
+ storePtr(regToStore, Address(temp1, vmEntry.offset()));
} else {
FPRReg fpRegToStore;
if (currentFrameEntry) {
loadDouble(Address(framePointerRegister, currentFrameEntry->offset()), fpRegToStore);
} else
// Just store callee save directly
- fpRegToStore = entry.reg().fpr();
+ fpRegToStore = vmEntry.reg().fpr();
- storeDouble(fpRegToStore, Address(temp1, entry.offset()));
+ storeDouble(fpRegToStore, Address(temp1, vmEntry.offset()));
}
}
#else
- UNUSED_PARAM(topEntryFrame);
+ UNUSED_PARAM(vm);
UNUSED_PARAM(usedRegisters);
#endif
}
#endif
#if ENABLE(WEBASSEMBLY)
- void loadWasmContextInstance(GPRReg dst);
- void storeWasmContextInstance(GPRReg src);
- static bool loadWasmContextInstanceNeedsMacroScratchRegister();
- static bool storeWasmContextInstanceNeedsMacroScratchRegister();
+ void loadWasmContext(GPRReg dst);
+ void storeWasmContext(GPRReg src);
+ static bool loadWasmContextNeedsMacroScratchRegister();
+ static bool storeWasmContextNeedsMacroScratchRegister();
#endif
protected:
- void copyCalleeSavesToEntryFrameCalleeSavesBufferImpl(GPRReg calleeSavesBuffer);
+ void copyCalleeSavesToVMEntryFrameCalleeSavesBufferImpl(GPRReg calleeSavesBuffer);
CodeBlock* m_codeBlock;
CodeBlock* m_baselineCodeBlock;
skipOptimize.append(branchAdd32(Signed, TrustedImm32(Options::executionCounterIncrementForEntry()), AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
ASSERT(!m_bytecodeOffset);
- copyCalleeSavesFromFrameOrRegisterToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
+ copyCalleeSavesFromFrameOrRegisterToVMEntryFrameCalleeSavesBuffer(*vm());
callOperation(operationOptimize, m_bytecodeOffset);
skipOptimize.append(branchTestPtr(Zero, returnValueGPR));
if (!m_exceptionChecksWithCallFrameRollback.empty()) {
m_exceptionChecksWithCallFrameRollback.link(this);
- copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
+ copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm());
// lookupExceptionHandlerFromCallerFrame is passed two arguments, the VM and the exec (the CallFrame*).
m_exceptionHandler = label();
m_exceptionChecks.link(this);
- copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
+ copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm());
// lookupExceptionHandler is passed two arguments, the VM and the exec (the CallFrame*).
move(TrustedImmPtr(vm()), GPRInfo::argumentGPR0);
/*
- * Copyright (C) 2012-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2013, 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
ExecState* shadowChickenTopFrame = callFrame;
if (unwindStart == UnwindFromCallerFrame) {
- EntryFrame* topEntryFrame = vm->topEntryFrame;
- shadowChickenTopFrame = callFrame->callerFrame(topEntryFrame);
+ VMEntryFrame* topVMEntryFrame = vm->topVMEntryFrame;
+ shadowChickenTopFrame = callFrame->callerFrame(topVMEntryFrame);
}
vm->shadowChicken().log(*vm, shadowChickenTopFrame, ShadowChicken::Packet::throwPacket());
} else
catchRoutine = LLInt::getCodePtr(handleUncaughtException);
- ASSERT(bitwise_cast<uintptr_t>(callFrame) < bitwise_cast<uintptr_t>(vm->topEntryFrame));
+ ASSERT(bitwise_cast<uintptr_t>(callFrame) < bitwise_cast<uintptr_t>(vm->topVMEntryFrame));
vm->callFrameForCatch = callFrame;
vm->targetMachinePCForThrow = catchRoutine;
void JIT::emit_op_throw(Instruction* currentInstruction)
{
ASSERT(regT0 == returnValueGPR);
- copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
+ copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm());
emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
callOperationNoExceptionCheck(operationThrow, regT0);
jumpToExceptionHandler(*vm());
void JIT::emit_op_catch(Instruction* currentInstruction)
{
- restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
+ restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(*vm());
move(TrustedImmPtr(m_vm), regT3);
load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
if (canBeOptimized()) {
linkSlowCase(iter);
- copyCalleeSavesFromFrameOrRegisterToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
+ copyCalleeSavesFromFrameOrRegisterToVMEntryFrameCalleeSavesBuffer(*vm());
callOperation(operationOptimize, m_bytecodeOffset);
Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
/*
- * Copyright (C) 2009-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2009, 2012-2016 Apple Inc. All rights reserved.
* Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
*
* Redistribution and use in source and binary forms, with or without
void JIT::emit_op_throw(Instruction* currentInstruction)
{
ASSERT(regT0 == returnValueGPR);
- copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
+ copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm());
emitLoad(currentInstruction[1].u.operand, regT1, regT0);
callOperationNoExceptionCheck(operationThrow, regT1, regT0);
jumpToExceptionHandler(*vm());
void JIT::emit_op_catch(Instruction* currentInstruction)
{
- restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
+ restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(*vm());
move(TrustedImmPtr(m_vm), regT3);
// operationThrow returns the callFrame for the handler.
VM* vm = codeBlock->vm();
auto scope = DECLARE_THROW_SCOPE(*vm);
- EntryFrame* entryFrame = vm->topEntryFrame;
- CallFrame* callerFrame = exec->callerFrame(entryFrame);
+ VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
+ CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
if (!callerFrame) {
callerFrame = exec;
- entryFrame = vm->topEntryFrame;
+ vmEntryFrame = vm->topVMEntryFrame;
}
- NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
+ NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
throwStackOverflowError(callerFrame, scope);
}
VM* vm = &exec->vm();
auto scope = DECLARE_THROW_SCOPE(*vm);
- EntryFrame* entryFrame = vm->topEntryFrame;
- CallFrame* callerFrame = exec->callerFrame(entryFrame);
+ VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
+ CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
- NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
+ NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
ErrorHandlingScope errorScope(*vm);
throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
}
VM* vm = &exec->vm();
auto scope = DECLARE_THROW_SCOPE(*vm);
- EntryFrame* entryFrame = vm->topEntryFrame;
- CallFrame* callerFrame = exec->callerFrame(entryFrame);
+ VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
+ CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
- NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
+ NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
ErrorHandlingScope errorScope(*vm);
throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
}
int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
if (missingArgCount < 0) {
- EntryFrame* entryFrame = vm->topEntryFrame;
- CallFrame* callerFrame = exec->callerFrame(entryFrame);
- NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
+ VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
+ CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
+ NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
throwStackOverflowError(callerFrame, scope);
}
int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
if (missingArgCount < 0) {
- EntryFrame* entryFrame = vm->topEntryFrame;
- CallFrame* callerFrame = exec->callerFrame(entryFrame);
- NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
+ VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
+ CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
+ NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
throwStackOverflowError(callerFrame, scope);
}
// even though we won't use it.
jit.preserveReturnAddressAfterCall(GPRInfo::nonPreservedNonReturnGPR);
- jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
+ jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
jit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
jit.move(CCallHelpers::TrustedImmPtr(bitwise_cast<void*>(lookupExceptionHandler)), GPRInfo::nonArgGPR0);
// Handle an exception
exceptionHandler.link(&jit);
- jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
+ jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
jit.storePtr(JSInterfaceJIT::callFrameRegister, &vm->topCallFrame);
#if CPU(X86) && USE(JSVALUE32_64)
EncodedJSValue JSC_HOST_CALL functionDumpCallFrame(ExecState* exec)
{
VM& vm = exec->vm();
- EntryFrame* topEntryFrame = vm.topEntryFrame;
- ExecState* callerFrame = exec->callerFrame(topEntryFrame);
+ VMEntryFrame* topVMEntryFrame = vm.topVMEntryFrame;
+ ExecState* callerFrame = exec->callerFrame(topVMEntryFrame);
if (callerFrame)
vm.interpreter->dumpCallFrame(callerFrame);
return JSValue::encode(jsUndefined());
VM& vm = exec->vm();
auto throwScope = DECLARE_THROW_SCOPE(vm);
- EntryFrame* topEntryFrame = vm.topEntryFrame;
- CallFrame* callerFrame = exec->callerFrame(topEntryFrame);
+ VMEntryFrame* vmEntryFrame = vm.topVMEntryFrame;
+ CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
if (!callerFrame) {
callerFrame = exec;
- topEntryFrame = vm.topEntryFrame;
+ vmEntryFrame = vm.topVMEntryFrame;
}
- NativeCallFrameTracerWithRestore tracer(&vm, topEntryFrame, callerFrame);
+ NativeCallFrameTracerWithRestore tracer(&vm, vmEntryFrame, callerFrame);
LLINT_SET_PC_FOR_STUBS();
/*
- * Copyright (C) 2012-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2013, 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
return JSValue::encode(result);
}
-extern "C" VMEntryRecord* vmEntryRecord(EntryFrame* entryFrame)
+extern "C" VMEntryRecord* vmEntryRecord(VMEntryFrame* entryFrame)
{
// The C Loop doesn't have any callee save registers, so the VMEntryRecord is allocated at the base of the frame.
intptr_t stackAlignment = stackAlignmentBytes();
macro copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(vm, temp)
if ARM64 or X86_64 or X86_64_WIN
- loadp VM::topEntryFrame[vm], temp
+ loadp VM::topVMEntryFrame[vm], temp
vmEntryRecord(temp, temp)
leap VMEntryRecord::calleeSaveRegistersBuffer[temp], temp
if ARM64
macro restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(vm, temp)
if ARM64 or X86_64 or X86_64_WIN
- loadp VM::topEntryFrame[vm], temp
+ loadp VM::topVMEntryFrame[vm], temp
vmEntryRecord(temp, temp)
leap VMEntryRecord::calleeSaveRegistersBuffer[temp], temp
if ARM64
storep address, VM::m_lastStackTop[vm]
ret
- # VMEntryRecord* vmEntryRecord(const EntryFrame* entryFrame)
+ # VMEntryRecord* vmEntryRecord(const VMEntryFrame* entryFrame)
global _vmEntryRecord
_vmEntryRecord:
if X86 or X86_WIN
storep vm, VMEntryRecord::m_vm[sp]
loadp VM::topCallFrame[vm], t4
storep t4, VMEntryRecord::m_prevTopCallFrame[sp]
- loadp VM::topEntryFrame[vm], t4
- storep t4, VMEntryRecord::m_prevTopEntryFrame[sp]
+ loadp VM::topVMEntryFrame[vm], t4
+ storep t4, VMEntryRecord::m_prevTopVMEntryFrame[sp]
# Align stack pointer
if X86_WIN or MIPS
loadp VMEntryRecord::m_vm[sp], t5
loadp VMEntryRecord::m_prevTopCallFrame[sp], t4
storep t4, VM::topCallFrame[t5]
- loadp VMEntryRecord::m_prevTopEntryFrame[sp], t4
- storep t4, VM::topEntryFrame[t5]
+ loadp VMEntryRecord::m_prevTopVMEntryFrame[sp], t4
+ storep t4, VM::topVMEntryFrame[t5]
if ARMv7
subp cfr, CalleeRegisterSaveSize, t5
.copyArgsDone:
storep sp, VM::topCallFrame[vm]
- storep cfr, VM::topEntryFrame[vm]
+ storep cfr, VM::topVMEntryFrame[vm]
makeCall(entry, t3, t4)
loadp VMEntryRecord::m_vm[sp], t5
loadp VMEntryRecord::m_prevTopCallFrame[sp], t4
storep t4, VM::topCallFrame[t5]
- loadp VMEntryRecord::m_prevTopEntryFrame[sp], t4
- storep t4, VM::topEntryFrame[t5]
+ loadp VMEntryRecord::m_prevTopVMEntryFrame[sp], t4
+ storep t4, VM::topVMEntryFrame[t5]
if ARMv7
subp cfr, CalleeRegisterSaveSize, t5
loadp VMEntryRecord::m_vm[sp], t3
loadp VMEntryRecord::m_prevTopCallFrame[sp], t5
storep t5, VM::topCallFrame[t3]
- loadp VMEntryRecord::m_prevTopEntryFrame[sp], t5
- storep t5, VM::topEntryFrame[t3]
+ loadp VMEntryRecord::m_prevTopVMEntryFrame[sp], t5
+ storep t5, VM::topVMEntryFrame[t3]
if ARMv7
subp cfr, CalleeRegisterSaveSize, t3
storep vm, VMEntryRecord::m_vm[sp]
loadp VM::topCallFrame[vm], t4
storep t4, VMEntryRecord::m_prevTopCallFrame[sp]
- loadp VM::topEntryFrame[vm], t4
- storep t4, VMEntryRecord::m_prevTopEntryFrame[sp]
+ loadp VM::topVMEntryFrame[vm], t4
+ storep t4, VMEntryRecord::m_prevTopVMEntryFrame[sp]
loadi ProtoCallFrame::paddedArgCount[protoCallFrame], t4
addp CallFrameHeaderSlots, t4, t4
loadp VMEntryRecord::m_vm[t4], vm
loadp VMEntryRecord::m_prevTopCallFrame[t4], extraTempReg
storep extraTempReg, VM::topCallFrame[vm]
- loadp VMEntryRecord::m_prevTopEntryFrame[t4], extraTempReg
- storep extraTempReg, VM::topEntryFrame[vm]
+ loadp VMEntryRecord::m_prevTopVMEntryFrame[t4], extraTempReg
+ storep extraTempReg, VM::topVMEntryFrame[vm]
subp cfr, CalleeRegisterSaveSize, sp
else
storep sp, VM::topCallFrame[vm]
end
- storep cfr, VM::topEntryFrame[vm]
+ storep cfr, VM::topVMEntryFrame[vm]
checkStackPointerAlignment(extraTempReg, 0xbad0dc02)
loadp VMEntryRecord::m_vm[t4], vm
loadp VMEntryRecord::m_prevTopCallFrame[t4], t2
storep t2, VM::topCallFrame[vm]
- loadp VMEntryRecord::m_prevTopEntryFrame[t4], t2
- storep t2, VM::topEntryFrame[vm]
+ loadp VMEntryRecord::m_prevTopVMEntryFrame[t4], t2
+ storep t2, VM::topVMEntryFrame[vm]
subp cfr, CalleeRegisterSaveSize, sp
end
end
+
_handleUncaughtException:
loadp Callee[cfr], t3
andp MarkedBlockMask, t3
loadp VMEntryRecord::m_vm[t2], t3
loadp VMEntryRecord::m_prevTopCallFrame[t2], extraTempReg
storep extraTempReg, VM::topCallFrame[t3]
- loadp VMEntryRecord::m_prevTopEntryFrame[t2], extraTempReg
- storep extraTempReg, VM::topEntryFrame[t3]
+ loadp VMEntryRecord::m_prevTopVMEntryFrame[t2], extraTempReg
+ storep extraTempReg, VM::topVMEntryFrame[t3]
subp cfr, CalleeRegisterSaveSize, sp
Options::useWebAssembly() = false;
if (!Options::useWebAssembly())
- Options::useFastTLSForWasmContext() = false;
+ Options::useWebAssemblyFastTLS() = false;
if (Options::dumpDisassembly()
|| Options::dumpDFGDisassembly()
v(unsigned, webAssemblyFastMemoryRedzonePages, 128, Normal, "WebAssembly fast memories use 4GiB virtual allocations, plus a redzone (counted as multiple of 64KiB WebAssembly pages) at the end to catch reg+imm accesses which exceed 32-bit, anything beyond the redzone is explicitly bounds-checked") \
v(bool, crashIfWebAssemblyCantFastMemory, false, Normal, "If true, we will crash if we can't obtain fast memory for wasm.") \
v(unsigned, maxNumWebAssemblyFastMemories, 4, Normal, nullptr) \
- v(bool, useFastTLSForWasmContext, true, Normal, "If true, we will store context in fast TLS. If false, we will pin it to a register.") \
+ v(bool, useWebAssemblyFastTLS, true, Normal, "If true, we will try to use fast thread-local storage if available on the current platform.") \
+ v(bool, useFastTLSForWasmContext, true, Normal, "If true (and fast TLS is enabled), we will store context in fast TLS. If false, we will pin it to a register.") \
v(bool, useCallICsForWebAssemblyToJSCalls, true, Normal, "If true, we will use CallLinkInfo to inline cache Wasm to JS calls.") \
v(bool, useObjectRestSpread, true, Normal, "If true, we will enable Object Rest/Spread feature.") \
v(bool, useArrayAllocationProfiling, true, Normal, "If true, we will use our normal array allocation profiling. If false, the allocation profile will always claim to be undecided.")\
FrameWalker(VM& vm, ExecState* callFrame, const AbstractLocker& codeBlockSetLocker, const AbstractLocker& machineThreadsLocker)
: m_vm(vm)
, m_callFrame(callFrame)
- , m_entryFrame(vm.topEntryFrame)
+ , m_vmEntryFrame(vm.topVMEntryFrame)
, m_codeBlockSetLocker(codeBlockSetLocker)
, m_machineThreadsLocker(machineThreadsLocker)
{
SUPPRESS_ASAN
void advanceToParentFrame()
{
- m_callFrame = m_callFrame->unsafeCallerFrame(m_entryFrame);
+ m_callFrame = m_callFrame->unsafeCallerFrame(m_vmEntryFrame);
}
bool isAtTop() const
VM& m_vm;
ExecState* m_callFrame;
- EntryFrame* m_entryFrame;
+ VMEntryFrame* m_vmEntryFrame;
const AbstractLocker& m_codeBlockSetLocker;
const AbstractLocker& m_machineThreadsLocker;
bool m_bailingOut { false };
// We reuse LLInt CodeBlocks for the baseline JIT, so we need to check for both jit types.
// This might also be false for various reasons (known and unknown), even though
// it's super unlikely. One reason that this can be false is when we throw from a DFG frame,
- // and we end up having to unwind past an EntryFrame, we will end up executing
+ // and we end up having to unwind past a VMEntryFrame, we will end up executing
// inside the LLInt's handleUncaughtException. So we just protect against this
// by ignoring it.
unsigned bytecodeIndex = 0;
/*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
bool willBeHandleByLLIntOrJIT = false;
void* previousScope = m_previousScope;
- void* topEntryFrame = m_vm.topEntryFrame;
+ void* topVMEntryFrame = m_vm.topVMEntryFrame;
- // If the topEntryFrame was pushed on the stack after the previousScope was instantiated,
+ // If the topVMEntryFrame was pushed on the stack after the previousScope was instantiated,
// then this throwScope will be returning to LLINT or JIT code that always do an exception
// check. In that case, skip the simulated throw because the LLInt and JIT will be
// checking for the exception their own way instead of calling ThrowScope::exception().
- if (topEntryFrame && previousScope > topEntryFrame)
+ if (topVMEntryFrame && previousScope > topVMEntryFrame)
willBeHandleByLLIntOrJIT = true;
if (!willBeHandleByLLIntOrJIT)
#endif
, vmType(vmType)
, clientData(0)
- , topEntryFrame(nullptr)
+ , topVMEntryFrame(nullptr)
, topCallFrame(CallFrame::noCaller())
, promiseDeferredTimer(std::make_unique<PromiseDeferredTimer>(*this))
, m_atomicStringTable(vmType == Default ? Thread::current().atomicStringTable() : new AtomicStringTable)
promiseDeferredTimer->stopRunningTasks();
#if ENABLE(WEBASSEMBLY)
if (Wasm::existingWorklistOrNull())
- Wasm::ensureWorklist().stopAllPlansForContext(wasmContext);
+ Wasm::ensureWorklist().stopAllPlansForVM(*this);
#endif
if (UNLIKELY(m_watchdog))
m_watchdog->willDestroyVM(this);
#include "TemplateRegistryKeyTable.h"
#include "VMEntryRecord.h"
#include "VMTraps.h"
-#include "WasmContext.h"
#include "Watchpoint.h"
#include <wtf/BumpPointerAllocator.h>
#include <wtf/CheckedArithmetic.h>
VMType vmType;
ClientData* clientData;
- EntryFrame* topEntryFrame;
+ VMEntryFrame* topVMEntryFrame;
// NOTE: When throwing an exception while rolling back the call frame, this may be equal to
- // topEntryFrame.
+ // topVMEntryFrame.
// FIXME: This should be a void*, because it might not point to a CallFrame.
// https://bugs.webkit.org/show_bug.cgi?id=160441
ExecState* topCallFrame { nullptr };
-#if ENABLE(WEBASSEMBLY)
- Wasm::Context wasmContext;
-#endif
+ // FIXME: Save this state elsewhere to allow PIC. https://bugs.webkit.org/show_bug.cgi?id=169773
+ JSWebAssemblyInstance* wasmContext { nullptr };
Strong<Structure> structureStructure;
Strong<Structure> structureRareDataStructure;
Strong<Structure> terminatedExecutionErrorStructure;
return OBJECT_OFFSETOF(VM, targetMachinePCForThrow);
}
- static ptrdiff_t topEntryFrameOffset()
+ static ptrdiff_t topVMEntryFrameOffset()
{
- return OBJECT_OFFSETOF(VM, topEntryFrame);
+ return OBJECT_OFFSETOF(VM, topVMEntryFrame);
}
void restorePreviousException(Exception* exception) { setException(exception); }
return topCallFrame;
}
-static bool isSaneFrame(CallFrame* frame, CallFrame* calleeFrame, EntryFrame* entryFrame, StackBounds stackBounds)
+static bool isSaneFrame(CallFrame* frame, CallFrame* calleeFrame, VMEntryFrame* entryFrame, StackBounds stackBounds)
{
if (reinterpret_cast<void*>(frame) >= reinterpret_cast<void*>(entryFrame))
return false;
}
CodeBlock* foundCodeBlock = nullptr;
- EntryFrame* entryFrame = vm.topEntryFrame;
+ VMEntryFrame* vmEntryFrame = vm.topVMEntryFrame;
// We don't have a callee to start with. So, use the end of the stack to keep the
// isSaneFrame() checker below happy for the first iteration. It will still check
// to ensure that the address is in the stackBounds.
CallFrame* calleeFrame = reinterpret_cast<CallFrame*>(stackBounds.end());
- if (!entryFrame || !callFrame)
+ if (!vmEntryFrame || !callFrame)
return; // Not running JS code. Let the SignalSender try again later.
do {
- if (!isSaneFrame(callFrame, calleeFrame, entryFrame, stackBounds))
+ if (!isSaneFrame(callFrame, calleeFrame, vmEntryFrame, stackBounds))
return; // Let the SignalSender try again later.
CodeBlock* candidateCodeBlock = callFrame->codeBlock();
}
calleeFrame = callFrame;
- callFrame = callFrame->callerFrame(entryFrame);
+ callFrame = callFrame->callerFrame(vmEntryFrame);
- } while (callFrame && entryFrame);
+ } while (callFrame && vmEntryFrame);
if (!foundCodeBlock) {
// We may have just entered the frame and the codeBlock pointer is not
m_needToInvalidatedCodeBlocks = false;
- EntryFrame* entryFrame = vm().topEntryFrame;
+ VMEntryFrame* vmEntryFrame = vm().topVMEntryFrame;
CallFrame* callFrame = topCallFrame;
- if (!entryFrame)
+ if (!vmEntryFrame)
return; // Not running JS code. Nothing to invalidate.
while (callFrame) {
CodeBlock* codeBlock = callFrame->codeBlock();
if (codeBlock && JITCode::isOptimizingJIT(codeBlock->jitType()))
codeBlock->jettison(Profiler::JettisonDueToVMTraps);
- callFrame = callFrame->callerFrame(entryFrame);
+ callFrame = callFrame->callerFrame(vmEntryFrame);
}
}
#if ENABLE(WEBASSEMBLY)
#include "JSObject.h"
-#include "JSWebAssemblyCompileError.h"
-#include "JSWebAssemblyInstance.h"
-#include "JSWebAssemblyLinkError.h"
-#include "JSWebAssemblyMemory.h"
-#include "JSWebAssemblyModule.h"
-#include "JSWebAssemblyRuntimeError.h"
-#include "JSWebAssemblyTable.h"
-#include "WebAssemblyCompileErrorConstructor.h"
-#include "WebAssemblyCompileErrorPrototype.h"
-#include "WebAssemblyFunction.h"
-#include "WebAssemblyInstanceConstructor.h"
-#include "WebAssemblyInstancePrototype.h"
-#include "WebAssemblyLinkErrorConstructor.h"
-#include "WebAssemblyLinkErrorPrototype.h"
-#include "WebAssemblyMemoryConstructor.h"
-#include "WebAssemblyMemoryPrototype.h"
-#include "WebAssemblyModuleConstructor.h"
-#include "WebAssemblyModulePrototype.h"
-#include "WebAssemblyModuleRecord.h"
-#include "WebAssemblyPrototype.h"
-#include "WebAssemblyRuntimeErrorConstructor.h"
-#include "WebAssemblyRuntimeErrorPrototype.h"
-#include "WebAssemblyTableConstructor.h"
-#include "WebAssemblyTablePrototype.h"
-#include "WebAssemblyToJSCallee.h"
+#include "js/JSWebAssemblyCompileError.h"
+#include "js/JSWebAssemblyInstance.h"
+#include "js/JSWebAssemblyLinkError.h"
+#include "js/JSWebAssemblyMemory.h"
+#include "js/JSWebAssemblyModule.h"
+#include "js/JSWebAssemblyRuntimeError.h"
+#include "js/JSWebAssemblyTable.h"
+#include "js/WebAssemblyCompileErrorConstructor.h"
+#include "js/WebAssemblyCompileErrorPrototype.h"
+#include "js/WebAssemblyFunction.h"
+#include "js/WebAssemblyInstanceConstructor.h"
+#include "js/WebAssemblyInstancePrototype.h"
+#include "js/WebAssemblyLinkErrorConstructor.h"
+#include "js/WebAssemblyLinkErrorPrototype.h"
+#include "js/WebAssemblyMemoryConstructor.h"
+#include "js/WebAssemblyMemoryPrototype.h"
+#include "js/WebAssemblyModuleConstructor.h"
+#include "js/WebAssemblyModulePrototype.h"
+#include "js/WebAssemblyModuleRecord.h"
+#include "js/WebAssemblyPrototype.h"
+#include "js/WebAssemblyRuntimeErrorConstructor.h"
+#include "js/WebAssemblyRuntimeErrorPrototype.h"
+#include "js/WebAssemblyTableConstructor.h"
+#include "js/WebAssemblyTablePrototype.h"
+#include "js/WebAssemblyToJSCallee.h"
namespace JSC {
#include "B3WasmBoundsCheckValue.h"
#include "JSCInlines.h"
#include "JSWebAssemblyInstance.h"
+#include "JSWebAssemblyModule.h"
+#include "JSWebAssemblyRuntimeError.h"
#include "ScratchRegisterAllocator.h"
#include "VirtualRegister.h"
#include "WasmCallingConvention.h"
return fail(__VA_ARGS__); \
} while (0)
- B3IRGenerator(const ModuleInformation&, Procedure&, InternalFunction*, Vector<UnlinkedWasmToWasmCall>&, MemoryMode, CompilationMode, unsigned functionIndex, TierUpCount*, ThrowWasmException);
+ B3IRGenerator(const ModuleInformation&, Procedure&, InternalFunction*, Vector<UnlinkedWasmToWasmCall>&, MemoryMode, CompilationMode, unsigned functionIndex, TierUpCount*);
PartialResult WARN_UNUSED_RETURN addArguments(const Signature&);
PartialResult WARN_UNUSED_RETURN addLocal(Type, uint32_t);
int32_t WARN_UNUSED_RETURN fixupPointerPlusOffset(ExpressionType&, uint32_t);
- void restoreWasmContextInstance(Procedure&, BasicBlock*, Value*);
+ void restoreWasmContext(Procedure&, BasicBlock*, Value*);
void restoreWebAssemblyGlobalState(const MemoryInformation&, Value* instance, Procedure&, BasicBlock*);
Origin origin();
InsertionSet m_constantInsertionValues;
GPRReg m_memoryBaseGPR { InvalidGPRReg };
GPRReg m_memorySizeGPR { InvalidGPRReg };
- GPRReg m_wasmContextInstanceGPR { InvalidGPRReg };
+ GPRReg m_wasmContextGPR { InvalidGPRReg };
bool m_makesCalls { false };
Value* m_instanceValue { nullptr }; // Always use the accessor below to ensure the instance value is materialized when used.
return offset;
}
-void B3IRGenerator::restoreWasmContextInstance(Procedure& proc, BasicBlock* block, Value* arg)
+void B3IRGenerator::restoreWasmContext(Procedure& proc, BasicBlock* block, Value* arg)
{
- if (Context::useFastTLS()) {
+ if (useFastTLSForContext()) {
PatchpointValue* patchpoint = block->appendNew<PatchpointValue>(proc, B3::Void, Origin());
- if (CCallHelpers::storeWasmContextInstanceNeedsMacroScratchRegister())
+ if (CCallHelpers::storeWasmContextNeedsMacroScratchRegister())
patchpoint->clobber(RegisterSet::macroScratchRegisters());
patchpoint->append(ConstrainedValue(arg, ValueRep::SomeRegister));
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
- AllowMacroScratchRegisterUsageIf allowScratch(jit, CCallHelpers::storeWasmContextInstanceNeedsMacroScratchRegister());
- jit.storeWasmContextInstance(params[0].gpr());
+ AllowMacroScratchRegisterUsageIf allowScratch(jit, CCallHelpers::storeWasmContextNeedsMacroScratchRegister());
+ jit.storeWasmContext(params[0].gpr());
});
return;
}
- // FIXME: Because WasmToWasm call clobbers wasmContextInstance register and does not restore it, we need to restore it in the caller side.
+ // FIXME: Because WasmToWasm call clobbers wasmContext register and does not restore it, we need to restore it in the caller side.
// This prevents us from using ArgumentReg to this (logically) immutable pinned register.
PatchpointValue* patchpoint = block->appendNew<PatchpointValue>(proc, B3::Void, Origin());
Effects effects = Effects::none();
effects.writesPinned = true;
effects.reads = B3::HeapRange::top();
patchpoint->effects = effects;
- patchpoint->clobberLate(RegisterSet(m_wasmContextInstanceGPR));
+ patchpoint->clobberLate(RegisterSet(m_wasmContextGPR));
patchpoint->append(instanceValue(), ValueRep::SomeRegister);
- GPRReg wasmContextInstanceGPR = m_wasmContextInstanceGPR;
+ GPRReg wasmContextGPR = m_wasmContextGPR;
patchpoint->setGenerator([=] (CCallHelpers& jit, const StackmapGenerationParams& param) {
- jit.move(param[0].gpr(), wasmContextInstanceGPR);
+ jit.move(param[0].gpr(), wasmContextGPR);
});
}
-B3IRGenerator::B3IRGenerator(const ModuleInformation& info, Procedure& procedure, InternalFunction* compilation, Vector<UnlinkedWasmToWasmCall>& unlinkedWasmToWasmCalls, MemoryMode mode, CompilationMode compilationMode, unsigned functionIndex, TierUpCount* tierUp, ThrowWasmException throwWasmException)
+B3IRGenerator::B3IRGenerator(const ModuleInformation& info, Procedure& procedure, InternalFunction* compilation, Vector<UnlinkedWasmToWasmCall>& unlinkedWasmToWasmCalls, MemoryMode mode, CompilationMode compilationMode, unsigned functionIndex, TierUpCount* tierUp)
: m_info(info)
, m_mode(mode)
, m_compilationMode(compilationMode)
m_memoryBaseGPR = pinnedRegs.baseMemoryPointer;
m_proc.pinRegister(m_memoryBaseGPR);
- m_wasmContextInstanceGPR = pinnedRegs.wasmContextInstancePointer;
- if (!Context::useFastTLS())
- m_proc.pinRegister(m_wasmContextInstanceGPR);
+ m_wasmContextGPR = pinnedRegs.wasmContextPointer;
+ if (!useFastTLSForContext())
+ m_proc.pinRegister(m_wasmContextGPR);
if (mode != MemoryMode::Signaling) {
ASSERT(!pinnedRegs.sizeRegisters[0].sizeOffset);
m_proc.pinRegister(regInfo.sizeRegister);
}
- if (throwWasmException)
- Thunks::singleton().setThrowWasmException(throwWasmException);
-
if (info.memory) {
m_proc.setWasmBoundsCheckGenerator([=] (CCallHelpers& jit, GPRReg pinnedGPR) {
AllowMacroScratchRegisterUsage allowScratch(jit);
}
this->emitExceptionCheck(jit, ExceptionType::OutOfBoundsMemoryAccess);
});
-
- switch (m_mode) {
- case MemoryMode::BoundsChecking:
- break;
- case MemoryMode::Signaling:
- // Most memory accesses in signaling mode don't do an explicit
- // exception check because they can rely on fault handling to detect
- // out-of-bounds accesses. FaultSignalHandler nonetheless needs the
- // thunk to exist so that it can jump to that thunk.
- if (UNLIKELY(!Thunks::singleton().stub(throwExceptionFromWasmThunkGenerator)))
- CRASH();
- break;
- }
}
wasmCallingConvention().setupFrameInPrologue(&compilation->calleeMoveLocation, m_proc, Origin(), m_currentBlock);
m_instanceValue = stackOverflowCheck;
stackOverflowCheck->appendSomeRegister(framePointer);
stackOverflowCheck->clobber(RegisterSet::macroScratchRegisters());
- if (!Context::useFastTLS()) {
- // FIXME: Because WasmToWasm call clobbers wasmContextInstance register and does not restore it, we need to restore it in the caller side.
+ if (!useFastTLSForContext()) {
+ // FIXME: Because WasmToWasm call clobbers wasmContext register and does not restore it, we need to restore it in the caller side.
// This prevents us from using ArgumentReg to this (logically) immutable pinned register.
stackOverflowCheck->effects.writesPinned = false;
stackOverflowCheck->effects.readsPinned = true;
- stackOverflowCheck->resultConstraint = ValueRep::reg(m_wasmContextInstanceGPR);
+ stackOverflowCheck->resultConstraint = ValueRep::reg(m_wasmContextGPR);
}
stackOverflowCheck->numGPScratchRegisters = 2;
stackOverflowCheck->setGenerator([=] (CCallHelpers& jit, const B3::StackmapGenerationParams& params) {
bool needUnderflowCheck = static_cast<unsigned>(checkSize) > Options::reservedZoneSize();
bool needsOverflowCheck = m_makesCalls || wasmFrameSize >= minimumParentCheckSize || needUnderflowCheck;
- GPRReg contextInstance = Context::useFastTLS() ? params[0].gpr() : m_wasmContextInstanceGPR;
+ GPRReg context = useFastTLSForContext() ? params[0].gpr() : m_wasmContextGPR;
// This allows leaf functions to not do stack checks if their frame size is within
// certain limits since their caller would have already done the check.
GPRReg scratch1 = params.gpScratch(0);
GPRReg scratch2 = params.gpScratch(1);
- if (Context::useFastTLS())
- jit.loadWasmContextInstance(contextInstance);
+ if (useFastTLSForContext())
+ jit.loadWasmContext(context);
- jit.loadPtr(CCallHelpers::Address(contextInstance, JSWebAssemblyInstance::offsetOfCachedStackLimit()), scratch2);
+ jit.loadPtr(CCallHelpers::Address(context, Context::offsetOfCachedStackLimit()), scratch2);
jit.addPtr(CCallHelpers::TrustedImm32(-checkSize), fp, scratch1);
MacroAssembler::JumpList overflow;
if (UNLIKELY(needUnderflowCheck))
jit.addLinkTask([overflow] (LinkBuffer& linkBuffer) {
linkBuffer.link(overflow, CodeLocationLabel(Thunks::singleton().stub(throwStackOverflowFromWasmThunkGenerator).code()));
});
- } else if (m_usesInstanceValue && Context::useFastTLS()) {
+ } else if (m_usesInstanceValue && useFastTLSForContext()) {
// No overflow check is needed, but the instance values still needs to be correct.
- AllowMacroScratchRegisterUsageIf allowScratch(jit, CCallHelpers::loadWasmContextInstanceNeedsMacroScratchRegister());
- jit.loadWasmContextInstance(contextInstance);
+ AllowMacroScratchRegisterUsageIf allowScratch(jit, CCallHelpers::loadWasmContextNeedsMacroScratchRegister());
+ jit.loadWasmContext(context);
} else {
// We said we'd return a pointer. We don't actually need to because it isn't used, but the patchpoint conservatively said it had effects (potential stack check) which prevent it from getting removed.
}
void B3IRGenerator::restoreWebAssemblyGlobalState(const MemoryInformation& memory, Value* instance, Procedure& proc, BasicBlock* block)
{
- restoreWasmContextInstance(proc, block, instance);
+ restoreWasmContext(proc, block, instance);
if (!!memory) {
const PinnedRegisterInfo* pinnedRegs = &PinnedRegisterInfo::get();
patchpoint->setGenerator([pinnedRegs] (CCallHelpers& jit, const B3::StackmapGenerationParams& params) {
GPRReg baseMemory = pinnedRegs->baseMemoryPointer;
- jit.loadPtr(CCallHelpers::Address(params[0].gpr(), JSWebAssemblyInstance::offsetOfWasmMemory()), baseMemory);
+ jit.loadPtr(CCallHelpers::Address(params[0].gpr(), JSWebAssemblyInstance::offsetOfMemory()), baseMemory);
const auto& sizeRegs = pinnedRegs->sizeRegisters;
ASSERT(sizeRegs.size() >= 1);
ASSERT(!sizeRegs[0].sizeOffset); // The following code assumes we start at 0, and calculates subsequent size registers relative to 0.
- jit.loadPtr(CCallHelpers::Address(baseMemory, Memory::offsetOfSize()), sizeRegs[0].sizeRegister);
- jit.loadPtr(CCallHelpers::Address(baseMemory, Memory::offsetOfMemory()), baseMemory);
+ jit.loadPtr(CCallHelpers::Address(baseMemory, JSWebAssemblyMemory::offsetOfSize()), sizeRegs[0].sizeRegister);
+ jit.loadPtr(CCallHelpers::Address(baseMemory, JSWebAssemblyMemory::offsetOfMemory()), baseMemory);
for (unsigned i = 1; i < sizeRegs.size(); ++i)
jit.add64(CCallHelpers::TrustedImm32(-sizeRegs[i].sizeOffset), sizeRegs[0].sizeRegister, sizeRegs[i].sizeRegister);
});
auto B3IRGenerator::addGrowMemory(ExpressionType delta, ExpressionType& result) -> PartialResult
{
- int32_t (*growMemory)(JSWebAssemblyInstance*, int32_t) = [] (JSWebAssemblyInstance* instance, int32_t delta) -> int32_t {
+ int32_t (*growMemory) (Context*, int32_t) = [] (Context* wasmContext, int32_t delta) -> int32_t {
+ VM& vm = *wasmContext->vm();
+ auto scope = DECLARE_THROW_SCOPE(vm);
+
+ JSWebAssemblyMemory* wasmMemory = wasmContext->memory();
+
if (delta < 0)
return -1;
- auto grown = instance->internalMemory().grow(PageCount(delta));
- if (!grown) {
- switch (grown.error()) {
- case Memory::GrowFailReason::InvalidDelta:
- case Memory::GrowFailReason::InvalidGrowSize:
- case Memory::GrowFailReason::WouldExceedMaximum:
- case Memory::GrowFailReason::OutOfMemory:
- return -1;
- }
- }
+ bool shouldThrowExceptionsOnFailure = false;
+ // grow() does not require ExecState* if it doesn't throw exceptions.
+ ExecState* exec = nullptr;
+ PageCount result = wasmMemory->grow(vm, exec, static_cast<uint32_t>(delta), shouldThrowExceptionsOnFailure);
+ scope.releaseAssertNoException();
+ if (!result)
+ return -1;
- return grown.value().pageCount();
+ return result.pageCount();
};
result = m_currentBlock->appendNew<CCallValue>(m_proc, Int32, origin(),
auto B3IRGenerator::addCurrentMemory(ExpressionType& result) -> PartialResult
{
- Value* memoryObject = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, pointerType(), origin(), instanceValue(), safeCast<int32_t>(JSWebAssemblyInstance::offsetOfWasmMemory()));
+ Value* memoryObject = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, pointerType(), origin(), instanceValue(), safeCast<int32_t>(JSWebAssemblyInstance::offsetOfMemory()));
- static_assert(sizeof(decltype(static_cast<Memory*>(nullptr)->size())) == sizeof(uint64_t), "codegen relies on this size");
- Value* size = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, Int64, origin(), memoryObject, safeCast<int32_t>(Memory::offsetOfSize()));
+ static_assert(sizeof(decltype(static_cast<JSWebAssemblyInstance*>(nullptr)->memory()->memory().size())) == sizeof(uint64_t), "codegen relies on this size");
+ Value* size = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, Int64, origin(), memoryObject, safeCast<int32_t>(JSWebAssemblyMemory::offsetOfSize()));
constexpr uint32_t shiftValue = 16;
- static_assert(PageCount::pageSize == 1ull << shiftValue, "This must hold for the code below to be correct.");
+ static_assert(PageCount::pageSize == 1 << shiftValue, "This must hold for the code below to be correct.");
Value* numPages = m_currentBlock->appendNew<Value>(m_proc, ZShr, origin(),
size, m_currentBlock->appendNew<Const32Value>(m_proc, origin(), shiftValue));
m_maxNumJSCallArguments = std::max(m_maxNumJSCallArguments, static_cast<uint32_t>(args.size()));
// FIXME imports can be linked here, instead of generating a patchpoint, because all import stubs are generated before B3 compilation starts. https://bugs.webkit.org/show_bug.cgi?id=166462
- Value* targetInstance = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, pointerType(), origin(), instanceValue(), safeCast<int32_t>(JSWebAssemblyInstance::offsetOfTargetInstance(functionIndex)));
- Value* isWasmCall = m_currentBlock->appendNew<Value>(m_proc, NotEqual, origin(), targetInstance, m_currentBlock->appendNew<Const64Value>(m_proc, origin(), 0));
+ Value* functionImport = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, pointerType(), origin(), instanceValue(), safeCast<int32_t>(JSWebAssemblyInstance::offsetOfImportFunction(functionIndex)));
+ Value* jsTypeOfImport = m_currentBlock->appendNew<MemoryValue>(m_proc, Load8Z, origin(), functionImport, safeCast<int32_t>(JSCell::typeInfoTypeOffset()));
+ Value* isWasmCall = m_currentBlock->appendNew<Value>(m_proc, Equal, origin(), jsTypeOfImport, m_currentBlock->appendNew<Const32Value>(m_proc, origin(), WebAssemblyFunctionType));
BasicBlock* isWasmBlock = m_proc.addBlock();
- BasicBlock* isEmbedderBlock = m_proc.addBlock();
+ BasicBlock* isJSBlock = m_proc.addBlock();
BasicBlock* continuation = m_proc.addBlock();
- m_currentBlock->appendNewControlValue(m_proc, B3::Branch, origin(), isWasmCall, FrequentedBlock(isWasmBlock), FrequentedBlock(isEmbedderBlock));
+ m_currentBlock->appendNewControlValue(m_proc, B3::Branch, origin(), isWasmCall, FrequentedBlock(isWasmBlock), FrequentedBlock(isJSBlock));
Value* wasmCallResult = wasmCallingConvention().setupCall(m_proc, isWasmBlock, origin(), args, toB3Type(returnType),
[=] (PatchpointValue* patchpoint) {
UpsilonValue* wasmCallResultUpsilon = returnType == Void ? nullptr : isWasmBlock->appendNew<UpsilonValue>(m_proc, origin(), wasmCallResult);
isWasmBlock->appendNewControlValue(m_proc, Jump, origin(), continuation);
- // FIXME: Let's remove this indirection by creating a PIC friendly IC
+ // FIXME: Lets remove this indirection by creating a PIC friendly IC
// for calls out to JS. This shouldn't be that hard to do. We could probably
- // implement the IC to be over Context*.
+ // implement the IC to be over Wasm::Context*.
// https://bugs.webkit.org/show_bug.cgi?id=170375
- Value* codeBlock = isEmbedderBlock->appendNew<MemoryValue>(m_proc,
- Load, pointerType(), origin(), instanceValue(), safeCast<int32_t>(JSWebAssemblyInstance::offsetOfWasmCodeBlock()));
- Value* jumpDestination = isEmbedderBlock->appendNew<MemoryValue>(m_proc,
- Load, pointerType(), origin(), codeBlock, safeCast<int32_t>(CodeBlock::offsetOfImportWasmToEmbedderStub(functionIndex)));
- Value* embedderCallResult = wasmCallingConvention().setupCall(m_proc, isEmbedderBlock, origin(), args, toB3Type(returnType),
+ Value* codeBlock = isJSBlock->appendNew<MemoryValue>(m_proc,
+ Load, pointerType(), origin(), instanceValue(), safeCast<int32_t>(JSWebAssemblyInstance::offsetOfCodeBlock()));
+ Value* jumpDestination = isJSBlock->appendNew<MemoryValue>(m_proc,
+ Load, pointerType(), origin(), codeBlock, safeCast<int32_t>(JSWebAssemblyCodeBlock::offsetOfImportWasmToJSStub(functionIndex)));
+ Value* jsCallResult = wasmCallingConvention().setupCall(m_proc, isJSBlock, origin(), args, toB3Type(returnType),
[&] (PatchpointValue* patchpoint) {
patchpoint->effects.writesPinned = true;
patchpoint->effects.readsPinned = true;
jit.call(params[returnType == Void ? 0 : 1].gpr());
});
});
- UpsilonValue* embedderCallResultUpsilon = returnType == Void ? nullptr : isEmbedderBlock->appendNew<UpsilonValue>(m_proc, origin(), embedderCallResult);
- isEmbedderBlock->appendNewControlValue(m_proc, Jump, origin(), continuation);
+ UpsilonValue* jsCallResultUpsilon = returnType == Void ? nullptr : isJSBlock->appendNew<UpsilonValue>(m_proc, origin(), jsCallResult);
+ isJSBlock->appendNewControlValue(m_proc, Jump, origin(), continuation);
m_currentBlock = continuation;
else {
result = continuation->appendNew<Value>(m_proc, Phi, toB3Type(returnType), origin());
wasmCallResultUpsilon->setPhi(result);
- embedderCallResultUpsilon->setPhi(result);
+ jsCallResultUpsilon->setPhi(result);
}
// The call could have been to another WebAssembly instance, and / or could have modified our Memory.
m_maxNumJSCallArguments = std::max(m_maxNumJSCallArguments, static_cast<uint32_t>(args.size()));
ExpressionType callableFunctionBuffer;
- ExpressionType instancesBuffer;
+ ExpressionType jsFunctionBuffer;
ExpressionType callableFunctionBufferSize;
{
ExpressionType table = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, pointerType(), origin(),
- instanceValue(), safeCast<int32_t>(JSWebAssemblyInstance::offsetOfWasmTable()));
+ instanceValue(), safeCast<int32_t>(JSWebAssemblyInstance::offsetOfTable()));
callableFunctionBuffer = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, pointerType(), origin(),
- table, safeCast<int32_t>(Table::offsetOfFunctions()));
- instancesBuffer = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, pointerType(), origin(),
- table, safeCast<int32_t>(Table::offsetOfInstances()));
+ table, safeCast<int32_t>(JSWebAssemblyTable::offsetOfFunctions()));
+ jsFunctionBuffer = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, pointerType(), origin(),
+ table, safeCast<int32_t>(JSWebAssemblyTable::offsetOfJSFunctions()));
callableFunctionBufferSize = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, Int32, origin(),
- table, safeCast<int32_t>(Table::offsetOfSize()));
+ table, safeCast<int32_t>(JSWebAssemblyTable::offsetOfSize()));
}
// Check the index we are looking for is valid.
ExpressionType callableFunction = m_currentBlock->appendNew<Value>(m_proc, Add, origin(), callableFunctionBuffer, offset);
// Check that the CallableFunction is initialized. We trap if it isn't. An "invalid" SignatureIndex indicates it's not initialized.
- // FIXME: when we have trap handlers, we can just let the call fail because Signature::invalidIndex is 0. https://bugs.webkit.org/show_bug.cgi?id=177210
static_assert(sizeof(CallableFunction::signatureIndex) == sizeof(uint32_t), "Load codegen assumes i32");
ExpressionType calleeSignatureIndex = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, Int32, origin(), callableFunction, safeCast<int32_t>(OBJECT_OFFSETOF(CallableFunction, signatureIndex)));
{
{
Value* offset = m_currentBlock->appendNew<Value>(m_proc, Mul, origin(),
m_currentBlock->appendNew<Value>(m_proc, ZExt32, origin(), calleeIndex),
- constant(pointerType(), sizeof(Instance*)));
- Value* newContextInstance = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, pointerType(), origin(),
- m_currentBlock->appendNew<Value>(m_proc, Add, origin(), instancesBuffer, offset));
+ constant(pointerType(), sizeof(WriteBarrier<JSObject>)));
+ Value* jsObject = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, pointerType(), origin(),
+ m_currentBlock->appendNew<Value>(m_proc, Add, origin(), jsFunctionBuffer, offset));
BasicBlock* continuation = m_proc.addBlock();
BasicBlock* doContextSwitch = m_proc.addBlock();
- Value* isSameContextInstance = m_currentBlock->appendNew<Value>(m_proc, Equal, origin(),
- newContextInstance, instanceValue());
+ Value* newContext = m_currentBlock->appendNew<MemoryValue>(m_proc, Load, pointerType(), origin(),
+ jsObject, safeCast<int32_t>(WebAssemblyFunctionBase::offsetOfInstance()));
+ Value* isSameContext = m_currentBlock->appendNew<Value>(m_proc, Equal, origin(),
+ newContext, instanceValue());
m_currentBlock->appendNewControlValue(m_proc, B3::Branch, origin(),
- isSameContextInstance, FrequentedBlock(continuation), FrequentedBlock(doContextSwitch));
+ isSameContext, FrequentedBlock(continuation), FrequentedBlock(doContextSwitch));
PatchpointValue* patchpoint = doContextSwitch->appendNew<PatchpointValue>(m_proc, B3::Void, origin());
patchpoint->effects.writesPinned = true;
// FIXME: We shouldn't have to do this: https://bugs.webkit.org/show_bug.cgi?id=172181
patchpoint->clobber(PinnedRegisterInfo::get().toSave(MemoryMode::BoundsChecking));
patchpoint->clobber(RegisterSet::macroScratchRegisters());
- patchpoint->append(newContextInstance, ValueRep::SomeRegister);
+ patchpoint->append(newContext, ValueRep::SomeRegister);
patchpoint->append(instanceValue(), ValueRep::SomeRegister);
patchpoint->setGenerator([=] (CCallHelpers& jit, const B3::StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
- GPRReg newContextInstance = params[0].gpr();
- GPRReg oldContextInstance = params[1].gpr();
+ GPRReg newContext = params[0].gpr();
+ GPRReg oldContext = params[1].gpr();
const PinnedRegisterInfo& pinnedRegs = PinnedRegisterInfo::get();
const auto& sizeRegs = pinnedRegs.sizeRegisters;
GPRReg baseMemory = pinnedRegs.baseMemoryPointer;
- ASSERT(newContextInstance != baseMemory);
- jit.loadPtr(CCallHelpers::Address(oldContextInstance, JSWebAssemblyInstance::offsetOfCachedStackLimit()), baseMemory);
- jit.storePtr(baseMemory, CCallHelpers::Address(newContextInstance, JSWebAssemblyInstance::offsetOfCachedStackLimit()));
- jit.storeWasmContextInstance(newContextInstance);
- jit.loadPtr(CCallHelpers::Address(newContextInstance, JSWebAssemblyInstance::offsetOfWasmMemory()), baseMemory); // Memory*.
+ ASSERT(newContext != baseMemory);
+ jit.loadPtr(CCallHelpers::Address(oldContext, Context::offsetOfCachedStackLimit()), baseMemory);
+ jit.storePtr(baseMemory, CCallHelpers::Address(newContext, Context::offsetOfCachedStackLimit()));
+ jit.storeWasmContext(newContext);
+ jit.loadPtr(CCallHelpers::Address(newContext, Context::offsetOfMemory()), baseMemory); // JSWebAssemblyMemory*.
ASSERT(sizeRegs.size() == 1);
ASSERT(sizeRegs[0].sizeRegister != baseMemory);
- ASSERT(sizeRegs[0].sizeRegister != newContextInstance);
+ ASSERT(sizeRegs[0].sizeRegister != newContext);
ASSERT(!sizeRegs[0].sizeOffset);
- jit.loadPtr(CCallHelpers::Address(baseMemory, Memory::offsetOfSize()), sizeRegs[0].sizeRegister); // Memory size.
- jit.loadPtr(CCallHelpers::Address(baseMemory, Memory::offsetOfMemory()), baseMemory); // Memory::void*.
+ jit.loadPtr(CCallHelpers::Address(baseMemory, JSWebAssemblyMemory::offsetOfSize()), sizeRegs[0].sizeRegister); // Memory size.
+ jit.loadPtr(CCallHelpers::Address(baseMemory, JSWebAssemblyMemory::offsetOfMemory()), baseMemory); // WasmMemory::void*.
});
doContextSwitch->appendNewControlValue(m_proc, Jump, origin(), continuation);
dataLogLn();
}
+std::unique_ptr<InternalFunction> createJSToWasmWrapper(CompilationContext& compilationContext, const Signature& signature, Vector<UnlinkedWasmToWasmCall>* unlinkedWasmToWasmCalls, const ModuleInformation& info, MemoryMode mode, unsigned functionIndex)
+{
+ CCallHelpers& jit = *compilationContext.jsEntrypointJIT;
+
+ auto result = std::make_unique<InternalFunction>();
+ jit.emitFunctionPrologue();
+
+ // FIXME Stop using 0 as codeBlocks. https://bugs.webkit.org/show_bug.cgi?id=165321
+ jit.store64(CCallHelpers::TrustedImm64(0), CCallHelpers::Address(GPRInfo::callFrameRegister, CallFrameSlot::codeBlock * static_cast<int>(sizeof(Register))));
+ MacroAssembler::DataLabelPtr calleeMoveLocation = jit.moveWithPatch(MacroAssembler::TrustedImmPtr(nullptr), GPRInfo::nonPreservedNonReturnGPR);
+ jit.storePtr(GPRInfo::nonPreservedNonReturnGPR, CCallHelpers::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
+ CodeLocationDataLabelPtr* linkedCalleeMove = &result->calleeMoveLocation;
+ jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
+ *linkedCalleeMove = linkBuffer.locationOf(calleeMoveLocation);
+ });
+
+ const PinnedRegisterInfo& pinnedRegs = PinnedRegisterInfo::get();
+ RegisterSet toSave = pinnedRegs.toSave(mode);
+
+#if !ASSERT_DISABLED
+ unsigned toSaveSize = toSave.numberOfSetGPRs();
+ // They should all be callee saves.
+ toSave.filter(RegisterSet::calleeSaveRegisters());
+ ASSERT(toSave.numberOfSetGPRs() == toSaveSize);
+#endif
+
+ RegisterAtOffsetList registersToSpill(toSave, RegisterAtOffsetList::OffsetBaseType::FramePointerBased);
+ result->entrypoint.calleeSaveRegisters = registersToSpill;
+
+ unsigned totalFrameSize = registersToSpill.size() * sizeof(void*);
+ totalFrameSize += WasmCallingConvention::headerSizeInBytes();
+ totalFrameSize -= sizeof(CallerFrameAndPC);
+ unsigned numGPRs = 0;
+ unsigned numFPRs = 0;
+ for (unsigned i = 0; i < signature.argumentCount(); i++) {
+ switch (signature.argument(i)) {
+ case Wasm::I64:
+ case Wasm::I32:
+ if (numGPRs >= wasmCallingConvention().m_gprArgs.size())
+ totalFrameSize += sizeof(void*);
+ ++numGPRs;
+ break;
+ case Wasm::F32:
+ case Wasm::F64:
+ if (numFPRs >= wasmCallingConvention().m_fprArgs.size())
+ totalFrameSize += sizeof(void*);
+ ++numFPRs;
+ break;
+ default:
+ RELEASE_ASSERT_NOT_REACHED();
+ }
+ }
+
+ totalFrameSize = WTF::roundUpToMultipleOf(stackAlignmentBytes(), totalFrameSize);
+ jit.subPtr(MacroAssembler::TrustedImm32(totalFrameSize), MacroAssembler::stackPointerRegister);
+
+ // We save all these registers regardless of having a memory or not.
+ // The reason is that we use one of these as a scratch. That said,
+ // almost all real wasm programs use memory, so it's not really
+ // worth optimizing for the case that they don't.
+ for (const RegisterAtOffset& regAtOffset : registersToSpill) {
+ GPRReg reg = regAtOffset.reg().gpr();
+ ptrdiff_t offset = regAtOffset.offset();
+ jit.storePtr(reg, CCallHelpers::Address(GPRInfo::callFrameRegister, offset));
+ }
+
+ GPRReg wasmContextGPR = pinnedRegs.wasmContextPointer;
+
+ {
+ CCallHelpers::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
+ numGPRs = 0;
+ numFPRs = 0;
+ // We're going to set the pinned registers after this. So
+ // we can use this as a scratch for now since we saved it above.
+ GPRReg scratchReg = pinnedRegs.baseMemoryPointer;
+
+ ptrdiff_t jsOffset = CallFrameSlot::thisArgument * sizeof(EncodedJSValue);
+
+ // vmEntryToWasm passes Wasm::Context* as the first JS argument when we're
+ // not using fast TLS to hold the Wasm::Context*.
+ if (!useFastTLSForContext()) {
+ jit.loadPtr(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), wasmContextGPR);
+ jsOffset += sizeof(EncodedJSValue);
+ }
+
+ ptrdiff_t wasmOffset = CallFrame::headerSizeInRegisters * sizeof(void*);
+ for (unsigned i = 0; i < signature.argumentCount(); i++) {
+ switch (signature.argument(i)) {
+ case Wasm::I32:
+ case Wasm::I64:
+ if (numGPRs >= wasmCallingConvention().m_gprArgs.size()) {
+ if (signature.argument(i) == Wasm::I32) {
+ jit.load32(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchReg);
+ jit.store32(scratchReg, calleeFrame.withOffset(wasmOffset));
+ } else {
+ jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchReg);
+ jit.store64(scratchReg, calleeFrame.withOffset(wasmOffset));
+ }
+ wasmOffset += sizeof(void*);
+ } else {
+ if (signature.argument(i) == Wasm::I32)
+ jit.load32(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), wasmCallingConvention().m_gprArgs[numGPRs].gpr());
+ else
+ jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), wasmCallingConvention().m_gprArgs[numGPRs].gpr());
+ }
+ ++numGPRs;
+ break;
+ case Wasm::F32:
+ case Wasm::F64:
+ if (numFPRs >= wasmCallingConvention().m_fprArgs.size()) {
+ if (signature.argument(i) == Wasm::F32) {
+ jit.load32(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchReg);
+ jit.store32(scratchReg, calleeFrame.withOffset(wasmOffset));
+ } else {
+ jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchReg);
+ jit.store64(scratchReg, calleeFrame.withOffset(wasmOffset));
+ }
+ wasmOffset += sizeof(void*);
+ } else {
+ if (signature.argument(i) == Wasm::F32)
+ jit.loadFloat(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), wasmCallingConvention().m_fprArgs[numFPRs].fpr());
+ else
+ jit.loadDouble(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), wasmCallingConvention().m_fprArgs[numFPRs].fpr());
+ }
+ ++numFPRs;
+ break;
+ default:
+ RELEASE_ASSERT_NOT_REACHED();
+ }
+
+ jsOffset += sizeof(EncodedJSValue);
+ }
+ }
+
+ if (!!info.memory) {
+ GPRReg baseMemory = pinnedRegs.baseMemoryPointer;
+
+ if (!useFastTLSForContext())
+ jit.loadPtr(CCallHelpers::Address(wasmContextGPR, JSWebAssemblyInstance::offsetOfMemory()), baseMemory);
+ else {
+ jit.loadWasmContext(baseMemory);
+ jit.loadPtr(CCallHelpers::Address(baseMemory, JSWebAssemblyInstance::offsetOfMemory()), baseMemory);
+ }
+
+ if (mode != MemoryMode::Signaling) {
+ const auto& sizeRegs = pinnedRegs.sizeRegisters;
+ ASSERT(sizeRegs.size() >= 1);
+ ASSERT(!sizeRegs[0].sizeOffset); // The following code assumes we start at 0, and calculates subsequent size registers relative to 0.
+ jit.loadPtr(CCallHelpers::Address(baseMemory, JSWebAssemblyMemory::offsetOfSize()), sizeRegs[0].sizeRegister);
+ for (unsigned i = 1; i < sizeRegs.size(); ++i)
+ jit.add64(CCallHelpers::TrustedImm32(-sizeRegs[i].sizeOffset), sizeRegs[0].sizeRegister, sizeRegs[i].sizeRegister);
+ }
+
+ jit.loadPtr(CCallHelpers::Address(baseMemory, JSWebAssemblyMemory::offsetOfMemory()), baseMemory);
+ }
+
+ CCallHelpers::Call call = jit.threadSafePatchableNearCall();
+ unsigned functionIndexSpace = functionIndex + info.importFunctionCount();
+ ASSERT(functionIndexSpace < info.functionIndexSpaceSize());
+ jit.addLinkTask([unlinkedWasmToWasmCalls, call, functionIndexSpace] (LinkBuffer& linkBuffer) {
+ unlinkedWasmToWasmCalls->append({ linkBuffer.locationOfNearCall(call), functionIndexSpace });
+ });
+
+
+ for (const RegisterAtOffset& regAtOffset : registersToSpill) {
+ GPRReg reg = regAtOffset.reg().gpr();
+ ASSERT(reg != GPRInfo::returnValueGPR);
+ ptrdiff_t offset = regAtOffset.offset();
+ jit.loadPtr(CCallHelpers::Address(GPRInfo::callFrameRegister, offset), reg);
+ }
+
+ switch (signature.returnType()) {
+ case Wasm::F32:
+ jit.moveFloatTo32(FPRInfo::returnValueFPR, GPRInfo::returnValueGPR);
+ break;
+ case Wasm::F64:
+ jit.moveDoubleTo64(FPRInfo::returnValueFPR, GPRInfo::returnValueGPR);
+ break;
+ default:
+ break;
+ }
+
+ jit.emitFunctionEpilogue();
+ jit.ret();
+
+ return result;
+}
+
auto B3IRGenerator::origin() -> Origin
{
OpcodeOrigin origin(m_parser->currentOpcode(), m_parser->currentOpcodeStartingOffset());
return bitwise_cast<Origin>(origin);
}
-Expected<std::unique_ptr<InternalFunction>, String> parseAndCompile(CompilationContext& compilationContext, const uint8_t* functionStart, size_t functionLength, const Signature& signature, Vector<UnlinkedWasmToWasmCall>& unlinkedWasmToWasmCalls, const ModuleInformation& info, MemoryMode mode, CompilationMode compilationMode, uint32_t functionIndex, TierUpCount* tierUp, ThrowWasmException throwWasmException)
+Expected<std::unique_ptr<InternalFunction>, String> parseAndCompile(CompilationContext& compilationContext, const uint8_t* functionStart, size_t functionLength, const Signature& signature, Vector<UnlinkedWasmToWasmCall>& unlinkedWasmToWasmCalls, const ModuleInformation& info, MemoryMode mode, CompilationMode compilationMode, uint32_t functionIndex, TierUpCount* tierUp)
{
auto result = std::make_unique<InternalFunction>();
? Options::webAssemblyBBQOptimizationLevel()
: Options::webAssemblyOMGOptimizationLevel());
- B3IRGenerator irGenerator(info, procedure, result.get(), unlinkedWasmToWasmCalls, mode, compilationMode, functionIndex, tierUp, throwWasmException);
- FunctionParser<B3IRGenerator> parser(irGenerator, functionStart, functionLength, signature, info);
+ B3IRGenerator context(info, procedure, result.get(), unlinkedWasmToWasmCalls, mode, compilationMode, functionIndex, tierUp);
+ FunctionParser<B3IRGenerator> parser(context, functionStart, functionLength, signature, info);
WASM_FAIL_IF_HELPER_FAILS(parser.parse());
- irGenerator.insertConstants();
+ context.insertConstants();
procedure.resetReachability();
if (!ASSERT_DISABLED)
#include "B3Compilation.h"
#include "B3OpaqueByproducts.h"
#include "CCallHelpers.h"
-#include "WasmEmbedder.h"
#include "WasmMemory.h"
#include "WasmModuleInformation.h"
#include "WasmTierUpCount.h"
std::unique_ptr<B3::OpaqueByproducts> wasmEntrypointByproducts;
};
-Expected<std::unique_ptr<InternalFunction>, String> parseAndCompile(CompilationContext&, const uint8_t*, size_t, const Signature&, Vector<UnlinkedWasmToWasmCall>&, const ModuleInformation&, MemoryMode, CompilationMode, uint32_t functionIndex, TierUpCount* = nullptr, ThrowWasmException = nullptr);
+Expected<std::unique_ptr<InternalFunction>, String> parseAndCompile(CompilationContext&, const uint8_t*, size_t, const Signature&, Vector<UnlinkedWasmToWasmCall>&, const ModuleInformation&, MemoryMode, CompilationMode, uint32_t functionIndex, TierUpCount* = nullptr);
+
+std::unique_ptr<InternalFunction> createJSToWasmWrapper(CompilationContext&, const Signature&, Vector<UnlinkedWasmToWasmCall>*, const ModuleInformation&, MemoryMode, uint32_t functionIndex);
} } // namespace JSC::Wasm
#if ENABLE(WEBASSEMBLY)
#include "B3Compilation.h"
+#include "JSCInlines.h"
+#include "JSGlobalObject.h"
#include "WasmB3IRGenerator.h"
#include "WasmBinding.h"
#include "WasmCallee.h"
static const bool verbose = false;
}
-BBQPlan::BBQPlan(Context* context, Ref<ModuleInformation> info, AsyncWork work, CompletionTask&& task, CreateEmbedderWrapper&& createEmbedderWrapper, ThrowWasmException throwWasmException)
- : Base(context, WTFMove(info), WTFMove(task), WTFMove(createEmbedderWrapper), throwWasmException)
+BBQPlan::BBQPlan(VM* vm, Ref<ModuleInformation> info, AsyncWork work, CompletionTask&& task)
+ : Base(vm, WTFMove(info), WTFMove(task))
, m_state(State::Validated)
, m_asyncWork(work)
{
}
-BBQPlan::BBQPlan(Context* context, Vector<uint8_t>&& source, AsyncWork work, CompletionTask&& task, CreateEmbedderWrapper&& createEmbedderWrapper, ThrowWasmException throwWasmException)
- : BBQPlan(context, adoptRef(*new ModuleInformation(WTFMove(source))), work, WTFMove(task), WTFMove(createEmbedderWrapper), throwWasmException)
+BBQPlan::BBQPlan(VM* vm, Vector<uint8_t>&& source, AsyncWork work, CompletionTask&& task)
+ : BBQPlan(vm, adoptRef(*new ModuleInformation(WTFMove(source))), work, WTFMove(task))
{
m_state = State::Initial;
}
-BBQPlan::BBQPlan(Context* context, const uint8_t* source, size_t sourceLength, AsyncWork work, CompletionTask&& task)
- : Base(context, source, sourceLength, WTFMove(task))
+BBQPlan::BBQPlan(VM* vm, const uint8_t* source, size_t sourceLength, AsyncWork work, CompletionTask&& task)
+ : Base(vm, source, sourceLength, WTFMove(task))
, m_state(State::Initial)
, m_asyncWork(work)
{
m_unlinkedWasmToWasmCalls[functionIndex] = Vector<UnlinkedWasmToWasmCall>();
TierUpCount* tierUp = Options::useBBQTierUpChecks() ? &m_tierUpCounts[functionIndex] : nullptr;
- auto parseAndCompileResult = parseAndCompile(m_compilationContexts[functionIndex], functionStart, functionLength, signature, m_unlinkedWasmToWasmCalls[functionIndex], m_moduleInformation.get(), m_mode, CompilationMode::BBQMode, functionIndex, tierUp, m_throwWasmException);
+ auto parseAndCompileResult = parseAndCompile(m_compilationContexts[functionIndex], functionStart, functionLength, signature, m_unlinkedWasmToWasmCalls[functionIndex], m_moduleInformation.get(), m_mode, CompilationMode::BBQMode, functionIndex, tierUp);
if (UNLIKELY(!parseAndCompileResult)) {
auto locker = holdLock(m_lock);
if (m_exportedFunctionIndices.contains(functionIndex)) {
auto locker = holdLock(m_lock);
- auto result = m_embedderToWasmInternalFunctions.add(functionIndex, m_createEmbedderWrapper(m_compilationContexts[functionIndex], signature, &m_unlinkedWasmToWasmCalls[functionIndex], m_moduleInformation.get(), m_mode, functionIndex));
+ auto result = m_jsToWasmInternalFunctions.add(functionIndex, createJSToWasmWrapper(m_compilationContexts[functionIndex], signature, &m_unlinkedWasmToWasmCalls[functionIndex], m_moduleInformation.get(), m_mode, functionIndex));
ASSERT_UNUSED(result, result.isNewEntry);
}
WTFMove(context.wasmEntrypointByproducts));
}
- if (auto embedderToWasmInternalFunction = m_embedderToWasmInternalFunctions.get(functionIndex)) {
+ if (auto jsToWasmInternalFunction = m_jsToWasmInternalFunctions.get(functionIndex)) {
LinkBuffer linkBuffer(*context.jsEntrypointJIT, nullptr, JITCompilationCanFail);
if (UNLIKELY(linkBuffer.didFailToAllocate())) {
Base::fail(locker, makeString("Out of executable memory in function entrypoint at index ", String::number(functionIndex)));
return;
}
- embedderToWasmInternalFunction->entrypoint.compilation = std::make_unique<B3::Compilation>(
+ jsToWasmInternalFunction->entrypoint.compilation = std::make_unique<B3::Compilation>(
FINALIZE_CODE(linkBuffer, ("JavaScript->WebAssembly entrypoint[%i] %s", functionIndex, SignatureInformation::get(signatureIndex).toString().ascii().data())),
WTFMove(context.jsEntrypointByproducts));
}
#if ENABLE(WEBASSEMBLY)
#include "CompilationResult.h"
+#include "VM.h"
#include "WasmB3IRGenerator.h"
#include "WasmModuleInformation.h"
#include "WasmPlan.h"
#include "WasmTierUpCount.h"
#include <wtf/Bag.h>
-#include <wtf/Function.h>
#include <wtf/SharedTask.h>
#include <wtf/ThreadSafeRefCounted.h>
#include <wtf/Vector.h>
namespace JSC {
class CallLinkInfo;
+class JSGlobalObject;
+class JSPromiseDeferred;
namespace Wasm {
public:
using Base = Plan;
enum AsyncWork : uint8_t { FullCompile, Validation };
-
// Note: CompletionTask should not hold a reference to the Plan otherwise there will be a reference cycle.
- BBQPlan(Context*, Ref<ModuleInformation>, AsyncWork, CompletionTask&&, CreateEmbedderWrapper&&, ThrowWasmException);
- JS_EXPORT_PRIVATE BBQPlan(Context*, Vector<uint8_t>&&, AsyncWork, CompletionTask&&, CreateEmbedderWrapper&&, ThrowWasmException);
+ BBQPlan(VM*, Ref<ModuleInformation>, AsyncWork, CompletionTask&&);
+ JS_EXPORT_PRIVATE BBQPlan(VM*, Vector<uint8_t>&&, AsyncWork, CompletionTask&&);
// Note: This constructor should only be used if you are not actually building a module e.g. validation/function tests
// FIXME: When we get rid of function tests we should remove AsyncWork from this constructor.
- JS_EXPORT_PRIVATE BBQPlan(Context*, const uint8_t*, size_t, AsyncWork, CompletionTask&&);
+ JS_EXPORT_PRIVATE BBQPlan(VM*, const uint8_t*, size_t, AsyncWork, CompletionTask&&);
bool parseAndValidateModule();
Vector<MacroAssemblerCodeRef> m_wasmToWasmExitStubs;
Vector<std::unique_ptr<InternalFunction>> m_wasmInternalFunctions;
HashSet<uint32_t, typename DefaultHash<uint32_t>::Hash, WTF::UnsignedWithZeroKeyHashTraits<uint32_t>> m_exportedFunctionIndices;
- HashMap<uint32_t, std::unique_ptr<InternalFunction>, typename DefaultHash<uint32_t>::Hash, WTF::UnsignedWithZeroKeyHashTraits<uint32_t>> m_embedderToWasmInternalFunctions;
+ HashMap<uint32_t, std::unique_ptr<InternalFunction>, typename DefaultHash<uint32_t>::Hash, WTF::UnsignedWithZeroKeyHashTraits<uint32_t>> m_jsToWasmInternalFunctions;
Vector<CompilationContext> m_compilationContexts;
Vector<TierUpCount> m_tierUpCounts;
ASSERT(!failed());
for (unsigned internalFunctionIndex = 0; internalFunctionIndex < m_wasmInternalFunctions.size(); ++internalFunctionIndex) {
- RefPtr<Wasm::Callee> embedderEntrypointCallee;
- if (auto embedderToWasmFunction = m_embedderToWasmInternalFunctions.get(internalFunctionIndex)) {
- embedderEntrypointCallee = Wasm::Callee::create(WTFMove(embedderToWasmFunction->entrypoint));
- MacroAssembler::repatchPointer(embedderToWasmFunction->calleeMoveLocation, CalleeBits::boxWasm(embedderEntrypointCallee.get()));
+ RefPtr<Wasm::Callee> jsEntrypointCallee;
+ if (auto jsToWasmFunction = m_jsToWasmInternalFunctions.get(internalFunctionIndex)) {
+ jsEntrypointCallee = Wasm::Callee::create(WTFMove(jsToWasmFunction->entrypoint));
+ MacroAssembler::repatchPointer(jsToWasmFunction->calleeMoveLocation, CalleeBits::boxWasm(jsEntrypointCallee.get()));
}
InternalFunction* function = m_wasmInternalFunctions[internalFunctionIndex].get();
Ref<Wasm::Callee> wasmEntrypointCallee = Wasm::Callee::create(WTFMove(function->entrypoint), functionIndexSpace, m_moduleInformation->nameSection.get(functionIndexSpace));
MacroAssembler::repatchPointer(function->calleeMoveLocation, CalleeBits::boxWasm(wasmEntrypointCallee.ptr()));
- callback(internalFunctionIndex, WTFMove(embedderEntrypointCallee), WTFMove(wasmEntrypointCallee));
+ callback(internalFunctionIndex, WTFMove(jsEntrypointCallee), WTFMove(wasmEntrypointCallee));
}
}
#if ENABLE(WEBASSEMBLY)
#include "CCallHelpers.h"
+#include "FrameTracers.h"
+#include "JITExceptions.h"
#include "JSCInlines.h"
#include "JSWebAssemblyInstance.h"
#include "LinkBuffer.h"
+#include "NativeErrorConstructor.h"
+#include "ThunkGenerators.h"
+#include "WasmCallingConvention.h"
+#include "WasmContext.h"
+#include "WasmExceptionType.h"
namespace JSC { namespace Wasm {
using JIT = CCallHelpers;
+static void materializeImportJSCell(JIT& jit, unsigned importIndex, GPRReg result)
+{
+ // We're calling out of the current WebAssembly.Instance. That Instance has a list of all its import functions.
+ jit.loadWasmContext(result);
+ jit.loadPtr(JIT::Address(result, JSWebAssemblyInstance::offsetOfImportFunction(importIndex)), result);
+}
+
+Expected<MacroAssemblerCodeRef, BindingFailure> wasmToJs(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
+{
+ // FIXME: This function doesn't properly abstract away the calling convention.
+ // It'd be super easy to do so: https://bugs.webkit.org/show_bug.cgi?id=169401
+ const WasmCallingConvention& wasmCC = wasmCallingConvention();
+ const JSCCallingConvention& jsCC = jscCallingConvention();
+ const Signature& signature = SignatureInformation::get(signatureIndex);
+ unsigned argCount = signature.argumentCount();
+ JIT jit;
+
+ // Note: WasmB3IRGenerator assumes that this stub treats SP as a callee save.
+ // If we ever change this, we will also need to change WasmB3IRGenerator.
+
+ // Below, we assume that the JS calling convention is always on the stack.
+ ASSERT(!jsCC.m_gprArgs.size());
+ ASSERT(!jsCC.m_fprArgs.size());
+
+ jit.emitFunctionPrologue();
+ jit.store64(JIT::TrustedImm32(0), JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::codeBlock * static_cast<int>(sizeof(Register)))); // FIXME Stop using 0 as codeBlocks. https://bugs.webkit.org/show_bug.cgi?id=165321
+
+ {
+ bool hasBadI64Use = false;
+ hasBadI64Use |= signature.returnType() == I64;
+ for (unsigned argNum = 0; argNum < argCount && !hasBadI64Use; ++argNum) {
+ Type argType = signature.argument(argNum);
+ switch (argType) {
+ case Void:
+ case Func:
+ case Anyfunc:
+ RELEASE_ASSERT_NOT_REACHED();
+
+ case I64: {
+ hasBadI64Use = true;
+ break;
+ }
+
+ default:
+ break;
+ }
+ }
+
+ if (hasBadI64Use) {
+ jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
+ jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
+ jit.loadWasmContext(GPRInfo::argumentGPR1);
+
+ // Store Callee.
+ jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR1, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR2);
+ jit.storePtr(GPRInfo::argumentGPR2, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
+
+ auto call = jit.call();
+ jit.jumpToExceptionHandler(*vm);
+
+ void (*throwBadI64)(ExecState*, JSWebAssemblyInstance*) = [] (ExecState* exec, JSWebAssemblyInstance* wasmContext) -> void {
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ {
+ auto throwScope = DECLARE_THROW_SCOPE(*vm);
+ JSGlobalObject* globalObject = wasmContext->globalObject();
+ auto* error = ErrorInstance::create(exec, *vm, globalObject->typeErrorConstructor()->errorStructure(), ASCIILiteral("i64 not allowed as return type or argument to an imported function"));
+ throwException(exec, throwScope, error);
+ }
+
+ genericUnwind(vm, exec);
+ ASSERT(!!vm->callFrameForCatch);
+ };
+
+ LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID, JITCompilationCanFail);
+ if (UNLIKELY(linkBuffer.didFailToAllocate()))
+ return makeUnexpected(BindingFailure::OutOfMemory);
+
+ linkBuffer.link(call, throwBadI64);
+ return FINALIZE_CODE(linkBuffer, ("WebAssembly->JavaScript invalid i64 use in import[%i]", importIndex));
+ }
+ }
+
+ // Here we assume that the JS calling convention saves at least all the wasm callee saved. We therefore don't need to save and restore more registers since the wasm callee already took care of this.
+ RegisterSet missingCalleeSaves = wasmCC.m_calleeSaveRegisters;
+ missingCalleeSaves.exclude(jsCC.m_calleeSaveRegisters);
+ ASSERT(missingCalleeSaves.isEmpty());
+
+ if (!Options::useCallICsForWebAssemblyToJSCalls()) {
+ ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(argCount * sizeof(uint64_t));
+ char* buffer = argCount ? static_cast<char*>(scratchBuffer->dataBuffer()) : nullptr;
+ unsigned marshalledGPRs = 0;
+ unsigned marshalledFPRs = 0;
+ unsigned bufferOffset = 0;
+ unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
+ const GPRReg scratchGPR = GPRInfo::regCS0;
+ jit.subPtr(MacroAssembler::TrustedImm32(WTF::roundUpToMultipleOf(stackAlignmentBytes(), sizeof(Register))), MacroAssembler::stackPointerRegister);
+ jit.storePtr(scratchGPR, MacroAssembler::Address(MacroAssembler::stackPointerRegister));
+
+ for (unsigned argNum = 0; argNum < argCount; ++argNum) {
+ Type argType = signature.argument(argNum);
+ switch (argType) {
+ case Void:
+ case Func:
+ case Anyfunc:
+ case I64:
+ RELEASE_ASSERT_NOT_REACHED();
+ case I32: {
+ GPRReg gprReg;
+ if (marshalledGPRs < wasmCC.m_gprArgs.size())
+ gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
+ else {
+ // We've already spilled all arguments, these registers are available as scratch.
+ gprReg = GPRInfo::argumentGPR0;
+ jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
+ frOffset += sizeof(Register);
+ }
+ jit.zeroExtend32ToPtr(gprReg, gprReg);
+ jit.store64(gprReg, buffer + bufferOffset);
+ ++marshalledGPRs;
+ break;
+ }
+ case F32: {
+ FPRReg fprReg;
+ if (marshalledFPRs < wasmCC.m_fprArgs.size())
+ fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
+ else {
+ // We've already spilled all arguments, these registers are available as scratch.
+ fprReg = FPRInfo::argumentFPR0;
+ jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
+ frOffset += sizeof(Register);
+ }
+ jit.convertFloatToDouble(fprReg, fprReg);
+ jit.moveDoubleTo64(fprReg, scratchGPR);
+ jit.store64(scratchGPR, buffer + bufferOffset);
+ ++marshalledFPRs;
+ break;
+ }
+ case F64: {
+ FPRReg fprReg;
+ if (marshalledFPRs < wasmCC.m_fprArgs.size())
+ fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
+ else {
+ // We've already spilled all arguments, these registers are available as scratch.
+ fprReg = FPRInfo::argumentFPR0;
+ jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
+ frOffset += sizeof(Register);
+ }
+ jit.moveDoubleTo64(fprReg, scratchGPR);
+ jit.store64(scratchGPR, buffer + bufferOffset);
+ ++marshalledFPRs;
+ break;
+ }
+ }
+
+ bufferOffset += sizeof(Register);
+ }
+ jit.loadPtr(MacroAssembler::Address(MacroAssembler::stackPointerRegister), scratchGPR);
+ if (argCount) {
+ // The GC should not look at this buffer at all, these aren't JSValues.
+ jit.move(CCallHelpers::TrustedImmPtr(scratchBuffer->addressOfActiveLength()), GPRInfo::argumentGPR0);
+ jit.storePtr(CCallHelpers::TrustedImmPtr(0), GPRInfo::argumentGPR0);
+ }
+
+ uint64_t (*callFunc)(ExecState*, JSObject*, SignatureIndex, uint64_t*) =
+ [] (ExecState* exec, JSObject* callee, SignatureIndex signatureIndex, uint64_t* buffer) -> uint64_t {
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+ auto throwScope = DECLARE_THROW_SCOPE(*vm);
+ const Signature& signature = SignatureInformation::get(signatureIndex);
+ MarkedArgumentBuffer args;
+ for (unsigned argNum = 0; argNum < signature.argumentCount(); ++argNum) {
+ Type argType = signature.argument(argNum);
+ JSValue arg;
+ switch (argType) {
+ case Void:
+ case Func:
+ case Anyfunc:
+ case I64:
+ RELEASE_ASSERT_NOT_REACHED();
+ case I32:
+ arg = jsNumber(static_cast<int32_t>(buffer[argNum]));
+ break;
+ case F32:
+ case F64:
+ arg = jsNumber(bitwise_cast<double>(buffer[argNum]));
+ break;
+ }
+ args.append(arg);
+ }
+
+ CallData callData;
+ CallType callType = callee->methodTable(*vm)->getCallData(callee, callData);
+ RELEASE_ASSERT(callType != CallType::None);
+ JSValue result = call(exec, callee, callType, callData, jsUndefined(), args);
+ RETURN_IF_EXCEPTION(throwScope, 0);
+
+ uint64_t realResult;
+ switch (signature.returnType()) {
+ case Func:
+ case Anyfunc:
+ case I64:
+ RELEASE_ASSERT_NOT_REACHED();
+ break;
+ case Void:
+ break;
+ case I32: {
+ realResult = static_cast<uint64_t>(static_cast<uint32_t>(result.toInt32(exec)));
+ break;
+ }
+ case F64:
+ case F32: {
+ realResult = bitwise_cast<uint64_t>(result.toNumber(exec));
+ break;
+ }
+ }
+
+ RETURN_IF_EXCEPTION(throwScope, 0);
+ return realResult;
+ };
+
+ jit.loadWasmContext(GPRInfo::argumentGPR0);
+ jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR0);
+ jit.storePtr(GPRInfo::argumentGPR0, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
+
+ materializeImportJSCell(jit, importIndex, GPRInfo::argumentGPR1);
+ static_assert(GPRInfo::numberOfArgumentRegisters >= 4, "We rely on this with the call below.");
+ jit.setupArgumentsWithExecState(GPRInfo::argumentGPR1, CCallHelpers::TrustedImm32(signatureIndex), CCallHelpers::TrustedImmPtr(buffer));
+ auto call = jit.call();
+ auto noException = jit.emitExceptionCheck(*vm, AssemblyHelpers::InvertedExceptionCheck);
+
+ // exception here.
+ jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
+ jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
+ void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+ genericUnwind(vm, exec);
+ ASSERT(!!vm->callFrameForCatch);
+ };
+ auto exceptionCall = jit.call();
+ jit.jumpToExceptionHandler(*vm);
+
+ noException.link(&jit);
+ switch (signature.returnType()) {
+ case F64: {
+ jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
+ break;
+ }
+ case F32: {
+ jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
+ jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
+ break;
+ }
+ default:
+ break;
+ }
+
+ jit.emitFunctionEpilogue();
+ jit.ret();
+
+ LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID, JITCompilationCanFail);
+ if (UNLIKELY(linkBuffer.didFailToAllocate()))
+ return makeUnexpected(BindingFailure::OutOfMemory);
+
+ linkBuffer.link(call, callFunc);
+ linkBuffer.link(exceptionCall, doUnwinding);
+
+ return FINALIZE_CODE(linkBuffer, ("WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data()));
+ }
+
+ // Note: We don't need to perform a stack check here since WasmB3IRGenerator
+ // will do the stack check for us. Whenever it detects that it might make
+ // a call to this thunk, it'll make sure its stack check includes space
+ // for us here.
+
+ const unsigned numberOfParameters = argCount + 1; // There is a "this" argument.
+ const unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
+ const unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
+ const unsigned stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
+ jit.subPtr(MacroAssembler::TrustedImm32(stackOffset), MacroAssembler::stackPointerRegister);
+ JIT::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
+
+ // FIXME make these loops which switch on Signature if there are many arguments on the stack. It'll otherwise be huge for huge signatures. https://bugs.webkit.org/show_bug.cgi?id=165547
+
+ // First go through the integer parameters, freeing up their register for use afterwards.
+ {
+ unsigned marshalledGPRs = 0;
+ unsigned marshalledFPRs = 0;
+ unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
+ unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
+ for (unsigned argNum = 0; argNum < argCount; ++argNum) {
+ Type argType = signature.argument(argNum);
+ switch (argType) {
+ case Void:
+ case Func:
+ case Anyfunc:
+ case I64:
+ RELEASE_ASSERT_NOT_REACHED(); // Handled above.
+ case I32: {
+ GPRReg gprReg;
+ if (marshalledGPRs < wasmCC.m_gprArgs.size())
+ gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
+ else {
+ // We've already spilled all arguments, these registers are available as scratch.
+ gprReg = GPRInfo::argumentGPR0;
+ jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
+ frOffset += sizeof(Register);
+ }
+ ++marshalledGPRs;
+ jit.zeroExtend32ToPtr(gprReg, gprReg); // Clear non-int32 and non-tag bits.
+ jit.boxInt32(gprReg, JSValueRegs(gprReg), DoNotHaveTagRegisters);
+ jit.store64(gprReg, calleeFrame.withOffset(calleeFrameOffset));
+ calleeFrameOffset += sizeof(Register);
+ break;
+ }
+ case F32:
+ case F64:
+ // Skipped: handled below.
+ if (marshalledFPRs >= wasmCC.m_fprArgs.size())
+ frOffset += sizeof(Register);
+ ++marshalledFPRs;
+ calleeFrameOffset += sizeof(Register);
+ break;
+ }
+ }
+ }
+
+ {
+ // Integer registers have already been spilled, these are now available.
+ GPRReg doubleEncodeOffsetGPRReg = GPRInfo::argumentGPR0;
+ GPRReg scratch = GPRInfo::argumentGPR1;
+ bool hasMaterializedDoubleEncodeOffset = false;
+ auto materializeDoubleEncodeOffset = [&hasMaterializedDoubleEncodeOffset, &jit] (GPRReg dest) {
+ if (!hasMaterializedDoubleEncodeOffset) {
+ static_assert(DoubleEncodeOffset == 1ll << 48, "codegen assumes this below");
+ jit.move(JIT::TrustedImm32(1), dest);
+ jit.lshift64(JIT::TrustedImm32(48), dest);
+ hasMaterializedDoubleEncodeOffset = true;
+ }
+ };
+
+ unsigned marshalledGPRs = 0;
+ unsigned marshalledFPRs = 0;
+ unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
+ unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
+ for (unsigned argNum = 0; argNum < argCount; ++argNum) {
+ Type argType = signature.argument(argNum);
+ switch (argType) {
+ case Void:
+ case Func:
+ case Anyfunc:
+ case I64:
+ RELEASE_ASSERT_NOT_REACHED(); // Handled above.
+ case I32:
+ // Skipped: handled above.
+ if (marshalledGPRs >= wasmCC.m_gprArgs.size())
+ frOffset += sizeof(Register);
+ ++marshalledGPRs;
+ calleeFrameOffset += sizeof(Register);
+ break;
+ case F32: {
+ FPRReg fprReg;
+ if (marshalledFPRs < wasmCC.m_fprArgs.size())
+ fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
+ else {
+ // We've already spilled all arguments, these registers are available as scratch.
+ fprReg = FPRInfo::argumentFPR0;
+ jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
+ frOffset += sizeof(Register);
+ }
+ jit.convertFloatToDouble(fprReg, fprReg);
+ jit.purifyNaN(fprReg);
+ jit.moveDoubleTo64(fprReg, scratch);
+ materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
+ jit.add64(doubleEncodeOffsetGPRReg, scratch);
+ jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
+ calleeFrameOffset += sizeof(Register);
+ ++marshalledFPRs;
+ break;
+ }
+ case F64: {
+ FPRReg fprReg;
+ if (marshalledFPRs < wasmCC.m_fprArgs.size())
+ fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
+ else {
+ // We've already spilled all arguments, these registers are available as scratch.
+ fprReg = FPRInfo::argumentFPR0;
+ jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
+ frOffset += sizeof(Register);
+ }
+ jit.purifyNaN(fprReg);
+ jit.moveDoubleTo64(fprReg, scratch);
+ materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
+ jit.add64(doubleEncodeOffsetGPRReg, scratch);
+ jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
+ calleeFrameOffset += sizeof(Register);
+ ++marshalledFPRs;
+ break;
+ }
+ }
+ }
+ }
+
+ jit.loadWasmContext(GPRInfo::argumentGPR0);
+ jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR0);
+ jit.storePtr(GPRInfo::argumentGPR0, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
+
+ GPRReg importJSCellGPRReg = GPRInfo::regT0; // Callee needs to be in regT0 for slow path below.
+ ASSERT(!wasmCC.m_calleeSaveRegisters.get(importJSCellGPRReg));
+
+ materializeImportJSCell(jit, importIndex, importJSCellGPRReg);
+
+ jit.store64(importJSCellGPRReg, calleeFrame.withOffset(CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
+ jit.store32(JIT::TrustedImm32(numberOfParameters), calleeFrame.withOffset(CallFrameSlot::argumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset));
+ jit.store64(JIT::TrustedImm64(ValueUndefined), calleeFrame.withOffset(CallFrameSlot::thisArgument * static_cast<int>(sizeof(Register))));
+
+ // FIXME Tail call if the wasm return type is void and no registers were spilled. https://bugs.webkit.org/show_bug.cgi?id=165488
+
+ CallLinkInfo* callLinkInfo = callLinkInfos.add();
+ callLinkInfo->setUpCall(CallLinkInfo::Call, CodeOrigin(), importJSCellGPRReg);
+ JIT::DataLabelPtr targetToCheck;
+ JIT::TrustedImmPtr initialRightValue(0);
+ JIT::Jump slowPath = jit.branchPtrWithPatch(MacroAssembler::NotEqual, importJSCellGPRReg, targetToCheck, initialRightValue);
+ JIT::Call fastCall = jit.nearCall();
+ JIT::Jump done = jit.jump();
+ slowPath.link(&jit);
+ // Callee needs to be in regT0 here.
+ jit.move(MacroAssembler::TrustedImmPtr(callLinkInfo), GPRInfo::regT2); // Link info needs to be in regT2.
+ JIT::Call slowCall = jit.nearCall();
+ done.link(&jit);
+
+ CCallHelpers::JumpList exceptionChecks;
+
+ switch (signature.returnType()) {
+ case Void:
+ // Discard.
+ break;
+ case Func:
+ case Anyfunc:
+ // For the JavaScript embedding, imports with these types in their signature return are a WebAssembly.Module validation error.
+ RELEASE_ASSERT_NOT_REACHED();
+ break;
+ case I64: {
+ RELEASE_ASSERT_NOT_REACHED(); // Handled above.
+ }
+ case I32: {
+ CCallHelpers::JumpList done;
+ CCallHelpers::JumpList slowPath;
+
+ slowPath.append(jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters));
+ slowPath.append(jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters));
+ jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
+ done.append(jit.jump());
+
+ slowPath.link(&jit);
+ jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
+ auto call = jit.call();
+ exceptionChecks.append(jit.emitJumpIfException(*vm));
+
+ int32_t (*convertToI32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> int32_t {
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+ return v.toInt32(exec);
+ };
+ jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
+ linkBuffer.link(call, convertToI32);
+ });
+
+ done.link(&jit);
+ break;
+ }
+ case F32: {
+ CCallHelpers::JumpList done;
+ auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
+ auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
+ // We're an int32
+ jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
+ jit.convertInt64ToFloat(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
+ done.append(jit.jump());
+
+ isDouble.link(&jit);
+ jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
+ jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
+ jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
+ jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
+ done.append(jit.jump());
+
+ notANumber.link(&jit);
+ jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
+ auto call = jit.call();
+ exceptionChecks.append(jit.emitJumpIfException(*vm));
+
+ float (*convertToF32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> float {
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+ return static_cast<float>(v.toNumber(exec));
+ };
+ jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
+ linkBuffer.link(call, convertToF32);
+ });
+
+ done.link(&jit);
+ break;
+ }
+ case F64: {
+ CCallHelpers::JumpList done;
+ auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
+ auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
+ // We're an int32
+ jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
+ jit.convertInt64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
+ done.append(jit.jump());
+
+ isDouble.link(&jit);
+ jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
+ jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
+ jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
+ done.append(jit.jump());
+
+ notANumber.link(&jit);
+ jit.setupArgumentsWithExecState(GPRInfo::returnValueGPR);
+ auto call = jit.call();
+ exceptionChecks.append(jit.emitJumpIfException(*vm));
+
+ double (*convertToF64)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> double {
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+ return v.toNumber(exec);
+ };
+ jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
+ linkBuffer.link(call, convertToF64);
+ });
+
+ done.link(&jit);
+ break;
+ }
+ }
+
+ jit.emitFunctionEpilogue();
+ jit.ret();
+
+ if (!exceptionChecks.empty()) {
+ exceptionChecks.link(&jit);
+ jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm);
+ jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
+ auto call = jit.call();
+ jit.jumpToExceptionHandler(*vm);
+
+ void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+ genericUnwind(vm, exec);
+ ASSERT(!!vm->callFrameForCatch);
+ };
+
+ jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
+ linkBuffer.link(call, doUnwinding);
+ });
+ }
+
+ LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID, JITCompilationCanFail);
+ if (UNLIKELY(patchBuffer.didFailToAllocate()))
+ return makeUnexpected(BindingFailure::OutOfMemory);
+
+ patchBuffer.link(slowCall, FunctionPtr(vm->getCTIStub(linkCallThunkGenerator).code().executableAddress()));
+ CodeLocationLabel callReturnLocation(patchBuffer.locationOfNearCall(slowCall));
+ CodeLocationLabel hotPathBegin(patchBuffer.locationOf(targetToCheck));
+ CodeLocationNearCall hotPathOther = patchBuffer.locationOfNearCall(fastCall);
+ callLinkInfo->setCallLocations(callReturnLocation, hotPathBegin, hotPathOther);
+
+ return FINALIZE_CODE(patchBuffer, ("WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data()));
+}
+
Expected<MacroAssemblerCodeRef, BindingFailure> wasmToWasm(unsigned importIndex)
{
const PinnedRegisterInfo& pinnedRegs = PinnedRegisterInfo::get();
ASSERT(sizeRegs[0].sizeRegister != scratch);
GPRReg sizeRegAsScratch = sizeRegs[0].sizeRegister;
+ static_assert(std::is_same<Context, JSWebAssemblyInstance>::value, "This is assumed in the code below.");
// B3's call codegen ensures that the JSCell is a WebAssemblyFunction.
- jit.loadWasmContextInstance(sizeRegAsScratch); // Old Instance*
- // Get the callee's WebAssembly.Instance and set it as WasmContext's instance. The caller will take care of restoring its own Instance.
- jit.loadPtr(JIT::Address(sizeRegAsScratch, JSWebAssemblyInstance::offsetOfTargetInstance(importIndex)), baseMemory); // JSWebAssemblyInstance*.
- // While we're accessing that cacheline, also get the wasm entrypoint so we can tail call to it below.
- jit.loadPtr(JIT::Address(sizeRegAsScratch, JSWebAssemblyInstance::offsetOfWasmEntrypoint(importIndex)), scratch); // Wasm::WasmEntrypointLoadLocation.
- jit.storeWasmContextInstance(baseMemory);
+ jit.loadWasmContext(sizeRegAsScratch); // Old Instance*
+ jit.loadPtr(JIT::Address(sizeRegAsScratch, JSWebAssemblyInstance::offsetOfImportFunction(importIndex)), scratch);
+
+ // Get the callee's WebAssembly.Instance and set it as WasmContext. The caller will take care of restoring its own Instance.
+ jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfInstance()), baseMemory); // Instance*.
+ jit.storeWasmContext(baseMemory);
jit.loadPtr(JIT::Address(sizeRegAsScratch, JSWebAssemblyInstance::offsetOfCachedStackLimit()), sizeRegAsScratch);
jit.storePtr(sizeRegAsScratch, JIT::Address(baseMemory, JSWebAssemblyInstance::offsetOfCachedStackLimit()));
// FIXME the following code assumes that all WebAssembly.Instance have the same pinned registers. https://bugs.webkit.org/show_bug.cgi?id=162952
// Set up the callee's baseMemory register as well as the memory size registers.
- jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyInstance::offsetOfWasmMemory()), baseMemory); // Wasm::Memory*.
+ jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyInstance::offsetOfMemory()), baseMemory); // JSWebAssemblyMemory*.
ASSERT(!sizeRegs[0].sizeOffset); // The following code assumes we start at 0, and calculates subsequent size registers relative to 0.
- jit.loadPtr(JIT::Address(baseMemory, Wasm::Memory::offsetOfSize()), sizeRegs[0].sizeRegister); // Memory size.
- jit.loadPtr(JIT::Address(baseMemory, Wasm::Memory::offsetOfMemory()), baseMemory); // Wasm::Memory::void*.
+ jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfSize()), sizeRegs[0].sizeRegister); // Memory size.
+ jit.loadPtr(JIT::Address(baseMemory, JSWebAssemblyMemory::offsetOfMemory()), baseMemory); // WasmMemory::void*.
for (unsigned i = 1; i < sizeRegs.size(); ++i) {
ASSERT(sizeRegs[i].sizeRegister != baseMemory);
ASSERT(sizeRegs[i].sizeRegister != scratch);
}
// Tail call into the callee WebAssembly function.
+ jit.loadPtr(JIT::Address(scratch, WebAssemblyFunction::offsetOfWasmEntrypointLoadLocation()), scratch);
jit.loadPtr(scratch, scratch);
jit.jump(scratch);
/*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
#if ENABLE(WEBASSEMBLY)
#include "B3Compilation.h"
+#include "VM.h"
#include "WasmFormat.h"
+#include <wtf/Bag.h>
#include <wtf/Expected.h>
namespace JSC {
};
Expected<MacroAssemblerCodeRef, BindingFailure> wasmToWasm(unsigned importIndex);
+Expected<MacroAssemblerCodeRef, BindingFailure> wasmToJs(VM*, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex, unsigned importIndex);
} } // namespace JSC::Wasm
#include "WasmBBQPlanInlines.h"
#include "WasmCallee.h"
-#include "WasmFormat.h"
#include "WasmWorklist.h"
namespace JSC { namespace Wasm {
-Ref<CodeBlock> CodeBlock::create(Context* context, MemoryMode mode, ModuleInformation& moduleInformation, CreateEmbedderWrapper&& createEmbedderWrapper, ThrowWasmException throwWasmException)
-{
- size_t importFunctionCount = moduleInformation.importFunctionCount();
- auto* result = new (NotNull, fastMalloc(allocationSize(importFunctionCount))) CodeBlock(context, mode, moduleInformation, WTFMove(createEmbedderWrapper), throwWasmException);
- for (size_t i = 0; i < importFunctionCount; ++i)
- result->importWasmToEmbedderStub(i) = nullptr;
- return adoptRef(*result);
-}
-
-CodeBlock::CodeBlock(Context* context, MemoryMode mode, ModuleInformation& moduleInformation, CreateEmbedderWrapper&& createEmbedderWrapper, ThrowWasmException throwWasmException)
+CodeBlock::CodeBlock(MemoryMode mode, ModuleInformation& moduleInformation)
: m_calleeCount(moduleInformation.internalFunctionCount())
, m_mode(mode)
{
RefPtr<CodeBlock> protectedThis = this;
-
- m_plan = adoptRef(*new BBQPlan(context, makeRef(moduleInformation), BBQPlan::FullCompile, createSharedTask<Plan::CallbackType>([this, protectedThis = WTFMove(protectedThis)] (Plan&) {
+ m_plan = adoptRef(*new BBQPlan(nullptr, makeRef(moduleInformation), BBQPlan::FullCompile, createSharedTask<Plan::CallbackType>([this, protectedThis = WTFMove(protectedThis)] (VM*, Plan&) {
auto locker = holdLock(m_lock);
if (m_plan->failed()) {
m_errorMessage = m_plan->errorMessage();
- setCompilationFinished();
+ m_plan = nullptr;
return;
}
m_wasmToWasmCallsites = m_plan->takeWasmToWasmCallsites();
m_tierUpCounts = m_plan->takeTierUpCounts();
- setCompilationFinished();
- }), WTFMove(createEmbedderWrapper), throwWasmException));
- m_plan->setMode(mode);
+ m_plan = nullptr;
+ })));
+ m_plan->setMode(mode);
auto& worklist = Wasm::ensureWorklist();
// Note, immediately after we enqueue the plan, there is a chance the above callback will be called.
worklist.enqueue(makeRef(*m_plan.get()));
// else, if we don't have a plan, we're already compiled.
}
-void CodeBlock::compileAsync(Context* context, AsyncCompilationCallback&& task)
+void CodeBlock::compileAsync(VM& vm, AsyncCompilationCallback&& task)
{
RefPtr<Plan> plan;
{
// We don't need to keep a RefPtr on the Plan because the worklist will keep
// a RefPtr on the Plan until the plan finishes notifying all of its callbacks.
RefPtr<CodeBlock> protectedThis = this;
- plan->addCompletionTask(context, createSharedTask<Plan::CallbackType>([this, task = WTFMove(task), protectedThis = WTFMove(protectedThis)] (Plan&) {
- task->run(makeRef(*this));
+ plan->addCompletionTask(vm, createSharedTask<Plan::CallbackType>([this, task = WTFMove(task), protectedThis = WTFMove(protectedThis)] (VM* vm, Plan&) {
+ ASSERT(vm);
+ task->run(*vm, makeRef(*this));
}));
} else
- task->run(makeRef(*this));
+ task->run(vm, makeRef(*this));
}
bool CodeBlock::isSafeToRun(MemoryMode memoryMode)
return false;
}
-
-void CodeBlock::setCompilationFinished()
-{
- m_plan = nullptr;
- m_compilationFinished.store(true);
-}
-
} } // namespace JSC::Wasm
#endif // ENABLE(WEBASSEMBLY)
#if ENABLE(WEBASSEMBLY)
#include "MacroAssemblerCodeRef.h"
-#include "WasmEmbedder.h"
#include "WasmTierUpCount.h"
#include <wtf/Lock.h>
#include <wtf/RefPtr.h>
namespace JSC {
+class VM;
+
namespace Wasm {
class Callee;
-struct Context;
class BBQPlan;
class OMGPlan;
struct ModuleInformation;
class CodeBlock : public ThreadSafeRefCounted<CodeBlock> {
public:
- typedef void CallbackType(Ref<CodeBlock>&&);
+ typedef void CallbackType(VM&, Ref<CodeBlock>&&);
using AsyncCompilationCallback = RefPtr<WTF::SharedTask<CallbackType>>;
- static Ref<CodeBlock> create(Context*, MemoryMode, ModuleInformation&, CreateEmbedderWrapper&&, ThrowWasmException);
-
- static size_t offsetOfImportStubs()
- {
- return WTF::roundUpToMultipleOf<sizeof(void*)>(sizeof(CodeBlock));
- }
-
- static size_t allocationSize(Checked<size_t> functionImportCount)
- {
- return (offsetOfImportStubs() + sizeof(void*) * functionImportCount).unsafeGet();
- }
-
- void*& importWasmToEmbedderStub(unsigned importIndex)
+ static Ref<CodeBlock> create(MemoryMode mode, ModuleInformation& moduleInformation)
{
- return *bitwise_cast<void**>(bitwise_cast<char*>(this) + offsetOfImportWasmToEmbedderStub(importIndex));
- }
-
- static ptrdiff_t offsetOfImportWasmToEmbedderStub(unsigned importIndex)
- {
- return offsetOfImportStubs() + sizeof(void*) * importIndex;
- }
-
- Wasm::WasmEntrypointLoadLocation wasmToJSCallStubForImport(unsigned importIndex)
- {
- ASSERT(runnable());
- return &importWasmToEmbedderStub(importIndex);
+ return adoptRef(*new CodeBlock(mode, moduleInformation));
}
void waitUntilFinished();
- void compileAsync(Context*, AsyncCompilationCallback&&);
+ void compileAsync(VM&, AsyncCompilationCallback&&);
bool compilationFinished()
{
- return m_compilationFinished.load();
+ auto locker = holdLock(m_lock);
+ return !m_plan;
}
bool runnable() { return compilationFinished() && !m_errorMessage; }
unsigned functionImportCount() const { return m_wasmToWasmExitStubs.size(); }
- // These two callee getters are only valid once the callees have been populated.
-
Callee& jsEntrypointCalleeFromFunctionIndexSpace(unsigned functionIndexSpace)
{
- ASSERT(runnable());
RELEASE_ASSERT(functionIndexSpace >= functionImportCount());
unsigned calleeIndex = functionIndexSpace - functionImportCount();
}
Callee& wasmEntrypointCalleeFromFunctionIndexSpace(unsigned functionIndexSpace)
{
- ASSERT(runnable());
RELEASE_ASSERT(functionIndexSpace >= functionImportCount());
unsigned calleeIndex = functionIndexSpace - functionImportCount();
if (m_optimizedCallees[calleeIndex])
private:
friend class OMGPlan;
- CodeBlock(Context*, MemoryMode, ModuleInformation&, CreateEmbedderWrapper&&, ThrowWasmException);
- void setCompilationFinished();
+ CodeBlock(MemoryMode, ModuleInformation&);
unsigned m_calleeCount;
MemoryMode m_mode;
Vector<RefPtr<Callee>> m_callees;
Vector<Vector<UnlinkedWasmToWasmCall>> m_wasmToWasmCallsites;
Vector<MacroAssemblerCodeRef> m_wasmToWasmExitStubs;
RefPtr<BBQPlan> m_plan;
- std::atomic<bool> m_compilationFinished { false };
String m_errorMessage;
Lock m_lock;
};
#if ENABLE(WEBASSEMBLY)
-#include "JSWebAssemblyInstance.h"
-#include "Options.h"
-
+#include "VM.h"
#include <mutex>
#include <wtf/FastTLS.h>
namespace JSC { namespace Wasm {
-bool Context::useFastTLS()
-{
-#if ENABLE(FAST_TLS_JIT)
- return Options::useFastTLSForWasmContext();
-#else
- return false;
-#endif
-}
-
-JSWebAssemblyInstance* Context::load() const
+Context* loadContext(VM& vm)
{
#if ENABLE(FAST_TLS_JIT)
- if (useFastTLS())
- return bitwise_cast<JSWebAssemblyInstance*>(_pthread_getspecific_direct(WTF_WASM_CONTEXT_KEY));
+ if (useFastTLSForContext())
+ return bitwise_cast<Context*>(_pthread_getspecific_direct(WTF_WASM_CONTEXT_KEY));
#endif
- return instance;
+ return vm.wasmContext;
}
-void Context::store(JSWebAssemblyInstance* inst, void* softStackLimit)
+void storeContext(VM& vm, Context* context)
{
#if ENABLE(FAST_TLS_JIT)
- if (useFastTLS())
- _pthread_setspecific_direct(WTF_WASM_CONTEXT_KEY, bitwise_cast<void*>(inst));
+ if (useFastTLSForContext())
+ _pthread_setspecific_direct(WTF_WASM_CONTEXT_KEY, bitwise_cast<void*>(context));
#endif
- instance = inst;
- if (instance)
- instance->setCachedStackLimit(softStackLimit);
+ vm.wasmContext = context;
+ if (context)
+ context->setCachedStackLimit(vm.softStackLimit());
}
} } // namespace JSC::Wasm
#if ENABLE(WEBASSEMBLY)
+#include "JSWebAssemblyInstance.h"
+#include "Options.h"
+
namespace JSC {
-class JSWebAssemblyInstance;
+class VM;
namespace Wasm {
-struct Context {
- JSWebAssemblyInstance* instance { nullptr };
+// FIXME: We might want this to be something else at some point:
+// https://bugs.webkit.org/show_bug.cgi?id=170260
+using Context = JSWebAssemblyInstance;
+
+inline bool useFastTLS()
+{
+#if ENABLE(FAST_TLS_JIT)
+ return Options::useWebAssemblyFastTLS();
+#else
+ return false;
+#endif
+}
- JSWebAssemblyInstance* load() const;
- void store(JSWebAssemblyInstance*, void* softStackLimit);
+inline bool useFastTLSForContext()
+{
+ if (useFastTLS())
+ return Options::useFastTLSForWasmContext();
+ return false;
+}
- static bool useFastTLS();
-};
+Context* loadContext(VM&);
+void storeContext(VM&, Context*);
} } // namespace JSC::Wasm
+++ /dev/null
-/*
- * Copyright (C) 2017 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#pragma once
-
-#if ENABLE(WEBASSEMBLY)
-
-#include "WasmExceptionType.h"
-#include "WasmMemory.h"
-#include <wtf/Forward.h>
-#include <wtf/Function.h>
-
-#include <memory>
-
-namespace JSC {
-
-class ExecState;
-class JSWebAssemblyInstance; // FIXME this should be Wasm::Instance, so should use below. https://webkit.org/b/177472
-
-namespace Wasm {
-
-struct CompilationContext;
-struct InternalFunction;
-struct ModuleInformation;
-class Signature;
-struct UnlinkedWasmToWasmCall;
-
-// Create wrapper code to call from embedder -> WebAssembly.
-using CreateEmbedderWrapper = WTF::Function<std::unique_ptr<InternalFunction>(CompilationContext&, const Signature&, Vector<UnlinkedWasmToWasmCall>*, const ModuleInformation&, MemoryMode, uint32_t)>;
-
-// Called as soon as an exception is detected. The return value is the PC to continue at.
-using ThrowWasmException = void* (*)(ExecState*, Wasm::ExceptionType, JSWebAssemblyInstance*);
-
-} } // namespace JSC::Wasm
-
-#endif // ENABLE(WEBASSEMBLY)
#include "ExecutableAllocator.h"
#include "MachineContext.h"
+#include "VM.h"
#include "WasmExceptionType.h"
#include "WasmMemory.h"
#include "WasmThunks.h"
-#include <wtf/HashSet.h>
#include <wtf/Lock.h>
#include <wtf/NeverDestroyed.h>
#include <wtf/threads/Signals.h>
namespace JSC {
+class VM;
+
namespace Wasm {
void registerCode(void* start, void* end);
namespace JSC {
+class JSFunction;
+
namespace B3 {
class Compilation;
}
namespace Wasm {
-struct CompilationContext;
-struct ModuleInformation;
-
inline bool isValueType(Type type)
{
switch (type) {
+++ /dev/null
-/*
- * Copyright (C) 2017 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "WasmInstance.h"
-
-#include "Register.h"
-#include "WasmModuleInformation.h"
-
-#if ENABLE(WEBASSEMBLY)
-
-namespace JSC { namespace Wasm {
-
-namespace {
-size_t globalMemoryByteSize(Module& module)
-{
- return module.moduleInformation().globals.size() * sizeof(Register);
-}
-}
-
-Instance::Instance(Ref<Module>&& module)
- : m_module(WTFMove(module))
- , m_globals(MallocPtr<uint64_t>::malloc(globalMemoryByteSize(m_module.get())))
-{
-}
-
-Instance::~Instance() { }
-
-size_t Instance::extraMemoryAllocated() const
-{
- return globalMemoryByteSize(m_module.get());
-}
-
-} } // namespace JSC::Wasm
-
-#endif // ENABLE(WEBASSEMBLY)
+++ /dev/null
-/*
- * Copyright (C) 2017 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#pragma once
-
-#if ENABLE(WEBASSEMBLY)
-
-#include "WasmFormat.h"
-#include "WasmMemory.h"
-#include "WasmModule.h"
-#include "WasmTable.h"
-#include <wtf/Optional.h>
-#include <wtf/Ref.h>
-#include <wtf/RefPtr.h>
-#include <wtf/ThreadSafeRefCounted.h>
-
-namespace JSC { namespace Wasm {
-
-class Instance : public ThreadSafeRefCounted<Instance> {
-public:
- static Ref<Instance> create(Ref<Module>&& module)
- {
- return adoptRef(*new Instance(WTFMove(module)));
- }
-
- void finalizeCreation(Ref<CodeBlock>&& codeBlock)
- {
- m_codeBlock = WTFMove(codeBlock);
- }
-
- JS_EXPORT_PRIVATE ~Instance();
-
- size_t extraMemoryAllocated() const;
-
- Module& module() { return m_module.get(); }
- CodeBlock* codeBlock() { return m_codeBlock.get(); }
- Memory* memory() { return m_memory.get(); }
- Table* table() { return m_table.get(); }
-
- int32_t loadI32Global(unsigned i) const { return m_globals.get()[i]; }
- int64_t loadI64Global(unsigned i) const { return m_globals.get()[i]; }
- float loadF32Global(unsigned i) const { return bitwise_cast<float>(loadI32Global(i)); }
- double loadF64Global(unsigned i) const { return bitwise_cast<double>(loadI64Global(i)); }
- void setGlobal(unsigned i, int64_t bits) { m_globals.get()[i] = bits; }
-
- static ptrdiff_t offsetOfCachedStackLimit() { return OBJECT_OFFSETOF(Instance, m_cachedStackLimit); }
- void* cachedStackLimit() const { return m_cachedStackLimit; }
- void setCachedStackLimit(void* limit) { m_cachedStackLimit = limit; }
-
- friend class JSC::JSWebAssemblyInstance; // FIXME remove this once refactored https://webkit.org/b/177472.
-
-private:
- Instance(Ref<Module>&&);
-
- Ref<Module> m_module;
- RefPtr<CodeBlock> m_codeBlock;
- RefPtr<Memory> m_memory;
- RefPtr<Table> m_table;
- MallocPtr<uint64_t> m_globals;
- void* m_cachedStackLimit { bitwise_cast<void*>(std::numeric_limits<uintptr_t>::max()) };
-};
-
-} } // namespace JSC::Wasm
-
-#endif // ENABLE(WEBASSEMBLY)
#if ENABLE(WEBASSEMBLY)
-#include "Options.h"
-#include <wtf/DataLog.h>
+#include "VM.h"
+#include "WasmThunks.h"
#include <wtf/Gigacage.h>
#include <wtf/Lock.h>
-#include <wtf/OSAllocator.h>
-#include <wtf/PageBlock.h>
#include <wtf/Platform.h>
#include <wtf/PrintStream.h>
#include <wtf/RAMSize.h>
-#include <wtf/Vector.h>
-
-#include <cstring>
-#include <mutex>
namespace JSC { namespace Wasm {
struct MemoryResult {
enum Kind {
Success,
- SuccessAndNotifyMemoryPressure,
- SyncTryToReclaimMemory
+ SuccessAndAsyncGC,
+ SyncGCAndRetry
};
-
+
static const char* toString(Kind kind)
{
switch (kind) {
case Success:
return "Success";
- case SuccessAndNotifyMemoryPressure:
- return "SuccessAndNotifyMemoryPressure";
- case SyncTryToReclaimMemory:
- return "SyncTryToReclaimMemory";
+ case SuccessAndAsyncGC:
+ return "SuccessAndAsyncGC";
+ case SyncGCAndRetry:
+ return "SyncGCAndRetry";
}
RELEASE_ASSERT_NOT_REACHED();
return nullptr;
MemoryResult result = [&] {
auto holder = holdLock(m_lock);
if (m_memories.size() >= m_maxCount)
- return MemoryResult(nullptr, MemoryResult::SyncTryToReclaimMemory);
+ return MemoryResult(nullptr, MemoryResult::SyncGCAndRetry);
void* result = Gigacage::tryAllocateVirtualPages(Gigacage::Primitive, Memory::fastMappedBytes());
if (!result)
- return MemoryResult(nullptr, MemoryResult::SyncTryToReclaimMemory);
+ return MemoryResult(nullptr, MemoryResult::SyncGCAndRetry);
m_memories.append(result);
return MemoryResult(
result,
- m_memories.size() >= m_maxCount / 2 ? MemoryResult::SuccessAndNotifyMemoryPressure : MemoryResult::Success);
+ m_memories.size() >= m_maxCount / 2 ? MemoryResult::SuccessAndAsyncGC : MemoryResult::Success);
}();
if (Options::logWebAssemblyMemory())
MemoryResult::Kind result = [&] {
auto holder = holdLock(m_lock);
if (m_physicalBytes + bytes > memoryLimit())
- return MemoryResult::SyncTryToReclaimMemory;
+ return MemoryResult::SyncGCAndRetry;
m_physicalBytes += bytes;
if (m_physicalBytes >= memoryLimit() / 2)
- return MemoryResult::SuccessAndNotifyMemoryPressure;
+ return MemoryResult::SuccessAndAsyncGC;
return MemoryResult::Success;
}();
}
template<typename Func>
-bool tryAllocate(const Func& allocate, const WTF::Function<void(Memory::NotifyPressure)>& notifyMemoryPressure, const WTF::Function<void(Memory::SyncTryToReclaim)>& syncTryToReclaimMemory)
+bool tryAndGC(VM& vm, const Func& allocate)
{
unsigned numTries = 2;
bool done = false;
case MemoryResult::Success:
done = true;
break;
- case MemoryResult::SuccessAndNotifyMemoryPressure:
- if (notifyMemoryPressure)
- notifyMemoryPressure(Memory::NotifyPressureTag);
+ case MemoryResult::SuccessAndAsyncGC:
+ vm.heap.collectAsync(CollectionScope::Full);
done = true;
break;
- case MemoryResult::SyncTryToReclaimMemory:
+ case MemoryResult::SyncGCAndRetry:
if (i + 1 == numTries)
break;
- if (syncTryToReclaimMemory)
- syncTryToReclaimMemory(Memory::SyncTryToReclaimTag);
+ vm.heap.collectSync(CollectionScope::Full);
break;
}
}
} // anonymous namespace
-Memory::Memory()
+const char* makeString(MemoryMode mode)
{
+ switch (mode) {
+ case MemoryMode::BoundsChecking: return "BoundsChecking";
+ case MemoryMode::Signaling: return "Signaling";
+ }
+ RELEASE_ASSERT_NOT_REACHED();
+ return "";
}
-Memory::Memory(PageCount initial, PageCount maximum, Function<void(NotifyPressure)>&& notifyMemoryPressure, Function<void(SyncTryToReclaim)>&& syncTryToReclaimMemory, WTF::Function<void(GrowSuccess, PageCount, PageCount)>&& growSuccessCallback)
+Memory::Memory(PageCount initial, PageCount maximum)
: m_initial(initial)
, m_maximum(maximum)
- , m_notifyMemoryPressure(WTFMove(notifyMemoryPressure))
- , m_syncTryToReclaimMemory(WTFMove(syncTryToReclaimMemory))
- , m_growSuccessCallback(WTFMove(growSuccessCallback))
{
ASSERT(!initial.bytes());
ASSERT(m_mode == MemoryMode::BoundsChecking);
dataLogLnIf(verbose, "Memory::Memory allocating ", *this);
}
-Memory::Memory(void* memory, PageCount initial, PageCount maximum, size_t mappedCapacity, MemoryMode mode, Function<void(NotifyPressure)>&& notifyMemoryPressure, Function<void(SyncTryToReclaim)>&& syncTryToReclaimMemory, WTF::Function<void(GrowSuccess, PageCount, PageCount)>&& growSuccessCallback)
+Memory::Memory(void* memory, PageCount initial, PageCount maximum, size_t mappedCapacity, MemoryMode mode)
: m_memory(memory)
, m_size(initial.bytes())
, m_initial(initial)
, m_maximum(maximum)
, m_mappedCapacity(mappedCapacity)
, m_mode(mode)
- , m_notifyMemoryPressure(WTFMove(notifyMemoryPressure))
- , m_syncTryToReclaimMemory(WTFMove(syncTryToReclaimMemory))
- , m_growSuccessCallback(WTFMove(growSuccessCallback))
{
dataLogLnIf(verbose, "Memory::Memory allocating ", *this);
}
#endif
}
-RefPtr<Memory> Memory::create()
-{
- return adoptRef(new Memory());
-}
-
-RefPtr<Memory> Memory::create(PageCount initial, PageCount maximum, WTF::Function<void(NotifyPressure)>&& notifyMemoryPressure, WTF::Function<void(SyncTryToReclaim)>&& syncTryToReclaimMemory, WTF::Function<void(GrowSuccess, PageCount, PageCount)>&& growSuccessCallback)
+RefPtr<Memory> Memory::create(VM& vm, PageCount initial, PageCount maximum)
{
ASSERT(initial);
RELEASE_ASSERT(!maximum || maximum >= initial); // This should be guaranteed by our caller.
const size_t initialBytes = initial.bytes();
const size_t maximumBytes = maximum ? maximum.bytes() : 0;
+ // We need to be sure we have a stub prior to running code.
+ if (UNLIKELY(!Thunks::singleton().stub(throwExceptionFromWasmThunkGenerator)))
+ return nullptr;
+
if (maximum && !maximumBytes) {
// User specified a zero maximum, initial size must also be zero.
RELEASE_ASSERT(!initialBytes);
- return adoptRef(new Memory(initial, maximum, WTFMove(notifyMemoryPressure), WTFMove(syncTryToReclaimMemory), WTFMove(growSuccessCallback)));
+ return adoptRef(new Memory(initial, maximum));
}
- bool done = tryAllocate(
+ bool done = tryAndGC(
+ vm,
[&] () -> MemoryResult::Kind {
return memoryManager().tryAllocatePhysicalBytes(initialBytes);
- }, notifyMemoryPressure, syncTryToReclaimMemory);
+ });
if (!done)
return nullptr;
char* fastMemory = nullptr;
if (Options::useWebAssemblyFastMemory()) {
- tryAllocate(
+ tryAndGC(
+ vm,
[&] () -> MemoryResult::Kind {
auto result = memoryManager().tryAllocateVirtualPages();
fastMemory = bitwise_cast<char*>(result.basePtr);
return result.kind;
- }, notifyMemoryPressure, syncTryToReclaimMemory);
+ });
}
if (fastMemory) {
}
commitZeroPages(fastMemory, initialBytes);
-
- return adoptRef(new Memory(fastMemory, initial, maximum, Memory::fastMappedBytes(), MemoryMode::Signaling, WTFMove(notifyMemoryPressure), WTFMove(syncTryToReclaimMemory), WTFMove(growSuccessCallback)));
+
+ return adoptRef(new Memory(fastMemory, initial, maximum, Memory::fastMappedBytes(), MemoryMode::Signaling));
}
if (UNLIKELY(Options::crashIfWebAssemblyCantFastMemory()))
webAssemblyCouldntGetFastMemory();
if (!initialBytes)
- return adoptRef(new Memory(initial, maximum, WTFMove(notifyMemoryPressure), WTFMove(syncTryToReclaimMemory), WTFMove(growSuccessCallback)));
+ return adoptRef(new Memory(initial, maximum));
void* slowMemory = Gigacage::tryAlignedMalloc(Gigacage::Primitive, WTF::pageSize(), initialBytes);
if (!slowMemory) {
return nullptr;
}
memset(slowMemory, 0, initialBytes);
- return adoptRef(new Memory(slowMemory, initial, maximum, initialBytes, MemoryMode::BoundsChecking, WTFMove(notifyMemoryPressure), WTFMove(syncTryToReclaimMemory), WTFMove(growSuccessCallback)));
+ return adoptRef(new Memory(slowMemory, initial, maximum, initialBytes, MemoryMode::BoundsChecking));
}
Memory::~Memory()
memoryManager().freePhysicalBytes(m_size);
switch (m_mode) {
case MemoryMode::Signaling:
- if (mprotect(m_memory, Memory::fastMappedBytes(), PROT_READ | PROT_WRITE)) {
- dataLog("mprotect failed: ", strerror(errno), "\n");
- RELEASE_ASSERT_NOT_REACHED();
- }
+ mprotect(m_memory, Memory::fastMappedBytes(), PROT_READ | PROT_WRITE);
memoryManager().freeVirtualPages(m_memory);
break;
case MemoryMode::BoundsChecking:
return memoryManager().containsAddress(address);
}
-Expected<PageCount, Memory::GrowFailReason> Memory::grow(PageCount delta)
+bool Memory::grow(VM& vm, PageCount newSize)
{
- const Wasm::PageCount oldPageCount = sizeInPages();
-
- if (!delta.isValid())
- return makeUnexpected(GrowFailReason::InvalidDelta);
-
- const Wasm::PageCount newPageCount = oldPageCount + delta;
- if (!newPageCount)
- return makeUnexpected(GrowFailReason::InvalidGrowSize);
-
- auto success = [&] () {
- m_growSuccessCallback(GrowSuccessTag, oldPageCount, newPageCount);
- return oldPageCount;
- };
-
- if (delta.pageCount() == 0)
- return success();
+ RELEASE_ASSERT(newSize > PageCount::fromBytes(m_size));
- dataLogLnIf(verbose, "Memory::grow(", delta, ") to ", newPageCount, " from ", *this);
- RELEASE_ASSERT(newPageCount > PageCount::fromBytes(m_size));
+ dataLogLnIf(verbose, "Memory::grow to ", newSize, " from ", *this);
- if (maximum() && newPageCount > maximum())
- return makeUnexpected(GrowFailReason::WouldExceedMaximum);
+ if (maximum() && newSize > maximum())
+ return false;
- size_t desiredSize = newPageCount.bytes();
+ size_t desiredSize = newSize.bytes();
RELEASE_ASSERT(desiredSize > m_size);
size_t extraBytes = desiredSize - m_size;
RELEASE_ASSERT(extraBytes);
- bool allocationSuccess = tryAllocate(
+ bool success = tryAndGC(
+ vm,
[&] () -> MemoryResult::Kind {
return memoryManager().tryAllocatePhysicalBytes(extraBytes);
- }, m_notifyMemoryPressure, m_syncTryToReclaimMemory);
- if (!allocationSuccess)
- return makeUnexpected(GrowFailReason::OutOfMemory);
-
+ });
+ if (!success)
+ return false;
+
switch (mode()) {
case MemoryMode::BoundsChecking: {
RELEASE_ASSERT(maximum().bytes() != 0);
-
+
void* newMemory = Gigacage::tryAlignedMalloc(Gigacage::Primitive, WTF::pageSize(), desiredSize);
if (!newMemory)
- return makeUnexpected(GrowFailReason::OutOfMemory);
-
+ return false;
memcpy(newMemory, m_memory, m_size);
memset(static_cast<char*>(newMemory) + m_size, 0, desiredSize - m_size);
if (m_memory)
m_memory = newMemory;
m_mappedCapacity = desiredSize;
m_size = desiredSize;
- return success();
+ return true;
}
case MemoryMode::Signaling: {
RELEASE_ASSERT(m_memory);
dataLogLnIf(verbose, "Marking WebAssembly memory's ", RawPointer(m_memory), " as read+write in range [", RawPointer(startAddress), ", ", RawPointer(startAddress + extraBytes), ")");
if (mprotect(startAddress, extraBytes, PROT_READ | PROT_WRITE)) {
- dataLog("mprotect failed: ", strerror(errno), "\n");
- RELEASE_ASSERT_NOT_REACHED();
+ dataLogLnIf(verbose, "Memory::grow in-place failed ", *this);
+ return false;
}
commitZeroPages(startAddress, extraBytes);
m_size = desiredSize;
- return success();
- }
- }
-
+ return true;
+ } }
+
RELEASE_ASSERT_NOT_REACHED();
- return oldPageCount;
+ return false;
}
void Memory::dump(PrintStream& out) const
#if ENABLE(WEBASSEMBLY)
-#include "WasmMemoryMode.h"
#include "WasmPageCount.h"
-#include <wtf/Expected.h>
-#include <wtf/Function.h>
#include <wtf/RefCounted.h>
#include <wtf/RefPtr.h>
namespace JSC {
+class VM;
+
namespace Wasm {
+// FIXME: We should support other modes. see: https://bugs.webkit.org/show_bug.cgi?id=162693
+enum class MemoryMode : uint8_t {
+ BoundsChecking,
+ Signaling
+};
+static constexpr size_t NumberOfMemoryModes = 2;
+JS_EXPORT_PRIVATE const char* makeString(MemoryMode);
+
class Memory : public RefCounted<Memory> {
WTF_MAKE_NONCOPYABLE(Memory);
WTF_MAKE_FAST_ALLOCATED;
explicit operator bool() const { return !!m_memory; }
- enum NotifyPressure { NotifyPressureTag };
- enum SyncTryToReclaim { SyncTryToReclaimTag };
- enum GrowSuccess { GrowSuccessTag };
-
- static RefPtr<Memory> create();
- static RefPtr<Memory> create(PageCount initial, PageCount maximum, WTF::Function<void(NotifyPressure)>&& notifyMemoryPressure, WTF::Function<void(SyncTryToReclaim)>&& syncTryToReclaimMemory, WTF::Function<void(GrowSuccess, PageCount, PageCount)>&& growSuccessCallback);
+ static RefPtr<Memory> create(VM&, PageCount initial, PageCount maximum);
~Memory();
MemoryMode mode() const { return m_mode; }
- enum class GrowFailReason {
- InvalidDelta,
- InvalidGrowSize,
- WouldExceedMaximum,
- OutOfMemory,
- };
- Expected<PageCount, GrowFailReason> grow(PageCount);
+ // grow() should only be called from the JSWebAssemblyMemory object since that object needs to update internal
+ // pointers with the current base and size.
+ bool grow(VM&, PageCount);
void check() { ASSERT(!deletionHasBegun()); }
-
- static ptrdiff_t offsetOfMemory() { return OBJECT_OFFSETOF(Memory, m_memory); }
- static ptrdiff_t offsetOfSize() { return OBJECT_OFFSETOF(Memory, m_size); }
-
private:
- Memory();
- Memory(void* memory, PageCount initial, PageCount maximum, size_t mappedCapacity, MemoryMode, WTF::Function<void(NotifyPressure)>&& notifyMemoryPressure, WTF::Function<void(SyncTryToReclaim)>&& syncTryToReclaimMemory, WTF::Function<void(GrowSuccess, PageCount, PageCount)>&& growSuccessCallback);
- Memory(PageCount initial, PageCount maximum, WTF::Function<void(NotifyPressure)>&& notifyMemoryPressure, WTF::Function<void(SyncTryToReclaim)>&& syncTryToReclaimMemory, WTF::Function<void(GrowSuccess, PageCount, PageCount)>&& growSuccessCallback);
+ Memory(void* memory, PageCount initial, PageCount maximum, size_t mappedCapacity, MemoryMode);
+ Memory(PageCount initial, PageCount maximum);
- // FIXME: we cache these on the instances to avoid a load on instance->instance calls. This will require updating all the instances when grow is called. https://bugs.webkit.org/show_bug.cgi?id=177305
+ // FIXME: we should move these to the instance to avoid a load on instance->instance calls.
void* m_memory { nullptr };
size_t m_size { 0 };
PageCount m_initial;
PageCount m_maximum;
size_t m_mappedCapacity { 0 };
MemoryMode m_mode { MemoryMode::BoundsChecking };
- WTF::Function<void(NotifyPressure)> m_notifyMemoryPressure;
- WTF::Function<void(SyncTryToReclaim)> m_syncTryToReclaimMemory;
- WTF::Function<void(GrowSuccess, PageCount, PageCount)> m_growSuccessCallback;
};
} } // namespace JSC::Wasm
std::call_once(staticPinnedRegisterInfoFlag, [] () {
Vector<PinnedSizeRegisterInfo> sizeRegisters;
GPRReg baseMemoryPointer = InvalidGPRReg;
- GPRReg wasmContextInstancePointer = InvalidGPRReg;
+ GPRReg wasmContextPointer = InvalidGPRReg;
// FIXME: We should support more than one memory size register, and we should allow different
// WebAssembly.Instance to have different pins. Right now we take a vector with only one entry.
// see: https://bugs.webkit.org/show_bug.cgi?id=162952
Vector<unsigned> pinnedSizes = { 0 };
unsigned numberOfPinnedRegisters = pinnedSizes.size() + 1;
- if (!Context::useFastTLS())
+ if (!useFastTLSForContext())
++numberOfPinnedRegisters;
Vector<GPRReg> pinnedRegs = getPinnedRegisters(numberOfPinnedRegisters);
baseMemoryPointer = pinnedRegs.takeLast();
- if (!Context::useFastTLS())
- wasmContextInstancePointer = pinnedRegs.takeLast();
+ if (!useFastTLSForContext())
+ wasmContextPointer = pinnedRegs.takeLast();
ASSERT(pinnedSizes.size() == pinnedRegs.size());
for (unsigned i = 0; i < pinnedSizes.size(); ++i)
sizeRegisters.append({ pinnedRegs[i], pinnedSizes[i] });
- staticPinnedRegisterInfo.construct(WTFMove(sizeRegisters), baseMemoryPointer, wasmContextInstancePointer);
+ staticPinnedRegisterInfo.construct(WTFMove(sizeRegisters), baseMemoryPointer, wasmContextPointer);
});
return staticPinnedRegisterInfo.get();
}
-PinnedRegisterInfo::PinnedRegisterInfo(Vector<PinnedSizeRegisterInfo>&& sizeRegisters, GPRReg baseMemoryPointer, GPRReg wasmContextInstancePointer)
+PinnedRegisterInfo::PinnedRegisterInfo(Vector<PinnedSizeRegisterInfo>&& sizeRegisters, GPRReg baseMemoryPointer, GPRReg wasmContextPointer)
: sizeRegisters(WTFMove(sizeRegisters))
, baseMemoryPointer(baseMemoryPointer)
- , wasmContextInstancePointer(wasmContextInstancePointer)
+ , wasmContextPointer(wasmContextPointer)
{
}
/*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
struct PinnedRegisterInfo {
Vector<PinnedSizeRegisterInfo> sizeRegisters;
GPRReg baseMemoryPointer;
- GPRReg wasmContextInstancePointer;
+ GPRReg wasmContextPointer;
static const PinnedRegisterInfo& get();
PinnedRegisterInfo(Vector<PinnedSizeRegisterInfo>&&, GPRReg, GPRReg);
{
RegisterSet result;
result.set(baseMemoryPointer);
- if (wasmContextInstancePointer != InvalidGPRReg)
- result.set(wasmContextInstancePointer);
+ if (wasmContextPointer != InvalidGPRReg)
+ result.set(wasmContextPointer);
if (mode != MemoryMode::Signaling) {
for (const auto& info : sizeRegisters)
result.set(info.sizeRegister);
+++ /dev/null
-/*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "WasmMemoryMode.h"
-
-#if ENABLE(WEBASSEMBLY)
-
-#include <wtf/Assertions.h>
-
-namespace JSC { namespace Wasm {
-
-const char* makeString(MemoryMode mode)
-{
- switch (mode) {
- case MemoryMode::BoundsChecking: return "BoundsChecking";
- case MemoryMode::Signaling: return "Signaling";
- }
- RELEASE_ASSERT_NOT_REACHED();
- return "";
-}
-
-} } // namespace JSC::Wasm
-
-#endif // ENABLE(WEBASSEMBLY)
+++ /dev/null
-/*
- * Copyright (C) 2017 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#pragma once
-
-#if ENABLE(WEBASSEMBLY)
-
-#include "JSExportMacros.h"
-
-namespace JSC { namespace Wasm {
-
-// FIXME: We should support other modes. see: https://bugs.webkit.org/show_bug.cgi?id=162693
-enum class MemoryMode : uint8_t {
- BoundsChecking,
- Signaling
-};
-
-static constexpr size_t NumberOfMemoryModes = 2;
-JS_EXPORT_PRIVATE const char* makeString(MemoryMode);
-
-} } // namespace JSC::Wasm
-
-#endif // ENABLE(WEBASSEMLY)
static Plan::CompletionTask makeValidationCallback(Module::AsyncValidationCallback&& callback)
{
- return createSharedTask<Plan::CallbackType>([callback = WTFMove(callback)] (Plan& plan) {
+ return createSharedTask<Plan::CallbackType>([callback = WTFMove(callback)] (VM* vm, Plan& plan) {
ASSERT(!plan.hasWork());
- callback->run(makeValidationResult(static_cast<BBQPlan&>(plan)));
+ ASSERT(vm);
+ callback->run(*vm, makeValidationResult(static_cast<BBQPlan&>(plan)));
});
}
-Module::ValidationResult Module::validateSync(Context* context, Vector<uint8_t>&& source)
+Module::ValidationResult Module::validateSync(VM& vm, Vector<uint8_t>&& source)
{
- Ref<BBQPlan> plan = adoptRef(*new BBQPlan(context, WTFMove(source), BBQPlan::Validation, Plan::dontFinalize(), nullptr, nullptr));
+ Ref<BBQPlan> plan = adoptRef(*new BBQPlan(&vm, WTFMove(source), BBQPlan::Validation, Plan::dontFinalize()));
plan->parseAndValidateModule();
return makeValidationResult(plan.get());
}
-void Module::validateAsync(Context* context, Vector<uint8_t>&& source, Module::AsyncValidationCallback&& callback)
+void Module::validateAsync(VM& vm, Vector<uint8_t>&& source, Module::AsyncValidationCallback&& callback)
{
- Ref<Plan> plan = adoptRef(*new BBQPlan(context, WTFMove(source), BBQPlan::Validation, makeValidationCallback(WTFMove(callback)), nullptr, nullptr));
+ Ref<Plan> plan = adoptRef(*new BBQPlan(&vm, WTFMove(source), BBQPlan::Validation, makeValidationCallback(WTFMove(callback))));
Wasm::ensureWorklist().enqueue(WTFMove(plan));
}
-Ref<CodeBlock> Module::getOrCreateCodeBlock(Context* context, MemoryMode mode, CreateEmbedderWrapper&& createEmbedderWrapper, ThrowWasmException throwWasmException)
+Ref<CodeBlock> Module::getOrCreateCodeBlock(MemoryMode mode)
{
RefPtr<CodeBlock> codeBlock;
auto locker = holdLock(m_lock);
// FIXME: We might want to back off retrying at some point:
// https://bugs.webkit.org/show_bug.cgi?id=170607
if (!codeBlock || (codeBlock->compilationFinished() && !codeBlock->runnable())) {
- codeBlock = CodeBlock::create(context, mode, const_cast<ModuleInformation&>(moduleInformation()), WTFMove(createEmbedderWrapper), throwWasmException);
+ codeBlock = CodeBlock::create(mode, const_cast<ModuleInformation&>(moduleInformation()));
m_codeBlocks[static_cast<uint8_t>(mode)] = codeBlock;
}
return codeBlock.releaseNonNull();
}
-Ref<CodeBlock> Module::compileSync(Context* context, MemoryMode mode, CreateEmbedderWrapper&& createEmbedderWrapper, ThrowWasmException throwWasmException)
+Ref<CodeBlock> Module::compileSync(MemoryMode mode)
{
- Ref<CodeBlock> codeBlock = getOrCreateCodeBlock(context, mode, WTFMove(createEmbedderWrapper), throwWasmException);
+ Ref<CodeBlock> codeBlock = getOrCreateCodeBlock(mode);
codeBlock->waitUntilFinished();
return codeBlock;
}
-void Module::compileAsync(Context* context, MemoryMode mode, CodeBlock::AsyncCompilationCallback&& task, CreateEmbedderWrapper&& createEmbedderWrapper, ThrowWasmException throwWasmException)
+void Module::compileAsync(VM& vm, MemoryMode mode, CodeBlock::AsyncCompilationCallback&& task)
{
- Ref<CodeBlock> codeBlock = getOrCreateCodeBlock(context, mode, WTFMove(createEmbedderWrapper), throwWasmException);
- codeBlock->compileAsync(context, WTFMove(task));
+ Ref<CodeBlock> codeBlock = getOrCreateCodeBlock(mode);
+ codeBlock->compileAsync(vm, WTFMove(task));
}
} } // namespace JSC::Wasm
#if ENABLE(WEBASSEMBLY)
#include "WasmCodeBlock.h"
-#include "WasmEmbedder.h"
#include "WasmMemory.h"
#include <wtf/Expected.h>
#include <wtf/Lock.h>
namespace JSC { namespace Wasm {
-struct Context;
struct ModuleInformation;
class Plan;
using SignatureIndex = uint32_t;
-
+
class Module : public ThreadSafeRefCounted<Module> {
public:
using ValidationResult = WTF::Expected<RefPtr<Module>, String>;
- typedef void CallbackType(ValidationResult&&);
+ typedef void CallbackType(VM&, ValidationResult&&);
using AsyncValidationCallback = RefPtr<SharedTask<CallbackType>>;
- static ValidationResult validateSync(Context*, Vector<uint8_t>&& source);
- static void validateAsync(Context*, Vector<uint8_t>&& source, Module::AsyncValidationCallback&&);
+ static ValidationResult validateSync(VM&, Vector<uint8_t>&& source);
+ static void validateAsync(VM&, Vector<uint8_t>&& source, Module::AsyncValidationCallback&&);
static Ref<Module> create(Ref<ModuleInformation>&& moduleInformation)
{
Wasm::SignatureIndex signatureIndexFromFunctionIndexSpace(unsigned functionIndexSpace) const;
const Wasm::ModuleInformation& moduleInformation() const { return m_moduleInformation.get(); }
- Ref<CodeBlock> compileSync(Context*, MemoryMode, CreateEmbedderWrapper&&, ThrowWasmException);
- void compileAsync(Context*, MemoryMode, CodeBlock::AsyncCompilationCallback&&, CreateEmbedderWrapper&&, ThrowWasmException);
+ Ref<CodeBlock> compileSync(MemoryMode);
+ void compileAsync(VM&, MemoryMode, CodeBlock::AsyncCompilationCallback&&);
JS_EXPORT_PRIVATE ~Module();
CodeBlock* codeBlockFor(MemoryMode mode) { return m_codeBlocks[static_cast<uint8_t>(mode)].get(); }
private:
- Ref<CodeBlock> getOrCreateCodeBlock(Context*, MemoryMode, CreateEmbedderWrapper&&, ThrowWasmException);
+ Ref<CodeBlock> getOrCreateCodeBlock(MemoryMode);
Module(Ref<ModuleInformation>&&);
Ref<ModuleInformation> m_moduleInformation;
#if ENABLE(WEBASSEMBLY)
#include "IdentifierInlines.h"
+#include "JSWebAssemblyTable.h"
#include "WasmMemoryInformation.h"
#include "WasmNameSectionParser.h"
#include "WasmOps.h"
PartialResult limits = parseResizableLimits(initial, maximum);
if (UNLIKELY(!limits))
return limits.getUnexpected();
- WASM_PARSER_FAIL_IF(initial > maxTableEntries, "Table's initial page count of ", initial, " is too big, maximum ", maxTableEntries);
+ WASM_PARSER_FAIL_IF(!JSWebAssemblyTable::isValidSize(initial), "Table's initial page count of ", initial, " is invalid");
ASSERT(!maximum || *maximum >= initial);
+++ /dev/null
-/*
- * Copyright (C) 2017 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#pragma once
-
-#include "WasmName.h"
-#include <wtf/ThreadSafeRefCounted.h>
-#include <wtf/Vector.h>
-
-namespace JSC { namespace Wasm {
-
-struct NameSection : ThreadSafeRefCounted<NameSection> {
- static Ref<NameSection> create()
- {
- return adoptRef(*new NameSection());
- }
-
- Name moduleName;
- Vector<Name> functionNames;
- const Name* get(size_t functionIndexSpace)
- {
- return functionIndexSpace < functionNames.size() ? &functionNames[functionIndexSpace] : nullptr;
- }
-};
-
-} } // namespace JSC::Wasm
#include "B3Compilation.h"
#include "B3OpaqueByproducts.h"
#include "JSCInlines.h"
-#include "JSWebAssemblyInstance.h"
+#include "JSWebAssemblyModule.h"
#include "LinkBuffer.h"
#include "WasmB3IRGenerator.h"
-#include "WasmCallee.h"
#include "WasmContext.h"
#include "WasmMachineThreads.h"
#include "WasmMemory.h"
static const bool verbose = false;
}
-OMGPlan::OMGPlan(Context* context, Ref<Module>&& module, uint32_t functionIndex, MemoryMode mode, CompletionTask&& task)
- : Base(context, makeRef(const_cast<ModuleInformation&>(module->moduleInformation())), WTFMove(task))
- , m_module(WTFMove(module))
- , m_codeBlock(*m_module->codeBlockFor(mode))
+OMGPlan::OMGPlan(Ref<Module> module, uint32_t functionIndex, MemoryMode mode, CompletionTask&& task)
+ : Base(nullptr, makeRef(const_cast<ModuleInformation&>(module->moduleInformation())), WTFMove(task))
+ , m_module(module.copyRef())
+ , m_codeBlock(*module->codeBlockFor(mode))
, m_functionIndex(functionIndex)
{
setMode(mode);
complete(holdLock(m_lock));
}
-void OMGPlan::runForIndex(JSWebAssemblyInstance* instance, uint32_t functionIndex)
+void runOMGPlanForIndex(Context* context, uint32_t functionIndex)
{
- Wasm::CodeBlock& codeBlock = instance->wasmCodeBlock();
- ASSERT(instance->wasmMemory()->mode() == codeBlock.mode());
+ JSWebAssemblyCodeBlock* codeBlock = context->codeBlock();
+ ASSERT(context->memoryMode() == codeBlock->m_codeBlock->mode());
- if (codeBlock.tierUpCount(functionIndex).shouldStartTierUp()) {
- Ref<Plan> plan = adoptRef(*new OMGPlan(instance->context(), Ref<Wasm::Module>(instance->wasmModule()), functionIndex, codeBlock.mode(), Plan::dontFinalize()));
+ if (codeBlock->m_codeBlock->tierUpCount(functionIndex).shouldStartTierUp()) {
+ Ref<Plan> plan = adoptRef(*new OMGPlan(context->module()->module(), functionIndex, codeBlock->m_codeBlock->mode(), Plan::dontFinalize()));
ensureWorklist().enqueue(plan.copyRef());
if (UNLIKELY(!Options::useConcurrentJIT()))
plan->waitForCompletion();
#if ENABLE(WEBASSEMBLY)
+#include "VM.h"
#include "WasmContext.h"
#include "WasmModule.h"
#include "WasmPlan.h"
class OMGPlan final : public Plan {
public:
using Base = Plan;
+ // Note: CompletionTask should not hold a reference to the Plan otherwise there will be a reference cycle.
+ OMGPlan(Ref<Module>, uint32_t functionIndex, MemoryMode, CompletionTask&&);
bool hasWork() const override { return !m_completed; }
void work(CompilationEffort) override;
bool multiThreaded() const override { return false; }
- static void runForIndex(JSWebAssemblyInstance*, uint32_t functionIndex);
-
private:
// For some reason friendship doesn't extend to parent classes...
using Base::m_lock;
- // Note: CompletionTask should not hold a reference to the Plan otherwise there will be a reference cycle.
- OMGPlan(Context*, Ref<Module>&&, uint32_t functionIndex, MemoryMode, CompletionTask&&);
-
bool isComplete() const override { return m_completed; }
void complete(const AbstractLocker& locker) override
{
uint32_t m_functionIndex;
};
+void runOMGPlanForIndex(Context*, uint32_t functionIndex);
+
} } // namespace JSC::Wasm
#endif // ENABLE(WEBASSEMBLY)
/*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
{
return pageCount <= maxPageCount;
}
-
- bool isValid() const
- {
- return isValid(m_pageCount);
- }
static PageCount fromBytes(uint64_t bytes)
{
/*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
#if ENABLE(WEBASSEMBLY)
#include "B3Compilation.h"
+#include "JSCInlines.h"
+#include "JSGlobalObject.h"
#include "WasmB3IRGenerator.h"
#include "WasmBinding.h"
#include "WasmCallee.h"
static const bool verbose = false;
}
-Plan::Plan(Context* context, Ref<ModuleInformation> info, CompletionTask&& task, CreateEmbedderWrapper&& createEmbedderWrapper, ThrowWasmException throwWasmException)
+Plan::Plan(VM* vm, Ref<ModuleInformation> info, CompletionTask&& task)
: m_moduleInformation(WTFMove(info))
- , m_createEmbedderWrapper(WTFMove(createEmbedderWrapper))
- , m_throwWasmException(throwWasmException)
, m_source(m_moduleInformation->source.data())
, m_sourceLength(m_moduleInformation->source.size())
{
- m_completionTasks.append(std::make_pair(context, WTFMove(task)));
+ m_completionTasks.append(std::make_pair(vm, WTFMove(task)));
}
-Plan::Plan(Context* context, Ref<ModuleInformation> info, CompletionTask&& task)
- : Plan(context, WTFMove(info), WTFMove(task), nullptr, nullptr)
-{
-}
-
-Plan::Plan(Context* context, const uint8_t* source, size_t sourceLength, CompletionTask&& task)
+Plan::Plan(VM* vm, const uint8_t* source, size_t sourceLength, CompletionTask&& task)
: m_moduleInformation(adoptRef(*new ModuleInformation(Vector<uint8_t>())))
, m_source(source)
, m_sourceLength(sourceLength)
{
- m_completionTasks.append(std::make_pair(context, WTFMove(task)));
+ m_completionTasks.append(std::make_pair(vm, WTFMove(task)));
}
void Plan::runCompletionTasks(const AbstractLocker&)
ASSERT(isComplete() && !hasWork());
for (auto& task : m_completionTasks)
- task.second->run(*this);
+ task.second->run(task.first, *this);
m_completionTasks.clear();
m_completed.notifyAll();
}
-void Plan::addCompletionTask(Context* context, CompletionTask&& task)
+void Plan::addCompletionTask(VM& vm, CompletionTask&& task)
{
LockHolder locker(m_lock);
if (!isComplete())
- m_completionTasks.append(std::make_pair(context, WTFMove(task)));
+ m_completionTasks.append(std::make_pair(&vm, WTFMove(task)));
else
- task->run(*this);
+ task->run(&vm, *this);
}
void Plan::waitForCompletion()
}
}
-bool Plan::tryRemoveContextAndCancelIfLast(Context& context)
+bool Plan::tryRemoveVMAndCancelIfLast(VM& vm)
{
LockHolder locker(m_lock);
if (!ASSERT_DISABLED) {
- // We allow the first completion task to not have a Context.
+ // We allow the first completion task to not have a vm.
for (unsigned i = 1; i < m_completionTasks.size(); ++i)
ASSERT(m_completionTasks[i].first);
}
bool removedAnyTasks = false;
- m_completionTasks.removeAllMatching([&] (const std::pair<Context*, CompletionTask>& pair) {
- bool shouldRemove = pair.first == &context;
+ m_completionTasks.removeAllMatching([&] (const std::pair<VM*, CompletionTask>& pair) {
+ bool shouldRemove = pair.first == &vm;
removedAnyTasks |= shouldRemove;
return shouldRemove;
});
#if ENABLE(WEBASSEMBLY)
#include "CompilationResult.h"
+#include "VM.h"
#include "WasmB3IRGenerator.h"
-#include "WasmEmbedder.h"
#include "WasmModuleInformation.h"
#include <wtf/Bag.h>
#include <wtf/SharedTask.h>
namespace JSC {
class CallLinkInfo;
+class JSGlobalObject;
+class JSPromiseDeferred;
namespace Wasm {
-struct Context;
-
class Plan : public ThreadSafeRefCounted<Plan> {
public:
- typedef void CallbackType(Plan&);
+ typedef void CallbackType(VM*, Plan&);
using CompletionTask = RefPtr<SharedTask<CallbackType>>;
-
- static CompletionTask dontFinalize() { return createSharedTask<CallbackType>([](Plan&) { }); }
- Plan(Context*, Ref<ModuleInformation>, CompletionTask&&, CreateEmbedderWrapper&&, ThrowWasmException);
- Plan(Context*, Ref<ModuleInformation>, CompletionTask&&);
+ static CompletionTask dontFinalize() { return createSharedTask<CallbackType>([](VM*, Plan&) { }); }
+ Plan(VM*, Ref<ModuleInformation>, CompletionTask&&);
// Note: This constructor should only be used if you are not actually building a module e.g. validation/function tests
- JS_EXPORT_PRIVATE Plan(Context*, const uint8_t*, size_t, CompletionTask&&);
+ JS_EXPORT_PRIVATE Plan(VM*, const uint8_t*, size_t, CompletionTask&&);
virtual JS_EXPORT_PRIVATE ~Plan();
// If you guarantee the ordering here, you can rely on FIFO of the
// completion tasks being called.
- void addCompletionTask(Context*, CompletionTask&&);
+ void addCompletionTask(VM&, CompletionTask&&);
void setMode(MemoryMode mode) { m_mode = mode; }
MemoryMode mode() const { return m_mode; }
void waitForCompletion();
// Returns true if it cancelled the plan.
- bool tryRemoveContextAndCancelIfLast(Context&);
+ bool tryRemoveVMAndCancelIfLast(VM&);
protected:
void runCompletionTasks(const AbstractLocker&);
Ref<ModuleInformation> m_moduleInformation;
- Vector<std::pair<Context*, CompletionTask>, 1> m_completionTasks;
-
- CreateEmbedderWrapper m_createEmbedderWrapper;
- ThrowWasmException m_throwWasmException { nullptr };
+ Vector<std::pair<VM*, CompletionTask>, 1> m_completionTasks;
const uint8_t* m_source;
const size_t m_sourceLength;
/*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
#if ENABLE(WEBASSEMBLY)
+#include "VM.h"
#include <wtf/FastMalloc.h>
#include <wtf/HashFunctions.h>
#include <wtf/PrintStream.h>
-#include <wtf/text/WTFString.h>
namespace JSC { namespace Wasm {
/*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
namespace JSC {
+class VM;
+
namespace Wasm {
using SignatureArgCount = uint32_t;
namespace JSC { namespace Wasm {
-// Signature information is held globally and shared by the entire process to allow all signatures to be unique. This is required when wasm calls another wasm instance, and must work when modules are shared between multiple VMs.
+// Signature information is held globally and shared by VMs to allow all signatures to be unique. This is required when wasm calls another wasm instance, and must work when modules are shared between multiple VMs.
// Note: signatures are never removed because that would require accounting for all WebAssembly.Module and which signatures they use. The maximum number of signatures is bounded, and isn't worth the counting overhead. We could clear everything when we reach zero outstanding WebAssembly.Module. https://bugs.webkit.org/show_bug.cgi?id=166037
class SignatureInformation {
WTF_MAKE_NONCOPYABLE(SignatureInformation);
+++ /dev/null
-/*
- * Copyright (C) 2017 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "WasmTable.h"
-
-#if ENABLE(WEBASSEMBLY)
-
-#include <wtf/CheckedArithmetic.h>
-#include <wtf/StdLibExtras.h>
-#include <type_traits>
-
-namespace JSC { namespace Wasm {
-
-RefPtr<Table> Table::create(uint32_t initial, std::optional<uint32_t> maximum)
-{
- if (!isValidSize(initial))
- return nullptr;
- return adoptRef(new (NotNull, fastMalloc(sizeof(Table))) Table(initial, maximum));
-}
-
-Table::~Table()
-{
-}
-
-Table::Table(uint32_t initial, std::optional<uint32_t> maximum)
-{
- m_size = initial;
- m_maximum = maximum;
- ASSERT(isValidSize(m_size));
- ASSERT(!m_maximum || *m_maximum >= m_size);
-
- // FIXME: It might be worth trying to pre-allocate maximum here. The spec recommends doing so.
- // But for now, we're not doing that.
- m_functions = MallocPtr<Wasm::CallableFunction>::malloc(sizeof(Wasm::CallableFunction) * static_cast<size_t>(size()));
- m_instances = MallocPtr<JSWebAssemblyInstance*>::malloc(sizeof(JSWebAssemblyInstance*) * static_cast<size_t>(size()));
- for (uint32_t i = 0; i < size(); ++i) {
- default_construct_at(&m_functions.get()[i]);
- ASSERT(m_functions.get()[i].signatureIndex == Wasm::Signature::invalidIndex); // We rely on this in compiled code.
- m_instances.get()[i] = nullptr;
- }
-}
-
-std::optional<uint32_t> Table::grow(uint32_t delta)
-{
- if (delta == 0)
- return size();
-
- using Checked = Checked<uint32_t, RecordOverflow>;
- Checked newSizeChecked = size();
- newSizeChecked += delta;
- uint32_t newSize;
- if (newSizeChecked.safeGet(newSize) == CheckedState::DidOverflow)
- return std::nullopt;
-
- if (maximum() && newSize > *maximum())
- return std::nullopt;
- if (!isValidSize(newSize))
- return std::nullopt;
-
- auto checkedGrow = [&] (auto& container) {
- Checked reallocSizeChecked = newSizeChecked;
- reallocSizeChecked *= sizeof(*container.get());
- uint32_t reallocSize;
- if (reallocSizeChecked.safeGet(reallocSize) == CheckedState::DidOverflow)
- return false;
- container.realloc(reallocSize);
- for (uint32_t i = m_size; i < newSize; ++i)
- default_construct_at(&container.get()[i]);
- return true;
- };
-
- if (!checkedGrow(m_functions))
- return std::nullopt;
- if (!checkedGrow(m_instances))
- return std::nullopt;
-
- m_size = newSize;
-
- return newSize;
-}
-
-void Table::clearFunction(uint32_t index)
-{
- RELEASE_ASSERT(index < size());
- m_functions.get()[index] = Wasm::CallableFunction();
- ASSERT(m_functions.get()[index].signatureIndex == Wasm::Signature::invalidIndex); // We rely on this in compiled code.
- m_instances.get()[index] = nullptr;
-}
-
-void Table::setFunction(uint32_t index, CallableFunction function, JSWebAssemblyInstance* instance)
-{
- RELEASE_ASSERT(index < size());
- m_functions.get()[index] = function;
- m_instances.get()[index] = instance;
-}
-
-} } // namespace JSC::Table
-
-#endif // ENABLE(WEBASSEMBLY)
+++ /dev/null
-/*
- * Copyright (C) 2017 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#pragma once
-
-#if ENABLE(WEBASSEMBLY)
-
-#include "WasmFormat.h"
-#include "WasmLimits.h"
-#include <wtf/MallocPtr.h>
-#include <wtf/Optional.h>
-#include <wtf/Ref.h>
-#include <wtf/ThreadSafeRefCounted.h>
-
-namespace JSC {
-
-class JSWebAssemblyInstance; // FIXME this should be Wasm::Instance https://webkit.org/b/177472
-
-namespace Wasm {
-
-class Table : public ThreadSafeRefCounted<Table> {
-public:
- static RefPtr<Table> create(uint32_t initial, std::optional<uint32_t> maximum);
-
- JS_EXPORT_PRIVATE ~Table();
-
- std::optional<uint32_t> maximum() const { return m_maximum; }
- uint32_t size() const { return m_size; }
- std::optional<uint32_t> grow(uint32_t delta) WARN_UNUSED_RETURN;
- void clearFunction(uint32_t);
- void setFunction(uint32_t, CallableFunction, JSWebAssemblyInstance*); // FIXME make this Wasm::Instance. https://webkit.org/b/177472
-
- static ptrdiff_t offsetOfSize() { return OBJECT_OFFSETOF(Table, m_size); }
- static ptrdiff_t offsetOfFunctions() { return OBJECT_OFFSETOF(Table, m_functions); }
- static ptrdiff_t offsetOfInstances() { return OBJECT_OFFSETOF(Table, m_instances); }
-
- static bool isValidSize(uint32_t size) { return size < maxTableEntries; }
-
-private:
- Table(uint32_t initial, std::optional<uint32_t> maximum);
-
- std::optional<uint32_t> m_maximum;
- uint32_t m_size;
- MallocPtr<CallableFunction> m_functions;
- // call_indirect needs to do an Instance check to potentially context switch when calling a function to another instance. We can hold raw pointers to Instance here because the embedder ensures that Table keeps all the instances alive. We couldn't hold a Ref here because it would cause cycles.
- MallocPtr<JSWebAssemblyInstance*> m_instances; // FIXME make this a Wasm::Instance. https://webkit.org/b/177472
-};
-
-} } // namespace JSC::Wasm
-
-#endif // ENABLE(WEBASSEMBLY)
#if ENABLE(WEBASSEMBLY)
#include "CCallHelpers.h"
+#include "FrameTracers.h"
#include "HeapCellInlines.h"
#include "JITExceptions.h"
#include "JSWebAssemblyInstance.h"
+#include "JSWebAssemblyRuntimeError.h"
#include "LinkBuffer.h"
#include "ScratchRegisterAllocator.h"
#include "WasmContext.h"
// The thing that jumps here must move ExceptionType into the argumentGPR1 before jumping here.
// We're allowed to use temp registers here. We are not allowed to use callee saves.
- jit.loadWasmContextInstance(GPRInfo::argumentGPR2);
- jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR2, JSWebAssemblyInstance::offsetOfVM()), GPRInfo::argumentGPR0);
+ jit.loadWasmContext(GPRInfo::argumentGPR2);
+ jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR2, Context::offsetOfVM()), GPRInfo::argumentGPR0);
jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(GPRInfo::argumentGPR0);
jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
CCallHelpers::Call call = jit.call();
jit.jump(GPRInfo::returnValueGPR);
jit.breakpoint(); // We should not reach this.
- ThrowWasmException throwWasmException = Thunks::singleton().throwWasmException();
- RELEASE_ASSERT(throwWasmException);
+ void* (*throwWasmException)(ExecState*, Wasm::ExceptionType, Wasm::Context*) = [] (ExecState* exec, Wasm::ExceptionType type, Wasm::Context* wasmContext) -> void* {
+ VM* vm = wasmContext->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ {
+ auto throwScope = DECLARE_THROW_SCOPE(*vm);
+ JSGlobalObject* globalObject = wasmContext->globalObject();
+
+ JSObject* error;
+ if (type == ExceptionType::StackOverflow)
+ error = createStackOverflowError(exec, globalObject);
+ else
+ error = JSWebAssemblyRuntimeError::create(exec, *vm, globalObject->WebAssemblyRuntimeErrorStructure(), Wasm::errorMessageForExceptionType(type));
+ throwException(exec, throwScope, error);
+ }
+
+ genericUnwind(vm, exec);
+ ASSERT(!!vm->callFrameForCatch);
+ ASSERT(!!vm->targetMachinePCForThrow);
+ // FIXME: We could make this better:
+ // This is a total hack, but the llint (both op_catch and handleUncaughtException)
+ // require a cell in the callee field to load the VM. (The baseline JIT does not require
+ // this since it is compiled with a constant VM pointer.) We could make the calling convention
+ // for exceptions first load callFrameForCatch info call frame register before jumping
+ // to the exception handler. If we did this, we could remove this terrible hack.
+ // https://bugs.webkit.org/show_bug.cgi?id=170440
+ bitwise_cast<uint64_t*>(exec)[CallFrameSlot::callee] = bitwise_cast<uint64_t>(wasmContext->webAssemblyToJSCallee());
+ return vm->targetMachinePCForThrow;
+ };
+
LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID);
- linkBuffer.link(call, FunctionPtr(throwWasmException));
+ linkBuffer.link(call, throwWasmException);
return FINALIZE_CODE(linkBuffer, ("Throw exception from Wasm"));
}
#endif
unsigned numberOfStackBytesUsedForRegisterPreservation = ScratchRegisterAllocator::preserveRegistersToStackForCall(jit, registersToSpill, extraPaddingBytes);
- jit.loadWasmContextInstance(GPRInfo::argumentGPR0);
- typedef void (*Run)(JSWebAssemblyInstance*, uint32_t);
- Run run = OMGPlan::runForIndex;
- jit.move(MacroAssembler::TrustedImmPtr(reinterpret_cast<void*>(run)), GPRInfo::argumentGPR2);
+ jit.loadWasmContext(GPRInfo::argumentGPR0);
+ jit.move(MacroAssembler::TrustedImmPtr(reinterpret_cast<void*>(runOMGPlanForIndex)), GPRInfo::argumentGPR2);
jit.call(GPRInfo::argumentGPR2);
ScratchRegisterAllocator::restoreRegistersFromStackForCall(jit, registersToSpill, RegisterSet(), numberOfStackBytesUsedForRegisterPreservation, extraPaddingBytes);
return *thunks;
}
-void Thunks::setThrowWasmException(ThrowWasmException throwWasmException)
-{
- auto locker = holdLock(m_lock);
- // The thunks are unique for the entire process, therefore changing the throwing function changes it for all uses of WebAssembly.
- RELEASE_ASSERT(!m_throwWasmException || m_throwWasmException == throwWasmException);
- m_throwWasmException = throwWasmException;
-}
-
-ThrowWasmException Thunks::throwWasmException()
-{
- return m_throwWasmException;
-}
-
MacroAssemblerCodeRef Thunks::stub(ThunkGenerator generator)
{
auto locker = holdLock(m_lock);
#if ENABLE(WEBASSEMBLY)
#include "MacroAssemblerCodeRef.h"
-#include "WasmEmbedder.h"
namespace JSC { namespace Wasm {
static void initialize();
static Thunks& singleton();
- void setThrowWasmException(ThrowWasmException);
- ThrowWasmException throwWasmException();
-
MacroAssemblerCodeRef stub(ThunkGenerator);
MacroAssemblerCodeRef stub(const AbstractLocker&, ThunkGenerator);
MacroAssemblerCodeRef existingStub(ThunkGenerator);
Thunks() = default;
HashMap<ThunkGenerator, MacroAssemblerCodeRef> m_stubs;
- ThrowWasmException m_throwWasmException { nullptr };
Lock m_lock;
};
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-
#include "config.h"
#include "WasmWorklist.h"
plan.waitForCompletion();
}
-void Worklist::stopAllPlansForContext(Context& context)
+void Worklist::stopAllPlansForVM(VM& vm)
{
LockHolder locker(*m_lock);
Vector<QueueElement> elements;
while (!m_queue.isEmpty()) {
QueueElement element = m_queue.dequeue();
- bool didCancel = element.plan->tryRemoveContextAndCancelIfLast(context);
+ bool didCancel = element.plan->tryRemoveVMAndCancelIfLast(vm);
if (!didCancel)
elements.append(WTFMove(element));
}
for (auto& thread : m_threads) {
if (thread->element.plan) {
- bool didCancel = thread->element.plan->tryRemoveContextAndCancelIfLast(context);
+ bool didCancel = thread->element.plan->tryRemoveVMAndCancelIfLast(vm);
if (didCancel) {
// We don't have to worry about the deadlocking since the thread can't block without checking for a new plan and must hold the lock to do so.
thread->synchronize.wait(*m_lock);
#if ENABLE(WEBASSEMBLY)
+#include "VM.h"
+
#include <queue>
#include <wtf/AutomaticThread.h>
namespace JSC {
+class JSPromiseDeferred;
+
namespace Wasm {
-struct Context;
class Plan;
class Worklist {
~Worklist();
JS_EXPORT_PRIVATE void enqueue(Ref<Plan>);
- void stopAllPlansForContext(Context&);
+ void stopAllPlansForVM(VM&);
JS_EXPORT_PRIVATE void completePlanSynchronously(Plan&);
+ void activatePlan(JSPromiseDeferred*, Plan*);
+ void deactivePlan(JSPromiseDeferred*, Plan*);
+
enum class Priority {
Shutdown,
Synchronous,
+++ /dev/null
-/*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "JSToWasm.h"
-
-#if ENABLE(WEBASSEMBLY)
-
-#include "CCallHelpers.h"
-#include "JSWebAssemblyInstance.h"
-#include "WasmCallingConvention.h"
-
-namespace JSC { namespace Wasm {
-
-std::unique_ptr<InternalFunction> createJSToWasmWrapper(CompilationContext& compilationContext, const Signature& signature, Vector<UnlinkedWasmToWasmCall>* unlinkedWasmToWasmCalls, const ModuleInformation& info, MemoryMode mode, unsigned functionIndex)
-{
- CCallHelpers& jit = *compilationContext.jsEntrypointJIT;
-
- auto result = std::make_unique<InternalFunction>();
- jit.emitFunctionPrologue();
-
- // FIXME Stop using 0 as codeBlocks. https://bugs.webkit.org/show_bug.cgi?id=165321
- jit.store64(CCallHelpers::TrustedImm64(0), CCallHelpers::Address(GPRInfo::callFrameRegister, CallFrameSlot::codeBlock * static_cast<int>(sizeof(Register))));
- MacroAssembler::DataLabelPtr calleeMoveLocation = jit.moveWithPatch(MacroAssembler::TrustedImmPtr(nullptr), GPRInfo::nonPreservedNonReturnGPR);
- jit.storePtr(GPRInfo::nonPreservedNonReturnGPR, CCallHelpers::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
- CodeLocationDataLabelPtr* linkedCalleeMove = &result->calleeMoveLocation;
- jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
- *linkedCalleeMove = linkBuffer.locationOf(calleeMoveLocation);
- });
-
- const PinnedRegisterInfo& pinnedRegs = PinnedRegisterInfo::get();
- RegisterSet toSave = pinnedRegs.toSave(mode);
-
-#if !ASSERT_DISABLED
- unsigned toSaveSize = toSave.numberOfSetGPRs();
- // They should all be callee saves.
- toSave.filter(RegisterSet::calleeSaveRegisters());
- ASSERT(toSave.numberOfSetGPRs() == toSaveSize);
-#endif
-
- RegisterAtOffsetList registersToSpill(toSave, RegisterAtOffsetList::OffsetBaseType::FramePointerBased);
- result->entrypoint.calleeSaveRegisters = registersToSpill;
-
- unsigned totalFrameSize = registersToSpill.size() * sizeof(void*);
- totalFrameSize += WasmCallingConvention::headerSizeInBytes();
- totalFrameSize -= sizeof(CallerFrameAndPC);
- unsigned numGPRs = 0;
- unsigned numFPRs = 0;
- for (unsigned i = 0; i < signature.argumentCount(); i++) {
- switch (signature.argument(i)) {
- case Wasm::I64:
- case Wasm::I32:
- if (numGPRs >= wasmCallingConvention().m_gprArgs.size())
- totalFrameSize += sizeof(void*);
- ++numGPRs;
- break;
- case Wasm::F32:
- case Wasm::F64:
- if (numFPRs >= wasmCallingConvention().m_fprArgs.size())
- totalFrameSize += sizeof(void*);
- ++numFPRs;
- break;
- default:
- RELEASE_ASSERT_NOT_REACHED();
- }
- }
-
- totalFrameSize = WTF::roundUpToMultipleOf(stackAlignmentBytes(), totalFrameSize);
- jit.subPtr(MacroAssembler::TrustedImm32(totalFrameSize), MacroAssembler::stackPointerRegister);
-
- // We save all these registers regardless of having a memory or not.
- // The reason is that we use one of these as a scratch. That said,
- // almost all real wasm programs use memory, so it's not really
- // worth optimizing for the case that they don't.
- for (const RegisterAtOffset& regAtOffset : registersToSpill) {
- GPRReg reg = regAtOffset.reg().gpr();
- ptrdiff_t offset = regAtOffset.offset();
- jit.storePtr(reg, CCallHelpers::Address(GPRInfo::callFrameRegister, offset));
- }
-
- GPRReg wasmContextInstanceGPR = pinnedRegs.wasmContextInstancePointer;
-
- {
- CCallHelpers::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
- numGPRs = 0;
- numFPRs = 0;
- // We're going to set the pinned registers after this. So
- // we can use this as a scratch for now since we saved it above.
- GPRReg scratchReg = pinnedRegs.baseMemoryPointer;
-
- ptrdiff_t jsOffset = CallFrameSlot::thisArgument * sizeof(EncodedJSValue);
-
- // vmEntryToWasm passes Wasm::Context*'s instance as the first JS argument when we're
- // not using fast TLS to hold the Wasm::Context*'s instance.
- if (!Context::useFastTLS()) {
- jit.loadPtr(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), wasmContextInstanceGPR);
- jsOffset += sizeof(EncodedJSValue);
- }
-
- ptrdiff_t wasmOffset = CallFrame::headerSizeInRegisters * sizeof(void*);
- for (unsigned i = 0; i < signature.argumentCount(); i++) {
- switch (signature.argument(i)) {
- case Wasm::I32:
- case Wasm::I64:
- if (numGPRs >= wasmCallingConvention().m_gprArgs.size()) {
- if (signature.argument(i) == Wasm::I32) {
- jit.load32(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchReg);
- jit.store32(scratchReg, calleeFrame.withOffset(wasmOffset));
- } else {
- jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchReg);
- jit.store64(scratchReg, calleeFrame.withOffset(wasmOffset));
- }
- wasmOffset += sizeof(void*);
- } else {
- if (signature.argument(i) == Wasm::I32)
- jit.load32(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), wasmCallingConvention().m_gprArgs[numGPRs].gpr());
- else
- jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), wasmCallingConvention().m_gprArgs[numGPRs].gpr());
- }
- ++numGPRs;
- break;
- case Wasm::F32:
- case Wasm::F64:
- if (numFPRs >= wasmCallingConvention().m_fprArgs.size()) {
- if (signature.argument(i) == Wasm::F32) {
- jit.load32(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchReg);
- jit.store32(scratchReg, calleeFrame.withOffset(wasmOffset));
- } else {
- jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchReg);
- jit.store64(scratchReg, calleeFrame.withOffset(wasmOffset));
- }
- wasmOffset += sizeof(void*);
- } else {
- if (signature.argument(i) == Wasm::F32)
- jit.loadFloat(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), wasmCallingConvention().m_fprArgs[numFPRs].fpr());
- else
- jit.loadDouble(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), wasmCallingConvention().m_fprArgs[numFPRs].fpr());
- }
- ++numFPRs;
- break;
- default:
- RELEASE_ASSERT_NOT_REACHED();
- }
-
- jsOffset += sizeof(EncodedJSValue);
- }
- }
-
- if (!!info.memory) {
- GPRReg baseMemory = pinnedRegs.baseMemoryPointer;
-
- if (!Context::useFastTLS())
- jit.loadPtr(CCallHelpers::Address(wasmContextInstanceGPR, JSWebAssemblyInstance::offsetOfWasmMemory()), baseMemory);
- else {
- jit.loadWasmContextInstance(baseMemory);
- jit.loadPtr(CCallHelpers::Address(baseMemory, JSWebAssemblyInstance::offsetOfWasmMemory()), baseMemory);
- }
-
- if (mode != MemoryMode::Signaling) {
- const auto& sizeRegs = pinnedRegs.sizeRegisters;
- ASSERT(sizeRegs.size() >= 1);
- ASSERT(!sizeRegs[0].sizeOffset); // The following code assumes we start at 0, and calculates subsequent size registers relative to 0.
- jit.loadPtr(CCallHelpers::Address(baseMemory, Wasm::Memory::offsetOfSize()), sizeRegs[0].sizeRegister);
- for (unsigned i = 1; i < sizeRegs.size(); ++i)
- jit.add64(CCallHelpers::TrustedImm32(-sizeRegs[i].sizeOffset), sizeRegs[0].sizeRegister, sizeRegs[i].sizeRegister);
- }
-
- jit.loadPtr(CCallHelpers::Address(baseMemory, Wasm::Memory::offsetOfMemory()), baseMemory);
- }
-
- CCallHelpers::Call call = jit.threadSafePatchableNearCall();
- unsigned functionIndexSpace = functionIndex + info.importFunctionCount();
- ASSERT(functionIndexSpace < info.functionIndexSpaceSize());
- jit.addLinkTask([unlinkedWasmToWasmCalls, call, functionIndexSpace] (LinkBuffer& linkBuffer) {
- unlinkedWasmToWasmCalls->append({ linkBuffer.locationOfNearCall(call), functionIndexSpace });
- });
-
-
- for (const RegisterAtOffset& regAtOffset : registersToSpill) {
- GPRReg reg = regAtOffset.reg().gpr();
- ASSERT(reg != GPRInfo::returnValueGPR);
- ptrdiff_t offset = regAtOffset.offset();
- jit.loadPtr(CCallHelpers::Address(GPRInfo::callFrameRegister, offset), reg);
- }
-
- switch (signature.returnType()) {
- case Wasm::F32:
- jit.moveFloatTo32(FPRInfo::returnValueFPR, GPRInfo::returnValueGPR);
- break;
- case Wasm::F64:
- jit.moveDoubleTo64(FPRInfo::returnValueFPR, GPRInfo::returnValueGPR);
- break;
- default:
- break;
- }
-
- jit.emitFunctionEpilogue();
- jit.ret();
-
- return result;
-}
-
-} } // namespace JSC::Wasm
-
-#endif // ENABLE(WEBASSEMBLY)
+++ /dev/null
-/*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#pragma once
-
-#if ENABLE(WEBASSEMBLY)
-
-#include "InternalFunction.h"
-#include "WasmB3IRGenerator.h"
-#include "WasmFormat.h"
-#include "WasmMemory.h"
-#include "WasmModuleInformation.h"
-#include "WasmSignature.h"
-#include <wtf/Vector.h>
-
-#include <memory>
-
-namespace JSC {
-
-namespace Wasm {
-
-std::unique_ptr<InternalFunction> createJSToWasmWrapper(CompilationContext&, const Signature&, Vector<UnlinkedWasmToWasmCall>*, const ModuleInformation&, MemoryMode, uint32_t functionIndex);
-
-} } // namespace JSC::Wasm
-
-#endif // ENABLE(WEBASSEMBLY)
#include "JSWebAssemblyLinkError.h"
#include "JSWebAssemblyMemory.h"
#include "JSWebAssemblyModule.h"
+#include "WasmBinding.h"
#include "WasmModuleInformation.h"
-#include "WasmToJS.h"
#include <wtf/CurrentTime.h>
JSWebAssemblyCodeBlock* JSWebAssemblyCodeBlock::create(VM& vm, Ref<Wasm::CodeBlock> codeBlock, JSWebAssemblyModule* module)
{
const Wasm::ModuleInformation& moduleInformation = module->module().moduleInformation();
- auto* result = new (NotNull, allocateCell<JSWebAssemblyCodeBlock>(vm.heap)) JSWebAssemblyCodeBlock(vm, WTFMove(codeBlock), moduleInformation);
+ auto* result = new (NotNull, allocateCell<JSWebAssemblyCodeBlock>(vm.heap, allocationSize(moduleInformation.importFunctionCount()))) JSWebAssemblyCodeBlock(vm, WTFMove(codeBlock), moduleInformation);
result->finishCreation(vm, module);
return result;
}
m_wasmToJSExitStubs.reserveCapacity(m_codeBlock->functionImportCount());
for (unsigned importIndex = 0; importIndex < m_codeBlock->functionImportCount(); ++importIndex) {
Wasm::SignatureIndex signatureIndex = moduleInformation.importFunctionSignatureIndices.at(importIndex);
- auto binding = Wasm::wasmToJS(&vm, m_callLinkInfos, signatureIndex, importIndex);
+ auto binding = Wasm::wasmToJs(&vm, m_callLinkInfos, signatureIndex, importIndex);
if (UNLIKELY(!binding)) {
switch (binding.error()) {
case Wasm::BindingFailure::OutOfMemory:
RELEASE_ASSERT_NOT_REACHED();
}
m_wasmToJSExitStubs.uncheckedAppend(binding.value());
- m_codeBlock->importWasmToEmbedderStub(importIndex) = m_wasmToJSExitStubs[importIndex].code().executableAddress();
+ importWasmToJSStub(importIndex) = m_wasmToJSExitStubs[importIndex].code().executableAddress();
}
}
static_cast<JSWebAssemblyCodeBlock*>(cell)->JSWebAssemblyCodeBlock::~JSWebAssemblyCodeBlock();
}
+bool JSWebAssemblyCodeBlock::isSafeToRun(JSWebAssemblyMemory* memory) const
+{
+ return m_codeBlock->isSafeToRun(memory->memory().mode());
+}
+
void JSWebAssemblyCodeBlock::clearJSCallICs(VM& vm)
{
for (auto iter = m_callLinkInfos.begin(); !!iter; ++iter)
return &vm.webAssemblyCodeBlockSpace;
}
- Wasm::CodeBlock& codeBlock() { return m_codeBlock.get(); }
-
+ unsigned functionImportCount() const { return m_codeBlock->functionImportCount(); }
JSWebAssemblyModule* module() const { return m_module.get(); }
+ bool isSafeToRun(JSWebAssemblyMemory*) const;
+
void finishCreation(VM&, JSWebAssemblyModule*);
+ // These two callee getters are only valid once the callees have been populated.
+
+ Wasm::Callee& jsEntrypointCalleeFromFunctionIndexSpace(unsigned functionIndexSpace)
+ {
+ ASSERT(runnable());
+ return m_codeBlock->jsEntrypointCalleeFromFunctionIndexSpace(functionIndexSpace);
+ }
+ Wasm::WasmEntrypointLoadLocation wasmEntrypointLoadLocationFromFunctionIndexSpace(unsigned functionIndexSpace)
+ {
+ ASSERT(runnable());
+ return m_codeBlock->wasmEntrypointLoadLocationFromFunctionIndexSpace(functionIndexSpace);
+ }
+
+ Wasm::WasmEntrypointLoadLocation wasmToJsCallStubForImport(unsigned importIndex)
+ {
+ ASSERT(runnable());
+ return &importWasmToJSStub(importIndex);
+ }
+
+ static ptrdiff_t offsetOfImportWasmToJSStub(unsigned importIndex)
+ {
+ return offsetOfImportStubs() + sizeof(void*) * importIndex;
+ }
+
+ Wasm::CodeBlock& codeBlock() { return m_codeBlock.get(); }
+
void clearJSCallICs(VM&);
bool runnable() const { return !m_errorMessage; }
static void destroy(JSCell*);
static void visitChildren(JSCell*, SlotVisitor&);
+ static size_t offsetOfImportStubs()
+ {
+ return WTF::roundUpToMultipleOf<sizeof(void*)>(sizeof(JSWebAssemblyCodeBlock));
+ }
+
+ static size_t allocationSize(Checked<size_t> functionImportCount)
+ {
+ return (offsetOfImportStubs() + sizeof(void*) * functionImportCount).unsafeGet();
+ }
+
+ void*& importWasmToJSStub(unsigned importIndex)
+ {
+ return *bitwise_cast<void**>(bitwise_cast<char*>(this) + offsetOfImportWasmToJSStub(importIndex));
+ }
+
class UnconditionalFinalizer : public JSC::UnconditionalFinalizer {
void finalizeUnconditionally() override;
};
return Structure::create(vm, globalObject, prototype, TypeInfo(ObjectType, StructureFlags), info());
}
-JSWebAssemblyInstance::JSWebAssemblyInstance(VM& vm, Structure* structure, unsigned numImportFunctions, Ref<Wasm::Instance>&& instance)
+JSWebAssemblyInstance::JSWebAssemblyInstance(VM& vm, Structure* structure, unsigned numImportFunctions)
: Base(vm, structure)
- , m_instance(WTFMove(instance))
, m_vm(&vm)
- , m_wasmModule(m_instance->module())
- , m_wasmTable(m_instance->m_table.get())
- , m_globals(m_instance->m_globals.get())
, m_numImportFunctions(numImportFunctions)
{
- for (unsigned i = 0; i < m_numImportFunctions; ++i)
- default_construct_at(importFunctionInfo(i));
+ memset(importFunctions(), 0, m_numImportFunctions * sizeof(WriteBarrier<JSObject>));
}
void JSWebAssemblyInstance::finishCreation(VM& vm, JSWebAssemblyModule* module, JSModuleNamespaceObject* moduleNamespaceObject)
ASSERT(inherits(vm, info()));
m_module.set(vm, this, module);
+ const size_t extraMemorySize = globalMemoryByteSize();
+ m_globals = MallocPtr<uint64_t>::malloc(extraMemorySize);
+ heap()->reportExtraMemoryAllocated(extraMemorySize);
+
m_moduleNamespaceObject.set(vm, this, moduleNamespaceObject);
m_callee.set(vm, this, module->callee());
-
- heap()->reportExtraMemoryAllocated(m_instance->extraMemoryAllocated());
}
void JSWebAssemblyInstance::destroy(JSCell* cell)
visitor.append(thisObject->m_memory);
visitor.append(thisObject->m_table);
visitor.append(thisObject->m_callee);
- visitor.reportExtraMemoryVisited(thisObject->m_instance->extraMemoryAllocated());
+ visitor.reportExtraMemoryVisited(thisObject->globalMemoryByteSize());
for (unsigned i = 0; i < thisObject->m_numImportFunctions; ++i)
- visitor.append(thisObject->importFunctionInfo(i)->importFunction); // This also keeps the functions' JSWebAssemblyInstance alive.
+ visitor.append(thisObject->importFunctions()[i]);
}
void JSWebAssemblyInstance::finalizeCreation(VM& vm, ExecState* exec, Ref<Wasm::CodeBlock>&& wasmCodeBlock)
{
- m_instance->finalizeCreation(wasmCodeBlock.copyRef());
- m_wasmCodeBlock = wasmCodeBlock.ptr();
-
auto scope = DECLARE_THROW_SCOPE(vm);
-
if (!wasmCodeBlock->runnable()) {
throwException(exec, scope, JSWebAssemblyLinkError::create(exec, vm, globalObject()->WebAssemblyLinkErrorStructure(), wasmCodeBlock->errorMessage()));
return;
}
RELEASE_ASSERT(wasmCodeBlock->isSafeToRun(memoryMode()));
- JSWebAssemblyCodeBlock* jsCodeBlock = m_module->codeBlock(memoryMode());
- if (jsCodeBlock) {
+ JSWebAssemblyCodeBlock* codeBlock = module()->codeBlock(memoryMode());
+ if (codeBlock) {
// A CodeBlock might have already been compiled. If so, it means
// that the CodeBlock we are trying to compile must be the same
// because we will never compile a CodeBlock again once it's
// runnable.
- ASSERT(&jsCodeBlock->codeBlock() == wasmCodeBlock.ptr());
- m_codeBlock.set(vm, this, jsCodeBlock);
+ ASSERT(&codeBlock->codeBlock() == wasmCodeBlock.ptr());
+ m_codeBlock.set(vm, this, codeBlock);
} else {
- jsCodeBlock = JSWebAssemblyCodeBlock::