builtins/BuiltinExecutables.cpp
builtins/BuiltinExecutableCreator.cpp
+ bytecode/AccessCase.cpp
bytecode/AdaptiveInferredPropertyValueWatchpointBase.cpp
bytecode/ArithProfile.cpp
bytecode/ArrayAllocationProfile.cpp
bytecode/FunctionCodeBlock.cpp
bytecode/GetByIdStatus.cpp
bytecode/GetByIdVariant.cpp
+ bytecode/GetterSetterAccessCase.cpp
bytecode/InlineAccess.cpp
bytecode/InlineCallFrame.cpp
bytecode/InlineCallFrameSet.cpp
+ bytecode/IntrinsicGetterAccessCase.cpp
bytecode/JumpTable.cpp
bytecode/LLIntPrototypeLoadAdaptiveStructureWatchpoint.cpp
bytecode/LazyOperandValueProfile.cpp
bytecode/PreciseJumpTargets.cpp
bytecode/ProgramCodeBlock.cpp
bytecode/PropertyCondition.cpp
+ bytecode/ProxyableAccessCase.cpp
bytecode/PutByIdFlags.cpp
bytecode/PutByIdStatus.cpp
bytecode/PutByIdVariant.cpp
+2017-02-16 Keith Miller <keith_miller@apple.com>
+
+ Refactor AccessCase to be more like B3Value
+ https://bugs.webkit.org/show_bug.cgi?id=168408
+
+ Reviewed by Filip Pizlo.
+
+ This patch makes AccessCase (and new subclasses) more like B3Value. In the new system each
+ type has an associated AccessCase subclass. For instance any getter should use the
+ GetterSetterAccessCase subclass. The new system is easier to follow since you no longer need
+ to know exactly which members are used by which types. The subclass to AccessType mapping is:
+
+ GetterSetterAccessCase:
+ Getter
+ CustomAccessorGetter
+ CustomValueGetter
+ Setter
+
+ ProxyableAccessCase:
+ Load
+ Miss
+ GetGetter
+
+ IntrinsicGetterAccessCase:
+ IntrinsicGetter
+
+ AccessCase:
+ Everything else
+
+ It also has the additional advantage that it uses less memory for the cases where we would have needed
+ rare data in the past but that case would only use a small bit of it.
+
+ This patch also removes megamorphic loads and renames some TryGetById related enum values from Pure to Try.
+
+ * CMakeLists.txt:
+ * JavaScriptCore.xcodeproj/project.pbxproj:
+ * bytecode/AccessCase.cpp: Added.
+ (JSC::AccessCase::AccessCase):
+ (JSC::AccessCase::create):
+ (JSC::AccessCase::~AccessCase):
+ (JSC::AccessCase::fromStructureStubInfo):
+ (JSC::AccessCase::clone):
+ (JSC::AccessCase::commit):
+ (JSC::AccessCase::guardedByStructureCheck):
+ (JSC::AccessCase::doesCalls):
+ (JSC::AccessCase::couldStillSucceed):
+ (JSC::AccessCase::canReplace):
+ (JSC::AccessCase::dump):
+ (JSC::AccessCase::visitWeak):
+ (JSC::AccessCase::propagateTransitions):
+ (JSC::AccessCase::generateWithGuard):
+ (JSC::AccessCase::generate):
+ (JSC::AccessCase::generateImpl):
+ * bytecode/AccessCase.h: Added.
+ (JSC::AccessCase::as):
+ (JSC::AccessCase::create):
+ (JSC::AccessCase::type):
+ (JSC::AccessCase::state):
+ (JSC::AccessCase::offset):
+ (JSC::AccessCase::structure):
+ (JSC::AccessCase::newStructure):
+ (JSC::AccessCase::conditionSet):
+ (JSC::AccessCase::alternateBase):
+ (JSC::AccessCase::additionalSet):
+ (JSC::AccessCase::viaProxy):
+ (JSC::AccessCase::isGetter):
+ (JSC::AccessCase::isAccessor):
+ (JSC::AccessCase::dumpImpl):
+ (JSC::AccessCase::resetState):
+ * bytecode/GetByIdStatus.cpp:
+ (JSC::GetByIdStatus::computeForStubInfoWithoutExitSiteFeedback):
+ * bytecode/GetterSetterAccessCase.cpp: Added.
+ (JSC::GetterSetterAccessCase::GetterSetterAccessCase):
+ (JSC::GetterSetterAccessCase::create):
+ (JSC::GetterSetterAccessCase::~GetterSetterAccessCase):
+ (JSC::GetterSetterAccessCase::clone):
+ (JSC::GetterSetterAccessCase::alternateBase):
+ (JSC::GetterSetterAccessCase::dumpImpl):
+ (JSC::GetterSetterAccessCase::emitDOMJITGetter):
+ * bytecode/GetterSetterAccessCase.h: Added.
+ (JSC::GetterSetterAccessCase::callLinkInfo):
+ (JSC::GetterSetterAccessCase::customSlotBase):
+ (JSC::GetterSetterAccessCase::domJIT):
+ * bytecode/IntrinsicGetterAccessCase.cpp: Added.
+ (JSC::IntrinsicGetterAccessCase::IntrinsicGetterAccessCase):
+ (JSC::IntrinsicGetterAccessCase::create):
+ (JSC::IntrinsicGetterAccessCase::~IntrinsicGetterAccessCase):
+ (JSC::IntrinsicGetterAccessCase::clone):
+ * bytecode/IntrinsicGetterAccessCase.h: Added.
+ (JSC::IntrinsicGetterAccessCase::intrinsicFunction):
+ (JSC::IntrinsicGetterAccessCase::intrinsic):
+ * bytecode/PolymorphicAccess.cpp:
+ (JSC::PolymorphicAccess::regenerate):
+ (WTF::printInternal):
+ (JSC::AccessCase::AccessCase): Deleted.
+ (JSC::AccessCase::tryGet): Deleted.
+ (JSC::AccessCase::get): Deleted.
+ (JSC::AccessCase::megamorphicLoad): Deleted.
+ (JSC::AccessCase::replace): Deleted.
+ (JSC::AccessCase::transition): Deleted.
+ (JSC::AccessCase::setter): Deleted.
+ (JSC::AccessCase::in): Deleted.
+ (JSC::AccessCase::getLength): Deleted.
+ (JSC::AccessCase::getIntrinsic): Deleted.
+ (JSC::AccessCase::~AccessCase): Deleted.
+ (JSC::AccessCase::fromStructureStubInfo): Deleted.
+ (JSC::AccessCase::clone): Deleted.
+ (JSC::AccessCase::commit): Deleted.
+ (JSC::AccessCase::guardedByStructureCheck): Deleted.
+ (JSC::AccessCase::alternateBase): Deleted.
+ (JSC::AccessCase::doesCalls): Deleted.
+ (JSC::AccessCase::couldStillSucceed): Deleted.
+ (JSC::AccessCase::canBeReplacedByMegamorphicLoad): Deleted.
+ (JSC::AccessCase::canReplace): Deleted.
+ (JSC::AccessCase::dump): Deleted.
+ (JSC::AccessCase::visitWeak): Deleted.
+ (JSC::AccessCase::propagateTransitions): Deleted.
+ (JSC::AccessCase::generateWithGuard): Deleted.
+ (JSC::AccessCase::generate): Deleted.
+ (JSC::AccessCase::generateImpl): Deleted.
+ (JSC::AccessCase::emitDOMJITGetter): Deleted.
+ * bytecode/PolymorphicAccess.h:
+ (JSC::AccessCase::type): Deleted.
+ (JSC::AccessCase::state): Deleted.
+ (JSC::AccessCase::offset): Deleted.
+ (JSC::AccessCase::viaProxy): Deleted.
+ (JSC::AccessCase::structure): Deleted.
+ (JSC::AccessCase::newStructure): Deleted.
+ (JSC::AccessCase::conditionSet): Deleted.
+ (JSC::AccessCase::intrinsicFunction): Deleted.
+ (JSC::AccessCase::intrinsic): Deleted.
+ (JSC::AccessCase::domJIT): Deleted.
+ (JSC::AccessCase::additionalSet): Deleted.
+ (JSC::AccessCase::customSlotBase): Deleted.
+ (JSC::AccessCase::isGetter): Deleted.
+ (JSC::AccessCase::callLinkInfo): Deleted.
+ (JSC::AccessCase::RareData::RareData): Deleted.
+ * bytecode/ProxyableAccessCase.cpp: Added.
+ (JSC::ProxyableAccessCase::ProxyableAccessCase):
+ (JSC::ProxyableAccessCase::create):
+ (JSC::ProxyableAccessCase::~ProxyableAccessCase):
+ (JSC::ProxyableAccessCase::clone):
+ (JSC::ProxyableAccessCase::dumpImpl):
+ * bytecode/ProxyableAccessCase.h: Added.
+ * bytecode/PutByIdStatus.cpp:
+ (JSC::PutByIdStatus::computeForStubInfo):
+ * bytecode/StructureStubInfo.cpp:
+ (JSC::StructureStubInfo::reset):
+ * bytecode/StructureStubInfo.h:
+ * dfg/DFGByteCodeParser.cpp:
+ (JSC::DFG::ByteCodeParser::parseBlock):
+ * dfg/DFGSpeculativeJIT.cpp:
+ (JSC::DFG::SpeculativeJIT::compileTryGetById):
+ * ftl/FTLLowerDFGToB3.cpp:
+ (JSC::FTL::DFG::LowerDFGToB3::compileNode):
+ (JSC::FTL::DFG::LowerDFGToB3::compileGetById):
+ * jit/IntrinsicEmitter.cpp:
+ (JSC::IntrinsicGetterAccessCase::canEmitIntrinsicGetter):
+ (JSC::IntrinsicGetterAccessCase::emitIntrinsicGetter):
+ (JSC::AccessCase::canEmitIntrinsicGetter): Deleted.
+ (JSC::AccessCase::emitIntrinsicGetter): Deleted.
+ * jit/JITOperations.cpp:
+ * jit/JITPropertyAccess.cpp:
+ (JSC::JIT::emit_op_try_get_by_id):
+ * jit/JITPropertyAccess32_64.cpp:
+ (JSC::JIT::emit_op_try_get_by_id):
+ * jit/Repatch.cpp:
+ (JSC::tryCacheGetByID):
+ (JSC::tryCachePutByID):
+ (JSC::tryRepatchIn):
+ * jit/Repatch.h:
+ * runtime/Options.h:
+
2017-02-16 Filip Pizlo <fpizlo@apple.com>
JSONParseTest needs to hold the lock when the VM is destroyed
534902851C7276B70012BCB8 /* TypedArrayCTest.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 534902821C7242C80012BCB8 /* TypedArrayCTest.cpp */; };
534C457C1BC72411007476A7 /* JSTypedArrayViewConstructor.h in Headers */ = {isa = PBXBuildFile; fileRef = 534C457B1BC72411007476A7 /* JSTypedArrayViewConstructor.h */; };
534C457E1BC72549007476A7 /* JSTypedArrayViewConstructor.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 534C457D1BC72549007476A7 /* JSTypedArrayViewConstructor.cpp */; };
+ 534E034E1E4D4B1600213F64 /* AccessCase.h in Headers */ = {isa = PBXBuildFile; fileRef = 534E034D1E4D4B1600213F64 /* AccessCase.h */; };
+ 534E03501E4D95ED00213F64 /* AccessCase.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 534E034F1E4D95ED00213F64 /* AccessCase.cpp */; };
+ 534E03541E53BD2900213F64 /* IntrinsicGetterAccessCase.h in Headers */ = {isa = PBXBuildFile; fileRef = 534E03531E53BD2900213F64 /* IntrinsicGetterAccessCase.h */; };
+ 534E03561E53BEDE00213F64 /* ProxyableAccessCase.h in Headers */ = {isa = PBXBuildFile; fileRef = 534E03551E53BEDE00213F64 /* ProxyableAccessCase.h */; };
+ 534E03581E53BF2F00213F64 /* GetterSetterAccessCase.h in Headers */ = {isa = PBXBuildFile; fileRef = 534E03571E53BF2F00213F64 /* GetterSetterAccessCase.h */; };
53529A4C1C457B75000B49C6 /* APIUtils.h in Headers */ = {isa = PBXBuildFile; fileRef = 53529A4B1C457B75000B49C6 /* APIUtils.h */; };
535557141D9D9EA5006D583B /* WasmMemory.h in Headers */ = {isa = PBXBuildFile; fileRef = 535557131D9D9EA5006D583B /* WasmMemory.h */; };
535557161D9DFA32006D583B /* WasmMemory.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 535557151D9DFA32006D583B /* WasmMemory.cpp */; };
5370B4F61BF26205005C40FC /* AdaptiveInferredPropertyValueWatchpointBase.h in Headers */ = {isa = PBXBuildFile; fileRef = 5370B4F41BF25EA2005C40FC /* AdaptiveInferredPropertyValueWatchpointBase.h */; };
53917E7B1B7906FA000EBD33 /* JSGenericTypedArrayViewPrototypeFunctions.h in Headers */ = {isa = PBXBuildFile; fileRef = 53917E7A1B7906E4000EBD33 /* JSGenericTypedArrayViewPrototypeFunctions.h */; };
539FB8BA1C99DA7C00940FA1 /* JSArrayInlines.h in Headers */ = {isa = PBXBuildFile; fileRef = 539FB8B91C99DA7C00940FA1 /* JSArrayInlines.h */; };
+ 53B0BE341E561AC900A8FC29 /* GetterSetterAccessCase.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 53B0BE331E561AC900A8FC29 /* GetterSetterAccessCase.cpp */; };
+ 53B0BE361E561B0900A8FC29 /* ProxyableAccessCase.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 53B0BE351E561B0900A8FC29 /* ProxyableAccessCase.cpp */; };
+ 53B0BE381E561B2400A8FC29 /* IntrinsicGetterAccessCase.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 53B0BE371E561B2400A8FC29 /* IntrinsicGetterAccessCase.cpp */; };
53D444DC1DAF08AB00B92784 /* B3WasmAddressValue.h in Headers */ = {isa = PBXBuildFile; fileRef = 53D444DB1DAF08AB00B92784 /* B3WasmAddressValue.h */; };
53D444DE1DAF09A000B92784 /* B3WasmAddressValue.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 53D444DD1DAF09A000B92784 /* B3WasmAddressValue.cpp */; };
53F40E851D58F9770099A1B6 /* WasmSections.h in Headers */ = {isa = PBXBuildFile; fileRef = 53F40E841D58F9770099A1B6 /* WasmSections.h */; };
534C457A1BC703DC007476A7 /* TypedArrayConstructor.js */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.javascript; path = TypedArrayConstructor.js; sourceTree = "<group>"; };
534C457B1BC72411007476A7 /* JSTypedArrayViewConstructor.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = JSTypedArrayViewConstructor.h; sourceTree = "<group>"; };
534C457D1BC72549007476A7 /* JSTypedArrayViewConstructor.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = JSTypedArrayViewConstructor.cpp; sourceTree = "<group>"; };
+ 534E034D1E4D4B1600213F64 /* AccessCase.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AccessCase.h; sourceTree = "<group>"; };
+ 534E034F1E4D95ED00213F64 /* AccessCase.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = AccessCase.cpp; sourceTree = "<group>"; };
+ 534E03531E53BD2900213F64 /* IntrinsicGetterAccessCase.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = IntrinsicGetterAccessCase.h; sourceTree = "<group>"; };
+ 534E03551E53BEDE00213F64 /* ProxyableAccessCase.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ProxyableAccessCase.h; sourceTree = "<group>"; };
+ 534E03571E53BF2F00213F64 /* GetterSetterAccessCase.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GetterSetterAccessCase.h; sourceTree = "<group>"; };
53529A4B1C457B75000B49C6 /* APIUtils.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = APIUtils.h; sourceTree = "<group>"; };
535557131D9D9EA5006D583B /* WasmMemory.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WasmMemory.h; sourceTree = "<group>"; };
535557151D9DFA32006D583B /* WasmMemory.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = WasmMemory.cpp; sourceTree = "<group>"; };
53917E831B791CB8000EBD33 /* TypedArrayPrototype.js */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.javascript; name = TypedArrayPrototype.js; path = builtins/TypedArrayPrototype.js; sourceTree = SOURCE_ROOT; };
539EB0711D553DF800C82EF7 /* testWasm.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = testWasm.cpp; sourceTree = "<group>"; };
539FB8B91C99DA7C00940FA1 /* JSArrayInlines.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = JSArrayInlines.h; sourceTree = "<group>"; };
+ 53B0BE331E561AC900A8FC29 /* GetterSetterAccessCase.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = GetterSetterAccessCase.cpp; sourceTree = "<group>"; };
+ 53B0BE351E561B0900A8FC29 /* ProxyableAccessCase.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = ProxyableAccessCase.cpp; sourceTree = "<group>"; };
+ 53B0BE371E561B2400A8FC29 /* IntrinsicGetterAccessCase.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = IntrinsicGetterAccessCase.cpp; sourceTree = "<group>"; };
53D444DB1DAF08AB00B92784 /* B3WasmAddressValue.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = B3WasmAddressValue.h; path = b3/B3WasmAddressValue.h; sourceTree = "<group>"; };
53D444DD1DAF09A000B92784 /* B3WasmAddressValue.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = B3WasmAddressValue.cpp; path = b3/B3WasmAddressValue.cpp; sourceTree = "<group>"; };
53F256E11B87E28000B4B768 /* JSTypedArrayViewPrototype.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = JSTypedArrayViewPrototype.cpp; sourceTree = "<group>"; };
tabWidth = 4;
usesTabs = 0;
};
+ 534E03521E53BBA900213F64 /* accessCase */ = {
+ isa = PBXGroup;
+ children = (
+ 534E034F1E4D95ED00213F64 /* AccessCase.cpp */,
+ 534E034D1E4D4B1600213F64 /* AccessCase.h */,
+ 53B0BE371E561B2400A8FC29 /* IntrinsicGetterAccessCase.cpp */,
+ 534E03531E53BD2900213F64 /* IntrinsicGetterAccessCase.h */,
+ 53B0BE351E561B0900A8FC29 /* ProxyableAccessCase.cpp */,
+ 534E03551E53BEDE00213F64 /* ProxyableAccessCase.h */,
+ 53B0BE331E561AC900A8FC29 /* GetterSetterAccessCase.cpp */,
+ 534E03571E53BF2F00213F64 /* GetterSetterAccessCase.h */,
+ );
+ name = accessCase;
+ sourceTree = "<group>";
+ };
650FDF8D09D0FCA700769E54 /* Derived Sources */ = {
isa = PBXGroup;
children = (
969A078F0ED1D3AE00F1F681 /* bytecode */ = {
isa = PBXGroup;
children = (
+ 534E03521E53BBA900213F64 /* accessCase */,
5370B4F31BF25EA2005C40FC /* AdaptiveInferredPropertyValueWatchpointBase.cpp */,
5370B4F41BF25EA2005C40FC /* AdaptiveInferredPropertyValueWatchpointBase.h */,
79A228331D35D71E00D8E067 /* ArithProfile.cpp */,
0F6B8AE51C4EFE1700969052 /* B3FixSSA.h in Headers */,
0F725CB01C506D3B00AD943A /* B3FoldPathConstants.h in Headers */,
0FEC85161BDACDAC0080FF74 /* B3FrequencyClass.h in Headers */,
+ 534E03561E53BEDE00213F64 /* ProxyableAccessCase.h in Headers */,
0FEC85171BDACDAC0080FF74 /* B3FrequentedBlock.h in Headers */,
0FEC85191BDACDAC0080FF74 /* B3Generate.h in Headers */,
0FEC851A1BDACDAC0080FF74 /* B3GenericFrequentedBlock.h in Headers */,
0FEA0A12170513DB00BB722C /* FTLState.h in Headers */,
A7FCC26D17A0B6AA00786D1A /* FTLSwitchCase.h in Headers */,
0F235BE217178E1C00690C7F /* FTLThunks.h in Headers */,
+ 534E03541E53BD2900213F64 /* IntrinsicGetterAccessCase.h in Headers */,
0FEA0A201708B00700BB722C /* FTLTypedPointer.h in Headers */,
0FDB2CCA173DA523007B3C1B /* FTLValueFromBlock.h in Headers */,
0F5A6284188C98D40072C9DF /* FTLValueRange.h in Headers */,
52B310FB1974AE610080857C /* FunctionHasExecutedCache.h in Headers */,
FE4BFF2C1AD476E700088F87 /* FunctionOverrides.h in Headers */,
BC18C4050E16F5CD00B34460 /* FunctionPrototype.h in Headers */,
+ 534E03581E53BF2F00213F64 /* GetterSetterAccessCase.h in Headers */,
62D2D3901ADF103F000206C1 /* FunctionRareData.h in Headers */,
FEA0C4031CDD7D1D00481991 /* FunctionWhitelist.h in Headers */,
2AACE63D18CA5A0300ED0191 /* GCActivityCallback.h in Headers */,
A503FA1E188E0FB000110F14 /* JSJavaScriptCallFramePrototype.h in Headers */,
7013CA8C1B491A9400CAE613 /* JSJob.h in Headers */,
BC18C4160E16F5CD00B34460 /* JSLexicalEnvironment.h in Headers */,
+ 534E034E1E4D4B1600213F64 /* AccessCase.h in Headers */,
BC18C4230E16F5CD00B34460 /* JSLock.h in Headers */,
C25D709C16DE99F400FCA6BC /* JSManagedValue.h in Headers */,
2A4BB7F318A41179008A0FCD /* JSManagedValueInternal.h in Headers */,
0F235BE117178E1C00690C7F /* FTLThunks.cpp in Sources */,
0F5A6283188C98D40072C9DF /* FTLValueRange.cpp in Sources */,
2A83638918D7D0FE0000EBCC /* FullGCActivityCallback.cpp in Sources */,
+ 53B0BE381E561B2400A8FC29 /* IntrinsicGetterAccessCase.cpp in Sources */,
14AD911B1DCA97FD0014F9FE /* FunctionCodeBlock.cpp in Sources */,
147F39CB107EC37600427A48 /* FunctionConstructor.cpp in Sources */,
147341E31DC2CE9600AA29BA /* FunctionExecutable.cpp in Sources */,
53486BBB1C18E84500F6F3AF /* JSTypedArray.cpp in Sources */,
0F2B66FA17B6B5AB00A7AE3F /* JSTypedArrayConstructors.cpp in Sources */,
0F2B66FC17B6B5AB00A7AE3F /* JSTypedArrayPrototypes.cpp in Sources */,
+ 53B0BE361E561B0900A8FC29 /* ProxyableAccessCase.cpp in Sources */,
0F2B66FE17B6B5AB00A7AE3F /* JSTypedArrays.cpp in Sources */,
534C457E1BC72549007476A7 /* JSTypedArrayViewConstructor.cpp in Sources */,
DEA7E2441BBC677200D78440 /* JSTypedArrayViewPrototype.cpp in Sources */,
AD2FCBE81DB58DAD00B3E736 /* JSWebAssemblyRuntimeError.cpp in Sources */,
AD2FCBEA1DB58DAD00B3E736 /* JSWebAssemblyTable.cpp in Sources */,
1442566115EDE98D0066A49B /* JSWithScope.cpp in Sources */,
+ 534E03501E4D95ED00213F64 /* AccessCase.cpp in Sources */,
86E3C618167BABEE006D760A /* JSWrapperMap.mm in Sources */,
14280870107EC1340013E7B2 /* JSWrapperObject.cpp in Sources */,
BCFD8C920EEB2EE700283848 /* JumpTable.cpp in Sources */,
1ACF7377171CA6FB00C9BB1E /* Weak.cpp in Sources */,
14E84F9E14EE1ACC00D6D5D4 /* WeakBlock.cpp in Sources */,
14F7256514EE265E00B1652B /* WeakHandleOwner.cpp in Sources */,
+ 53B0BE341E561AC900A8FC29 /* GetterSetterAccessCase.cpp in Sources */,
A7CA3AE317DA41AE006538AF /* WeakMapConstructor.cpp in Sources */,
A7CA3AEB17DA5168006538AF /* WeakMapData.cpp in Sources */,
A7CA3AE517DA41AE006538AF /* WeakMapPrototype.cpp in Sources */,
--- /dev/null
+/*
+ * Copyright (C) 2017 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "AccessCase.h"
+
+#if ENABLE(JIT)
+
+#include "CCallHelpers.h"
+#include "CallLinkInfo.h"
+#include "DOMJITGetterSetter.h"
+#include "DirectArguments.h"
+#include "GetterSetter.h"
+#include "GetterSetterAccessCase.h"
+#include "HeapInlines.h"
+#include "IntrinsicGetterAccessCase.h"
+#include "JSCJSValueInlines.h"
+#include "LinkBuffer.h"
+#include "PolymorphicAccess.h"
+#include "ScopedArguments.h"
+#include "ScratchRegisterAllocator.h"
+#include "SlotVisitorInlines.h"
+#include "StructureStubInfo.h"
+
+namespace JSC {
+
+static const bool verbose = false;
+
+AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet)
+ : m_type(type)
+ , m_offset(offset)
+{
+ m_structure.setMayBeNull(vm, owner, structure);
+ m_conditionSet = conditionSet;
+}
+
+std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet)
+{
+ switch (type) {
+ case InHit:
+ case InMiss:
+ case ArrayLength:
+ case StringLength:
+ case DirectArgumentsLength:
+ case ScopedArgumentsLength:
+ case Replace:
+ break;
+ default:
+ ASSERT_NOT_REACHED();
+ };
+
+ return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, offset, structure, conditionSet));
+}
+
+std::unique_ptr<AccessCase> AccessCase::create(
+ VM& vm, JSCell* owner, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
+ const ObjectPropertyConditionSet& conditionSet)
+{
+ RELEASE_ASSERT(oldStructure == newStructure->previousID());
+
+ // Skip optimizing the case where we need a realloc, if we don't have
+ // enough registers to make it happen.
+ if (GPRInfo::numberOfRegisters < 6
+ && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
+ && oldStructure->outOfLineCapacity()) {
+ return nullptr;
+ }
+
+ return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, offset, newStructure, conditionSet));
+}
+
+AccessCase::~AccessCase()
+{
+}
+
+std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
+ VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
+{
+ switch (stubInfo.cacheType) {
+ case CacheType::GetByIdSelf:
+ return ProxyableAccessCase::create(vm, owner, Load, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
+
+ case CacheType::PutByIdReplace:
+ return AccessCase::create(vm, owner, Replace, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
+
+ default:
+ return nullptr;
+ }
+}
+
+std::unique_ptr<AccessCase> AccessCase::clone() const
+{
+ std::unique_ptr<AccessCase> result(new AccessCase(*this));
+ result->resetState();
+ return result;
+}
+
+Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm, const Identifier& ident)
+{
+ // It's fine to commit something that is already committed. That arises when we switch to using
+ // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
+ // because most AccessCases have no extra watchpoints anyway.
+ RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
+
+ Vector<WatchpointSet*, 2> result;
+
+ if ((structure() && structure()->needImpurePropertyWatchpoint())
+ || m_conditionSet.needImpurePropertyWatchpoint())
+ result.append(vm.ensureWatchpointSetForImpureProperty(ident));
+
+ if (additionalSet())
+ result.append(additionalSet());
+
+ m_state = Committed;
+
+ return result;
+}
+
+bool AccessCase::guardedByStructureCheck() const
+{
+ if (viaProxy())
+ return false;
+
+ switch (m_type) {
+ case ArrayLength:
+ case StringLength:
+ case DirectArgumentsLength:
+ case ScopedArgumentsLength:
+ return false;
+ default:
+ return true;
+ }
+}
+
+bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMark) const
+{
+ switch (type()) {
+ case Getter:
+ case Setter:
+ case CustomValueGetter:
+ case CustomAccessorGetter:
+ case CustomValueSetter:
+ case CustomAccessorSetter:
+ return true;
+ case Transition:
+ if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()
+ && structure()->couldHaveIndexingHeader()) {
+ if (cellsToMark)
+ cellsToMark->append(newStructure());
+ return true;
+ }
+ return false;
+ default:
+ return false;
+ }
+}
+
+bool AccessCase::couldStillSucceed() const
+{
+ return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
+}
+
+bool AccessCase::canReplace(const AccessCase& other) const
+{
+ // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
+ // It's fine for this to return false if it's in doubt.
+
+ switch (type()) {
+ case ArrayLength:
+ case StringLength:
+ case DirectArgumentsLength:
+ case ScopedArgumentsLength:
+ return other.type() == type();
+ default:
+ if (!guardedByStructureCheck() || !other.guardedByStructureCheck())
+ return false;
+
+ return structure() == other.structure();
+ }
+}
+
+void AccessCase::dump(PrintStream& out) const
+{
+ out.print(m_type, ":(");
+
+ CommaPrinter comma;
+
+ out.print(comma, m_state);
+
+ if (m_type == Transition)
+ out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
+ else if (m_structure)
+ out.print(comma, "structure = ", pointerDump(m_structure.get()));
+
+ if (isValidOffset(m_offset))
+ out.print(comma, "offset = ", m_offset);
+ if (!m_conditionSet.isEmpty())
+ out.print(comma, "conditions = ", m_conditionSet);
+
+ dumpImpl(out, comma);
+ out.print(")");
+}
+
+bool AccessCase::visitWeak(VM& vm) const
+{
+ if (m_structure && !Heap::isMarked(m_structure.get()))
+ return false;
+ if (!m_conditionSet.areStillLive())
+ return false;
+ if (isAccessor()) {
+ auto& accessor = this->as<GetterSetterAccessCase>();
+ if (accessor.callLinkInfo())
+ accessor.callLinkInfo()->visitWeak(vm);
+ if (accessor.customSlotBase() && !Heap::isMarked(accessor.customSlotBase()))
+ return false;
+ } else if (type() == IntrinsicGetter) {
+ auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
+ if (intrinsic.intrinsicFunction() && !Heap::isMarked(intrinsic.intrinsicFunction()))
+ return false;
+ }
+
+ return true;
+}
+
+bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
+{
+ bool result = true;
+
+ if (m_structure)
+ result &= m_structure->markIfCheap(visitor);
+
+ switch (m_type) {
+ case Transition:
+ if (Heap::isMarkedConcurrently(m_structure->previousID()))
+ visitor.appendUnbarriered(m_structure.get());
+ else
+ result = false;
+ break;
+ default:
+ break;
+ }
+
+ return result;
+}
+
+void AccessCase::generateWithGuard(
+ AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
+{
+ SuperSamplerScope superSamplerScope(false);
+
+ RELEASE_ASSERT(m_state == Committed);
+ m_state = Generated;
+
+ CCallHelpers& jit = *state.jit;
+ VM& vm = *jit.vm();
+ JSValueRegs valueRegs = state.valueRegs;
+ GPRReg baseGPR = state.baseGPR;
+ GPRReg scratchGPR = state.scratchGPR;
+
+ UNUSED_PARAM(vm);
+
+ switch (m_type) {
+ case ArrayLength: {
+ ASSERT(!viaProxy());
+ jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
+ fallThrough.append(
+ jit.branchTest32(
+ CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
+ fallThrough.append(
+ jit.branchTest32(
+ CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
+ break;
+ }
+
+ case StringLength: {
+ ASSERT(!viaProxy());
+ fallThrough.append(
+ jit.branch8(
+ CCallHelpers::NotEqual,
+ CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
+ CCallHelpers::TrustedImm32(StringType)));
+ break;
+ }
+
+ case DirectArgumentsLength: {
+ ASSERT(!viaProxy());
+ fallThrough.append(
+ jit.branch8(
+ CCallHelpers::NotEqual,
+ CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
+ CCallHelpers::TrustedImm32(DirectArgumentsType)));
+
+ fallThrough.append(
+ jit.branchTestPtr(
+ CCallHelpers::NonZero,
+ CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
+ jit.load32(
+ CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
+ valueRegs.payloadGPR());
+ jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
+ state.succeed();
+ return;
+ }
+
+ case ScopedArgumentsLength: {
+ ASSERT(!viaProxy());
+ fallThrough.append(
+ jit.branch8(
+ CCallHelpers::NotEqual,
+ CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
+ CCallHelpers::TrustedImm32(ScopedArgumentsType)));
+
+ fallThrough.append(
+ jit.branchTest8(
+ CCallHelpers::NonZero,
+ CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfOverrodeThings())));
+ jit.load32(
+ CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength()),
+ valueRegs.payloadGPR());
+ jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
+ state.succeed();
+ return;
+ }
+
+ default: {
+ if (viaProxy()) {
+ fallThrough.append(
+ jit.branch8(
+ CCallHelpers::NotEqual,
+ CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
+ CCallHelpers::TrustedImm32(PureForwardingProxyType)));
+
+ jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
+
+ fallThrough.append(
+ jit.branchStructure(
+ CCallHelpers::NotEqual,
+ CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
+ structure()));
+ } else {
+ fallThrough.append(
+ jit.branchStructure(
+ CCallHelpers::NotEqual,
+ CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
+ structure()));
+ }
+ break;
+ } };
+
+ generateImpl(state);
+}
+
+void AccessCase::generate(AccessGenerationState& state)
+{
+ RELEASE_ASSERT(m_state == Committed);
+ m_state = Generated;
+
+ generateImpl(state);
+}
+
+void AccessCase::generateImpl(AccessGenerationState& state)
+{
+ SuperSamplerScope superSamplerScope(false);
+ if (verbose)
+ dataLog("\n\nGenerating code for: ", *this, "\n");
+
+ ASSERT(m_state == Generated); // We rely on the callers setting this for us.
+
+ CCallHelpers& jit = *state.jit;
+ VM& vm = *jit.vm();
+ CodeBlock* codeBlock = jit.codeBlock();
+ StructureStubInfo& stubInfo = *state.stubInfo;
+ const Identifier& ident = *state.ident;
+ JSValueRegs valueRegs = state.valueRegs;
+ GPRReg baseGPR = state.baseGPR;
+ GPRReg scratchGPR = state.scratchGPR;
+
+ ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
+
+ for (const ObjectPropertyCondition& condition : m_conditionSet) {
+ Structure* structure = condition.object()->structure();
+
+ if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
+ structure->addTransitionWatchpoint(state.addWatchpoint(condition));
+ continue;
+ }
+
+ if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
+ // The reason why this cannot happen is that we require that PolymorphicAccess calls
+ // AccessCase::generate() only after it has verified that
+ // AccessCase::couldStillSucceed() returned true.
+
+ dataLog("This condition is no longer met: ", condition, "\n");
+ RELEASE_ASSERT_NOT_REACHED();
+ }
+
+ // We will emit code that has a weak reference that isn't otherwise listed anywhere.
+ state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
+
+ jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
+ state.failAndRepatch.append(
+ jit.branchStructure(
+ CCallHelpers::NotEqual,
+ CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
+ structure));
+ }
+
+ switch (m_type) {
+ case InHit:
+ case InMiss:
+ jit.boxBooleanPayload(m_type == InHit, valueRegs.payloadGPR());
+ state.succeed();
+ return;
+
+ case Miss:
+ jit.moveTrustedValue(jsUndefined(), valueRegs);
+ state.succeed();
+ return;
+
+ case Load:
+ case GetGetter:
+ case Getter:
+ case Setter:
+ case CustomValueGetter:
+ case CustomAccessorGetter:
+ case CustomValueSetter:
+ case CustomAccessorSetter: {
+ GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
+
+ if (isValidOffset(m_offset)) {
+ Structure* currStructure;
+ if (m_conditionSet.isEmpty())
+ currStructure = structure();
+ else
+ currStructure = m_conditionSet.slotBaseCondition().object()->structure();
+ currStructure->startWatchingPropertyForReplacements(vm, offset());
+ }
+
+ GPRReg baseForGetGPR;
+ if (viaProxy()) {
+ ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
+ if (m_type == Getter || m_type == Setter)
+ baseForGetGPR = scratchGPR;
+ else
+ baseForGetGPR = valueRegsPayloadGPR;
+
+ ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
+ ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
+
+ jit.loadPtr(
+ CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
+ baseForGetGPR);
+ } else
+ baseForGetGPR = baseGPR;
+
+ GPRReg baseForAccessGPR;
+ if (!m_conditionSet.isEmpty()) {
+ jit.move(
+ CCallHelpers::TrustedImmPtr(alternateBase()),
+ scratchGPR);
+ baseForAccessGPR = scratchGPR;
+ } else
+ baseForAccessGPR = baseForGetGPR;
+
+ GPRReg loadedValueGPR = InvalidGPRReg;
+ if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
+ if (m_type == Load || m_type == GetGetter)
+ loadedValueGPR = valueRegsPayloadGPR;
+ else
+ loadedValueGPR = scratchGPR;
+
+ ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
+ ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
+
+ GPRReg storageGPR;
+ if (isInlineOffset(m_offset))
+ storageGPR = baseForAccessGPR;
+ else {
+ jit.loadPtr(
+ CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
+ loadedValueGPR);
+ storageGPR = loadedValueGPR;
+ }
+
+#if USE(JSVALUE64)
+ jit.load64(
+ CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
+#else
+ if (m_type == Load || m_type == GetGetter) {
+ jit.load32(
+ CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
+ valueRegs.tagGPR());
+ }
+ jit.load32(
+ CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
+ loadedValueGPR);
+#endif
+ }
+
+ if (m_type == Load || m_type == GetGetter) {
+ state.succeed();
+ return;
+ }
+
+ if (Options::useDOMJIT() && m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domJIT()) {
+ auto& access = this->as<GetterSetterAccessCase>();
+ // We do not need to emit CheckDOM operation since structure check ensures
+ // that the structure of the given base value is structure()! So all we should
+ // do is performing the CheckDOM thingy in IC compiling time here.
+ if (structure()->classInfo()->isSubClassOf(access.domJIT()->thisClassInfo())) {
+ access.emitDOMJITGetter(state, baseForGetGPR);
+ return;
+ }
+ }
+
+ // Stuff for custom getters/setters.
+ CCallHelpers::Call operationCall;
+
+ // Stuff for JS getters/setters.
+ CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
+ CCallHelpers::Call fastPathCall;
+ CCallHelpers::Call slowPathCall;
+
+ // This also does the necessary calculations of whether or not we're an
+ // exception handling call site.
+ AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
+
+ auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
+ RegisterSet dontRestore;
+ if (callHasReturnValue) {
+ // This is the result value. We don't want to overwrite the result with what we stored to the stack.
+ // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
+ dontRestore.set(valueRegs);
+ }
+ state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
+ };
+
+ jit.store32(
+ CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
+ CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
+
+ if (m_type == Getter || m_type == Setter) {
+ auto& access = this->as<GetterSetterAccessCase>();
+ ASSERT(baseGPR != loadedValueGPR);
+ ASSERT(m_type != Setter || (baseGPR != valueRegsPayloadGPR && loadedValueGPR != valueRegsPayloadGPR));
+
+ // Create a JS call using a JS call inline cache. Assume that:
+ //
+ // - SP is aligned and represents the extent of the calling compiler's stack usage.
+ //
+ // - FP is set correctly (i.e. it points to the caller's call frame header).
+ //
+ // - SP - FP is an aligned difference.
+ //
+ // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
+ // code.
+ //
+ // Therefore, we temporarily grow the stack for the purpose of the call and then
+ // shrink it after.
+
+ state.setSpillStateForJSGetterSetter(spillState);
+
+ RELEASE_ASSERT(!access.callLinkInfo());
+ access.m_callLinkInfo = std::make_unique<CallLinkInfo>();
+
+ // FIXME: If we generated a polymorphic call stub that jumped back to the getter
+ // stub, which then jumped back to the main code, then we'd have a reachability
+ // situation that the GC doesn't know about. The GC would ensure that the polymorphic
+ // call stub stayed alive, and it would ensure that the main code stayed alive, but
+ // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
+ // be GC objects, and then we'd be able to say that the polymorphic call stub has a
+ // reference to the getter stub.
+ // https://bugs.webkit.org/show_bug.cgi?id=148914
+ access.callLinkInfo()->disallowStubs();
+
+ access.callLinkInfo()->setUpCall(
+ CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
+
+ CCallHelpers::JumpList done;
+
+ // There is a "this" argument.
+ unsigned numberOfParameters = 1;
+ // ... and a value argument if we're calling a setter.
+ if (m_type == Setter)
+ numberOfParameters++;
+
+ // Get the accessor; if there ain't one then the result is jsUndefined().
+ if (m_type == Setter) {
+ jit.loadPtr(
+ CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
+ loadedValueGPR);
+ } else {
+ jit.loadPtr(
+ CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
+ loadedValueGPR);
+ }
+
+ CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
+ CCallHelpers::Zero, loadedValueGPR);
+
+ unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
+ unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
+
+ unsigned alignedNumberOfBytesForCall =
+ WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
+
+ jit.subPtr(
+ CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
+ CCallHelpers::stackPointerRegister);
+
+ CCallHelpers::Address calleeFrame = CCallHelpers::Address(
+ CCallHelpers::stackPointerRegister,
+ -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
+
+ jit.store32(
+ CCallHelpers::TrustedImm32(numberOfParameters),
+ calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
+
+ jit.storeCell(
+ loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
+
+ jit.storeCell(
+ baseGPR,
+ calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
+
+ if (m_type == Setter) {
+ jit.storeValue(
+ valueRegs,
+ calleeFrame.withOffset(
+ virtualRegisterForArgument(1).offset() * sizeof(Register)));
+ }
+
+ CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
+ CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
+ CCallHelpers::TrustedImmPtr(0));
+
+ fastPathCall = jit.nearCall();
+ if (m_type == Getter)
+ jit.setupResults(valueRegs);
+ done.append(jit.jump());
+
+ slowCase.link(&jit);
+ jit.move(loadedValueGPR, GPRInfo::regT0);
+#if USE(JSVALUE32_64)
+ // We *always* know that the getter/setter, if non-null, is a cell.
+ jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
+#endif
+ jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
+ slowPathCall = jit.nearCall();
+ if (m_type == Getter)
+ jit.setupResults(valueRegs);
+ done.append(jit.jump());
+
+ returnUndefined.link(&jit);
+ if (m_type == Getter)
+ jit.moveTrustedValue(jsUndefined(), valueRegs);
+
+ done.link(&jit);
+
+ jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
+ GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
+ bool callHasReturnValue = isGetter();
+ restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
+
+ jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
+ this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
+ CodeLocationLabel(linkBuffer.locationOfNearCall(slowPathCall)),
+ CodeLocationLabel(linkBuffer.locationOf(addressOfLinkFunctionCheck)),
+ linkBuffer.locationOfNearCall(fastPathCall));
+
+ linkBuffer.link(
+ slowPathCall,
+ CodeLocationLabel(vm.getCTIStub(linkCallThunkGenerator).code()));
+ });
+ } else {
+ ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
+
+ // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
+ // hard to track if someone did spillage or not, so we just assume that we always need
+ // to make some space here.
+ jit.makeSpaceOnStackForCCall();
+
+ // getter: EncodedJSValue (*GetValueFunc)(ExecState*, EncodedJSValue thisValue, PropertyName);
+ // setter: void (*PutValueFunc)(ExecState*, EncodedJSValue thisObject, EncodedJSValue value);
+ // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
+ // FIXME: Remove this differences in custom values and custom accessors.
+ // https://bugs.webkit.org/show_bug.cgi?id=158014
+ GPRReg baseForCustomValue = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForGetGPR;
+#if USE(JSVALUE64)
+ if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
+ jit.setupArgumentsWithExecState(
+ baseForCustomValue,
+ CCallHelpers::TrustedImmPtr(ident.impl()));
+ } else
+ jit.setupArgumentsWithExecState(baseForCustomValue, valueRegs.gpr());
+#else
+ if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
+ jit.setupArgumentsWithExecState(
+ EABI_32BIT_DUMMY_ARG baseForCustomValue,
+ CCallHelpers::TrustedImm32(JSValue::CellTag),
+ CCallHelpers::TrustedImmPtr(ident.impl()));
+ } else {
+ jit.setupArgumentsWithExecState(
+ EABI_32BIT_DUMMY_ARG baseForCustomValue,
+ CCallHelpers::TrustedImm32(JSValue::CellTag),
+ valueRegs.payloadGPR(), valueRegs.tagGPR());
+ }
+#endif
+ jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
+
+ operationCall = jit.call();
+ jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
+ linkBuffer.link(operationCall, FunctionPtr(this->as<GetterSetterAccessCase>().m_customAccessor.opaque));
+ });
+
+ if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
+ jit.setupResults(valueRegs);
+ jit.reclaimSpaceOnStackForCCall();
+
+ CCallHelpers::Jump noException =
+ jit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
+
+ state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
+ state.emitExplicitExceptionHandler();
+
+ noException.link(&jit);
+ bool callHasReturnValue = isGetter();
+ restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
+ }
+ state.succeed();
+ return;
+ }
+
+ case Replace: {
+ if (InferredType* type = structure()->inferredTypeFor(ident.impl())) {
+ if (verbose)
+ dataLog("Have type: ", type->descriptor(), "\n");
+ state.failAndRepatch.append(
+ jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
+ } else if (verbose)
+ dataLog("Don't have type.\n");
+
+ if (isInlineOffset(m_offset)) {
+ jit.storeValue(
+ valueRegs,
+ CCallHelpers::Address(
+ baseGPR,
+ JSObject::offsetOfInlineStorage() +
+ offsetInInlineStorage(m_offset) * sizeof(JSValue)));
+ } else {
+ jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
+ jit.storeValue(
+ valueRegs,
+ CCallHelpers::Address(
+ scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
+ }
+ state.succeed();
+ return;
+ }
+
+ case Transition: {
+ // AccessCase::transition() should have returned null if this wasn't true.
+ RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
+
+ if (InferredType* type = newStructure()->inferredTypeFor(ident.impl())) {
+ if (verbose)
+ dataLog("Have type: ", type->descriptor(), "\n");
+ state.failAndRepatch.append(
+ jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
+ } else if (verbose)
+ dataLog("Don't have type.\n");
+
+ // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
+ // exactly when this would make calls.
+ bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
+ bool reallocating = allocating && structure()->outOfLineCapacity();
+ bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
+
+ ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
+ allocator.lock(baseGPR);
+#if USE(JSVALUE32_64)
+ allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
+#endif
+ allocator.lock(valueRegs);
+ allocator.lock(scratchGPR);
+
+ GPRReg scratchGPR2 = InvalidGPRReg;
+ GPRReg scratchGPR3 = InvalidGPRReg;
+ if (allocatingInline) {
+ scratchGPR2 = allocator.allocateScratchGPR();
+ scratchGPR3 = allocator.allocateScratchGPR();
+ }
+
+ ScratchRegisterAllocator::PreservedState preservedState =
+ allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
+
+ CCallHelpers::JumpList slowPath;
+
+ ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
+
+ if (allocating) {
+ size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
+
+ if (allocatingInline) {
+ MarkedAllocator* allocator = vm.auxiliarySpace.allocatorFor(newSize);
+
+ if (!allocator) {
+ // Yuck, this case would suck!
+ slowPath.append(jit.jump());
+ }
+
+ jit.move(CCallHelpers::TrustedImmPtr(allocator), scratchGPR2);
+ jit.emitAllocate(scratchGPR, allocator, scratchGPR2, scratchGPR3, slowPath);
+ jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
+
+ size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
+ ASSERT(newSize > oldSize);
+
+ if (reallocating) {
+ // Handle the case where we are reallocating (i.e. the old structure/butterfly
+ // already had out-of-line property storage).
+
+ jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
+
+ // We have scratchGPR = new storage, scratchGPR3 = old storage,
+ // scratchGPR2 = available
+ for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
+ jit.loadPtr(
+ CCallHelpers::Address(
+ scratchGPR3,
+ -static_cast<ptrdiff_t>(
+ offset + sizeof(JSValue) + sizeof(void*))),
+ scratchGPR2);
+ jit.storePtr(
+ scratchGPR2,
+ CCallHelpers::Address(
+ scratchGPR,
+ -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
+ }
+ }
+
+ for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
+ jit.storePtr(CCallHelpers::TrustedImmPtr(0), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
+ } else {
+ // Handle the case where we are allocating out-of-line using an operation.
+ RegisterSet extraRegistersToPreserve;
+ extraRegistersToPreserve.set(baseGPR);
+ extraRegistersToPreserve.set(valueRegs);
+ AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
+
+ jit.store32(
+ CCallHelpers::TrustedImm32(
+ state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
+ CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
+
+ jit.makeSpaceOnStackForCCall();
+
+ if (!reallocating) {
+ jit.setupArgumentsWithExecState(baseGPR);
+
+ CCallHelpers::Call operationCall = jit.call();
+ jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
+ linkBuffer.link(
+ operationCall,
+ FunctionPtr(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
+ });
+ } else {
+ // Handle the case where we are reallocating (i.e. the old structure/butterfly
+ // already had out-of-line property storage).
+ jit.setupArgumentsWithExecState(
+ baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
+
+ CCallHelpers::Call operationCall = jit.call();
+ jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
+ linkBuffer.link(
+ operationCall,
+ FunctionPtr(operationReallocateButterflyToGrowPropertyStorage));
+ });
+ }
+
+ jit.reclaimSpaceOnStackForCCall();
+ jit.move(GPRInfo::returnValueGPR, scratchGPR);
+
+ CCallHelpers::Jump noException = jit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
+
+ state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
+ state.emitExplicitExceptionHandler();
+
+ noException.link(&jit);
+ state.restoreLiveRegistersFromStackForCall(spillState);
+ }
+ }
+
+ if (isInlineOffset(m_offset)) {
+ jit.storeValue(
+ valueRegs,
+ CCallHelpers::Address(
+ baseGPR,
+ JSObject::offsetOfInlineStorage() +
+ offsetInInlineStorage(m_offset) * sizeof(JSValue)));
+ } else {
+ if (!allocating)
+ jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
+ jit.storeValue(
+ valueRegs,
+ CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
+ }
+
+ if (allocatingInline) {
+ // We set the new butterfly and the structure last. Doing it this way ensures that
+ // whatever we had done up to this point is forgotten if we choose to branch to slow
+ // path.
+ jit.nukeStructureAndStoreButterfly(scratchGPR, baseGPR);
+ }
+
+ uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
+ jit.store32(
+ CCallHelpers::TrustedImm32(structureBits),
+ CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
+
+ allocator.restoreReusedRegistersByPopping(jit, preservedState);
+ state.succeed();
+
+ // We will have a slow path if we were allocating without the help of an operation.
+ if (allocatingInline) {
+ if (allocator.didReuseRegisters()) {
+ slowPath.link(&jit);
+ allocator.restoreReusedRegistersByPopping(jit, preservedState);
+ state.failAndIgnore.append(jit.jump());
+ } else
+ state.failAndIgnore.append(slowPath);
+ } else
+ RELEASE_ASSERT(slowPath.empty());
+ return;
+ }
+
+ case ArrayLength: {
+ jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
+ jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
+ state.failAndIgnore.append(
+ jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
+ jit.boxInt32(scratchGPR, valueRegs);
+ state.succeed();
+ return;
+ }
+
+ case StringLength: {
+ jit.load32(CCallHelpers::Address(baseGPR, JSString::offsetOfLength()), valueRegs.payloadGPR());
+ jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
+ state.succeed();
+ return;
+ }
+
+ case IntrinsicGetter: {
+ RELEASE_ASSERT(isValidOffset(offset()));
+
+ // We need to ensure the getter value does not move from under us. Note that GetterSetters
+ // are immutable so we just need to watch the property not any value inside it.
+ Structure* currStructure;
+ if (m_conditionSet.isEmpty())
+ currStructure = structure();
+ else
+ currStructure = m_conditionSet.slotBaseCondition().object()->structure();
+ currStructure->startWatchingPropertyForReplacements(vm, offset());
+
+ this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
+ return;
+ }
+
+ case DirectArgumentsLength:
+ case ScopedArgumentsLength:
+ // These need to be handled by generateWithGuard(), since the guard is part of the
+ // algorithm. We can be sure that nobody will call generate() directly for these since they
+ // are not guarded by structure checks.
+ RELEASE_ASSERT_NOT_REACHED();
+ }
+
+ RELEASE_ASSERT_NOT_REACHED();
+}
+
+} // namespace JSC
+
+#endif
--- /dev/null
+/*
+ * Copyright (C) 2017 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#if ENABLE(JIT)
+
+#include "JSFunctionInlines.h"
+#include "ObjectPropertyConditionSet.h"
+
+namespace JSC {
+
+struct AccessGenerationState;
+
+// An AccessCase describes one of the cases of a PolymorphicAccess. A PolymorphicAccess represents a
+// planned (to generate in future) or generated stub for some inline cache. That stub contains fast
+// path code for some finite number of fast cases, each described by an AccessCase object.
+//
+// An AccessCase object has a lifecycle that proceeds through several states. Note that the states
+// of AccessCase have a lot to do with the global effect epoch (we'll say epoch for short). This is
+// a simple way of reasoning about the state of the system outside this AccessCase. Any observable
+// effect - like storing to a property, changing an object's structure, etc. - increments the epoch.
+// The states are:
+//
+// Primordial: This is an AccessCase that was just allocated. It does not correspond to any actual
+// code and it is not owned by any PolymorphicAccess. In this state, the AccessCase
+// assumes that it is in the same epoch as when it was created. This is important
+// because it may make claims about itself ("I represent a valid case so long as you
+// register a watchpoint on this set") that could be contradicted by some outside
+// effects (like firing and deleting the watchpoint set in question). This is also the
+// state that an AccessCase is in when it is cloned (AccessCase::clone()).
+//
+// Committed: This happens as soon as some PolymorphicAccess takes ownership of this AccessCase.
+// In this state, the AccessCase no longer assumes anything about the epoch. To
+// accomplish this, PolymorphicAccess calls AccessCase::commit(). This must be done
+// during the same epoch when the AccessCase was created, either by the client or by
+// clone(). When created by the client, committing during the same epoch works because
+// we can be sure that whatever watchpoint sets they spoke of are still valid. When
+// created by clone(), we can be sure that the set is still valid because the original
+// of the clone still has watchpoints on it.
+//
+// Generated: This is the state when the PolymorphicAccess generates code for this case by
+// calling AccessCase::generate() or AccessCase::generateWithGuard(). At this point
+// the case object will have some extra stuff in it, like possibly the CallLinkInfo
+// object associated with the inline cache.
+// FIXME: Moving into the Generated state should not mutate the AccessCase object or
+// put more stuff into it. If we fix this, then we can get rid of AccessCase::clone().
+// https://bugs.webkit.org/show_bug.cgi?id=156456
+//
+// An AccessCase may be destroyed while in any of these states.
+//
+// We will sometimes buffer committed AccessCases in the PolymorphicAccess object before generating
+// code. This allows us to only regenerate once we've accumulated (hopefully) more than one new
+// AccessCase.
+class AccessCase {
+ WTF_MAKE_FAST_ALLOCATED;
+public:
+ enum AccessType : uint8_t {
+ Load,
+ Transition,
+ Replace,
+ Miss,
+ GetGetter,
+ Getter,
+ Setter,
+ CustomValueGetter,
+ CustomAccessorGetter,
+ CustomValueSetter,
+ CustomAccessorSetter,
+ IntrinsicGetter,
+ InHit,
+ InMiss,
+ ArrayLength,
+ StringLength,
+ DirectArgumentsLength,
+ ScopedArgumentsLength
+ };
+
+ enum State : uint8_t {
+ Primordial,
+ Committed,
+ Generated
+ };
+
+ template<typename T>
+ T& as() { return *static_cast<T*>(this); }
+
+ template<typename T>
+ const T& as() const { return *static_cast<const T*>(this); }
+
+
+ template<typename AccessCaseType, typename... Arguments>
+ static std::unique_ptr<AccessCaseType> create(Arguments... arguments)
+ {
+ return std::unique_ptr<AccessCaseType>(new AccessCaseType(arguments...));
+ }
+
+ static std::unique_ptr<AccessCase> create(VM&, JSCell* owner, AccessType, PropertyOffset = invalidOffset,
+ Structure* = nullptr, const ObjectPropertyConditionSet& = ObjectPropertyConditionSet());
+
+ // This create method should be used for transitions.
+ static std::unique_ptr<AccessCase> create(VM&, JSCell* owner, PropertyOffset, Structure* oldStructure,
+ Structure* newStructure, const ObjectPropertyConditionSet& = ObjectPropertyConditionSet());
+
+ static std::unique_ptr<AccessCase> fromStructureStubInfo(VM&, JSCell* owner, StructureStubInfo&);
+
+ AccessType type() const { return m_type; }
+ State state() const { return m_state; }
+ PropertyOffset offset() const { return m_offset; }
+
+ Structure* structure() const
+ {
+ if (m_type == Transition)
+ return m_structure->previousID();
+ return m_structure.get();
+ }
+ bool guardedByStructureCheck() const;
+
+ Structure* newStructure() const
+ {
+ ASSERT(m_type == Transition);
+ return m_structure.get();
+ }
+
+ ObjectPropertyConditionSet conditionSet() const { return m_conditionSet; }
+
+ virtual JSObject* alternateBase() const { return conditionSet().slotBaseCondition().object(); }
+ virtual WatchpointSet* additionalSet() const { return nullptr; }
+ virtual bool viaProxy() const { return false; }
+
+ // If you supply the optional vector, this will append the set of cells that this will need to keep alive
+ // past the call.
+ bool doesCalls(Vector<JSCell*>* cellsToMark = nullptr) const;
+
+ bool isGetter() const
+ {
+ switch (type()) {
+ case Getter:
+ case CustomValueGetter:
+ case CustomAccessorGetter:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ bool isAccessor() const { return isGetter() || type() == Setter; }
+
+ // Is it still possible for this case to ever be taken? Must call this as a prerequisite for
+ // calling generate() and friends. If this returns true, then you can call generate(). If
+ // this returns false, then generate() will crash. You must call generate() in the same epoch
+ // as when you called couldStillSucceed().
+ bool couldStillSucceed() const;
+
+ // If this method returns true, then it's a good idea to remove 'other' from the access once 'this'
+ // is added. This method assumes that in case of contradictions, 'this' represents a newer, and so
+ // more useful, truth. This method can be conservative; it will return false when it doubt.
+ bool canReplace(const AccessCase& other) const;
+
+ void dump(PrintStream& out) const;
+ virtual void dumpImpl(PrintStream&, CommaPrinter&) const { }
+
+ virtual ~AccessCase();
+
+protected:
+ AccessCase(VM&, JSCell* owner, AccessType, PropertyOffset, Structure*, const ObjectPropertyConditionSet&);
+ AccessCase(const AccessCase&) = default;
+ AccessCase& operator=(const AccessCase&) = delete;
+ void resetState() { m_state = Primordial; }
+
+private:
+ friend class CodeBlock;
+ friend class PolymorphicAccess;
+
+ bool visitWeak(VM&) const;
+ bool propagateTransitions(SlotVisitor&) const;
+
+ // FIXME: This only exists because of how AccessCase puts post-generation things into itself.
+ // https://bugs.webkit.org/show_bug.cgi?id=156456
+ virtual std::unique_ptr<AccessCase> clone() const;
+
+ // Perform any action that must be performed before the end of the epoch in which the case
+ // was created. Returns a set of watchpoint sets that will need to be watched.
+ Vector<WatchpointSet*, 2> commit(VM&, const Identifier&);
+
+ // Fall through on success. Two kinds of failures are supported: fall-through, which means that we
+ // should try a different case; and failure, which means that this was the right case but it needs
+ // help from the slow path.
+ void generateWithGuard(AccessGenerationState&, MacroAssembler::JumpList& fallThrough);
+
+ // Fall through on success, add a jump to the failure list on failure.
+ void generate(AccessGenerationState&);
+
+ void generateImpl(AccessGenerationState&);
+
+ AccessType m_type;
+ State m_state { Primordial };
+ PropertyOffset m_offset;
+
+ // Usually this is the structure that we expect the base object to have. But, this is the *new*
+ // structure for a transition and we rely on the fact that it has a strong reference to the old
+ // structure. For proxies, this is the structure of the object behind the proxy.
+ WriteBarrier<Structure> m_structure;
+
+ ObjectPropertyConditionSet m_conditionSet;
+};
+
+} // namespace JSC
+
+#endif
#include "CodeBlock.h"
#include "ComplexGetStatus.h"
+#include "GetterSetterAccessCase.h"
+#include "IntrinsicGetterAccessCase.h"
#include "JSCInlines.h"
#include "JSScope.h"
#include "LLIntData.h"
break;
}
case AccessCase::IntrinsicGetter: {
- intrinsicFunction = access.intrinsicFunction();
+ intrinsicFunction = access.as<IntrinsicGetterAccessCase>().intrinsicFunction();
break;
}
case AccessCase::Getter: {
callLinkStatus = std::make_unique<CallLinkStatus>();
- if (CallLinkInfo* callLinkInfo = access.callLinkInfo()) {
+ if (CallLinkInfo* callLinkInfo = access.as<GetterSetterAccessCase>().callLinkInfo()) {
*callLinkStatus = CallLinkStatus::computeFor(
locker, profiledBlock, *callLinkInfo, callExitSiteData);
}
break;
}
case AccessCase::CustomAccessorGetter: {
- domJIT = access.domJIT();
+ domJIT = access.as<GetterSetterAccessCase>().domJIT();
if (!domJIT)
return GetByIdStatus(slowPathState, true);
result.m_state = Custom;
--- /dev/null
+/*
+ * Copyright (C) 2017 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "GetterSetterAccessCase.h"
+
+#if ENABLE(JIT)
+
+#include "DOMJITAccessCasePatchpointParams.h"
+#include "DOMJITCallDOMGetterPatchpoint.h"
+#include "DOMJITGetterSetter.h"
+#include "HeapInlines.h"
+#include "JSCJSValueInlines.h"
+#include "PolymorphicAccess.h"
+#include "StructureStubInfo.h"
+
+namespace JSC {
+
+static const bool verbose = false;
+
+GetterSetterAccessCase::GetterSetterAccessCase(VM& vm, JSCell* owner, AccessType accessType, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, bool viaProxy, WatchpointSet* additionalSet, JSObject* customSlotBase)
+ : Base(vm, owner, accessType, offset, structure, conditionSet, viaProxy, additionalSet)
+{
+ m_customSlotBase.setMayBeNull(vm, owner, customSlotBase);
+}
+
+
+std::unique_ptr<AccessCase> GetterSetterAccessCase::create(
+ VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure,
+ const ObjectPropertyConditionSet& conditionSet, bool viaProxy, WatchpointSet* additionalSet,
+ PropertySlot::GetValueFunc customGetter, JSObject* customSlotBase, DOMJIT::GetterSetter* domJIT)
+{
+ switch (type) {
+ case Getter:
+ case CustomAccessorGetter:
+ case CustomValueGetter:
+ break;
+ default:
+ ASSERT_NOT_REACHED();
+ };
+
+ std::unique_ptr<GetterSetterAccessCase> result(new GetterSetterAccessCase(vm, owner, type, offset, structure, conditionSet, viaProxy, additionalSet, customSlotBase));
+ result->m_domJIT = domJIT;
+ result->m_customAccessor.getter = customGetter;
+ return WTFMove(result);
+}
+
+std::unique_ptr<AccessCase> GetterSetterAccessCase::create(VM& vm, JSCell* owner, AccessType type, Structure* structure, PropertyOffset offset,
+ const ObjectPropertyConditionSet& conditionSet, PutPropertySlot::PutValueFunc customSetter,
+ JSObject* customSlotBase)
+{
+ ASSERT(type == Setter || type == CustomValueSetter || type == CustomAccessorSetter);
+ std::unique_ptr<GetterSetterAccessCase> result(new GetterSetterAccessCase(vm, owner, type, offset, structure, conditionSet, false, nullptr, customSlotBase));
+ result->m_customAccessor.setter = customSetter;
+ return WTFMove(result);
+}
+
+
+GetterSetterAccessCase::~GetterSetterAccessCase()
+{
+}
+
+
+GetterSetterAccessCase::GetterSetterAccessCase(const GetterSetterAccessCase& other)
+ : Base(other)
+ , m_customSlotBase(other.m_customSlotBase)
+{
+ m_customAccessor.opaque = other.m_customAccessor.opaque;
+ m_domJIT = other.m_domJIT;
+}
+
+std::unique_ptr<AccessCase> GetterSetterAccessCase::clone() const
+{
+ std::unique_ptr<GetterSetterAccessCase> result(new GetterSetterAccessCase(*this));
+ result->resetState();
+ return WTFMove(result);
+}
+
+JSObject* GetterSetterAccessCase::alternateBase() const
+{
+ if (customSlotBase())
+ return customSlotBase();
+ return conditionSet().slotBaseCondition().object();
+}
+
+void GetterSetterAccessCase::dumpImpl(PrintStream& out, CommaPrinter& comma) const
+{
+ Base::dumpImpl(out, comma);
+ out.print(comma, "customSlotBase = ", RawPointer(customSlotBase()));
+ if (callLinkInfo())
+ out.print(comma, "callLinkInfo = ", RawPointer(callLinkInfo()));
+ out.print(comma, "customAccessor = ", RawPointer(m_customAccessor.opaque));
+}
+
+void GetterSetterAccessCase::emitDOMJITGetter(AccessGenerationState& state, GPRReg baseForGetGPR)
+{
+ CCallHelpers& jit = *state.jit;
+ StructureStubInfo& stubInfo = *state.stubInfo;
+ JSValueRegs valueRegs = state.valueRegs;
+ GPRReg baseGPR = state.baseGPR;
+ GPRReg scratchGPR = state.scratchGPR;
+
+ // We construct the environment that can execute the DOMJIT::Patchpoint here.
+ Ref<DOMJIT::CallDOMGetterPatchpoint> patchpoint = domJIT()->callDOMGetter();
+
+ Vector<GPRReg> gpScratch;
+ Vector<FPRReg> fpScratch;
+ Vector<DOMJIT::Value> regs;
+
+ ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
+ allocator.lock(baseGPR);
+#if USE(JSVALUE32_64)
+ allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
+#endif
+ allocator.lock(valueRegs);
+ allocator.lock(scratchGPR);
+
+ GPRReg paramBaseGPR = InvalidGPRReg;
+ GPRReg paramGlobalObjectGPR = InvalidGPRReg;
+ JSValueRegs paramValueRegs = valueRegs;
+ GPRReg remainingScratchGPR = InvalidGPRReg;
+
+ // valueRegs and baseForGetGPR may be the same. For example, in Baseline JIT, we pass the same regT0 for baseGPR and valueRegs.
+ // In FTL, there is no constraint that the baseForGetGPR interferes with the result. To make implementation simple in
+ // DOMJIT::Patchpoint, DOMJIT::Patchpoint assumes that result registers always early interfere with input registers, in this case,
+ // baseForGetGPR. So we move baseForGetGPR to the other register if baseForGetGPR == valueRegs.
+ if (baseForGetGPR != valueRegs.payloadGPR()) {
+ paramBaseGPR = baseForGetGPR;
+ if (!patchpoint->requireGlobalObject)
+ remainingScratchGPR = scratchGPR;
+ else
+ paramGlobalObjectGPR = scratchGPR;
+ } else {
+ jit.move(valueRegs.payloadGPR(), scratchGPR);
+ paramBaseGPR = scratchGPR;
+ if (patchpoint->requireGlobalObject)
+ paramGlobalObjectGPR = allocator.allocateScratchGPR();
+ }
+
+ JSGlobalObject* globalObjectForDOMJIT = structure()->globalObject();
+
+ regs.append(paramValueRegs);
+ regs.append(paramBaseGPR);
+ if (patchpoint->requireGlobalObject) {
+ ASSERT(paramGlobalObjectGPR != InvalidGPRReg);
+ regs.append(DOMJIT::Value(paramGlobalObjectGPR, globalObjectForDOMJIT));
+ }
+
+ if (patchpoint->numGPScratchRegisters) {
+ unsigned i = 0;
+ if (remainingScratchGPR != InvalidGPRReg) {
+ gpScratch.append(remainingScratchGPR);
+ ++i;
+ }
+ for (; i < patchpoint->numGPScratchRegisters; ++i)
+ gpScratch.append(allocator.allocateScratchGPR());
+ }
+
+ for (unsigned i = 0; i < patchpoint->numFPScratchRegisters; ++i)
+ fpScratch.append(allocator.allocateScratchFPR());
+
+ // Let's store the reused registers to the stack. After that, we can use allocated scratch registers.
+ ScratchRegisterAllocator::PreservedState preservedState =
+ allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
+
+ if (verbose) {
+ dataLog("baseGPR = ", baseGPR, "\n");
+ dataLog("valueRegs = ", valueRegs, "\n");
+ dataLog("scratchGPR = ", scratchGPR, "\n");
+ dataLog("paramBaseGPR = ", paramBaseGPR, "\n");
+ if (paramGlobalObjectGPR != InvalidGPRReg)
+ dataLog("paramGlobalObjectGPR = ", paramGlobalObjectGPR, "\n");
+ dataLog("paramValueRegs = ", paramValueRegs, "\n");
+ for (unsigned i = 0; i < patchpoint->numGPScratchRegisters; ++i)
+ dataLog("gpScratch[", i, "] = ", gpScratch[i], "\n");
+ }
+
+ if (patchpoint->requireGlobalObject)
+ jit.move(CCallHelpers::TrustedImmPtr(globalObjectForDOMJIT), paramGlobalObjectGPR);
+
+ // We just spill the registers used in DOMJIT::Patchpoint here. For not spilled registers here explicitly,
+ // they must be in the used register set passed by the callers (Baseline, DFG, and FTL) if they need to be kept.
+ // Some registers can be locked, but not in the used register set. For example, the caller could make baseGPR
+ // same to valueRegs, and not include it in the used registers since it will be changed.
+ RegisterSet registersToSpillForCCall;
+ for (auto& value : regs) {
+ DOMJIT::Reg reg = value.reg();
+ if (reg.isJSValueRegs())
+ registersToSpillForCCall.set(reg.jsValueRegs());
+ else if (reg.isGPR())
+ registersToSpillForCCall.set(reg.gpr());
+ else
+ registersToSpillForCCall.set(reg.fpr());
+ }
+ for (GPRReg reg : gpScratch)
+ registersToSpillForCCall.set(reg);
+ for (FPRReg reg : fpScratch)
+ registersToSpillForCCall.set(reg);
+ registersToSpillForCCall.exclude(RegisterSet::registersToNotSaveForCCall());
+
+ DOMJITAccessCasePatchpointParams params(WTFMove(regs), WTFMove(gpScratch), WTFMove(fpScratch));
+ patchpoint->generator()->run(jit, params);
+ allocator.restoreReusedRegistersByPopping(jit, preservedState);
+ state.succeed();
+
+ CCallHelpers::JumpList exceptions = params.emitSlowPathCalls(state, registersToSpillForCCall, jit);
+ if (!exceptions.empty()) {
+ exceptions.link(&jit);
+ allocator.restoreReusedRegistersByPopping(jit, preservedState);
+ state.emitExplicitExceptionHandler();
+ }
+}
+
+} // namespace JSC
+
+#endif // ENABLE(JIT)
--- /dev/null
+/*
+ * Copyright (C) 2017 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#if ENABLE(JIT)
+
+#include "ProxyableAccessCase.h"
+
+namespace JSC {
+
+class GetterSetterAccessCase : public ProxyableAccessCase {
+public:
+ typedef ProxyableAccessCase Base;
+ friend class AccessCase;
+
+ // This can return null if it hasn't been generated yet. That's
+ // actually somewhat likely because of how we do buffering of new cases.
+ CallLinkInfo* callLinkInfo() const { return m_callLinkInfo.get(); }
+ JSObject* customSlotBase() const { return m_customSlotBase.get(); }
+ DOMJIT::GetterSetter* domJIT() const { return m_domJIT; }
+
+ JSObject* alternateBase() const override;
+
+ void emitDOMJITGetter(AccessGenerationState&, GPRReg baseForGetGPR);
+
+ static std::unique_ptr<AccessCase> create(
+ VM&, JSCell* owner, AccessType, PropertyOffset, Structure*,
+ const ObjectPropertyConditionSet& = ObjectPropertyConditionSet(),
+ bool viaProxy = false,
+ WatchpointSet* additionalSet = nullptr,
+ PropertySlot::GetValueFunc = nullptr,
+ JSObject* customSlotBase = nullptr,
+ DOMJIT::GetterSetter* = nullptr);
+
+ static std::unique_ptr<AccessCase> create(VM&, JSCell* owner, AccessType, Structure*, PropertyOffset,
+ const ObjectPropertyConditionSet&, PutPropertySlot::PutValueFunc = nullptr,
+ JSObject* customSlotBase = nullptr);
+
+ void dumpImpl(PrintStream&, CommaPrinter&) const override;
+ std::unique_ptr<AccessCase> clone() const override;
+
+ ~GetterSetterAccessCase();
+
+private:
+ GetterSetterAccessCase(VM&, JSCell*, AccessType, PropertyOffset, Structure*, const ObjectPropertyConditionSet&, bool viaProxy, WatchpointSet* additionalSet, JSObject* customSlotBase);
+
+ GetterSetterAccessCase(const GetterSetterAccessCase&);
+
+ WriteBarrier<JSObject> m_customSlotBase;
+ std::unique_ptr<CallLinkInfo> m_callLinkInfo;
+ union {
+ PutPropertySlot::PutValueFunc setter;
+ PropertySlot::GetValueFunc getter;
+ void* opaque;
+ } m_customAccessor;
+ DOMJIT::GetterSetter* m_domJIT;
+};
+
+} // namespace JSC
+
+#endif // ENABLE(JIT)
--- /dev/null
+/*
+ * Copyright (C) 2017 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "IntrinsicGetterAccessCase.h"
+
+#if ENABLE(JIT)
+
+#include "HeapInlines.h"
+
+namespace JSC {
+
+IntrinsicGetterAccessCase::IntrinsicGetterAccessCase(VM& vm, JSCell* owner, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, JSFunction* intrinsicFunction)
+ : Base(vm, owner, IntrinsicGetter, offset, structure, conditionSet)
+{
+ m_intrinsicFunction.set(vm, owner, intrinsicFunction);
+}
+
+std::unique_ptr<AccessCase> IntrinsicGetterAccessCase::create(VM& vm, JSCell* owner, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, JSFunction* intrinsicFunction)
+{
+ return std::unique_ptr<AccessCase>(new IntrinsicGetterAccessCase(vm, owner, offset, structure, conditionSet, intrinsicFunction));
+}
+
+IntrinsicGetterAccessCase::~IntrinsicGetterAccessCase()
+{
+}
+
+std::unique_ptr<AccessCase> IntrinsicGetterAccessCase::clone() const
+{
+ std::unique_ptr<IntrinsicGetterAccessCase> result(new IntrinsicGetterAccessCase(*this));
+ result->resetState();
+ return WTFMove(result);
+}
+
+} // namespace JSC
+
+#endif // ENABLE(JIT)
--- /dev/null
+/*
+ * Copyright (C) 2017 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#if ENABLE(JIT)
+
+#include "AccessCase.h"
+
+namespace JSC {
+
+class IntrinsicGetterAccessCase : public AccessCase {
+public:
+ typedef AccessCase Base;
+ friend class AccessCase;
+
+ JSFunction* intrinsicFunction() const { return m_intrinsicFunction.get(); }
+ Intrinsic intrinsic() const { return m_intrinsicFunction->intrinsic(); }
+
+ static bool canEmitIntrinsicGetter(JSFunction*, Structure*);
+ void emitIntrinsicGetter(AccessGenerationState&);
+
+ static std::unique_ptr<AccessCase> create(VM&, JSCell*, PropertyOffset, Structure*, const ObjectPropertyConditionSet&, JSFunction* intrinsicFunction);
+
+ std::unique_ptr<AccessCase> clone() const override;
+
+ ~IntrinsicGetterAccessCase();
+
+private:
+ IntrinsicGetterAccessCase(VM&, JSCell*, PropertyOffset, Structure*, const ObjectPropertyConditionSet&, JSFunction* intrinsicFunction);
+
+ WriteBarrier<JSFunction> m_intrinsicFunction;
+};
+
+} // namespace JSC
+
+#endif // ENABLE(JIT)
#include "BinarySwitch.h"
#include "CCallHelpers.h"
#include "CodeBlock.h"
-#include "DOMJITAccessCasePatchpointParams.h"
-#include "DOMJITCallDOMGetterPatchpoint.h"
-#include "DirectArguments.h"
-#include "GetterSetter.h"
#include "Heap.h"
#include "JITOperations.h"
#include "JSCInlines.h"
#include "LinkBuffer.h"
-#include "ScopedArguments.h"
-#include "ScratchRegisterAllocator.h"
#include "StructureStubClearingWatchpoint.h"
#include "StructureStubInfo.h"
#include <wtf/CommaPrinter.h>
}
}
-AccessCase::AccessCase()
-{
-}
-
-std::unique_ptr<AccessCase> AccessCase::tryGet(
- VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure,
- const ObjectPropertyConditionSet& conditionSet, bool viaProxy, WatchpointSet* additionalSet)
-{
- std::unique_ptr<AccessCase> result(new AccessCase());
-
- result->m_type = type;
- result->m_offset = offset;
- result->m_structure.set(vm, owner, structure);
- result->m_conditionSet = conditionSet;
-
- if (viaProxy || additionalSet) {
- result->m_rareData = std::make_unique<RareData>();
- result->m_rareData->viaProxy = viaProxy;
- result->m_rareData->additionalSet = additionalSet;
- }
-
- return result;
-}
-
-std::unique_ptr<AccessCase> AccessCase::get(
- VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure,
- const ObjectPropertyConditionSet& conditionSet, bool viaProxy, WatchpointSet* additionalSet,
- PropertySlot::GetValueFunc customGetter, JSObject* customSlotBase, DOMJIT::GetterSetter* domJIT)
-{
- std::unique_ptr<AccessCase> result(new AccessCase());
-
- result->m_type = type;
- result->m_offset = offset;
- result->m_structure.set(vm, owner, structure);
- result->m_conditionSet = conditionSet;
-
- if (viaProxy || additionalSet || result->doesCalls() || customGetter || customSlotBase || domJIT) {
- result->m_rareData = std::make_unique<RareData>();
- result->m_rareData->viaProxy = viaProxy;
- result->m_rareData->additionalSet = additionalSet;
- result->m_rareData->customAccessor.getter = customGetter;
- result->m_rareData->customSlotBase.setMayBeNull(vm, owner, customSlotBase);
- result->m_rareData->domJIT = domJIT;
- }
-
- return result;
-}
-
-std::unique_ptr<AccessCase> AccessCase::megamorphicLoad(VM& vm, JSCell* owner)
-{
- UNUSED_PARAM(vm);
- UNUSED_PARAM(owner);
-
- if (GPRInfo::numberOfRegisters < 9)
- return nullptr;
-
- std::unique_ptr<AccessCase> result(new AccessCase());
-
- result->m_type = MegamorphicLoad;
-
- return result;
-}
-
-std::unique_ptr<AccessCase> AccessCase::replace(
- VM& vm, JSCell* owner, Structure* structure, PropertyOffset offset)
-{
- std::unique_ptr<AccessCase> result(new AccessCase());
-
- result->m_type = Replace;
- result->m_offset = offset;
- result->m_structure.set(vm, owner, structure);
-
- return result;
-}
-
-std::unique_ptr<AccessCase> AccessCase::transition(
- VM& vm, JSCell* owner, Structure* oldStructure, Structure* newStructure, PropertyOffset offset,
- const ObjectPropertyConditionSet& conditionSet)
-{
- RELEASE_ASSERT(oldStructure == newStructure->previousID());
-
- // Skip optimizing the case where we need a realloc, if we don't have
- // enough registers to make it happen.
- if (GPRInfo::numberOfRegisters < 6
- && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
- && oldStructure->outOfLineCapacity()) {
- return nullptr;
- }
-
- std::unique_ptr<AccessCase> result(new AccessCase());
-
- result->m_type = Transition;
- result->m_offset = offset;
- result->m_structure.set(vm, owner, newStructure);
- result->m_conditionSet = conditionSet;
-
- return result;
-}
-
-std::unique_ptr<AccessCase> AccessCase::setter(
- VM& vm, JSCell* owner, AccessType type, Structure* structure, PropertyOffset offset,
- const ObjectPropertyConditionSet& conditionSet, PutPropertySlot::PutValueFunc customSetter,
- JSObject* customSlotBase)
-{
- std::unique_ptr<AccessCase> result(new AccessCase());
-
- result->m_type = type;
- result->m_offset = offset;
- result->m_structure.set(vm, owner, structure);
- result->m_conditionSet = conditionSet;
- result->m_rareData = std::make_unique<RareData>();
- result->m_rareData->customAccessor.setter = customSetter;
- result->m_rareData->customSlotBase.setMayBeNull(vm, owner, customSlotBase);
-
- return result;
-}
-
-std::unique_ptr<AccessCase> AccessCase::in(
- VM& vm, JSCell* owner, AccessType type, Structure* structure,
- const ObjectPropertyConditionSet& conditionSet)
-{
- std::unique_ptr<AccessCase> result(new AccessCase());
-
- result->m_type = type;
- result->m_structure.set(vm, owner, structure);
- result->m_conditionSet = conditionSet;
-
- return result;
-}
-
-std::unique_ptr<AccessCase> AccessCase::getLength(VM&, JSCell*, AccessType type)
-{
- std::unique_ptr<AccessCase> result(new AccessCase());
-
- result->m_type = type;
-
- return result;
-}
-
-std::unique_ptr<AccessCase> AccessCase::getIntrinsic(
- VM& vm, JSCell* owner, JSFunction* getter, PropertyOffset offset,
- Structure* structure, const ObjectPropertyConditionSet& conditionSet)
-{
- std::unique_ptr<AccessCase> result(new AccessCase());
-
- result->m_type = IntrinsicGetter;
- result->m_structure.set(vm, owner, structure);
- result->m_conditionSet = conditionSet;
- result->m_offset = offset;
-
- result->m_rareData = std::make_unique<RareData>();
- result->m_rareData->intrinsicFunction.set(vm, owner, getter);
-
- return result;
-}
-
-AccessCase::~AccessCase()
-{
-}
-
-std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
- VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
-{
- switch (stubInfo.cacheType) {
- case CacheType::GetByIdSelf:
- return get(
- vm, owner, Load, stubInfo.u.byIdSelf.offset,
- stubInfo.u.byIdSelf.baseObjectStructure.get());
-
- case CacheType::PutByIdReplace:
- return replace(
- vm, owner, stubInfo.u.byIdSelf.baseObjectStructure.get(), stubInfo.u.byIdSelf.offset);
-
- default:
- return nullptr;
- }
-}
-
-std::unique_ptr<AccessCase> AccessCase::clone() const
-{
- std::unique_ptr<AccessCase> result(new AccessCase());
- result->m_type = m_type;
- result->m_offset = m_offset;
- result->m_structure = m_structure;
- result->m_conditionSet = m_conditionSet;
- if (RareData* rareData = m_rareData.get()) {
- result->m_rareData = std::make_unique<RareData>();
- result->m_rareData->viaProxy = rareData->viaProxy;
- result->m_rareData->additionalSet = rareData->additionalSet;
- // NOTE: We don't copy the callLinkInfo, since that's created during code generation.
- result->m_rareData->customAccessor.opaque = rareData->customAccessor.opaque;
- result->m_rareData->customSlotBase = rareData->customSlotBase;
- result->m_rareData->intrinsicFunction = rareData->intrinsicFunction;
- result->m_rareData->domJIT = rareData->domJIT;
- }
- return result;
-}
-
-Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm, const Identifier& ident)
-{
- // It's fine to commit something that is already committed. That arises when we switch to using
- // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
- // because most AccessCases have no extra watchpoints anyway.
- RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
-
- Vector<WatchpointSet*, 2> result;
-
- if ((structure() && structure()->needImpurePropertyWatchpoint())
- || m_conditionSet.needImpurePropertyWatchpoint())
- result.append(vm.ensureWatchpointSetForImpureProperty(ident));
-
- if (additionalSet())
- result.append(additionalSet());
-
- m_state = Committed;
-
- return result;
-}
-
-bool AccessCase::guardedByStructureCheck() const
-{
- if (viaProxy())
- return false;
-
- switch (m_type) {
- case MegamorphicLoad:
- case ArrayLength:
- case StringLength:
- case DirectArgumentsLength:
- case ScopedArgumentsLength:
- return false;
- default:
- return true;
- }
-}
-
-JSObject* AccessCase::alternateBase() const
-{
- if (customSlotBase())
- return customSlotBase();
- return conditionSet().slotBaseCondition().object();
-}
-
-bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMark) const
-{
- switch (type()) {
- case Getter:
- case Setter:
- case CustomValueGetter:
- case CustomAccessorGetter:
- case CustomValueSetter:
- case CustomAccessorSetter:
- return true;
- case Transition:
- if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()
- && structure()->couldHaveIndexingHeader()) {
- if (cellsToMark)
- cellsToMark->append(newStructure());
- return true;
- }
- return false;
- default:
- return false;
- }
-}
-
-bool AccessCase::couldStillSucceed() const
-{
- return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
-}
-
-bool AccessCase::canBeReplacedByMegamorphicLoad() const
-{
- if (type() == MegamorphicLoad)
- return true;
-
- return type() == Load
- && !viaProxy()
- && conditionSet().isEmpty()
- && !additionalSet()
- && !customSlotBase();
-}
-
-bool AccessCase::canReplace(const AccessCase& other) const
-{
- // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
- // It's fine for this to return false if it's in doubt.
-
- switch (type()) {
- case MegamorphicLoad:
- return other.canBeReplacedByMegamorphicLoad();
- case ArrayLength:
- case StringLength:
- case DirectArgumentsLength:
- case ScopedArgumentsLength:
- return other.type() == type();
- default:
- if (!guardedByStructureCheck() || !other.guardedByStructureCheck())
- return false;
-
- return structure() == other.structure();
- }
-}
-
-void AccessCase::dump(PrintStream& out) const
-{
- out.print(m_type, ":(");
-
- CommaPrinter comma;
-
- out.print(comma, m_state);
-
- if (m_type == Transition)
- out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
- else if (m_structure)
- out.print(comma, "structure = ", pointerDump(m_structure.get()));
-
- if (isValidOffset(m_offset))
- out.print(comma, "offset = ", m_offset);
- if (!m_conditionSet.isEmpty())
- out.print(comma, "conditions = ", m_conditionSet);
-
- if (RareData* rareData = m_rareData.get()) {
- if (rareData->viaProxy)
- out.print(comma, "viaProxy = ", rareData->viaProxy);
- if (rareData->additionalSet)
- out.print(comma, "additionalSet = ", RawPointer(rareData->additionalSet.get()));
- if (rareData->callLinkInfo)
- out.print(comma, "callLinkInfo = ", RawPointer(rareData->callLinkInfo.get()));
- if (rareData->customAccessor.opaque)
- out.print(comma, "customAccessor = ", RawPointer(rareData->customAccessor.opaque));
- if (rareData->customSlotBase)
- out.print(comma, "customSlotBase = ", RawPointer(rareData->customSlotBase.get()));
- }
-
- out.print(")");
-}
-
-bool AccessCase::visitWeak(VM& vm) const
-{
- if (m_structure && !Heap::isMarked(m_structure.get()))
- return false;
- if (!m_conditionSet.areStillLive())
- return false;
- if (m_rareData) {
- if (m_rareData->callLinkInfo)
- m_rareData->callLinkInfo->visitWeak(vm);
- if (m_rareData->customSlotBase && !Heap::isMarked(m_rareData->customSlotBase.get()))
- return false;
- if (m_rareData->intrinsicFunction && !Heap::isMarked(m_rareData->intrinsicFunction.get()))
- return false;
- }
- return true;
-}
-
-bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
-{
- bool result = true;
-
- if (m_structure)
- result &= m_structure->markIfCheap(visitor);
-
- switch (m_type) {
- case Transition:
- if (Heap::isMarkedConcurrently(m_structure->previousID()))
- visitor.appendUnbarriered(m_structure.get());
- else
- result = false;
- break;
- default:
- break;
- }
-
- return result;
-}
-
-void AccessCase::generateWithGuard(
- AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
-{
- SuperSamplerScope superSamplerScope(false);
-
- RELEASE_ASSERT(m_state == Committed);
- m_state = Generated;
-
- CCallHelpers& jit = *state.jit;
- VM& vm = *jit.vm();
- const Identifier& ident = *state.ident;
- StructureStubInfo& stubInfo = *state.stubInfo;
- JSValueRegs valueRegs = state.valueRegs;
- GPRReg baseGPR = state.baseGPR;
- GPRReg scratchGPR = state.scratchGPR;
-
- UNUSED_PARAM(vm);
-
- switch (m_type) {
- case ArrayLength: {
- ASSERT(!viaProxy());
- jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
- fallThrough.append(
- jit.branchTest32(
- CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
- fallThrough.append(
- jit.branchTest32(
- CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
- break;
- }
-
- case StringLength: {
- ASSERT(!viaProxy());
- fallThrough.append(
- jit.branch8(
- CCallHelpers::NotEqual,
- CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
- CCallHelpers::TrustedImm32(StringType)));
- break;
- }
-
- case DirectArgumentsLength: {
- ASSERT(!viaProxy());
- fallThrough.append(
- jit.branch8(
- CCallHelpers::NotEqual,
- CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
- CCallHelpers::TrustedImm32(DirectArgumentsType)));
-
- fallThrough.append(
- jit.branchTestPtr(
- CCallHelpers::NonZero,
- CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
- jit.load32(
- CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
- valueRegs.payloadGPR());
- jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
- state.succeed();
- return;
- }
-
- case ScopedArgumentsLength: {
- ASSERT(!viaProxy());
- fallThrough.append(
- jit.branch8(
- CCallHelpers::NotEqual,
- CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
- CCallHelpers::TrustedImm32(ScopedArgumentsType)));
-
- fallThrough.append(
- jit.branchTest8(
- CCallHelpers::NonZero,
- CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfOverrodeThings())));
- jit.load32(
- CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength()),
- valueRegs.payloadGPR());
- jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
- state.succeed();
- return;
- }
-
- case MegamorphicLoad: {
- UniquedStringImpl* key = ident.impl();
- unsigned hash = IdentifierRepHash::hash(key);
-
- ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
- allocator.lock(baseGPR);
-#if USE(JSVALUE32_64)
- allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
-#endif
- allocator.lock(valueRegs);
- allocator.lock(scratchGPR);
-
- GPRReg intermediateGPR = scratchGPR;
- GPRReg maskGPR = allocator.allocateScratchGPR();
- GPRReg maskedHashGPR = allocator.allocateScratchGPR();
- GPRReg indexGPR = allocator.allocateScratchGPR();
- GPRReg offsetGPR = allocator.allocateScratchGPR();
-
- if (verbose) {
- dataLog("baseGPR = ", baseGPR, "\n");
- dataLog("valueRegs = ", valueRegs, "\n");
- dataLog("scratchGPR = ", scratchGPR, "\n");
- dataLog("intermediateGPR = ", intermediateGPR, "\n");
- dataLog("maskGPR = ", maskGPR, "\n");
- dataLog("maskedHashGPR = ", maskedHashGPR, "\n");
- dataLog("indexGPR = ", indexGPR, "\n");
- dataLog("offsetGPR = ", offsetGPR, "\n");
- }
-
- ScratchRegisterAllocator::PreservedState preservedState =
- allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
-
- CCallHelpers::JumpList myFailAndIgnore;
- CCallHelpers::JumpList myFallThrough;
-
- jit.emitLoadStructure(baseGPR, intermediateGPR, maskGPR);
- jit.loadPtr(
- CCallHelpers::Address(intermediateGPR, Structure::propertyTableUnsafeOffset()),
- intermediateGPR);
-
- myFailAndIgnore.append(jit.branchTestPtr(CCallHelpers::Zero, intermediateGPR));
-
- jit.load32(CCallHelpers::Address(intermediateGPR, PropertyTable::offsetOfIndexMask()), maskGPR);
- jit.loadPtr(CCallHelpers::Address(intermediateGPR, PropertyTable::offsetOfIndex()), indexGPR);
- jit.load32(
- CCallHelpers::Address(intermediateGPR, PropertyTable::offsetOfIndexSize()),
- intermediateGPR);
-
- jit.move(maskGPR, maskedHashGPR);
- jit.and32(CCallHelpers::TrustedImm32(hash), maskedHashGPR);
- jit.lshift32(CCallHelpers::TrustedImm32(2), intermediateGPR);
- jit.addPtr(indexGPR, intermediateGPR);
-
- CCallHelpers::Label loop = jit.label();
-
- jit.load32(CCallHelpers::BaseIndex(indexGPR, maskedHashGPR, CCallHelpers::TimesFour), offsetGPR);
-
- myFallThrough.append(
- jit.branch32(
- CCallHelpers::Equal,
- offsetGPR,
- CCallHelpers::TrustedImm32(PropertyTable::EmptyEntryIndex)));
-
- jit.sub32(CCallHelpers::TrustedImm32(1), offsetGPR);
- jit.mul32(CCallHelpers::TrustedImm32(sizeof(PropertyMapEntry)), offsetGPR, offsetGPR);
- jit.addPtr(intermediateGPR, offsetGPR);
-
- CCallHelpers::Jump collision = jit.branchPtr(
- CCallHelpers::NotEqual,
- CCallHelpers::Address(offsetGPR, OBJECT_OFFSETOF(PropertyMapEntry, key)),
- CCallHelpers::TrustedImmPtr(key));
-
- // offsetGPR currently holds a pointer to the PropertyMapEntry, which has the offset and attributes.
- // Check them and then attempt the load.
-
- myFallThrough.append(
- jit.branchTest32(
- CCallHelpers::NonZero,
- CCallHelpers::Address(offsetGPR, OBJECT_OFFSETOF(PropertyMapEntry, attributes)),
- CCallHelpers::TrustedImm32(Accessor | CustomAccessor)));
-
- jit.load32(CCallHelpers::Address(offsetGPR, OBJECT_OFFSETOF(PropertyMapEntry, offset)), offsetGPR);
-
- jit.loadProperty(baseGPR, offsetGPR, valueRegs);
-
- allocator.restoreReusedRegistersByPopping(jit, preservedState);
- state.succeed();
-
- collision.link(&jit);
-
- jit.add32(CCallHelpers::TrustedImm32(1), maskedHashGPR);
-
- // FIXME: We could be smarter about this. Currently we're burning a GPR for the mask. But looping
- // around isn't super common so we could, for example, recompute the mask from the difference between
- // the table and index. But before we do that we should probably make it easier to multiply and
- // divide by the size of PropertyMapEntry. That probably involves making PropertyMapEntry be arranged
- // to have a power-of-2 size.
- jit.and32(maskGPR, maskedHashGPR);
- jit.jump().linkTo(loop, &jit);
-
- if (allocator.didReuseRegisters()) {
- myFailAndIgnore.link(&jit);
- allocator.restoreReusedRegistersByPopping(jit, preservedState);
- state.failAndIgnore.append(jit.jump());
-
- myFallThrough.link(&jit);
- allocator.restoreReusedRegistersByPopping(jit, preservedState);
- fallThrough.append(jit.jump());
- } else {
- state.failAndIgnore.append(myFailAndIgnore);
- fallThrough.append(myFallThrough);
- }
- return;
- }
-
- default: {
- if (viaProxy()) {
- fallThrough.append(
- jit.branch8(
- CCallHelpers::NotEqual,
- CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
- CCallHelpers::TrustedImm32(PureForwardingProxyType)));
-
- jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
-
- fallThrough.append(
- jit.branchStructure(
- CCallHelpers::NotEqual,
- CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
- structure()));
- } else {
- fallThrough.append(
- jit.branchStructure(
- CCallHelpers::NotEqual,
- CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
- structure()));
- }
- break;
- } };
-
- generateImpl(state);
-}
-
-void AccessCase::generate(AccessGenerationState& state)
-{
- RELEASE_ASSERT(m_state == Committed);
- m_state = Generated;
-
- generateImpl(state);
-}
-
-void AccessCase::generateImpl(AccessGenerationState& state)
-{
- SuperSamplerScope superSamplerScope(false);
- if (verbose)
- dataLog("Generating code for: ", *this, "\n");
-
- ASSERT(m_state == Generated); // We rely on the callers setting this for us.
-
- CCallHelpers& jit = *state.jit;
- VM& vm = *jit.vm();
- CodeBlock* codeBlock = jit.codeBlock();
- StructureStubInfo& stubInfo = *state.stubInfo;
- const Identifier& ident = *state.ident;
- JSValueRegs valueRegs = state.valueRegs;
- GPRReg baseGPR = state.baseGPR;
- GPRReg scratchGPR = state.scratchGPR;
-
- ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
-
- for (const ObjectPropertyCondition& condition : m_conditionSet) {
- Structure* structure = condition.object()->structure();
-
- if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
- structure->addTransitionWatchpoint(state.addWatchpoint(condition));
- continue;
- }
-
- if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
- // The reason why this cannot happen is that we require that PolymorphicAccess calls
- // AccessCase::generate() only after it has verified that
- // AccessCase::couldStillSucceed() returned true.
-
- dataLog("This condition is no longer met: ", condition, "\n");
- RELEASE_ASSERT_NOT_REACHED();
- }
-
- // We will emit code that has a weak reference that isn't otherwise listed anywhere.
- state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
-
- jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
- state.failAndRepatch.append(
- jit.branchStructure(
- CCallHelpers::NotEqual,
- CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
- structure));
- }
-
- switch (m_type) {
- case InHit:
- case InMiss:
- jit.boxBooleanPayload(m_type == InHit, valueRegs.payloadGPR());
- state.succeed();
- return;
-
- case Miss:
- jit.moveTrustedValue(jsUndefined(), valueRegs);
- state.succeed();
- return;
-
- case Load:
- case GetGetter:
- case Getter:
- case Setter:
- case CustomValueGetter:
- case CustomAccessorGetter:
- case CustomValueSetter:
- case CustomAccessorSetter: {
- GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
-
- if (isValidOffset(m_offset)) {
- Structure* currStructure;
- if (m_conditionSet.isEmpty())
- currStructure = structure();
- else
- currStructure = m_conditionSet.slotBaseCondition().object()->structure();
- currStructure->startWatchingPropertyForReplacements(vm, offset());
- }
-
- GPRReg baseForGetGPR;
- if (viaProxy()) {
- ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
- if (m_type == Getter || m_type == Setter)
- baseForGetGPR = scratchGPR;
- else
- baseForGetGPR = valueRegsPayloadGPR;
-
- ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
- ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
-
- jit.loadPtr(
- CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
- baseForGetGPR);
- } else
- baseForGetGPR = baseGPR;
-
- GPRReg baseForAccessGPR;
- if (!m_conditionSet.isEmpty()) {
- jit.move(
- CCallHelpers::TrustedImmPtr(alternateBase()),
- scratchGPR);
- baseForAccessGPR = scratchGPR;
- } else
- baseForAccessGPR = baseForGetGPR;
-
- GPRReg loadedValueGPR = InvalidGPRReg;
- if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
- if (m_type == Load || m_type == GetGetter)
- loadedValueGPR = valueRegsPayloadGPR;
- else
- loadedValueGPR = scratchGPR;
-
- ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
- ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
-
- GPRReg storageGPR;
- if (isInlineOffset(m_offset))
- storageGPR = baseForAccessGPR;
- else {
- jit.loadPtr(
- CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
- loadedValueGPR);
- storageGPR = loadedValueGPR;
- }
-
-#if USE(JSVALUE64)
- jit.load64(
- CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
-#else
- if (m_type == Load || m_type == GetGetter) {
- jit.load32(
- CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
- valueRegs.tagGPR());
- }
- jit.load32(
- CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
- loadedValueGPR);
-#endif
- }
-
- if (m_type == Load || m_type == GetGetter) {
- state.succeed();
- return;
- }
-
- if (Options::useDOMJIT() && m_type == CustomAccessorGetter && m_rareData->domJIT) {
- // We do not need to emit CheckDOM operation since structure check ensures
- // that the structure of the given base value is structure()! So all we should
- // do is performing the CheckDOM thingy in IC compiling time here.
- if (structure()->classInfo()->isSubClassOf(m_rareData->domJIT->thisClassInfo())) {
- emitDOMJITGetter(state, baseForGetGPR);
- return;
- }
- }
-
- // Stuff for custom getters/setters.
- CCallHelpers::Call operationCall;
-
- // Stuff for JS getters/setters.
- CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
- CCallHelpers::Call fastPathCall;
- CCallHelpers::Call slowPathCall;
-
- // This also does the necessary calculations of whether or not we're an
- // exception handling call site.
- AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
-
- auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
- RegisterSet dontRestore;
- if (callHasReturnValue) {
- // This is the result value. We don't want to overwrite the result with what we stored to the stack.
- // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
- dontRestore.set(valueRegs);
- }
- state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
- };
-
- jit.store32(
- CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
- CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
-
- if (m_type == Getter || m_type == Setter) {
- ASSERT(baseGPR != loadedValueGPR);
- ASSERT(m_type != Setter || (baseGPR != valueRegsPayloadGPR && loadedValueGPR != valueRegsPayloadGPR));
-
- // Create a JS call using a JS call inline cache. Assume that:
- //
- // - SP is aligned and represents the extent of the calling compiler's stack usage.
- //
- // - FP is set correctly (i.e. it points to the caller's call frame header).
- //
- // - SP - FP is an aligned difference.
- //
- // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
- // code.
- //
- // Therefore, we temporarily grow the stack for the purpose of the call and then
- // shrink it after.
-
- state.setSpillStateForJSGetterSetter(spillState);
-
- RELEASE_ASSERT(!m_rareData->callLinkInfo);
- m_rareData->callLinkInfo = std::make_unique<CallLinkInfo>();
-
- // FIXME: If we generated a polymorphic call stub that jumped back to the getter
- // stub, which then jumped back to the main code, then we'd have a reachability
- // situation that the GC doesn't know about. The GC would ensure that the polymorphic
- // call stub stayed alive, and it would ensure that the main code stayed alive, but
- // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
- // be GC objects, and then we'd be able to say that the polymorphic call stub has a
- // reference to the getter stub.
- // https://bugs.webkit.org/show_bug.cgi?id=148914
- m_rareData->callLinkInfo->disallowStubs();
-
- m_rareData->callLinkInfo->setUpCall(
- CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
-
- CCallHelpers::JumpList done;
-
- // There is a "this" argument.
- unsigned numberOfParameters = 1;
- // ... and a value argument if we're calling a setter.
- if (m_type == Setter)
- numberOfParameters++;
-
- // Get the accessor; if there ain't one then the result is jsUndefined().
- if (m_type == Setter) {
- jit.loadPtr(
- CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
- loadedValueGPR);
- } else {
- jit.loadPtr(
- CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
- loadedValueGPR);
- }
-
- CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
- CCallHelpers::Zero, loadedValueGPR);
-
- unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
-
- unsigned numberOfBytesForCall =
- numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
-
- unsigned alignedNumberOfBytesForCall =
- WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
-
- jit.subPtr(
- CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
- CCallHelpers::stackPointerRegister);
-
- CCallHelpers::Address calleeFrame = CCallHelpers::Address(
- CCallHelpers::stackPointerRegister,
- -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
-
- jit.store32(
- CCallHelpers::TrustedImm32(numberOfParameters),
- calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
-
- jit.storeCell(
- loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
-
- jit.storeCell(
- baseGPR,
- calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
-
- if (m_type == Setter) {
- jit.storeValue(
- valueRegs,
- calleeFrame.withOffset(
- virtualRegisterForArgument(1).offset() * sizeof(Register)));
- }
-
- CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
- CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
- CCallHelpers::TrustedImmPtr(0));
-
- fastPathCall = jit.nearCall();
- if (m_type == Getter)
- jit.setupResults(valueRegs);
- done.append(jit.jump());
-
- slowCase.link(&jit);
- jit.move(loadedValueGPR, GPRInfo::regT0);
-#if USE(JSVALUE32_64)
- // We *always* know that the getter/setter, if non-null, is a cell.
- jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
-#endif
- jit.move(CCallHelpers::TrustedImmPtr(m_rareData->callLinkInfo.get()), GPRInfo::regT2);
- slowPathCall = jit.nearCall();
- if (m_type == Getter)
- jit.setupResults(valueRegs);
- done.append(jit.jump());
-
- returnUndefined.link(&jit);
- if (m_type == Getter)
- jit.moveTrustedValue(jsUndefined(), valueRegs);
-
- done.link(&jit);
-
- jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
- GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
- bool callHasReturnValue = isGetter();
- restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
-
- jit.addLinkTask(
- [=, &vm] (LinkBuffer& linkBuffer) {
- m_rareData->callLinkInfo->setCallLocations(
- CodeLocationLabel(linkBuffer.locationOfNearCall(slowPathCall)),
- CodeLocationLabel(linkBuffer.locationOf(addressOfLinkFunctionCheck)),
- linkBuffer.locationOfNearCall(fastPathCall));
-
- linkBuffer.link(
- slowPathCall,
- CodeLocationLabel(vm.getCTIStub(linkCallThunkGenerator).code()));
- });
- } else {
- ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
-
- // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
- // hard to track if someone did spillage or not, so we just assume that we always need
- // to make some space here.
- jit.makeSpaceOnStackForCCall();
-
- // getter: EncodedJSValue (*GetValueFunc)(ExecState*, EncodedJSValue thisValue, PropertyName);
- // setter: void (*PutValueFunc)(ExecState*, EncodedJSValue thisObject, EncodedJSValue value);
- // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
- // FIXME: Remove this differences in custom values and custom accessors.
- // https://bugs.webkit.org/show_bug.cgi?id=158014
- GPRReg baseForCustomValue = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForGetGPR;
-#if USE(JSVALUE64)
- if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
- jit.setupArgumentsWithExecState(
- baseForCustomValue,
- CCallHelpers::TrustedImmPtr(ident.impl()));
- } else
- jit.setupArgumentsWithExecState(baseForCustomValue, valueRegs.gpr());
-#else
- if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
- jit.setupArgumentsWithExecState(
- EABI_32BIT_DUMMY_ARG baseForCustomValue,
- CCallHelpers::TrustedImm32(JSValue::CellTag),
- CCallHelpers::TrustedImmPtr(ident.impl()));
- } else {
- jit.setupArgumentsWithExecState(
- EABI_32BIT_DUMMY_ARG baseForCustomValue,
- CCallHelpers::TrustedImm32(JSValue::CellTag),
- valueRegs.payloadGPR(), valueRegs.tagGPR());
- }
-#endif
- jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
-
- operationCall = jit.call();
- jit.addLinkTask(
- [=] (LinkBuffer& linkBuffer) {
- linkBuffer.link(operationCall, FunctionPtr(m_rareData->customAccessor.opaque));
- });
-
- if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
- jit.setupResults(valueRegs);
- jit.reclaimSpaceOnStackForCCall();
-
- CCallHelpers::Jump noException =
- jit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
-
- state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
- state.emitExplicitExceptionHandler();
-
- noException.link(&jit);
- bool callHasReturnValue = isGetter();
- restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
- }
- state.succeed();
- return;
- }
-
- case Replace: {
- if (InferredType* type = structure()->inferredTypeFor(ident.impl())) {
- if (verbose)
- dataLog("Have type: ", type->descriptor(), "\n");
- state.failAndRepatch.append(
- jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
- } else if (verbose)
- dataLog("Don't have type.\n");
-
- if (isInlineOffset(m_offset)) {
- jit.storeValue(
- valueRegs,
- CCallHelpers::Address(
- baseGPR,
- JSObject::offsetOfInlineStorage() +
- offsetInInlineStorage(m_offset) * sizeof(JSValue)));
- } else {
- jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
- jit.storeValue(
- valueRegs,
- CCallHelpers::Address(
- scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
- }
- state.succeed();
- return;
- }
-
- case Transition: {
- // AccessCase::transition() should have returned null if this wasn't true.
- RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
-
- if (InferredType* type = newStructure()->inferredTypeFor(ident.impl())) {
- if (verbose)
- dataLog("Have type: ", type->descriptor(), "\n");
- state.failAndRepatch.append(
- jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
- } else if (verbose)
- dataLog("Don't have type.\n");
-
- // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
- // exactly when this would make calls.
- bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
- bool reallocating = allocating && structure()->outOfLineCapacity();
- bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
-
- ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
- allocator.lock(baseGPR);
-#if USE(JSVALUE32_64)
- allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
-#endif
- allocator.lock(valueRegs);
- allocator.lock(scratchGPR);
-
- GPRReg scratchGPR2 = InvalidGPRReg;
- GPRReg scratchGPR3 = InvalidGPRReg;
- if (allocatingInline) {
- scratchGPR2 = allocator.allocateScratchGPR();
- scratchGPR3 = allocator.allocateScratchGPR();
- }
-
- ScratchRegisterAllocator::PreservedState preservedState =
- allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
-
- CCallHelpers::JumpList slowPath;
-
- ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
-
- if (allocating) {
- size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
-
- if (allocatingInline) {
- MarkedAllocator* allocator = vm.auxiliarySpace.allocatorFor(newSize);
-
- if (!allocator) {
- // Yuck, this case would suck!
- slowPath.append(jit.jump());
- }
-
- jit.move(CCallHelpers::TrustedImmPtr(allocator), scratchGPR2);
- jit.emitAllocate(scratchGPR, allocator, scratchGPR2, scratchGPR3, slowPath);
- jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
-
- size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
- ASSERT(newSize > oldSize);
-
- if (reallocating) {
- // Handle the case where we are reallocating (i.e. the old structure/butterfly
- // already had out-of-line property storage).
-
- jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
-
- // We have scratchGPR = new storage, scratchGPR3 = old storage,
- // scratchGPR2 = available
- for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
- jit.loadPtr(
- CCallHelpers::Address(
- scratchGPR3,
- -static_cast<ptrdiff_t>(
- offset + sizeof(JSValue) + sizeof(void*))),
- scratchGPR2);
- jit.storePtr(
- scratchGPR2,
- CCallHelpers::Address(
- scratchGPR,
- -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
- }
- }
-
- for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
- jit.storePtr(CCallHelpers::TrustedImmPtr(0), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
- } else {
- // Handle the case where we are allocating out-of-line using an operation.
- RegisterSet extraRegistersToPreserve;
- extraRegistersToPreserve.set(baseGPR);
- extraRegistersToPreserve.set(valueRegs);
- AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
-
- jit.store32(
- CCallHelpers::TrustedImm32(
- state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
- CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
-
- jit.makeSpaceOnStackForCCall();
-
- if (!reallocating) {
- jit.setupArgumentsWithExecState(baseGPR);
-
- CCallHelpers::Call operationCall = jit.call();
- jit.addLinkTask(
- [=] (LinkBuffer& linkBuffer) {
- linkBuffer.link(
- operationCall,
- FunctionPtr(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
- });
- } else {
- // Handle the case where we are reallocating (i.e. the old structure/butterfly
- // already had out-of-line property storage).
- jit.setupArgumentsWithExecState(
- baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
-
- CCallHelpers::Call operationCall = jit.call();
- jit.addLinkTask(
- [=] (LinkBuffer& linkBuffer) {
- linkBuffer.link(
- operationCall,
- FunctionPtr(operationReallocateButterflyToGrowPropertyStorage));
- });
- }
-
- jit.reclaimSpaceOnStackForCCall();
- jit.move(GPRInfo::returnValueGPR, scratchGPR);
-
- CCallHelpers::Jump noException =
- jit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
-
- state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
- state.emitExplicitExceptionHandler();
-
- noException.link(&jit);
- state.restoreLiveRegistersFromStackForCall(spillState);
- }
- }
-
- if (isInlineOffset(m_offset)) {
- jit.storeValue(
- valueRegs,
- CCallHelpers::Address(
- baseGPR,
- JSObject::offsetOfInlineStorage() +
- offsetInInlineStorage(m_offset) * sizeof(JSValue)));
- } else {
- if (!allocating)
- jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
- jit.storeValue(
- valueRegs,
- CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
- }
-
- if (allocatingInline) {
- // We set the new butterfly and the structure last. Doing it this way ensures that
- // whatever we had done up to this point is forgotten if we choose to branch to slow
- // path.
- jit.nukeStructureAndStoreButterfly(scratchGPR, baseGPR);
- }
-
- uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
- jit.store32(
- CCallHelpers::TrustedImm32(structureBits),
- CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
-
- allocator.restoreReusedRegistersByPopping(jit, preservedState);
- state.succeed();
-
- // We will have a slow path if we were allocating without the help of an operation.
- if (allocatingInline) {
- if (allocator.didReuseRegisters()) {
- slowPath.link(&jit);
- allocator.restoreReusedRegistersByPopping(jit, preservedState);
- state.failAndIgnore.append(jit.jump());
- } else
- state.failAndIgnore.append(slowPath);
- } else
- RELEASE_ASSERT(slowPath.empty());
- return;
- }
-
- case ArrayLength: {
- jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
- jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
- state.failAndIgnore.append(
- jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
- jit.boxInt32(scratchGPR, valueRegs);
- state.succeed();
- return;
- }
-
- case StringLength: {
- jit.load32(CCallHelpers::Address(baseGPR, JSString::offsetOfLength()), valueRegs.payloadGPR());
- jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
- state.succeed();
- return;
- }
-
- case IntrinsicGetter: {
- RELEASE_ASSERT(isValidOffset(offset()));
-
- // We need to ensure the getter value does not move from under us. Note that GetterSetters
- // are immutable so we just need to watch the property not any value inside it.
- Structure* currStructure;
- if (m_conditionSet.isEmpty())
- currStructure = structure();
- else
- currStructure = m_conditionSet.slotBaseCondition().object()->structure();
- currStructure->startWatchingPropertyForReplacements(vm, offset());
-
- emitIntrinsicGetter(state);
- return;
- }
-
- case DirectArgumentsLength:
- case ScopedArgumentsLength:
- case MegamorphicLoad:
- // These need to be handled by generateWithGuard(), since the guard is part of the
- // algorithm. We can be sure that nobody will call generate() directly for these since they
- // are not guarded by structure checks.
- RELEASE_ASSERT_NOT_REACHED();
- }
-
- RELEASE_ASSERT_NOT_REACHED();
-}
-
-void AccessCase::emitDOMJITGetter(AccessGenerationState& state, GPRReg baseForGetGPR)
-{
- CCallHelpers& jit = *state.jit;
- StructureStubInfo& stubInfo = *state.stubInfo;
- JSValueRegs valueRegs = state.valueRegs;
- GPRReg baseGPR = state.baseGPR;
- GPRReg scratchGPR = state.scratchGPR;
-
- // We construct the environment that can execute the DOMJIT::Patchpoint here.
- Ref<DOMJIT::CallDOMGetterPatchpoint> patchpoint = m_rareData->domJIT->callDOMGetter();
-
- Vector<GPRReg> gpScratch;
- Vector<FPRReg> fpScratch;
- Vector<DOMJIT::Value> regs;
-
- ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
- allocator.lock(baseGPR);
-#if USE(JSVALUE32_64)
- allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
-#endif
- allocator.lock(valueRegs);
- allocator.lock(scratchGPR);
-
- GPRReg paramBaseGPR = InvalidGPRReg;
- GPRReg paramGlobalObjectGPR = InvalidGPRReg;
- JSValueRegs paramValueRegs = valueRegs;
- GPRReg remainingScratchGPR = InvalidGPRReg;
-
- // valueRegs and baseForGetGPR may be the same. For example, in Baseline JIT, we pass the same regT0 for baseGPR and valueRegs.
- // In FTL, there is no constraint that the baseForGetGPR interferes with the result. To make implementation simple in
- // DOMJIT::Patchpoint, DOMJIT::Patchpoint assumes that result registers always early interfere with input registers, in this case,
- // baseForGetGPR. So we move baseForGetGPR to the other register if baseForGetGPR == valueRegs.
- if (baseForGetGPR != valueRegs.payloadGPR()) {
- paramBaseGPR = baseForGetGPR;
- if (!patchpoint->requireGlobalObject)
- remainingScratchGPR = scratchGPR;
- else
- paramGlobalObjectGPR = scratchGPR;
- } else {
- jit.move(valueRegs.payloadGPR(), scratchGPR);
- paramBaseGPR = scratchGPR;
- if (patchpoint->requireGlobalObject)
- paramGlobalObjectGPR = allocator.allocateScratchGPR();
- }
-
- JSGlobalObject* globalObjectForDOMJIT = structure()->globalObject();
-
- regs.append(paramValueRegs);
- regs.append(paramBaseGPR);
- if (patchpoint->requireGlobalObject) {
- ASSERT(paramGlobalObjectGPR != InvalidGPRReg);
- regs.append(DOMJIT::Value(paramGlobalObjectGPR, globalObjectForDOMJIT));
- }
-
- if (patchpoint->numGPScratchRegisters) {
- unsigned i = 0;
- if (remainingScratchGPR != InvalidGPRReg) {
- gpScratch.append(remainingScratchGPR);
- ++i;
- }
- for (; i < patchpoint->numGPScratchRegisters; ++i)
- gpScratch.append(allocator.allocateScratchGPR());
- }
-
- for (unsigned i = 0; i < patchpoint->numFPScratchRegisters; ++i)
- fpScratch.append(allocator.allocateScratchFPR());
-
- // Let's store the reused registers to the stack. After that, we can use allocated scratch registers.
- ScratchRegisterAllocator::PreservedState preservedState =
- allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
-
- if (verbose) {
- dataLog("baseGPR = ", baseGPR, "\n");
- dataLog("valueRegs = ", valueRegs, "\n");
- dataLog("scratchGPR = ", scratchGPR, "\n");
- dataLog("paramBaseGPR = ", paramBaseGPR, "\n");
- if (paramGlobalObjectGPR != InvalidGPRReg)
- dataLog("paramGlobalObjectGPR = ", paramGlobalObjectGPR, "\n");
- dataLog("paramValueRegs = ", paramValueRegs, "\n");
- for (unsigned i = 0; i < patchpoint->numGPScratchRegisters; ++i)
- dataLog("gpScratch[", i, "] = ", gpScratch[i], "\n");
- }
-
- if (patchpoint->requireGlobalObject)
- jit.move(CCallHelpers::TrustedImmPtr(globalObjectForDOMJIT), paramGlobalObjectGPR);
-
- // We just spill the registers used in DOMJIT::Patchpoint here. For not spilled registers here explicitly,
- // they must be in the used register set passed by the callers (Baseline, DFG, and FTL) if they need to be kept.
- // Some registers can be locked, but not in the used register set. For example, the caller could make baseGPR
- // same to valueRegs, and not include it in the used registers since it will be changed.
- RegisterSet registersToSpillForCCall;
- for (auto& value : regs) {
- DOMJIT::Reg reg = value.reg();
- if (reg.isJSValueRegs())
- registersToSpillForCCall.set(reg.jsValueRegs());
- else if (reg.isGPR())
- registersToSpillForCCall.set(reg.gpr());
- else
- registersToSpillForCCall.set(reg.fpr());
- }
- for (GPRReg reg : gpScratch)
- registersToSpillForCCall.set(reg);
- for (FPRReg reg : fpScratch)
- registersToSpillForCCall.set(reg);
- registersToSpillForCCall.exclude(RegisterSet::registersToNotSaveForCCall());
-
- DOMJITAccessCasePatchpointParams params(WTFMove(regs), WTFMove(gpScratch), WTFMove(fpScratch));
- patchpoint->generator()->run(jit, params);
- allocator.restoreReusedRegistersByPopping(jit, preservedState);
- state.succeed();
-
- CCallHelpers::JumpList exceptions = params.emitSlowPathCalls(state, registersToSpillForCCall, jit);
- if (!exceptions.empty()) {
- exceptions.link(&jit);
- allocator.restoreReusedRegistersByPopping(jit, preservedState);
- state.emitExplicitExceptionHandler();
- }
-}
PolymorphicAccess::PolymorphicAccess() { }
PolymorphicAccess::~PolymorphicAccess() { }
m_list.resize(dstIndex);
if (verbose)
- dataLog("In regenerate: cases: ", listDump(cases), "\n");
-
- // Now that we've removed obviously unnecessary cases, we can check if the megamorphic load
- // optimization is applicable. Note that we basically tune megamorphicLoadCost according to code
- // size. It would be faster to just allow more repatching with many load cases, and avoid the
- // megamorphicLoad optimization, if we had infinite executable memory.
- if (cases.size() >= Options::maxAccessVariantListSize()) {
- unsigned numSelfLoads = 0;
- for (auto& newCase : cases) {
- if (newCase->canBeReplacedByMegamorphicLoad())
- numSelfLoads++;
- }
-
- if (numSelfLoads >= Options::megamorphicLoadCost()) {
- if (auto mega = AccessCase::megamorphicLoad(vm, codeBlock)) {
- cases.removeAllMatching(
- [&] (std::unique_ptr<AccessCase>& newCase) -> bool {
- return newCase->canBeReplacedByMegamorphicLoad();
- });
-
- cases.append(WTFMove(mega));
- }
- }
- }
-
- if (verbose)
dataLog("Optimized cases: ", listDump(cases), "\n");
// At this point we're convinced that 'cases' contains the cases that we want to JIT now and we
case AccessCase::Load:
out.print("Load");
return;
- case AccessCase::MegamorphicLoad:
- out.print("MegamorphicLoad");
- return;
case AccessCase::Transition:
out.print("Transition");
return;
#if ENABLE(JIT)
+#include "AccessCase.h"
#include "CodeOrigin.h"
#include "JITStubRoutine.h"
#include "JSFunctionInlines.h"
class WatchpointsOnStructureStubInfo;
class ScratchRegisterAllocator;
-struct AccessGenerationState;
-
-// An AccessCase describes one of the cases of a PolymorphicAccess. A PolymorphicAccess represents a
-// planned (to generate in future) or generated stub for some inline cache. That stub contains fast
-// path code for some finite number of fast cases, each described by an AccessCase object.
-//
-// An AccessCase object has a lifecycle that proceeds through several states. Note that the states
-// of AccessCase have a lot to do with the global effect epoch (we'll say epoch for short). This is
-// a simple way of reasoning about the state of the system outside this AccessCase. Any observable
-// effect - like storing to a property, changing an object's structure, etc. - increments the epoch.
-// The states are:
-//
-// Primordial: This is an AccessCase that was just allocated. It does not correspond to any actual
-// code and it is not owned by any PolymorphicAccess. In this state, the AccessCase
-// assumes that it is in the same epoch as when it was created. This is important
-// because it may make claims about itself ("I represent a valid case so long as you
-// register a watchpoint on this set") that could be contradicted by some outside
-// effects (like firing and deleting the watchpoint set in question). This is also the
-// state that an AccessCase is in when it is cloned (AccessCase::clone()).
-//
-// Committed: This happens as soon as some PolymorphicAccess takes ownership of this AccessCase.
-// In this state, the AccessCase no longer assumes anything about the epoch. To
-// accomplish this, PolymorphicAccess calls AccessCase::commit(). This must be done
-// during the same epoch when the AccessCase was created, either by the client or by
-// clone(). When created by the client, committing during the same epoch works because
-// we can be sure that whatever watchpoint sets they spoke of are still valid. When
-// created by clone(), we can be sure that the set is still valid because the original
-// of the clone still has watchpoints on it.
-//
-// Generated: This is the state when the PolymorphicAccess generates code for this case by
-// calling AccessCase::generate() or AccessCase::generateWithGuard(). At this point
-// the case object will have some extra stuff in it, like possibly the CallLinkInfo
-// object associated with the inline cache.
-// FIXME: Moving into the Generated state should not mutate the AccessCase object or
-// put more stuff into it. If we fix this, then we can get rid of AccessCase::clone().
-// https://bugs.webkit.org/show_bug.cgi?id=156456
-//
-// An AccessCase may be destroyed while in any of these states.
-//
-// We will sometimes buffer committed AccessCases in the PolymorphicAccess object before generating
-// code. This allows us to only regenerate once we've accumulated (hopefully) more than one new
-// AccessCase.
-class AccessCase {
- WTF_MAKE_NONCOPYABLE(AccessCase);
- WTF_MAKE_FAST_ALLOCATED;
-public:
- enum AccessType : uint8_t {
- Load,
- MegamorphicLoad,
- Transition,
- Replace,
- Miss,
- GetGetter,
- Getter,
- Setter,
- CustomValueGetter,
- CustomAccessorGetter,
- CustomValueSetter,
- CustomAccessorSetter,
- IntrinsicGetter,
- InHit,
- InMiss,
- ArrayLength,
- StringLength,
- DirectArgumentsLength,
- ScopedArgumentsLength
- };
-
- enum State : uint8_t {
- Primordial,
- Committed,
- Generated
- };
-
- static std::unique_ptr<AccessCase> tryGet(
- VM&, JSCell* owner, AccessType, PropertyOffset, Structure*,
- const ObjectPropertyConditionSet& = ObjectPropertyConditionSet(),
- bool viaProxy = false,
- WatchpointSet* additionalSet = nullptr);
-
- static std::unique_ptr<AccessCase> get(
- VM&, JSCell* owner, AccessType, PropertyOffset, Structure*,
- const ObjectPropertyConditionSet& = ObjectPropertyConditionSet(),
- bool viaProxy = false,
- WatchpointSet* additionalSet = nullptr,
- PropertySlot::GetValueFunc = nullptr,
- JSObject* customSlotBase = nullptr,
- DOMJIT::GetterSetter* = nullptr);
-
- static std::unique_ptr<AccessCase> megamorphicLoad(VM&, JSCell* owner);
-
- static std::unique_ptr<AccessCase> replace(VM&, JSCell* owner, Structure*, PropertyOffset);
-
- static std::unique_ptr<AccessCase> transition(
- VM&, JSCell* owner, Structure* oldStructure, Structure* newStructure, PropertyOffset,
- const ObjectPropertyConditionSet& = ObjectPropertyConditionSet());
-
- static std::unique_ptr<AccessCase> setter(
- VM&, JSCell* owner, AccessType, Structure*, PropertyOffset,
- const ObjectPropertyConditionSet&, PutPropertySlot::PutValueFunc = nullptr,
- JSObject* customSlotBase = nullptr);
-
- static std::unique_ptr<AccessCase> in(
- VM&, JSCell* owner, AccessType, Structure*,
- const ObjectPropertyConditionSet& = ObjectPropertyConditionSet());
-
- static std::unique_ptr<AccessCase> getLength(VM&, JSCell* owner, AccessType);
- static std::unique_ptr<AccessCase> getIntrinsic(VM&, JSCell* owner, JSFunction* intrinsic, PropertyOffset, Structure*, const ObjectPropertyConditionSet&);
-
- static std::unique_ptr<AccessCase> fromStructureStubInfo(VM&, JSCell* owner, StructureStubInfo&);
-
- ~AccessCase();
-
- AccessType type() const { return m_type; }
- State state() const { return m_state; }
- PropertyOffset offset() const { return m_offset; }
- bool viaProxy() const { return m_rareData ? m_rareData->viaProxy : false; }
-
- Structure* structure() const
- {
- if (m_type == Transition)
- return m_structure->previousID();
- return m_structure.get();
- }
- bool guardedByStructureCheck() const;
-
- Structure* newStructure() const
- {
- ASSERT(m_type == Transition);
- return m_structure.get();
- }
-
- ObjectPropertyConditionSet conditionSet() const { return m_conditionSet; }
- JSFunction* intrinsicFunction() const
- {
- ASSERT(type() == IntrinsicGetter && m_rareData);
- return m_rareData->intrinsicFunction.get();
- }
- Intrinsic intrinsic() const
- {
- return intrinsicFunction()->intrinsic();
- }
-
- DOMJIT::GetterSetter* domJIT() const
- {
- ASSERT(m_rareData);
- return m_rareData->domJIT;
- }
-
- WatchpointSet* additionalSet() const
- {
- return m_rareData ? m_rareData->additionalSet.get() : nullptr;
- }
-
- JSObject* customSlotBase() const
- {
- return m_rareData ? m_rareData->customSlotBase.get() : nullptr;
- }
-
- JSObject* alternateBase() const;
-
- // If you supply the optional vector, this will append the set of cells that this will need to keep alive
- // past the call.
- bool doesCalls(Vector<JSCell*>* cellsToMark = nullptr) const;
-
- bool isGetter() const
- {
- switch (type()) {
- case Getter:
- case CustomValueGetter:
- case CustomAccessorGetter:
- return true;
- default:
- return false;
- }
- }
-
- // This can return null even for a getter/setter, if it hasn't been generated yet. That's
- // actually somewhat likely because of how we do buffering of new cases.
- CallLinkInfo* callLinkInfo() const
- {
- if (!m_rareData)
- return nullptr;
- return m_rareData->callLinkInfo.get();
- }
-
- // Is it still possible for this case to ever be taken? Must call this as a prerequisite for
- // calling generate() and friends. If this returns true, then you can call generate(). If
- // this returns false, then generate() will crash. You must call generate() in the same epoch
- // as when you called couldStillSucceed().
- bool couldStillSucceed() const;
-
- static bool canEmitIntrinsicGetter(JSFunction*, Structure*);
-
- bool canBeReplacedByMegamorphicLoad() const;
-
- // If this method returns true, then it's a good idea to remove 'other' from the access once 'this'
- // is added. This method assumes that in case of contradictions, 'this' represents a newer, and so
- // more useful, truth. This method can be conservative; it will return false when it doubt.
- bool canReplace(const AccessCase& other) const;
-
- void dump(PrintStream& out) const;
-
-private:
- friend class CodeBlock;
- friend class PolymorphicAccess;
-
- AccessCase();
-
- bool visitWeak(VM&) const;
- bool propagateTransitions(SlotVisitor&) const;
-
- // FIXME: This only exists because of how AccessCase puts post-generation things into itself.
- // https://bugs.webkit.org/show_bug.cgi?id=156456
- std::unique_ptr<AccessCase> clone() const;
-
- // Perform any action that must be performed before the end of the epoch in which the case
- // was created. Returns a set of watchpoint sets that will need to be watched.
- Vector<WatchpointSet*, 2> commit(VM&, const Identifier&);
-
- // Fall through on success. Two kinds of failures are supported: fall-through, which means that we
- // should try a different case; and failure, which means that this was the right case but it needs
- // help from the slow path.
- void generateWithGuard(AccessGenerationState&, MacroAssembler::JumpList& fallThrough);
-
- // Fall through on success, add a jump to the failure list on failure.
- void generate(AccessGenerationState&);
-
- void generateImpl(AccessGenerationState&);
- void emitIntrinsicGetter(AccessGenerationState&);
- void emitDOMJITGetter(AccessGenerationState&, GPRReg baseForGetGPR);
-
- AccessType m_type { Load };
- State m_state { Primordial };
- PropertyOffset m_offset { invalidOffset };
-
- // Usually this is the structure that we expect the base object to have. But, this is the *new*
- // structure for a transition and we rely on the fact that it has a strong reference to the old
- // structure. For proxies, this is the structure of the object behind the proxy.
- WriteBarrier<Structure> m_structure;
-
- ObjectPropertyConditionSet m_conditionSet;
-
- class RareData {
- WTF_MAKE_FAST_ALLOCATED;
- public:
- RareData()
- : viaProxy(false)
- , domJIT(nullptr)
- {
- customAccessor.opaque = nullptr;
- }
-
- bool viaProxy;
- RefPtr<WatchpointSet> additionalSet;
- // FIXME: This should probably live in the stub routine object.
- // https://bugs.webkit.org/show_bug.cgi?id=156456
- std::unique_ptr<CallLinkInfo> callLinkInfo;
- union {
- PropertySlot::GetValueFunc getter;
- PutPropertySlot::PutValueFunc setter;
- void* opaque;
- } customAccessor;
- WriteBarrier<JSObject> customSlotBase;
- WriteBarrier<JSFunction> intrinsicFunction;
- DOMJIT::GetterSetter* domJIT;
- };
-
- std::unique_ptr<RareData> m_rareData;
-};
-
class AccessGenerationResult {
public:
enum Kind {
--- /dev/null
+/*
+ * Copyright (C) 2017 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "ProxyableAccessCase.h"
+
+#if ENABLE(JIT)
+
+namespace JSC {
+
+ProxyableAccessCase::ProxyableAccessCase(VM& vm, JSCell* owner, AccessType accessType, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, bool viaProxy, WatchpointSet* additionalSet)
+ : Base(vm, owner, accessType, offset, structure, conditionSet)
+ , m_viaProxy(viaProxy)
+ , m_additionalSet(additionalSet)
+{
+}
+
+std::unique_ptr<AccessCase> ProxyableAccessCase::create(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, bool viaProxy, WatchpointSet* additionalSet)
+{
+ ASSERT(type == Load || type == Miss || type == GetGetter);
+ return std::unique_ptr<AccessCase>(new ProxyableAccessCase(vm, owner, type, offset, structure, conditionSet, viaProxy, additionalSet));
+}
+
+ProxyableAccessCase::~ProxyableAccessCase()
+{
+}
+
+std::unique_ptr<AccessCase> ProxyableAccessCase::clone() const
+{
+ std::unique_ptr<ProxyableAccessCase> result(new ProxyableAccessCase(*this));
+ result->resetState();
+ return WTFMove(result);
+}
+
+void ProxyableAccessCase::dumpImpl(PrintStream& out, CommaPrinter& comma) const
+{
+ Base::dumpImpl(out, comma);
+ out.print(comma, "viaProxy = ", viaProxy());
+ out.print(comma, "additionalSet = ", RawPointer(additionalSet()));
+}
+
+} // namespace JSC
+
+#endif // ENABLE(JIT)
--- /dev/null
+/*
+ * Copyright (C) 2017 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#if ENABLE(JIT)
+
+#include "AccessCase.h"
+
+namespace JSC {
+
+class ProxyableAccessCase : public AccessCase {
+public:
+ typedef AccessCase Base;
+
+ bool viaProxy() const override { return m_viaProxy; }
+ WatchpointSet* additionalSet() const override { return m_additionalSet.get(); }
+
+ static std::unique_ptr<AccessCase> create(VM&, JSCell*, AccessType, PropertyOffset, Structure*, const ObjectPropertyConditionSet& = ObjectPropertyConditionSet(),
+ bool viaProxy = false, WatchpointSet* additionalSet = nullptr);
+
+ void dumpImpl(PrintStream&, CommaPrinter&) const override;
+ std::unique_ptr<AccessCase> clone() const override;
+
+ ~ProxyableAccessCase();
+
+protected:
+ ProxyableAccessCase(VM&, JSCell*, AccessType, PropertyOffset, Structure*, const ObjectPropertyConditionSet&, bool viaProxy, WatchpointSet* additionalSet);
+
+private:
+ bool m_viaProxy;
+ RefPtr<WatchpointSet> m_additionalSet;
+};
+
+} // namespace JSC
+
+#endif // ENABLE(JIT)
#include "CodeBlock.h"
#include "ComplexGetStatus.h"
+#include "GetterSetterAccessCase.h"
#include "LLIntData.h"
#include "LowLevelInterpreter.h"
#include "JSCInlines.h"
case ComplexGetStatus::Inlineable: {
std::unique_ptr<CallLinkStatus> callLinkStatus =
std::make_unique<CallLinkStatus>();
- if (CallLinkInfo* callLinkInfo = access.callLinkInfo()) {
+ if (CallLinkInfo* callLinkInfo = access.as<GetterSetterAccessCase>().callLinkInfo()) {
*callLinkStatus = CallLinkStatus::computeFor(
locker, profiledBlock, *callLinkInfo, callExitSiteData);
}
}
switch (accessType) {
- case AccessType::GetPure:
- resetGetByID(codeBlock, *this, GetByIDKind::Pure);
+ case AccessType::TryGet:
+ resetGetByID(codeBlock, *this, GetByIDKind::Try);
break;
case AccessType::Get:
resetGetByID(codeBlock, *this, GetByIDKind::Normal);
enum class AccessType : int8_t {
Get,
- GetPure,
+ TryGet,
Put,
In
};
m_inlineStackTop->m_profiledBlock, m_dfgCodeBlock,
m_inlineStackTop->m_stubInfos, m_dfgStubInfos,
currentCodeOrigin(), uid);
- AccessType type = op_try_get_by_id == opcodeID ? AccessType::GetPure : AccessType::Get;
+ AccessType type = op_try_get_by_id == opcodeID ? AccessType::TryGet : AccessType::Get;
unsigned opcodeLength = opcodeID == op_try_get_by_id ? OPCODE_LENGTH(op_try_get_by_id) : OPCODE_LENGTH(op_get_by_id);
base.use();
- cachedGetById(node->origin.semantic, baseRegs, resultRegs, node->identifierNumber(), JITCompiler::Jump(), NeedToSpill, AccessType::GetPure);
+ cachedGetById(node->origin.semantic, baseRegs, resultRegs, node->identifierNumber(), JITCompiler::Jump(), NeedToSpill, AccessType::TryGet);
jsValueResult(resultRegs, node, DataFormatJS, UseChildrenCalledExplicitly);
break;
JITCompiler::Jump notCell = m_jit.branchIfNotCell(baseRegs);
- cachedGetById(node->origin.semantic, baseRegs, resultRegs, node->identifierNumber(), notCell, NeedToSpill, AccessType::GetPure);
+ cachedGetById(node->origin.semantic, baseRegs, resultRegs, node->identifierNumber(), notCell, NeedToSpill, AccessType::TryGet);
jsValueResult(resultRegs, node, DataFormatJS, UseChildrenCalledExplicitly);
break;
compilePutStructure();
break;
case TryGetById:
- compileGetById(AccessType::GetPure);
+ compileGetById(AccessType::TryGet);
break;
case GetById:
case GetByIdFlush:
void compileGetById(AccessType type)
{
- ASSERT(type == AccessType::Get || type == AccessType::GetPure);
+ ASSERT(type == AccessType::Get || type == AccessType::TryGet);
switch (m_node->child1().useKind()) {
case CellUse: {
setJSValue(getById(lowCell(m_node->child1()), type));
#include "CCallHelpers.h"
#include "CallFrame.h"
#include "CodeBlock.h"
+#include "IntrinsicGetterAccessCase.h"
#include "JSArrayBufferView.h"
#include "JSCJSValueInlines.h"
#include "JSCellInlines.h"
typedef CCallHelpers::TrustedImm64 TrustedImm64;
typedef CCallHelpers::Imm64 Imm64;
-bool AccessCase::canEmitIntrinsicGetter(JSFunction* getter, Structure* structure)
+bool IntrinsicGetterAccessCase::canEmitIntrinsicGetter(JSFunction* getter, Structure* structure)
{
switch (getter->intrinsic()) {
RELEASE_ASSERT_NOT_REACHED();
}
-void AccessCase::emitIntrinsicGetter(AccessGenerationState& state)
+void IntrinsicGetterAccessCase::emitIntrinsicGetter(AccessGenerationState& state)
{
CCallHelpers& jit = *state.jit;
JSValueRegs valueRegs = state.valueRegs;
RETURN_IF_EXCEPTION(scope, encodedJSValue());
if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()) && !slot.isTaintedByOpaqueObject() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
- repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Pure);
+ repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Try);
return JSValue::encode(slot.getPureResult());
}
JITGetByIdGenerator gen(
m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
- ident->impl(), JSValueRegs(regT0), JSValueRegs(regT0), AccessType::GetPure);
+ ident->impl(), JSValueRegs(regT0), JSValueRegs(regT0), AccessType::TryGet);
gen.generateFastPath(*this);
addSlowCase(gen.slowPathJump());
m_getByIds.append(gen);
JITGetByIdGenerator gen(
m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(currentInstruction), RegisterSet::stubUnavailableRegisters(),
- ident->impl(), JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0), AccessType::GetPure);
+ ident->impl(), JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0), AccessType::TryGet);
gen.generateFastPath(*this);
addSlowCase(gen.slowPathJump());
m_getByIds.append(gen);
#include "FunctionCodeBlock.h"
#include "GCAwareJITStubRoutine.h"
#include "GetterSetter.h"
+#include "GetterSetterAccessCase.h"
#include "ICStats.h"
#include "InlineAccess.h"
+#include "IntrinsicGetterAccessCase.h"
#include "JIT.h"
#include "JITInlines.h"
#include "JSCInlines.h"
}
}
- newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
+ newCase = AccessCase::create(vm, codeBlock, AccessCase::ArrayLength);
} else if (isJSString(baseValue))
- newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
+ newCase = AccessCase::create(vm, codeBlock, AccessCase::StringLength);
else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(vm, baseValue)) {
// If there were overrides, then we can handle this as a normal property load! Guarding
// this with such a check enables us to add an IC case for that load if needed.
if (!arguments->overrodeThings())
- newCase = AccessCase::getLength(vm, codeBlock, AccessCase::DirectArgumentsLength);
+ newCase = AccessCase::create(vm, codeBlock, AccessCase::DirectArgumentsLength);
} else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(vm, baseValue)) {
// Ditto.
if (!arguments->overrodeThings())
- newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ScopedArgumentsLength);
+ newCase = AccessCase::create(vm, codeBlock, AccessCase::ScopedArgumentsLength);
}
}
if (slot.isCacheableCustom() && slot.domJIT())
domJIT = slot.domJIT();
- if (kind == GetByIDKind::Pure) {
+ if (kind == GetByIDKind::Try) {
AccessCase::AccessType type;
if (slot.isCacheableValue())
type = AccessCase::Load;
else
RELEASE_ASSERT_NOT_REACHED();
- newCase = AccessCase::tryGet(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
- } else if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
- newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
+ newCase = ProxyableAccessCase::create(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
+ } else if (!loadTargetFromProxy && getter && IntrinsicGetterAccessCase::canEmitIntrinsicGetter(getter, structure))
+ newCase = IntrinsicGetterAccessCase::create(vm, codeBlock, slot.cachedOffset(), structure, conditionSet, getter);
else {
- AccessCase::AccessType type;
- if (slot.isCacheableValue())
- type = AccessCase::Load;
- else if (slot.isUnset())
- type = AccessCase::Miss;
- else if (slot.isCacheableGetter())
- type = AccessCase::Getter;
- else if (slot.attributes() & CustomAccessor)
- type = AccessCase::CustomAccessorGetter;
- else
- type = AccessCase::CustomValueGetter;
-
- newCase = AccessCase::get(
- vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
- slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
- slot.isCacheableCustom() ? slot.slotBase() : nullptr,
- domJIT);
+ if (slot.isCacheableValue() || slot.isUnset()) {
+ newCase = ProxyableAccessCase::create(vm, codeBlock, slot.isUnset() ? AccessCase::Miss : AccessCase::Load,
+ offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
+ } else {
+ AccessCase::AccessType type;
+ if (slot.isCacheableGetter())
+ type = AccessCase::Getter;
+ else if (slot.attributes() & CustomAccessor)
+ type = AccessCase::CustomAccessorGetter;
+ else
+ type = AccessCase::CustomValueGetter;
+
+ newCase = GetterSetterAccessCase::create(
+ vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
+ slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
+ slot.isCacheableCustom() ? slot.slotBase() : nullptr,
+ domJIT);
+ }
}
}
}
}
- newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
+ newCase = AccessCase::create(vm, codeBlock, AccessCase::Replace, slot.cachedOffset(), structure);
} else {
ASSERT(slot.type() == PutPropertySlot::NewProperty);
return GiveUpOnCache;
}
- newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
+ newCase = AccessCase::create(vm, codeBlock, offset, structure, newStructure, conditionSet);
}
} else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
if (slot.isCacheableCustom()) {
return GiveUpOnCache;
}
- newCase = AccessCase::setter(
+ newCase = GetterSetterAccessCase::create(
vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
slot.customSetter(), slot.base());
} else {
} else
offset = slot.cachedOffset();
- newCase = AccessCase::setter(
+ newCase = GetterSetterAccessCase::create(
vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
}
}
LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
- std::unique_ptr<AccessCase> newCase = AccessCase::in(
- vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
+ std::unique_ptr<AccessCase> newCase = AccessCase::create(
+ vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, invalidOffset, structure, conditionSet);
AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
enum class GetByIDKind {
Normal,
- Pure
+ Try
};
void repatchGetByID(ExecState*, JSValue, const Identifier&, const PropertySlot&, StructureStubInfo&, GetByIDKind);
v(bool, clobberAllRegsInFTLICSlowPath, !ASSERT_DISABLED, Normal, nullptr) \
v(bool, useAccessInlining, true, Normal, nullptr) \
v(unsigned, maxAccessVariantListSize, 8, Normal, nullptr) \
- v(unsigned, megamorphicLoadCost, 999, Normal, nullptr) /* This used to be 10, but we're temporarily testing what happens when the feature is disabled. */\
v(bool, usePolyvariantDevirtualization, true, Normal, nullptr) \
v(bool, usePolymorphicAccessInlining, true, Normal, nullptr) \
v(bool, usePolymorphicCallInlining, true, Normal, nullptr) \