7eeaf704b43354396097b9eb4821177eb5a14959
[WebKit-https.git] / Source / JavaScriptCore / runtime / CommonSlowPaths.h
1 /*
2  * Copyright (C) 2011-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #pragma once
27
28 #include "BytecodeStructs.h"
29 #include "CodeBlock.h"
30 #include "CodeSpecializationKind.h"
31 #include "DirectArguments.h"
32 #include "ExceptionHelpers.h"
33 #include "FunctionCodeBlock.h"
34 #include "JSImmutableButterfly.h"
35 #include "ScopedArguments.h"
36 #include "SlowPathReturnType.h"
37 #include "StackAlignment.h"
38 #include "VMInlines.h"
39 #include <wtf/StdLibExtras.h>
40
41 namespace JSC {
42
43 // The purpose of this namespace is to include slow paths that are shared
44 // between the interpreter and baseline JIT. They are written to be agnostic
45 // with respect to the slow-path calling convention, but they do rely on the
46 // JS code being executed more-or-less directly from bytecode (so the call
47 // frame layout is unmodified, making it potentially awkward to use these
48 // from any optimizing JIT, like the DFG).
49
50 namespace CommonSlowPaths {
51
52 ALWAYS_INLINE int numberOfExtraSlots(int argumentCountIncludingThis)
53 {
54     int frameSize = argumentCountIncludingThis + CallFrame::headerSizeInRegisters;
55     int alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize);
56     return alignedFrameSize - frameSize;
57 }
58
59 ALWAYS_INLINE int numberOfStackPaddingSlots(CodeBlock* codeBlock, int argumentCountIncludingThis)
60 {
61     if (argumentCountIncludingThis >= codeBlock->numParameters())
62         return 0;
63     int alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), argumentCountIncludingThis + CallFrame::headerSizeInRegisters);
64     int alignedFrameSizeForParameters = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), codeBlock->numParameters() + CallFrame::headerSizeInRegisters);
65     return alignedFrameSizeForParameters - alignedFrameSize;
66 }
67
68 ALWAYS_INLINE int numberOfStackPaddingSlotsWithExtraSlots(CodeBlock* codeBlock, int argumentCountIncludingThis)
69 {
70     if (argumentCountIncludingThis >= codeBlock->numParameters())
71         return 0;
72     return numberOfStackPaddingSlots(codeBlock, argumentCountIncludingThis) + numberOfExtraSlots(argumentCountIncludingThis);
73 }
74
75 ALWAYS_INLINE CodeBlock* codeBlockFromCallFrameCallee(ExecState* exec, CodeSpecializationKind kind)
76 {
77     JSFunction* callee = jsCast<JSFunction*>(exec->jsCallee());
78     ASSERT(!callee->isHostFunction());
79     return callee->jsExecutable()->codeBlockFor(kind);
80 }
81
82 ALWAYS_INLINE int arityCheckFor(ExecState* exec, VM& vm, CodeSpecializationKind kind)
83 {
84     CodeBlock* newCodeBlock = codeBlockFromCallFrameCallee(exec, kind);
85     ASSERT(exec->argumentCountIncludingThis() < static_cast<unsigned>(newCodeBlock->numParameters()));
86     int padding = numberOfStackPaddingSlotsWithExtraSlots(newCodeBlock, exec->argumentCountIncludingThis());
87     
88     Register* newStack = exec->registers() - WTF::roundUpToMultipleOf(stackAlignmentRegisters(), padding);
89
90     if (UNLIKELY(!vm.ensureStackCapacityFor(newStack)))
91         return -1;
92     return padding;
93 }
94
95 inline bool opInByVal(ExecState* exec, JSValue baseVal, JSValue propName, ArrayProfile* arrayProfile = nullptr)
96 {
97     VM& vm = exec->vm();
98     auto scope = DECLARE_THROW_SCOPE(vm);
99     if (!baseVal.isObject()) {
100         throwException(exec, scope, createInvalidInParameterError(exec, baseVal));
101         return false;
102     }
103
104     JSObject* baseObj = asObject(baseVal);
105     if (arrayProfile)
106         arrayProfile->observeStructure(baseObj->structure(vm));
107
108     uint32_t i;
109     if (propName.getUInt32(i)) {
110         if (arrayProfile)
111             arrayProfile->observeIndexedRead(vm, baseObj, i);
112         RELEASE_AND_RETURN(scope, baseObj->hasProperty(exec, i));
113     }
114
115     auto property = propName.toPropertyKey(exec);
116     RETURN_IF_EXCEPTION(scope, false);
117     RELEASE_AND_RETURN(scope, baseObj->hasProperty(exec, property));
118 }
119
120 inline void tryCachePutToScopeGlobal(
121     ExecState* exec, CodeBlock* codeBlock, OpPutToScope& bytecode, JSObject* scope,
122     PutPropertySlot& slot, const Identifier& ident)
123 {
124     // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
125     auto& metadata = bytecode.metadata(exec);
126     ResolveType resolveType = metadata.m_getPutInfo.resolveType();
127
128     switch (resolveType) {
129     case UnresolvedProperty:
130     case UnresolvedPropertyWithVarInjectionChecks: {
131         if (scope->isGlobalObject()) {
132             ResolveType newResolveType = needsVarInjectionChecks(resolveType) ? GlobalPropertyWithVarInjectionChecks : GlobalProperty;
133             resolveType = newResolveType; // Allow below caching mechanism to kick in.
134             ConcurrentJSLocker locker(codeBlock->m_lock);
135             metadata.m_getPutInfo = GetPutInfo(metadata.m_getPutInfo.resolveMode(), newResolveType, metadata.m_getPutInfo.initializationMode());
136             break;
137         }
138         FALLTHROUGH;
139     }
140     case GlobalProperty:
141     case GlobalPropertyWithVarInjectionChecks: {
142          // Global Lexical Binding Epoch is changed. Update op_get_from_scope from GlobalProperty to GlobalLexicalVar.
143         if (scope->isGlobalLexicalEnvironment()) {
144             JSGlobalLexicalEnvironment* globalLexicalEnvironment = jsCast<JSGlobalLexicalEnvironment*>(scope);
145             ResolveType newResolveType = needsVarInjectionChecks(resolveType) ? GlobalLexicalVarWithVarInjectionChecks : GlobalLexicalVar;
146             metadata.m_getPutInfo = GetPutInfo(metadata.m_getPutInfo.resolveMode(), newResolveType, metadata.m_getPutInfo.initializationMode());
147             SymbolTableEntry entry = globalLexicalEnvironment->symbolTable()->get(ident.impl());
148             ASSERT(!entry.isNull());
149             ConcurrentJSLocker locker(codeBlock->m_lock);
150             metadata.m_watchpointSet = entry.watchpointSet();
151             metadata.m_operand = reinterpret_cast<uintptr_t>(globalLexicalEnvironment->variableAt(entry.scopeOffset()).slot());
152             return;
153         }
154         break;
155     }
156     default:
157         return;
158     }
159
160     if (resolveType == GlobalProperty || resolveType == GlobalPropertyWithVarInjectionChecks) {
161         VM& vm = exec->vm();
162         JSGlobalObject* globalObject = codeBlock->globalObject();
163         ASSERT(globalObject == scope || globalObject->varInjectionWatchpoint()->hasBeenInvalidated());
164         if (!slot.isCacheablePut()
165             || slot.base() != scope
166             || scope != globalObject
167             || !scope->structure(vm)->propertyAccessesAreCacheable())
168             return;
169         
170         if (slot.type() == PutPropertySlot::NewProperty) {
171             // Don't cache if we've done a transition. We want to detect the first replace so that we
172             // can invalidate the watchpoint.
173             return;
174         }
175         
176         scope->structure(vm)->didCachePropertyReplacement(vm, slot.cachedOffset());
177
178         ConcurrentJSLocker locker(codeBlock->m_lock);
179         metadata.m_structure.set(vm, codeBlock, scope->structure(vm));
180         metadata.m_operand = slot.cachedOffset();
181     }
182 }
183
184 inline void tryCacheGetFromScopeGlobal(
185     ExecState* exec, VM& vm, OpGetFromScope& bytecode, JSObject* scope, PropertySlot& slot, const Identifier& ident)
186 {
187     auto& metadata = bytecode.metadata(exec);
188     ResolveType resolveType = metadata.m_getPutInfo.resolveType();
189
190     switch (resolveType) {
191     case UnresolvedProperty:
192     case UnresolvedPropertyWithVarInjectionChecks: {
193         if (scope->isGlobalObject()) {
194             ResolveType newResolveType = needsVarInjectionChecks(resolveType) ? GlobalPropertyWithVarInjectionChecks : GlobalProperty;
195             resolveType = newResolveType; // Allow below caching mechanism to kick in.
196             ConcurrentJSLocker locker(exec->codeBlock()->m_lock);
197             metadata.m_getPutInfo = GetPutInfo(metadata.m_getPutInfo.resolveMode(), newResolveType, metadata.m_getPutInfo.initializationMode());
198             break;
199         }
200         FALLTHROUGH;
201     }
202     case GlobalProperty:
203     case GlobalPropertyWithVarInjectionChecks: {
204          // Global Lexical Binding Epoch is changed. Update op_get_from_scope from GlobalProperty to GlobalLexicalVar.
205         if (scope->isGlobalLexicalEnvironment()) {
206             JSGlobalLexicalEnvironment* globalLexicalEnvironment = jsCast<JSGlobalLexicalEnvironment*>(scope);
207             ResolveType newResolveType = needsVarInjectionChecks(resolveType) ? GlobalLexicalVarWithVarInjectionChecks : GlobalLexicalVar;
208             SymbolTableEntry entry = globalLexicalEnvironment->symbolTable()->get(ident.impl());
209             ASSERT(!entry.isNull());
210             ConcurrentJSLocker locker(exec->codeBlock()->m_lock);
211             metadata.m_getPutInfo = GetPutInfo(metadata.m_getPutInfo.resolveMode(), newResolveType, metadata.m_getPutInfo.initializationMode());
212             metadata.m_watchpointSet = entry.watchpointSet();
213             metadata.m_operand = reinterpret_cast<uintptr_t>(globalLexicalEnvironment->variableAt(entry.scopeOffset()).slot());
214             return;
215         }
216         break;
217     }
218     default:
219         return;
220     }
221
222     // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
223     if (resolveType == GlobalProperty || resolveType == GlobalPropertyWithVarInjectionChecks) {
224         CodeBlock* codeBlock = exec->codeBlock();
225         JSGlobalObject* globalObject = codeBlock->globalObject();
226         ASSERT(scope == globalObject || globalObject->varInjectionWatchpoint()->hasBeenInvalidated());
227         if (slot.isCacheableValue() && slot.slotBase() == scope && scope == globalObject && scope->structure(vm)->propertyAccessesAreCacheable()) {
228             Structure* structure = scope->structure(vm);
229             {
230                 ConcurrentJSLocker locker(codeBlock->m_lock);
231                 metadata.m_structure.set(vm, codeBlock, structure);
232                 metadata.m_operand = slot.cachedOffset();
233             }
234             structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
235         }
236     }
237 }
238
239 inline bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
240 {
241     switch (object.type()) {
242     case DirectArgumentsType: {
243         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
244         if (directArguments->isMappedArgumentInDFG(index))
245             return true;
246         break;
247     }
248     case ScopedArgumentsType: {
249         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
250         if (scopedArguments->isMappedArgumentInDFG(index))
251             return true;
252         break;
253     }
254     default:
255         break;
256     }
257     return false;
258 }
259
260 static ALWAYS_INLINE void putDirectWithReify(VM& vm, ExecState* exec, JSObject* baseObject, PropertyName propertyName, JSValue value, PutPropertySlot& slot, Structure** result = nullptr)
261 {
262     auto scope = DECLARE_THROW_SCOPE(vm);
263     if (baseObject->inherits<JSFunction>(vm)) {
264         jsCast<JSFunction*>(baseObject)->reifyLazyPropertyIfNeeded(vm, exec, propertyName);
265         RETURN_IF_EXCEPTION(scope, void());
266     }
267     if (result)
268         *result = baseObject->structure(vm);
269     scope.release();
270     baseObject->putDirect(vm, propertyName, value, slot);
271 }
272
273 static ALWAYS_INLINE void putDirectAccessorWithReify(VM& vm, ExecState* exec, JSObject* baseObject, PropertyName propertyName, GetterSetter* accessor, unsigned attribute)
274 {
275     auto scope = DECLARE_THROW_SCOPE(vm);
276     if (baseObject->inherits<JSFunction>(vm)) {
277         jsCast<JSFunction*>(baseObject)->reifyLazyPropertyIfNeeded(vm, exec, propertyName);
278         RETURN_IF_EXCEPTION(scope, void());
279     }
280     scope.release();
281     baseObject->putDirectAccessor(exec, propertyName, accessor, attribute);
282 }
283
284 inline JSArray* allocateNewArrayBuffer(VM& vm, Structure* structure, JSImmutableButterfly* immutableButterfly)
285 {
286     JSGlobalObject* globalObject = structure->globalObject();
287     Structure* originalStructure = globalObject->originalArrayStructureForIndexingType(immutableButterfly->indexingMode());
288     ASSERT(originalStructure->indexingMode() == immutableButterfly->indexingMode());
289     ASSERT(isCopyOnWrite(immutableButterfly->indexingMode()));
290     ASSERT(!structure->outOfLineCapacity());
291
292     JSArray* result = JSArray::createWithButterfly(vm, nullptr, originalStructure, immutableButterfly->toButterfly());
293     // FIXME: This works but it's slow. If we cared enough about the perf when having a bad time then we could fix it.
294     if (UNLIKELY(originalStructure != structure)) {
295         ASSERT(hasSlowPutArrayStorage(structure->indexingMode()));
296         ASSERT(globalObject->isHavingABadTime());
297
298         result->switchToSlowPutArrayStorage(vm);
299         ASSERT(result->butterfly() != immutableButterfly->toButterfly());
300         ASSERT(!result->butterfly()->arrayStorage()->m_sparseMap.get());
301         ASSERT(result->structureID() == structure->id());
302     }
303
304     return result;
305 }
306
307 } // namespace CommonSlowPaths
308
309 class ExecState;
310 struct Instruction;
311
312 #define SLOW_PATH
313     
314 #define SLOW_PATH_DECL(name) \
315 extern "C" SlowPathReturnType SLOW_PATH name(ExecState* exec, const Instruction* pc)
316     
317 #define SLOW_PATH_HIDDEN_DECL(name) \
318 SLOW_PATH_DECL(name) WTF_INTERNAL
319     
320 SLOW_PATH_HIDDEN_DECL(slow_path_call_arityCheck);
321 SLOW_PATH_HIDDEN_DECL(slow_path_construct_arityCheck);
322 SLOW_PATH_HIDDEN_DECL(slow_path_create_direct_arguments);
323 SLOW_PATH_HIDDEN_DECL(slow_path_create_scoped_arguments);
324 SLOW_PATH_HIDDEN_DECL(slow_path_create_cloned_arguments);
325 SLOW_PATH_HIDDEN_DECL(slow_path_create_this);
326 SLOW_PATH_HIDDEN_DECL(slow_path_enter);
327 SLOW_PATH_HIDDEN_DECL(slow_path_get_callee);
328 SLOW_PATH_HIDDEN_DECL(slow_path_to_this);
329 SLOW_PATH_HIDDEN_DECL(slow_path_throw_tdz_error);
330 SLOW_PATH_HIDDEN_DECL(slow_path_check_tdz);
331 SLOW_PATH_HIDDEN_DECL(slow_path_throw_strict_mode_readonly_property_write_error);
332 SLOW_PATH_HIDDEN_DECL(slow_path_not);
333 SLOW_PATH_HIDDEN_DECL(slow_path_eq);
334 SLOW_PATH_HIDDEN_DECL(slow_path_neq);
335 SLOW_PATH_HIDDEN_DECL(slow_path_stricteq);
336 SLOW_PATH_HIDDEN_DECL(slow_path_nstricteq);
337 SLOW_PATH_HIDDEN_DECL(slow_path_less);
338 SLOW_PATH_HIDDEN_DECL(slow_path_lesseq);
339 SLOW_PATH_HIDDEN_DECL(slow_path_greater);
340 SLOW_PATH_HIDDEN_DECL(slow_path_greatereq);
341 SLOW_PATH_HIDDEN_DECL(slow_path_inc);
342 SLOW_PATH_HIDDEN_DECL(slow_path_dec);
343 SLOW_PATH_HIDDEN_DECL(slow_path_to_number);
344 SLOW_PATH_HIDDEN_DECL(slow_path_to_string);
345 SLOW_PATH_HIDDEN_DECL(slow_path_to_object);
346 SLOW_PATH_HIDDEN_DECL(slow_path_negate);
347 SLOW_PATH_HIDDEN_DECL(slow_path_add);
348 SLOW_PATH_HIDDEN_DECL(slow_path_mul);
349 SLOW_PATH_HIDDEN_DECL(slow_path_sub);
350 SLOW_PATH_HIDDEN_DECL(slow_path_div);
351 SLOW_PATH_HIDDEN_DECL(slow_path_mod);
352 SLOW_PATH_HIDDEN_DECL(slow_path_pow);
353 SLOW_PATH_HIDDEN_DECL(slow_path_lshift);
354 SLOW_PATH_HIDDEN_DECL(slow_path_rshift);
355 SLOW_PATH_HIDDEN_DECL(slow_path_urshift);
356 SLOW_PATH_HIDDEN_DECL(slow_path_unsigned);
357 SLOW_PATH_HIDDEN_DECL(slow_path_bitnot);
358 SLOW_PATH_HIDDEN_DECL(slow_path_bitand);
359 SLOW_PATH_HIDDEN_DECL(slow_path_bitor);
360 SLOW_PATH_HIDDEN_DECL(slow_path_bitxor);
361 SLOW_PATH_HIDDEN_DECL(slow_path_typeof);
362 SLOW_PATH_HIDDEN_DECL(slow_path_is_object);
363 SLOW_PATH_HIDDEN_DECL(slow_path_is_object_or_null);
364 SLOW_PATH_HIDDEN_DECL(slow_path_is_function);
365 SLOW_PATH_HIDDEN_DECL(slow_path_in_by_id);
366 SLOW_PATH_HIDDEN_DECL(slow_path_in_by_val);
367 SLOW_PATH_HIDDEN_DECL(slow_path_del_by_val);
368 SLOW_PATH_HIDDEN_DECL(slow_path_strcat);
369 SLOW_PATH_HIDDEN_DECL(slow_path_to_primitive);
370 SLOW_PATH_HIDDEN_DECL(slow_path_get_enumerable_length);
371 SLOW_PATH_HIDDEN_DECL(slow_path_has_generic_property);
372 SLOW_PATH_HIDDEN_DECL(slow_path_has_structure_property);
373 SLOW_PATH_HIDDEN_DECL(slow_path_has_indexed_property);
374 SLOW_PATH_HIDDEN_DECL(slow_path_get_direct_pname);
375 SLOW_PATH_HIDDEN_DECL(slow_path_get_property_enumerator);
376 SLOW_PATH_HIDDEN_DECL(slow_path_enumerator_structure_pname);
377 SLOW_PATH_HIDDEN_DECL(slow_path_enumerator_generic_pname);
378 SLOW_PATH_HIDDEN_DECL(slow_path_to_index_string);
379 SLOW_PATH_HIDDEN_DECL(slow_path_profile_type_clear_log);
380 SLOW_PATH_HIDDEN_DECL(slow_path_unreachable);
381 SLOW_PATH_HIDDEN_DECL(slow_path_create_lexical_environment);
382 SLOW_PATH_HIDDEN_DECL(slow_path_push_with_scope);
383 SLOW_PATH_HIDDEN_DECL(slow_path_resolve_scope);
384 SLOW_PATH_HIDDEN_DECL(slow_path_is_var_scope);
385 SLOW_PATH_HIDDEN_DECL(slow_path_resolve_scope_for_hoisting_func_decl_in_eval);
386 SLOW_PATH_HIDDEN_DECL(slow_path_create_rest);
387 SLOW_PATH_HIDDEN_DECL(slow_path_get_by_id_with_this);
388 SLOW_PATH_HIDDEN_DECL(slow_path_get_by_val_with_this);
389 SLOW_PATH_HIDDEN_DECL(slow_path_put_by_id_with_this);
390 SLOW_PATH_HIDDEN_DECL(slow_path_put_by_val_with_this);
391 SLOW_PATH_HIDDEN_DECL(slow_path_define_data_property);
392 SLOW_PATH_HIDDEN_DECL(slow_path_define_accessor_property);
393 SLOW_PATH_HIDDEN_DECL(slow_path_throw_static_error);
394 SLOW_PATH_HIDDEN_DECL(slow_path_new_array_with_spread);
395 SLOW_PATH_HIDDEN_DECL(slow_path_new_array_buffer);
396 SLOW_PATH_HIDDEN_DECL(slow_path_spread);
397
398 using SlowPathFunction = SlowPathReturnType(SLOW_PATH *)(ExecState*, const Instruction*);
399
400 } // namespace JSC