2 * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "JITOperations.h"
31 #include "ArrayConstructor.h"
32 #include "CommonSlowPaths.h"
33 #include "DFGCompilationMode.h"
34 #include "DFGDriver.h"
35 #include "DFGOSREntry.h"
36 #include "DFGThunks.h"
37 #include "DFGWorklist.h"
39 #include "DirectArguments.h"
41 #include "ErrorHandlingScope.h"
42 #include "ExceptionFuzz.h"
43 #include "GetterSetter.h"
44 #include "HostCallReturnValue.h"
47 #include "JITExceptions.h"
48 #include "JITToDFGDeferredCompilationCallback.h"
49 #include "JSCInlines.h"
50 #include "JSGeneratorFunction.h"
51 #include "JSGlobalObjectFunctions.h"
52 #include "JSLexicalEnvironment.h"
53 #include "JSPropertyNameEnumerator.h"
54 #include "JSStackInlines.h"
55 #include "JSWithScope.h"
56 #include "LegacyProfiler.h"
57 #include "ObjectConstructor.h"
58 #include "PolymorphicAccess.h"
59 #include "PropertyName.h"
61 #include "ScopedArguments.h"
62 #include "ShadowChicken.h"
63 #include "StructureStubInfo.h"
64 #include "SuperSampler.h"
65 #include "TestRunnerUtils.h"
66 #include "TypeProfilerLog.h"
67 #include "VMInlines.h"
68 #include <wtf/InlineASM.h>
75 void * _ReturnAddress(void);
76 #pragma intrinsic(_ReturnAddress)
78 #define OUR_RETURN_ADDRESS _ReturnAddress()
80 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
83 #if ENABLE(OPCODE_SAMPLING)
84 #define CTI_SAMPLER vm->interpreter->sampler()
90 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
92 // We pass in our own code block, because the callframe hasn't been populated.
93 VM* vm = codeBlock->vm();
95 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
96 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
100 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
101 throwStackOverflowError(callerFrame);
104 #if ENABLE(WEBASSEMBLY)
105 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
107 VM* vm = &exec->vm();
108 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
109 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
111 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
112 ErrorHandlingScope errorScope(*vm);
113 vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
116 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
118 VM* vm = &exec->vm();
119 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
120 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
122 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
123 ErrorHandlingScope errorScope(*vm);
124 vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
128 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
130 VM* vm = &exec->vm();
131 JSStack& stack = vm->interpreter->stack();
133 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
134 if (missingArgCount < 0) {
135 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
136 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
137 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
138 throwStackOverflowError(callerFrame);
141 return missingArgCount;
144 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
146 VM* vm = &exec->vm();
147 JSStack& stack = vm->interpreter->stack();
149 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
150 if (missingArgCount < 0) {
151 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
152 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
153 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
154 throwStackOverflowError(callerFrame);
157 return missingArgCount;
160 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
162 VM* vm = &exec->vm();
163 NativeCallFrameTracer tracer(vm, exec);
164 Identifier ident = Identifier::fromUid(vm, uid);
165 stubInfo->tookSlowPath = true;
167 JSValue baseValue = JSValue::decode(base);
168 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
169 baseValue.getPropertySlot(exec, ident, slot);
171 return JSValue::encode(slot.getPureResult());
175 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
177 VM* vm = &exec->vm();
178 NativeCallFrameTracer tracer(vm, exec);
179 Identifier ident = Identifier::fromUid(vm, uid);
181 JSValue baseValue = JSValue::decode(base);
182 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
183 baseValue.getPropertySlot(exec, ident, slot);
185 return JSValue::encode(slot.getPureResult());
188 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
190 VM* vm = &exec->vm();
191 NativeCallFrameTracer tracer(vm, exec);
192 Identifier ident = Identifier::fromUid(vm, uid);
194 JSValue baseValue = JSValue::decode(base);
195 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
197 baseValue.getPropertySlot(exec, ident, slot);
198 if (stubInfo->considerCaching() && !slot.isTaintedByProxy() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
199 repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Pure);
201 return JSValue::encode(slot.getPureResult());
204 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
206 SuperSamplerScope superSamplerScope(false);
208 VM* vm = &exec->vm();
209 NativeCallFrameTracer tracer(vm, exec);
211 stubInfo->tookSlowPath = true;
213 JSValue baseValue = JSValue::decode(base);
214 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
215 Identifier ident = Identifier::fromUid(vm, uid);
217 LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(), ident));
218 return JSValue::encode(baseValue.get(exec, ident, slot));
221 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
223 SuperSamplerScope superSamplerScope(false);
225 VM* vm = &exec->vm();
226 NativeCallFrameTracer tracer(vm, exec);
228 JSValue baseValue = JSValue::decode(base);
229 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
230 Identifier ident = Identifier::fromUid(vm, uid);
231 LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(), ident));
232 return JSValue::encode(baseValue.get(exec, ident, slot));
235 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
237 SuperSamplerScope superSamplerScope(false);
239 VM* vm = &exec->vm();
240 NativeCallFrameTracer tracer(vm, exec);
241 Identifier ident = Identifier::fromUid(vm, uid);
243 JSValue baseValue = JSValue::decode(base);
244 LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(), ident));
245 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
247 bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
248 if (stubInfo->considerCaching())
249 repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
251 return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
254 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
256 SuperSamplerScope superSamplerScope(false);
258 VM* vm = &exec->vm();
259 NativeCallFrameTracer tracer(vm, exec);
261 if (!base->isObject()) {
262 vm->throwException(exec, createInvalidInParameterError(exec, base));
263 return JSValue::encode(jsUndefined());
266 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
268 Identifier ident = Identifier::fromUid(vm, key);
269 LOG_IC((ICEvent::OperationInOptimize, base->classInfo(), ident));
270 PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
271 bool result = asObject(base)->getPropertySlot(exec, ident, slot);
273 RELEASE_ASSERT(accessType == stubInfo->accessType);
275 if (stubInfo->considerCaching())
276 repatchIn(exec, base, ident, result, slot, *stubInfo);
278 return JSValue::encode(jsBoolean(result));
281 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
283 SuperSamplerScope superSamplerScope(false);
285 VM* vm = &exec->vm();
286 NativeCallFrameTracer tracer(vm, exec);
288 stubInfo->tookSlowPath = true;
290 if (!base->isObject()) {
291 vm->throwException(exec, createInvalidInParameterError(exec, base));
292 return JSValue::encode(jsUndefined());
295 Identifier ident = Identifier::fromUid(vm, key);
296 LOG_IC((ICEvent::OperationIn, base->classInfo(), ident));
297 return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
300 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
302 SuperSamplerScope superSamplerScope(false);
304 VM* vm = &exec->vm();
305 NativeCallFrameTracer tracer(vm, exec);
307 return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
310 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
312 SuperSamplerScope superSamplerScope(false);
314 VM* vm = &exec->vm();
315 NativeCallFrameTracer tracer(vm, exec);
317 stubInfo->tookSlowPath = true;
319 JSValue baseValue = JSValue::decode(encodedBase);
320 Identifier ident = Identifier::fromUid(vm, uid);
321 LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(), ident));
323 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
324 baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
327 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
329 SuperSamplerScope superSamplerScope(false);
331 VM* vm = &exec->vm();
332 NativeCallFrameTracer tracer(vm, exec);
334 stubInfo->tookSlowPath = true;
336 JSValue baseValue = JSValue::decode(encodedBase);
337 Identifier ident = Identifier::fromUid(vm, uid);
338 LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(), ident));
339 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
340 baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
343 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
345 SuperSamplerScope superSamplerScope(false);
347 VM* vm = &exec->vm();
348 NativeCallFrameTracer tracer(vm, exec);
350 stubInfo->tookSlowPath = true;
352 JSValue baseValue = JSValue::decode(encodedBase);
353 Identifier ident = Identifier::fromUid(vm, uid);
354 LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(), ident));
355 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
356 asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
359 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
361 SuperSamplerScope superSamplerScope(false);
363 VM* vm = &exec->vm();
364 NativeCallFrameTracer tracer(vm, exec);
366 stubInfo->tookSlowPath = true;
368 JSValue baseValue = JSValue::decode(encodedBase);
369 Identifier ident = Identifier::fromUid(vm, uid);
370 LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(), ident));
371 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
372 asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
375 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
377 SuperSamplerScope superSamplerScope(false);
379 VM* vm = &exec->vm();
380 NativeCallFrameTracer tracer(vm, exec);
382 Identifier ident = Identifier::fromUid(vm, uid);
383 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
385 JSValue value = JSValue::decode(encodedValue);
386 JSValue baseValue = JSValue::decode(encodedBase);
387 LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(), ident));
388 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
390 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
391 baseValue.putInline(exec, ident, value, slot);
393 if (accessType != static_cast<AccessType>(stubInfo->accessType))
396 if (stubInfo->considerCaching())
397 repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
400 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
402 SuperSamplerScope superSamplerScope(false);
404 VM* vm = &exec->vm();
405 NativeCallFrameTracer tracer(vm, exec);
407 Identifier ident = Identifier::fromUid(vm, uid);
408 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
410 JSValue value = JSValue::decode(encodedValue);
411 JSValue baseValue = JSValue::decode(encodedBase);
412 LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(), ident));
413 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
415 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
416 baseValue.putInline(exec, ident, value, slot);
418 if (accessType != static_cast<AccessType>(stubInfo->accessType))
421 if (stubInfo->considerCaching())
422 repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
425 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
427 SuperSamplerScope superSamplerScope(false);
429 VM* vm = &exec->vm();
430 NativeCallFrameTracer tracer(vm, exec);
432 Identifier ident = Identifier::fromUid(vm, uid);
433 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
435 JSValue value = JSValue::decode(encodedValue);
436 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
437 LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(), ident));
438 PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
440 Structure* structure = baseObject->structure(*vm);
441 baseObject->putDirect(exec->vm(), ident, value, slot);
443 if (accessType != static_cast<AccessType>(stubInfo->accessType))
446 if (stubInfo->considerCaching())
447 repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
450 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
452 SuperSamplerScope superSamplerScope(false);
454 VM* vm = &exec->vm();
455 NativeCallFrameTracer tracer(vm, exec);
457 Identifier ident = Identifier::fromUid(vm, uid);
458 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
460 JSValue value = JSValue::decode(encodedValue);
461 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
462 LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(), ident));
463 PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
465 Structure* structure = baseObject->structure(*vm);
466 baseObject->putDirect(exec->vm(), ident, value, slot);
468 if (accessType != static_cast<AccessType>(stubInfo->accessType))
471 if (stubInfo->considerCaching())
472 repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
475 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
478 NativeCallFrameTracer tracer(&vm, exec);
480 ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
481 ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
482 base->setStructureAndReallocateStorageIfNecessary(vm, structure);
483 base->putDirect(vm, offset, JSValue::decode(value));
486 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
488 return value.isString() || value.isSymbol();
491 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
493 VM& vm = callFrame->vm();
494 if (LIKELY(subscript.isUInt32())) {
495 byValInfo->tookSlowPath = true;
496 uint32_t i = subscript.asUInt32();
497 if (baseValue.isObject()) {
498 JSObject* object = asObject(baseValue);
499 if (object->canSetIndexQuickly(i))
500 object->setIndexQuickly(callFrame->vm(), i, value);
502 // FIXME: This will make us think that in-bounds typed array accesses are actually
504 // https://bugs.webkit.org/show_bug.cgi?id=149886
505 byValInfo->arrayProfile->setOutOfBounds();
506 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
509 baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
513 auto property = subscript.toPropertyKey(callFrame);
514 // Don't put to an object if toString threw an exception.
515 if (callFrame->vm().exception())
518 if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
519 byValInfo->tookSlowPath = true;
521 PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
522 baseValue.putInline(callFrame, property, value, slot);
525 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
527 bool isStrictMode = callFrame->codeBlock()->isStrictMode();
528 if (LIKELY(subscript.isUInt32())) {
529 // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
530 byValInfo->tookSlowPath = true;
531 uint32_t index = subscript.asUInt32();
532 ASSERT(isIndex(index));
533 if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
534 baseObject->setIndexQuickly(callFrame->vm(), index, value);
538 // FIXME: This will make us think that in-bounds typed array accesses are actually
540 // https://bugs.webkit.org/show_bug.cgi?id=149886
541 byValInfo->arrayProfile->setOutOfBounds();
542 baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
546 if (subscript.isDouble()) {
547 double subscriptAsDouble = subscript.asDouble();
548 uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
549 if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
550 byValInfo->tookSlowPath = true;
551 baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
556 // Don't put to an object if toString threw an exception.
557 auto property = subscript.toPropertyKey(callFrame);
558 if (callFrame->vm().exception())
561 if (Optional<uint32_t> index = parseIndex(property)) {
562 byValInfo->tookSlowPath = true;
563 baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
567 if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
568 byValInfo->tookSlowPath = true;
570 PutPropertySlot slot(baseObject, isStrictMode);
571 baseObject->putDirect(callFrame->vm(), property, value, slot);
574 enum class OptimizationResult {
581 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
583 // See if it's worth optimizing at all.
584 OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
588 if (baseValue.isObject() && subscript.isInt32()) {
589 JSObject* object = asObject(baseValue);
591 ASSERT(exec->bytecodeOffset());
592 ASSERT(!byValInfo->stubRoutine);
594 Structure* structure = object->structure(vm);
595 if (hasOptimizableIndexing(structure)) {
596 // Attempt to optimize.
597 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
598 if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
599 CodeBlock* codeBlock = exec->codeBlock();
600 ConcurrentJITLocker locker(codeBlock->m_lock);
601 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
603 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
604 optimizationResult = OptimizationResult::Optimized;
608 // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
609 if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
610 optimizationResult = OptimizationResult::GiveUp;
613 if (baseValue.isObject() && isStringOrSymbol(subscript)) {
614 const Identifier propertyName = subscript.toPropertyKey(exec);
615 if (!subscript.isString() || !parseIndex(propertyName)) {
616 ASSERT(exec->bytecodeOffset());
617 ASSERT(!byValInfo->stubRoutine);
618 if (byValInfo->seen) {
619 if (byValInfo->cachedId == propertyName) {
620 JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
621 optimizationResult = OptimizationResult::Optimized;
623 // Seem like a generic property access site.
624 optimizationResult = OptimizationResult::GiveUp;
627 byValInfo->seen = true;
628 byValInfo->cachedId = propertyName;
629 optimizationResult = OptimizationResult::SeenOnce;
634 if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
635 // If we take slow path more than 10 times without patching then make sure we
636 // never make that mistake again. For cases where we see non-index-intercepting
637 // objects, this gives 10 iterations worth of opportunity for us to observe
638 // that the put_by_val may be polymorphic. We count up slowPathCount even if
639 // the result is GiveUp.
640 if (++byValInfo->slowPathCount >= 10)
641 optimizationResult = OptimizationResult::GiveUp;
644 return optimizationResult;
647 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
650 NativeCallFrameTracer tracer(&vm, exec);
652 JSValue baseValue = JSValue::decode(encodedBaseValue);
653 JSValue subscript = JSValue::decode(encodedSubscript);
654 JSValue value = JSValue::decode(encodedValue);
655 if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
656 // Don't ever try to optimize.
657 byValInfo->tookSlowPath = true;
658 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
660 putByVal(exec, baseValue, subscript, value, byValInfo);
663 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
665 // See if it's worth optimizing at all.
666 OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
670 if (subscript.isInt32()) {
671 ASSERT(exec->bytecodeOffset());
672 ASSERT(!byValInfo->stubRoutine);
674 Structure* structure = object->structure(vm);
675 if (hasOptimizableIndexing(structure)) {
676 // Attempt to optimize.
677 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
678 if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
679 CodeBlock* codeBlock = exec->codeBlock();
680 ConcurrentJITLocker locker(codeBlock->m_lock);
681 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
683 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
684 optimizationResult = OptimizationResult::Optimized;
688 // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
689 if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
690 optimizationResult = OptimizationResult::GiveUp;
691 } else if (isStringOrSymbol(subscript)) {
692 const Identifier propertyName = subscript.toPropertyKey(exec);
693 Optional<uint32_t> index = parseIndex(propertyName);
695 if (!subscript.isString() || !index) {
696 ASSERT(exec->bytecodeOffset());
697 ASSERT(!byValInfo->stubRoutine);
698 if (byValInfo->seen) {
699 if (byValInfo->cachedId == propertyName) {
700 JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
701 optimizationResult = OptimizationResult::Optimized;
703 // Seem like a generic property access site.
704 optimizationResult = OptimizationResult::GiveUp;
707 byValInfo->seen = true;
708 byValInfo->cachedId = propertyName;
709 optimizationResult = OptimizationResult::SeenOnce;
714 if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
715 // If we take slow path more than 10 times without patching then make sure we
716 // never make that mistake again. For cases where we see non-index-intercepting
717 // objects, this gives 10 iterations worth of opportunity for us to observe
718 // that the get_by_val may be polymorphic. We count up slowPathCount even if
719 // the result is GiveUp.
720 if (++byValInfo->slowPathCount >= 10)
721 optimizationResult = OptimizationResult::GiveUp;
724 return optimizationResult;
727 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
730 NativeCallFrameTracer tracer(&vm, exec);
732 JSValue baseValue = JSValue::decode(encodedBaseValue);
733 JSValue subscript = JSValue::decode(encodedSubscript);
734 JSValue value = JSValue::decode(encodedValue);
735 RELEASE_ASSERT(baseValue.isObject());
736 JSObject* object = asObject(baseValue);
737 if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
738 // Don't ever try to optimize.
739 byValInfo->tookSlowPath = true;
740 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
743 directPutByVal(exec, object, subscript, value, byValInfo);
746 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
749 NativeCallFrameTracer tracer(&vm, exec);
751 JSValue baseValue = JSValue::decode(encodedBaseValue);
752 JSValue subscript = JSValue::decode(encodedSubscript);
753 JSValue value = JSValue::decode(encodedValue);
755 putByVal(exec, baseValue, subscript, value, byValInfo);
759 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
762 NativeCallFrameTracer tracer(&vm, exec);
764 JSValue baseValue = JSValue::decode(encodedBaseValue);
765 JSValue subscript = JSValue::decode(encodedSubscript);
766 JSValue value = JSValue::decode(encodedValue);
767 RELEASE_ASSERT(baseValue.isObject());
768 directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
771 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
775 execCallee->setCodeBlock(0);
777 if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
778 return JSValue::encode(JSValue());
780 VM* vm = &execCallee->vm();
781 JSValue result = eval(execCallee);
783 return EncodedJSValue();
785 return JSValue::encode(result);
788 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
790 ExecState* exec = execCallee->callerFrame();
791 VM* vm = &exec->vm();
793 execCallee->setCodeBlock(0);
795 if (callLinkInfo->specializationKind() == CodeForCall) {
797 CallType callType = getCallData(callee, callData);
799 ASSERT(callType != CallType::JS);
801 if (callType == CallType::Host) {
802 NativeCallFrameTracer tracer(vm, execCallee);
803 execCallee->setCallee(asObject(callee));
804 vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
805 if (vm->exception()) {
807 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
808 reinterpret_cast<void*>(KeepTheFrame));
812 bitwise_cast<void*>(getHostCallReturnValue),
813 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
816 ASSERT(callType == CallType::None);
817 exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
819 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
820 reinterpret_cast<void*>(KeepTheFrame));
823 ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
825 ConstructData constructData;
826 ConstructType constructType = getConstructData(callee, constructData);
828 ASSERT(constructType != ConstructType::JS);
830 if (constructType == ConstructType::Host) {
831 NativeCallFrameTracer tracer(vm, execCallee);
832 execCallee->setCallee(asObject(callee));
833 vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
834 if (vm->exception()) {
836 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
837 reinterpret_cast<void*>(KeepTheFrame));
840 return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
843 ASSERT(constructType == ConstructType::None);
844 exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
846 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
847 reinterpret_cast<void*>(KeepTheFrame));
850 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
852 ExecState* exec = execCallee->callerFrame();
853 VM* vm = &exec->vm();
854 CodeSpecializationKind kind = callLinkInfo->specializationKind();
855 NativeCallFrameTracer tracer(vm, exec);
857 JSValue calleeAsValue = execCallee->calleeAsValue();
858 JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
859 if (!calleeAsFunctionCell) {
860 // FIXME: We should cache these kinds of calls. They can be common and currently they are
862 // https://bugs.webkit.org/show_bug.cgi?id=144458
863 return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
866 JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
867 JSScope* scope = callee->scopeUnchecked();
868 ExecutableBase* executable = callee->executable();
870 MacroAssemblerCodePtr codePtr;
871 CodeBlock* codeBlock = 0;
872 if (executable->isHostFunction()) {
873 codePtr = executable->entrypointFor(kind, MustCheckArity);
874 #if ENABLE(WEBASSEMBLY)
875 } else if (executable->isWebAssemblyExecutable()) {
876 WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
877 webAssemblyExecutable->prepareForExecution(execCallee);
878 codeBlock = webAssemblyExecutable->codeBlockForCall();
880 ArityCheckMode arity;
881 if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
882 arity = MustCheckArity;
884 arity = ArityCheckNotRequired;
885 codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
888 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
890 if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
891 exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
893 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
894 reinterpret_cast<void*>(KeepTheFrame));
897 JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
899 exec->vm().throwException(exec, error);
901 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
902 reinterpret_cast<void*>(KeepTheFrame));
904 codeBlock = functionExecutable->codeBlockFor(kind);
905 ArityCheckMode arity;
906 if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
907 arity = MustCheckArity;
909 arity = ArityCheckNotRequired;
910 codePtr = functionExecutable->entrypointFor(kind, arity);
912 if (!callLinkInfo->seenOnce())
913 callLinkInfo->setSeen();
915 linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
917 return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
920 inline SlowPathReturnType virtualForWithFunction(
921 ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
923 ExecState* exec = execCallee->callerFrame();
924 VM* vm = &exec->vm();
925 CodeSpecializationKind kind = callLinkInfo->specializationKind();
926 NativeCallFrameTracer tracer(vm, exec);
928 JSValue calleeAsValue = execCallee->calleeAsValue();
929 calleeAsFunctionCell = getJSFunction(calleeAsValue);
930 if (UNLIKELY(!calleeAsFunctionCell))
931 return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
933 JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
934 JSScope* scope = function->scopeUnchecked();
935 ExecutableBase* executable = function->executable();
936 if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
937 bool isWebAssemblyExecutable = false;
938 #if ENABLE(WEBASSEMBLY)
939 isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
941 if (!isWebAssemblyExecutable) {
942 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
944 if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
945 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
947 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
948 reinterpret_cast<void*>(KeepTheFrame));
951 JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
953 exec->vm().throwException(exec, error);
955 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
956 reinterpret_cast<void*>(KeepTheFrame));
959 #if ENABLE(WEBASSEMBLY)
961 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
963 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
964 reinterpret_cast<void*>(KeepTheFrame));
967 WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
968 webAssemblyExecutable->prepareForExecution(execCallee);
972 return encodeResult(executable->entrypointFor(
973 kind, MustCheckArity).executableAddress(),
974 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
977 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
979 ASSERT(callLinkInfo->specializationKind() == CodeForCall);
980 JSCell* calleeAsFunctionCell;
981 SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
983 linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
988 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
990 JSCell* calleeAsFunctionCellIgnored;
991 return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
994 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
996 VM* vm = &exec->vm();
997 NativeCallFrameTracer tracer(vm, exec);
999 return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1002 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1004 VM* vm = &exec->vm();
1005 NativeCallFrameTracer tracer(vm, exec);
1007 return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1010 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1012 VM* vm = &exec->vm();
1013 NativeCallFrameTracer tracer(vm, exec);
1015 return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1018 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1020 VM* vm = &exec->vm();
1021 NativeCallFrameTracer tracer(vm, exec);
1023 return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1026 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
1028 VM* vm = &exec->vm();
1029 NativeCallFrameTracer tracer(vm, exec);
1031 return JSValue::decode(encodedOp).toBoolean(exec);
1034 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1036 VM* vm = &exec->vm();
1037 NativeCallFrameTracer tracer(vm, exec);
1039 return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1043 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1045 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1048 VM* vm = &exec->vm();
1049 NativeCallFrameTracer tracer(vm, exec);
1051 bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
1053 return JSValue::encode(jsBoolean(result));
1059 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1061 VM* vm = &exec->vm();
1062 NativeCallFrameTracer tracer(vm, exec);
1063 return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1066 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1068 VM* vm = &exec->vm();
1069 NativeCallFrameTracer tracer(vm, exec);
1070 return JSValue::encode(constructArray(exec, profile, values, size));
1073 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1075 VM* vm = &exec->vm();
1076 NativeCallFrameTracer tracer(vm, exec);
1077 JSValue sizeValue = JSValue::decode(size);
1078 return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1083 template<typename FunctionType>
1084 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1086 ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1087 VM& vm = exec->vm();
1088 NativeCallFrameTracer tracer(&vm, exec);
1090 return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1091 return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1096 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1098 return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1101 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1103 return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1106 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1108 return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1111 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1113 return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1116 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1118 JSFunction* func = jsCast<JSFunction*>(funcCell);
1119 JSValue name = JSValue::decode(encodedName);
1120 func->setFunctionName(exec, name);
1123 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1125 VM* vm = &exec->vm();
1126 NativeCallFrameTracer tracer(vm, exec);
1128 return constructEmptyObject(exec, structure);
1131 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1133 SuperSamplerScope superSamplerScope(false);
1134 VM& vm = exec->vm();
1135 NativeCallFrameTracer tracer(&vm, exec);
1136 RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1137 if (!regexp->isValid()) {
1138 vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1139 return JSValue::encode(jsUndefined());
1142 return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1145 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1146 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1147 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1148 // future, we can switch to using that then.
1149 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1151 VM& vm = exec->vm();
1152 NativeCallFrameTracer tracer(&vm, exec);
1154 if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1155 vm.throwException(exec, createTerminatedExecutionException(&vm));
1160 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1162 VM& vm = exec->vm();
1163 NativeCallFrameTracer tracer(&vm, exec);
1164 JSValue errorMessageValue = JSValue::decode(encodedValue);
1165 RELEASE_ASSERT(errorMessageValue.isString());
1166 String errorMessage = asString(errorMessageValue)->value(exec);
1167 if (referenceErrorFlag)
1168 vm.throwException(exec, createReferenceError(exec, errorMessage));
1170 vm.throwException(exec, createTypeError(exec, errorMessage));
1173 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1175 VM& vm = exec->vm();
1176 NativeCallFrameTracer tracer(&vm, exec);
1178 vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1182 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1184 codeBlock->updateAllPredictions();
1185 codeBlock->optimizeAfterWarmUp();
1188 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1190 VM& vm = exec->vm();
1191 NativeCallFrameTracer tracer(&vm, exec);
1193 // Defer GC for a while so that it doesn't run between when we enter into this
1194 // slow path and when we figure out the state of our code block. This prevents
1195 // a number of awkward reentrancy scenarios, including:
1197 // - The optimized version of our code block being jettisoned by GC right after
1198 // we concluded that we wanted to use it, but have not planted it into the JS
1201 // - An optimized version of our code block being installed just as we decided
1202 // that it wasn't ready yet.
1204 // Note that jettisoning won't happen if we already initiated OSR, because in
1205 // that case we would have already planted the optimized code block into the JS
1207 DeferGCForAWhile deferGC(vm.heap);
1209 CodeBlock* codeBlock = exec->codeBlock();
1210 if (codeBlock->jitType() != JITCode::BaselineJIT) {
1211 dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1212 RELEASE_ASSERT_NOT_REACHED();
1215 if (bytecodeIndex) {
1216 // If we're attempting to OSR from a loop, assume that this should be
1217 // separately optimized.
1218 codeBlock->m_shouldAlwaysBeInlined = false;
1221 if (Options::verboseOSR()) {
1223 *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1224 ", executeCounter = ", codeBlock->jitExecuteCounter(),
1225 ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1226 ", exitCounter = ");
1227 if (codeBlock->hasOptimizedReplacement())
1228 dataLog(codeBlock->replacement()->osrExitCounter());
1234 if (!codeBlock->checkIfOptimizationThresholdReached()) {
1235 codeBlock->updateAllPredictions();
1236 if (Options::verboseOSR())
1237 dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1238 return encodeResult(0, 0);
1241 if (vm.enabledProfiler()) {
1242 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1243 return encodeResult(0, 0);
1246 Debugger* debugger = codeBlock->globalObject()->debugger();
1247 if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1248 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1249 return encodeResult(0, 0);
1252 if (codeBlock->m_shouldAlwaysBeInlined) {
1253 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1254 if (Options::verboseOSR())
1255 dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1256 return encodeResult(0, 0);
1259 // We cannot be in the process of asynchronous compilation and also have an optimized
1261 DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1264 || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1265 && codeBlock->hasOptimizedReplacement()));
1267 DFG::Worklist::State worklistState;
1269 // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1270 // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1271 // what the result was so that we don't plow ahead and attempt OSR or immediate
1272 // reoptimization. This will have already also set the appropriate JIT execution
1273 // count threshold depending on what happened, so if the compilation was anything
1274 // but successful we just want to return early. See the case for worklistState ==
1275 // DFG::Worklist::Compiled, below.
1277 // Note that we could have alternatively just called Worklist::compilationState()
1278 // here, and if it returned Compiled, we could have then called
1279 // completeAndScheduleOSR() below. But that would have meant that it could take
1280 // longer for code blocks to be completed: they would only complete when *their*
1281 // execution count trigger fired; but that could take a while since the firing is
1282 // racy. It could also mean that code blocks that never run again after being
1283 // compiled would sit on the worklist until next GC. That's fine, but it's
1284 // probably a waste of memory. Our goal here is to complete code blocks as soon as
1285 // possible in order to minimize the chances of us executing baseline code after
1286 // optimized code is already available.
1287 worklistState = worklist->completeAllReadyPlansForVM(
1288 vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1290 worklistState = DFG::Worklist::NotKnown;
1292 if (worklistState == DFG::Worklist::Compiling) {
1293 // We cannot be in the process of asynchronous compilation and also have an optimized
1295 RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1296 codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1297 return encodeResult(0, 0);
1300 if (worklistState == DFG::Worklist::Compiled) {
1301 // If we don't have an optimized replacement but we did just get compiled, then
1302 // the compilation failed or was invalidated, in which case the execution count
1303 // thresholds have already been set appropriately by
1304 // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1305 // nothing left to do.
1306 if (!codeBlock->hasOptimizedReplacement()) {
1307 codeBlock->updateAllPredictions();
1308 if (Options::verboseOSR())
1309 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1310 return encodeResult(0, 0);
1312 } else if (codeBlock->hasOptimizedReplacement()) {
1313 if (Options::verboseOSR())
1314 dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1315 // If we have an optimized replacement, then it must be the case that we entered
1316 // cti_optimize from a loop. That's because if there's an optimized replacement,
1317 // then all calls to this function will be relinked to the replacement and so
1318 // the prologue OSR will never fire.
1320 // This is an interesting threshold check. Consider that a function OSR exits
1321 // in the middle of a loop, while having a relatively low exit count. The exit
1322 // will reset the execution counter to some target threshold, meaning that this
1323 // code won't be reached until that loop heats up for >=1000 executions. But then
1324 // we do a second check here, to see if we should either reoptimize, or just
1325 // attempt OSR entry. Hence it might even be correct for
1326 // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1327 // additional checking anyway, to reduce the amount of recompilation thrashing.
1328 if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1329 if (Options::verboseOSR()) {
1331 "Triggering reoptimization of ", *codeBlock,
1332 "(", *codeBlock->replacement(), ") (in loop).\n");
1334 codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1335 return encodeResult(0, 0);
1338 if (!codeBlock->shouldOptimizeNow()) {
1339 if (Options::verboseOSR()) {
1341 "Delaying optimization for ", *codeBlock,
1342 " because of insufficient profiling.\n");
1344 return encodeResult(0, 0);
1347 if (Options::verboseOSR())
1348 dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1350 unsigned numVarsWithValues;
1352 numVarsWithValues = codeBlock->m_numVars;
1354 numVarsWithValues = 0;
1355 Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1356 int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1357 for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1358 int operand = mustHandleValues.operandForIndex(i);
1359 if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1361 mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1364 CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1365 CompilationResult result = DFG::compile(
1366 vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1367 mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1369 if (result != CompilationSuccessful)
1370 return encodeResult(0, 0);
1373 CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1374 ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1376 if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1377 if (Options::verboseOSR()) {
1379 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1382 codeBlock->optimizeSoon();
1383 return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1386 if (Options::verboseOSR()) {
1388 "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1389 " succeeded, OSR failed, after a delay of ",
1390 codeBlock->optimizationDelayCounter(), ".\n");
1393 // Count the OSR failure as a speculation failure. If this happens a lot, then
1395 optimizedCodeBlock->countOSRExit();
1397 // We are a lot more conservative about triggering reoptimization after OSR failure than
1398 // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1399 // already, then we really would like to reoptimize immediately. But this case covers
1400 // something else: there weren't many (or any) speculation failures before, but we just
1401 // failed to enter the speculative code because some variable had the wrong value or
1402 // because the OSR code decided for any spurious reason that it did not want to OSR
1403 // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1404 // reoptimization trigger.
1405 if (optimizedCodeBlock->shouldReoptimizeNow()) {
1406 if (Options::verboseOSR()) {
1408 "Triggering reoptimization of ", *codeBlock, " -> ",
1409 *codeBlock->replacement(), " (after OSR fail).\n");
1411 optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1412 return encodeResult(0, 0);
1415 // OSR failed this time, but it might succeed next time! Let the code run a bit
1416 // longer and then try again.
1417 codeBlock->optimizeAfterWarmUp();
1419 return encodeResult(0, 0);
1423 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1425 VM& vm = exec->vm();
1426 NativeCallFrameTracer tracer(&vm, exec);
1428 JSValue arrayValue = JSValue::decode(encodedArrayValue);
1429 ASSERT(isJSArray(arrayValue));
1430 asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1433 enum class AccessorType {
1438 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1440 auto propertyKey = subscript.toPropertyKey(exec);
1441 if (exec->hadException())
1444 if (accessorType == AccessorType::Getter)
1445 base->putGetter(exec, propertyKey, accessor, attribute);
1447 base->putSetter(exec, propertyKey, accessor, attribute);
1450 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1452 VM& vm = exec->vm();
1453 NativeCallFrameTracer tracer(&vm, exec);
1455 ASSERT(object && object->isObject());
1456 JSObject* baseObj = object->getObject();
1458 ASSERT(getter->isObject());
1459 baseObj->putGetter(exec, uid, getter, options);
1462 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1464 VM& vm = exec->vm();
1465 NativeCallFrameTracer tracer(&vm, exec);
1467 ASSERT(object && object->isObject());
1468 JSObject* baseObj = object->getObject();
1470 ASSERT(setter->isObject());
1471 baseObj->putSetter(exec, uid, setter, options);
1474 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1476 VM& vm = exec->vm();
1477 NativeCallFrameTracer tracer(&vm, exec);
1479 putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1482 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1484 VM& vm = exec->vm();
1485 NativeCallFrameTracer tracer(&vm, exec);
1487 putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1491 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1493 VM& vm = exec->vm();
1494 NativeCallFrameTracer tracer(&vm, exec);
1496 ASSERT(object && object->isObject());
1497 JSObject* baseObj = asObject(object);
1499 GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1501 JSValue getter = JSValue::decode(encodedGetterValue);
1502 JSValue setter = JSValue::decode(encodedSetterValue);
1503 ASSERT(getter.isObject() || getter.isUndefined());
1504 ASSERT(setter.isObject() || setter.isUndefined());
1505 ASSERT(getter.isObject() || setter.isObject());
1507 if (!getter.isUndefined())
1508 accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1509 if (!setter.isUndefined())
1510 accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1511 baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1515 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1517 VM& vm = exec->vm();
1518 NativeCallFrameTracer tracer(&vm, exec);
1520 ASSERT(object && object->isObject());
1521 JSObject* baseObj = asObject(object);
1523 GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1525 ASSERT(!getter || getter->isObject());
1526 ASSERT(!setter || setter->isObject());
1527 ASSERT(getter || setter);
1530 accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1532 accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1533 baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1537 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1539 VM& vm = exec->vm();
1540 NativeCallFrameTracer tracer(&vm, exec);
1542 JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1543 exec->uncheckedR(scopeReg) = scope->next();
1546 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1548 VM& vm = exec->vm();
1549 NativeCallFrameTracer tracer(&vm, exec);
1551 if (LegacyProfiler* profiler = vm.enabledProfiler())
1552 profiler->didExecute(exec, JSValue::decode(encodedValue));
1555 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1557 VM& vm = exec->vm();
1558 NativeCallFrameTracer tracer(&vm, exec);
1560 if (LegacyProfiler* profiler = vm.enabledProfiler())
1561 profiler->willExecute(exec, JSValue::decode(encodedValue));
1564 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1566 VM& vm = exec->vm();
1567 NativeCallFrameTracer tracer(&vm, exec);
1569 JSValue value = JSValue::decode(encodedValue);
1570 JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1572 ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1574 if (constructor->hasInstance(exec, value, hasInstanceValue))
1581 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1583 switch (object.structure()->typeInfo().type()) {
1584 case DirectArgumentsType: {
1585 DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1586 if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1590 case ScopedArgumentsType: {
1591 ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1592 if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1602 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1604 if (LIKELY(baseValue.isCell() && subscript.isString())) {
1605 VM& vm = exec->vm();
1606 Structure& structure = *baseValue.asCell()->structure(vm);
1607 if (JSCell::canUseFastGetOwnProperty(structure)) {
1608 if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1609 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1610 ASSERT(exec->bytecodeOffset());
1611 if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1612 byValInfo->tookSlowPath = true;
1619 if (subscript.isUInt32()) {
1620 ASSERT(exec->bytecodeOffset());
1621 byValInfo->tookSlowPath = true;
1623 uint32_t i = subscript.asUInt32();
1624 if (isJSString(baseValue)) {
1625 if (asString(baseValue)->canGetIndex(i)) {
1626 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1627 return asString(baseValue)->getIndex(exec, i);
1629 byValInfo->arrayProfile->setOutOfBounds();
1630 } else if (baseValue.isObject()) {
1631 JSObject* object = asObject(baseValue);
1632 if (object->canGetIndexQuickly(i))
1633 return object->getIndexQuickly(i);
1635 if (!canAccessArgumentIndexQuickly(*object, i)) {
1636 // FIXME: This will make us think that in-bounds typed array accesses are actually
1638 // https://bugs.webkit.org/show_bug.cgi?id=149886
1639 byValInfo->arrayProfile->setOutOfBounds();
1643 return baseValue.get(exec, i);
1646 baseValue.requireObjectCoercible(exec);
1647 if (exec->hadException())
1648 return jsUndefined();
1649 auto property = subscript.toPropertyKey(exec);
1650 if (exec->hadException())
1651 return jsUndefined();
1653 ASSERT(exec->bytecodeOffset());
1654 if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1655 byValInfo->tookSlowPath = true;
1657 return baseValue.get(exec, property);
1660 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1662 // See if it's worth optimizing this at all.
1663 OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1665 VM& vm = exec->vm();
1667 if (baseValue.isObject() && subscript.isInt32()) {
1668 JSObject* object = asObject(baseValue);
1670 ASSERT(exec->bytecodeOffset());
1671 ASSERT(!byValInfo->stubRoutine);
1673 if (hasOptimizableIndexing(object->structure(vm))) {
1674 // Attempt to optimize.
1675 Structure* structure = object->structure(vm);
1676 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1677 if (arrayMode != byValInfo->arrayMode) {
1678 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1679 // Let's update the profile to do better next time.
1680 CodeBlock* codeBlock = exec->codeBlock();
1681 ConcurrentJITLocker locker(codeBlock->m_lock);
1682 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1684 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1685 optimizationResult = OptimizationResult::Optimized;
1689 // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1690 if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1691 optimizationResult = OptimizationResult::GiveUp;
1694 if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1695 const Identifier propertyName = subscript.toPropertyKey(exec);
1696 if (!subscript.isString() || !parseIndex(propertyName)) {
1697 ASSERT(exec->bytecodeOffset());
1698 ASSERT(!byValInfo->stubRoutine);
1699 if (byValInfo->seen) {
1700 if (byValInfo->cachedId == propertyName) {
1701 JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1702 optimizationResult = OptimizationResult::Optimized;
1704 // Seem like a generic property access site.
1705 optimizationResult = OptimizationResult::GiveUp;
1708 byValInfo->seen = true;
1709 byValInfo->cachedId = propertyName;
1710 optimizationResult = OptimizationResult::SeenOnce;
1716 if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1717 // If we take slow path more than 10 times without patching then make sure we
1718 // never make that mistake again. For cases where we see non-index-intercepting
1719 // objects, this gives 10 iterations worth of opportunity for us to observe
1720 // that the get_by_val may be polymorphic. We count up slowPathCount even if
1721 // the result is GiveUp.
1722 if (++byValInfo->slowPathCount >= 10)
1723 optimizationResult = OptimizationResult::GiveUp;
1726 return optimizationResult;
1731 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1733 VM& vm = exec->vm();
1734 NativeCallFrameTracer tracer(&vm, exec);
1735 JSValue baseValue = JSValue::decode(encodedBase);
1736 JSValue subscript = JSValue::decode(encodedSubscript);
1738 JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1739 return JSValue::encode(result);
1742 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1744 VM& vm = exec->vm();
1745 NativeCallFrameTracer tracer(&vm, exec);
1747 JSValue baseValue = JSValue::decode(encodedBase);
1748 JSValue subscript = JSValue::decode(encodedSubscript);
1749 ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1750 if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1751 // Don't ever try to optimize.
1752 byValInfo->tookSlowPath = true;
1753 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1756 return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1759 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1761 VM& vm = exec->vm();
1762 NativeCallFrameTracer tracer(&vm, exec);
1763 JSValue baseValue = JSValue::decode(encodedBase);
1764 JSValue subscript = JSValue::decode(encodedSubscript);
1766 ASSERT(baseValue.isObject());
1767 ASSERT(subscript.isUInt32());
1769 JSObject* object = asObject(baseValue);
1770 bool didOptimize = false;
1772 ASSERT(exec->bytecodeOffset());
1773 ASSERT(!byValInfo->stubRoutine);
1775 if (hasOptimizableIndexing(object->structure(vm))) {
1776 // Attempt to optimize.
1777 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1778 if (arrayMode != byValInfo->arrayMode) {
1779 JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1785 // If we take slow path more than 10 times without patching then make sure we
1786 // never make that mistake again. Or, if we failed to patch and we have some object
1787 // that intercepts indexed get, then don't even wait until 10 times. For cases
1788 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1789 // opportunity for us to observe that the get_by_val may be polymorphic.
1790 if (++byValInfo->slowPathCount >= 10
1791 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1792 // Don't ever try to optimize.
1793 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1797 uint32_t index = subscript.asUInt32();
1798 if (object->canGetIndexQuickly(index))
1799 return JSValue::encode(JSValue(JSValue::JSTrue));
1801 if (!canAccessArgumentIndexQuickly(*object, index)) {
1802 // FIXME: This will make us think that in-bounds typed array accesses are actually
1804 // https://bugs.webkit.org/show_bug.cgi?id=149886
1805 byValInfo->arrayProfile->setOutOfBounds();
1807 return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1810 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1812 VM& vm = exec->vm();
1813 NativeCallFrameTracer tracer(&vm, exec);
1814 JSValue baseValue = JSValue::decode(encodedBase);
1815 JSValue subscript = JSValue::decode(encodedSubscript);
1817 ASSERT(baseValue.isObject());
1818 ASSERT(subscript.isUInt32());
1820 JSObject* object = asObject(baseValue);
1821 uint32_t index = subscript.asUInt32();
1822 if (object->canGetIndexQuickly(index))
1823 return JSValue::encode(JSValue(JSValue::JSTrue));
1825 if (!canAccessArgumentIndexQuickly(*object, index)) {
1826 // FIXME: This will make us think that in-bounds typed array accesses are actually
1828 // https://bugs.webkit.org/show_bug.cgi?id=149886
1829 byValInfo->arrayProfile->setOutOfBounds();
1831 return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1834 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1836 VM& vm = exec->vm();
1837 NativeCallFrameTracer tracer(&vm, exec);
1838 JSValue baseValue = JSValue::decode(encodedBase);
1839 JSValue subscript = JSValue::decode(encodedSubscript);
1842 if (LIKELY(subscript.isUInt32())) {
1843 uint32_t i = subscript.asUInt32();
1844 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1845 result = asString(baseValue)->getIndex(exec, i);
1847 result = baseValue.get(exec, i);
1848 if (!isJSString(baseValue)) {
1849 ASSERT(exec->bytecodeOffset());
1850 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1854 baseValue.requireObjectCoercible(exec);
1855 if (exec->hadException())
1856 return JSValue::encode(jsUndefined());
1857 auto property = subscript.toPropertyKey(exec);
1858 if (exec->hadException())
1859 return JSValue::encode(jsUndefined());
1860 result = baseValue.get(exec, property);
1863 return JSValue::encode(result);
1866 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1868 VM& vm = exec->vm();
1869 NativeCallFrameTracer tracer(&vm, exec);
1871 JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1873 JSValue::encode(JSValue());
1874 bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1875 JSValue result = jsBoolean(couldDelete);
1876 if (!couldDelete && exec->codeBlock()->isStrictMode())
1877 vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1878 return JSValue::encode(result);
1881 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1883 VM& vm = exec->vm();
1884 NativeCallFrameTracer tracer(&vm, exec);
1885 JSValue value = JSValue::decode(encodedValue);
1886 JSValue proto = JSValue::decode(encodedProto);
1888 bool result = JSObject::defaultHasInstance(exec, value, proto);
1889 return JSValue::encode(jsBoolean(result));
1892 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1894 VM& vm = exec->vm();
1895 NativeCallFrameTracer tracer(&vm, exec);
1896 JSStack* stack = &exec->interpreter()->stack();
1897 JSValue arguments = JSValue::decode(encodedArguments);
1898 return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1901 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1903 VM& vm = exec->vm();
1904 NativeCallFrameTracer tracer(&vm, exec);
1905 JSValue arguments = JSValue::decode(encodedArguments);
1906 setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1907 return newCallFrame;
1910 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1912 VM& vm = exec->vm();
1913 NativeCallFrameTracer tracer(&vm, exec);
1914 JSObject* obj = JSValue::decode(value).toObject(exec);
1916 return JSValue::encode(JSValue());
1917 return JSValue::encode(obj);
1920 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1922 VM& vm = exec->vm();
1923 NativeCallFrameTracer tracer(&vm, exec);
1924 JSValue key = JSValue::decode(encodedKey);
1925 CodeBlock* codeBlock = exec->codeBlock();
1927 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1928 void* result = jumpTable.ctiDefault.executableAddress();
1930 if (key.isString()) {
1931 StringImpl* value = asString(key)->value(exec).impl();
1932 if (value->length() == 1)
1933 result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1936 return reinterpret_cast<char*>(result);
1939 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1941 VM& vm = exec->vm();
1942 NativeCallFrameTracer tracer(&vm, exec);
1943 JSValue key = JSValue::decode(encodedKey);
1944 CodeBlock* codeBlock = exec->codeBlock();
1946 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1949 result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1950 else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1951 result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1953 result = jumpTable.ctiDefault.executableAddress();
1954 return reinterpret_cast<char*>(result);
1957 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1959 VM& vm = exec->vm();
1960 NativeCallFrameTracer tracer(&vm, exec);
1961 JSValue key = JSValue::decode(encodedKey);
1962 CodeBlock* codeBlock = exec->codeBlock();
1965 StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1967 if (key.isString()) {
1968 StringImpl* value = asString(key)->value(exec).impl();
1969 result = jumpTable.ctiForValue(value).executableAddress();
1971 result = jumpTable.ctiDefault.executableAddress();
1973 return reinterpret_cast<char*>(result);
1976 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1978 VM& vm = exec->vm();
1979 NativeCallFrameTracer tracer(&vm, exec);
1980 CodeBlock* codeBlock = exec->codeBlock();
1981 Instruction* pc = bytecodePC;
1983 const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1984 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1985 GetPutInfo getPutInfo(pc[4].u.operand);
1987 // ModuleVar is always converted to ClosureVar for get_from_scope.
1988 ASSERT(getPutInfo.resolveType() != ModuleVar);
1990 PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1991 if (!scope->getPropertySlot(exec, ident, slot)) {
1992 if (getPutInfo.resolveMode() == ThrowIfNotFound)
1993 vm.throwException(exec, createUndefinedVariableError(exec, ident));
1994 return JSValue::encode(jsUndefined());
1997 JSValue result = JSValue();
1998 if (scope->isGlobalLexicalEnvironment()) {
1999 // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2000 result = slot.getValue(exec, ident);
2001 if (result == jsTDZValue()) {
2002 exec->vm().throwException(exec, createTDZError(exec));
2003 return JSValue::encode(jsUndefined());
2007 CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
2010 result = slot.getValue(exec, ident);
2011 return JSValue::encode(result);
2014 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
2016 VM& vm = exec->vm();
2017 NativeCallFrameTracer tracer(&vm, exec);
2018 Instruction* pc = bytecodePC;
2020 CodeBlock* codeBlock = exec->codeBlock();
2021 const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2022 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2023 JSValue value = exec->r(pc[3].u.operand).jsValue();
2024 GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
2026 // ModuleVar does not keep the scope register value alive in DFG.
2027 ASSERT(getPutInfo.resolveType() != ModuleVar);
2029 if (getPutInfo.resolveType() == LocalClosureVar) {
2030 JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2031 environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
2032 if (WatchpointSet* set = pc[5].u.watchpointSet)
2033 set->touch("Executed op_put_scope<LocalClosureVar>");
2037 bool hasProperty = scope->hasProperty(exec, ident);
2039 && scope->isGlobalLexicalEnvironment()
2040 && getPutInfo.initializationMode() != Initialization) {
2041 // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2042 PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2043 JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2044 if (slot.getValue(exec, ident) == jsTDZValue()) {
2045 exec->vm().throwException(exec, createTDZError(exec));
2050 if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2051 exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
2055 PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
2056 scope->methodTable()->put(scope, exec, ident, value, slot);
2058 if (exec->vm().exception())
2061 CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2064 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2066 VM* vm = &exec->vm();
2067 NativeCallFrameTracer tracer(vm, exec);
2069 JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2070 vm->throwException(exec, exceptionValue);
2072 // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2073 genericUnwind(vm, exec);
2076 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2078 VM& vm = exec->vm();
2079 NativeCallFrameTracer tracer(&vm, exec);
2081 ASSERT(!object->structure()->outOfLineCapacity());
2082 DeferGC deferGC(vm.heap);
2083 Butterfly* result = object->growOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2084 object->setButterflyWithoutChangingStructure(vm, result);
2085 return reinterpret_cast<char*>(result);
2088 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2090 VM& vm = exec->vm();
2091 NativeCallFrameTracer tracer(&vm, exec);
2093 DeferGC deferGC(vm.heap);
2094 Butterfly* result = object->growOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize);
2095 object->setButterflyWithoutChangingStructure(vm, result);
2096 return reinterpret_cast<char*>(result);
2099 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2101 VM* vm = &exec->vm();
2102 NativeCallFrameTracer tracer(vm, exec);
2103 vm->heap.flushWriteBarrierBuffer(cell);
2106 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2108 VM* vm = &exec->vm();
2109 NativeCallFrameTracer tracer(vm, exec);
2110 vm->heap.writeBarrier(cell);
2113 // NB: We don't include the value as part of the barrier because the write barrier elision
2114 // phase in the DFG only tracks whether the object being stored to has been barriered. It
2115 // would be much more complicated to try to model the value being stored as well.
2116 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2118 VM* vm = &exec->vm();
2119 NativeCallFrameTracer tracer(vm, exec);
2120 vm->heap.writeBarrier(cell);
2123 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2125 NativeCallFrameTracer tracer(vm, exec);
2126 genericUnwind(vm, exec);
2127 ASSERT(vm->targetMachinePCForThrow);
2130 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2132 NativeCallFrameTracer tracer(vm, exec);
2133 genericUnwind(vm, exec, UnwindFromCallerFrame);
2134 ASSERT(vm->targetMachinePCForThrow);
2137 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2139 VM* vm = &exec->vm();
2140 NativeCallFrameTracer tracer(vm, exec);
2141 genericUnwind(vm, exec);
2144 // This function "should" just take the ExecState*, but doing so would make it more difficult
2145 // to call from exception check sites. So, unlike all of our other functions, we allow
2146 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2147 // particularly safe here since this is never called on the critical path - it's only for
2149 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2151 VM* vm = &exec->vm();
2152 NativeCallFrameTracer tracer(vm, exec);
2153 #if COMPILER(GCC_OR_CLANG)
2154 void* returnPC = __builtin_return_address(0);
2155 doExceptionFuzzing(exec, "JITOperations", returnPC);
2156 #endif // COMPILER(GCC_OR_CLANG)
2159 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2161 VM& vm = exec->vm();
2162 NativeCallFrameTracer tracer(&vm, exec);
2163 JSValue baseValue = JSValue::decode(encodedBaseValue);
2164 if (baseValue.isUndefinedOrNull())
2165 return JSValue::encode(jsBoolean(false));
2167 JSObject* base = baseValue.toObject(exec);
2169 return JSValue::encode(JSValue());
2170 return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2173 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2175 VM& vm = exec->vm();
2176 NativeCallFrameTracer tracer(&vm, exec);
2177 JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2178 return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, PropertySlot::InternalMethodType::GetOwnProperty)));
2181 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2183 VM& vm = exec->vm();
2184 NativeCallFrameTracer tracer(&vm, exec);
2186 JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2188 return propertyNameEnumerator(exec, base);
2191 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2193 VM& vm = exec->vm();
2194 NativeCallFrameTracer tracer(&vm, exec);
2195 JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2196 JSString* propertyName = enumerator->propertyNameAtIndex(index);
2197 return JSValue::encode(propertyName ? propertyName : jsNull());
2200 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2202 VM& vm = exec->vm();
2203 NativeCallFrameTracer tracer(&vm, exec);
2204 return jsString(exec, Identifier::from(exec, index).string());
2207 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2209 VM& vm = exec->vm();
2210 NativeCallFrameTracer tracer(&vm, exec);
2211 vm.typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2214 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2216 VM& vm = exec->vm();
2217 NativeCallFrameTracer tracer(&vm, exec);
2218 vm.shadowChicken().update(vm, exec);
2221 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2223 VM& vm = exec->vm();
2224 NativeCallFrameTracer tracer(&vm, exec);
2225 RELEASE_ASSERT(!!vm.exception());
2227 if (LegacyProfiler* profiler = vm.enabledProfiler())
2228 profiler->exceptionUnwind(exec);
2230 if (isTerminatedExecutionException(vm.exception())) {
2231 genericUnwind(&vm, exec);
2239 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2240 // definition of getHostCallReturnValue() below because the Windows build
2242 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2245 return JSValue::encode(JSValue());
2246 return JSValue::encode(exec->vm().hostCallReturnValue);
2249 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2251 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2252 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2253 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2254 "lea -8(%rsp), %rdi\n"
2255 "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2258 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2261 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2262 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2263 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2266 "leal -4(%esp), %esp\n"
2268 "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2269 "leal 8(%esp), %esp\n"
2274 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2278 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2279 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2281 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2282 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2283 "sub r0, sp, #8" "\n"
2284 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2287 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2290 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2291 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2292 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2293 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2294 "sub r0, sp, #8" "\n"
2295 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2302 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2303 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2304 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2305 "sub x0, sp, #16" "\n"
2306 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2309 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2312 #define LOAD_FUNCTION_TO_T9(function) \
2313 ".set noreorder" "\n" \
2314 ".cpload $25" "\n" \
2315 ".set reorder" "\n" \
2316 "la $t9, " LOCAL_REFERENCE(function) "\n"
2318 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2323 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2324 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2325 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2326 LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2327 "addi $a0, $sp, -8" "\n"
2328 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2331 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2333 #define SH4_SCRATCH_REGISTER "r11"
2337 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2338 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2339 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2342 "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2343 "braf " SH4_SCRATCH_REGISTER "\n"
2346 "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2349 #elif COMPILER(MSVC) && CPU(X86)
2351 __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2353 __asm lea eax, [esp - 4]
2354 __asm mov [esp + 4], eax;
2355 __asm jmp getHostCallReturnValueWithExecState
2362 #endif // ENABLE(JIT)