2 * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "JITOperations.h"
31 #include "ArithProfile.h"
32 #include "ArrayConstructor.h"
33 #include "CommonSlowPaths.h"
34 #include "DFGCompilationMode.h"
35 #include "DFGDriver.h"
36 #include "DFGOSREntry.h"
37 #include "DFGThunks.h"
38 #include "DFGWorklist.h"
40 #include "DirectArguments.h"
42 #include "ErrorHandlingScope.h"
43 #include "EvalCodeBlock.h"
44 #include "ExceptionFuzz.h"
45 #include "FTLOSREntry.h"
46 #include "FrameTracers.h"
47 #include "FunctionCodeBlock.h"
48 #include "GetterSetter.h"
49 #include "HostCallReturnValue.h"
51 #include "Interpreter.h"
53 #include "JITExceptions.h"
54 #include "JITToDFGDeferredCompilationCallback.h"
55 #include "JSAsyncFunction.h"
56 #include "JSAsyncGeneratorFunction.h"
57 #include "JSCInlines.h"
58 #include "JSGeneratorFunction.h"
59 #include "JSGlobalObjectFunctions.h"
60 #include "JSLexicalEnvironment.h"
61 #include "JSPropertyNameEnumerator.h"
62 #include "JSWithScope.h"
63 #include "ModuleProgramCodeBlock.h"
64 #include "ObjectConstructor.h"
65 #include "PolymorphicAccess.h"
66 #include "ProgramCodeBlock.h"
67 #include "PropertyName.h"
68 #include "RegExpObject.h"
70 #include "ScopedArguments.h"
71 #include "ShadowChicken.h"
72 #include "StructureStubInfo.h"
73 #include "SuperSampler.h"
74 #include "TestRunnerUtils.h"
75 #include "ThunkGenerators.h"
76 #include "TypeProfilerLog.h"
77 #include "VMInlines.h"
78 #include <wtf/InlineASM.h>
85 void * _ReturnAddress(void);
86 #pragma intrinsic(_ReturnAddress)
88 #define OUR_RETURN_ADDRESS _ReturnAddress()
90 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
93 #if ENABLE(OPCODE_SAMPLING)
94 #define CTI_SAMPLER vm->interpreter->sampler()
100 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
102 // We pass in our own code block, because the callframe hasn't been populated.
103 VM* vm = codeBlock->vm();
104 auto scope = DECLARE_THROW_SCOPE(*vm);
106 EntryFrame* entryFrame = vm->topEntryFrame;
107 CallFrame* callerFrame = exec->callerFrame(entryFrame);
110 entryFrame = vm->topEntryFrame;
113 NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
114 throwStackOverflowError(callerFrame, scope);
117 #if ENABLE(WEBASSEMBLY)
118 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
120 VM* vm = &exec->vm();
121 auto scope = DECLARE_THROW_SCOPE(*vm);
123 EntryFrame* entryFrame = vm->topEntryFrame;
124 CallFrame* callerFrame = exec->callerFrame(entryFrame);
126 NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
127 ErrorHandlingScope errorScope(*vm);
128 throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
131 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
133 VM* vm = &exec->vm();
134 auto scope = DECLARE_THROW_SCOPE(*vm);
136 EntryFrame* entryFrame = vm->topEntryFrame;
137 CallFrame* callerFrame = exec->callerFrame(entryFrame);
139 NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
140 ErrorHandlingScope errorScope(*vm);
141 throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
145 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
147 VM* vm = &exec->vm();
148 auto scope = DECLARE_THROW_SCOPE(*vm);
150 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
151 if (missingArgCount < 0) {
152 EntryFrame* entryFrame = vm->topEntryFrame;
153 CallFrame* callerFrame = exec->callerFrame(entryFrame);
154 NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
155 throwStackOverflowError(callerFrame, scope);
158 return missingArgCount;
161 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
163 VM* vm = &exec->vm();
164 auto scope = DECLARE_THROW_SCOPE(*vm);
166 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
167 if (missingArgCount < 0) {
168 EntryFrame* entryFrame = vm->topEntryFrame;
169 CallFrame* callerFrame = exec->callerFrame(entryFrame);
170 NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
171 throwStackOverflowError(callerFrame, scope);
174 return missingArgCount;
177 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
179 VM* vm = &exec->vm();
180 NativeCallFrameTracer tracer(vm, exec);
181 Identifier ident = Identifier::fromUid(vm, uid);
182 stubInfo->tookSlowPath = true;
184 JSValue baseValue = JSValue::decode(base);
185 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
186 baseValue.getPropertySlot(exec, ident, slot);
188 return JSValue::encode(slot.getPureResult());
192 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
194 VM* vm = &exec->vm();
195 NativeCallFrameTracer tracer(vm, exec);
196 Identifier ident = Identifier::fromUid(vm, uid);
198 JSValue baseValue = JSValue::decode(base);
199 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
200 baseValue.getPropertySlot(exec, ident, slot);
202 return JSValue::encode(slot.getPureResult());
205 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
207 VM* vm = &exec->vm();
208 NativeCallFrameTracer tracer(vm, exec);
209 auto scope = DECLARE_THROW_SCOPE(*vm);
210 Identifier ident = Identifier::fromUid(vm, uid);
212 JSValue baseValue = JSValue::decode(base);
213 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
215 baseValue.getPropertySlot(exec, ident, slot);
216 RETURN_IF_EXCEPTION(scope, encodedJSValue());
218 if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()) && !slot.isTaintedByOpaqueObject() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
219 repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Try);
221 return JSValue::encode(slot.getPureResult());
224 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
226 SuperSamplerScope superSamplerScope(false);
228 VM* vm = &exec->vm();
229 NativeCallFrameTracer tracer(vm, exec);
231 stubInfo->tookSlowPath = true;
233 JSValue baseValue = JSValue::decode(base);
234 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
235 Identifier ident = Identifier::fromUid(vm, uid);
237 LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(*vm), ident));
238 return JSValue::encode(baseValue.get(exec, ident, slot));
241 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
243 SuperSamplerScope superSamplerScope(false);
245 VM* vm = &exec->vm();
246 NativeCallFrameTracer tracer(vm, exec);
248 JSValue baseValue = JSValue::decode(base);
249 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
250 Identifier ident = Identifier::fromUid(vm, uid);
251 LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(*vm), ident));
252 return JSValue::encode(baseValue.get(exec, ident, slot));
255 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
257 SuperSamplerScope superSamplerScope(false);
259 VM* vm = &exec->vm();
260 NativeCallFrameTracer tracer(vm, exec);
261 Identifier ident = Identifier::fromUid(vm, uid);
263 JSValue baseValue = JSValue::decode(base);
264 LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(*vm), ident));
266 return JSValue::encode(baseValue.getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
267 if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
268 repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
269 return found ? slot.getValue(exec, ident) : jsUndefined();
273 EncodedJSValue JIT_OPERATION operationGetByIdWithThisGeneric(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
275 SuperSamplerScope superSamplerScope(false);
277 VM* vm = &exec->vm();
278 NativeCallFrameTracer tracer(vm, exec);
279 Identifier ident = Identifier::fromUid(vm, uid);
281 stubInfo->tookSlowPath = true;
283 JSValue baseValue = JSValue::decode(base);
284 JSValue thisValue = JSValue::decode(thisEncoded);
285 PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
287 return JSValue::encode(baseValue.get(exec, ident, slot));
290 EncodedJSValue JIT_OPERATION operationGetByIdWithThisOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
292 SuperSamplerScope superSamplerScope(false);
294 VM* vm = &exec->vm();
295 NativeCallFrameTracer tracer(vm, exec);
296 Identifier ident = Identifier::fromUid(vm, uid);
298 JSValue baseValue = JSValue::decode(base);
299 JSValue thisValue = JSValue::decode(thisEncoded);
300 LOG_IC((ICEvent::OperationGetByIdWithThisOptimize, baseValue.classInfoOrNull(*vm), ident));
302 PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
303 return JSValue::encode(baseValue.getPropertySlot(exec, ident, slot, [&] (bool found, PropertySlot& slot) -> JSValue {
304 if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
305 repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::WithThis);
306 return found ? slot.getValue(exec, ident) : jsUndefined();
310 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
312 SuperSamplerScope superSamplerScope(false);
314 VM* vm = &exec->vm();
315 NativeCallFrameTracer tracer(vm, exec);
316 auto scope = DECLARE_THROW_SCOPE(*vm);
318 if (!base->isObject()) {
319 throwException(exec, scope, createInvalidInParameterError(exec, base));
320 return JSValue::encode(jsUndefined());
323 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
325 Identifier ident = Identifier::fromUid(vm, key);
326 LOG_IC((ICEvent::OperationInOptimize, base->classInfo(*vm), ident));
327 PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
328 bool result = asObject(base)->getPropertySlot(exec, ident, slot);
329 RETURN_IF_EXCEPTION(scope, encodedJSValue());
331 RELEASE_ASSERT(accessType == stubInfo->accessType);
333 if (stubInfo->considerCaching(exec->codeBlock(), asObject(base)->structure()))
334 repatchIn(exec, base, ident, result, slot, *stubInfo);
336 return JSValue::encode(jsBoolean(result));
339 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
341 SuperSamplerScope superSamplerScope(false);
343 VM* vm = &exec->vm();
344 NativeCallFrameTracer tracer(vm, exec);
345 auto scope = DECLARE_THROW_SCOPE(*vm);
347 stubInfo->tookSlowPath = true;
349 if (!base->isObject()) {
350 throwException(exec, scope, createInvalidInParameterError(exec, base));
351 return JSValue::encode(jsUndefined());
354 Identifier ident = Identifier::fromUid(vm, key);
355 LOG_IC((ICEvent::OperationIn, base->classInfo(*vm), ident));
357 return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
360 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
362 SuperSamplerScope superSamplerScope(false);
364 VM* vm = &exec->vm();
365 NativeCallFrameTracer tracer(vm, exec);
367 return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, base, JSValue::decode(key))));
370 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
372 SuperSamplerScope superSamplerScope(false);
374 VM* vm = &exec->vm();
375 NativeCallFrameTracer tracer(vm, exec);
377 stubInfo->tookSlowPath = true;
379 JSValue baseValue = JSValue::decode(encodedBase);
380 Identifier ident = Identifier::fromUid(vm, uid);
381 LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(*vm), ident));
383 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
384 baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
387 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
389 SuperSamplerScope superSamplerScope(false);
391 VM* vm = &exec->vm();
392 NativeCallFrameTracer tracer(vm, exec);
394 stubInfo->tookSlowPath = true;
396 JSValue baseValue = JSValue::decode(encodedBase);
397 Identifier ident = Identifier::fromUid(vm, uid);
398 LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(*vm), ident));
399 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
400 baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
403 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
405 SuperSamplerScope superSamplerScope(false);
407 VM* vm = &exec->vm();
408 NativeCallFrameTracer tracer(vm, exec);
410 stubInfo->tookSlowPath = true;
412 JSValue baseValue = JSValue::decode(encodedBase);
413 Identifier ident = Identifier::fromUid(vm, uid);
414 LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(*vm), ident));
415 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
416 asObject(baseValue)->putDirect(*vm, ident, JSValue::decode(encodedValue), slot);
419 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
421 SuperSamplerScope superSamplerScope(false);
423 VM* vm = &exec->vm();
424 NativeCallFrameTracer tracer(vm, exec);
426 stubInfo->tookSlowPath = true;
428 JSValue baseValue = JSValue::decode(encodedBase);
429 Identifier ident = Identifier::fromUid(vm, uid);
430 LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(*vm), ident));
431 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
432 asObject(baseValue)->putDirect(*vm, ident, JSValue::decode(encodedValue), slot);
435 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
437 SuperSamplerScope superSamplerScope(false);
439 VM* vm = &exec->vm();
440 NativeCallFrameTracer tracer(vm, exec);
441 auto scope = DECLARE_THROW_SCOPE(*vm);
443 Identifier ident = Identifier::fromUid(vm, uid);
444 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
446 JSValue value = JSValue::decode(encodedValue);
447 JSValue baseValue = JSValue::decode(encodedBase);
448 LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
449 CodeBlock* codeBlock = exec->codeBlock();
450 PutPropertySlot slot(baseValue, true, codeBlock->putByIdContext());
452 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
453 baseValue.putInline(exec, ident, value, slot);
454 RETURN_IF_EXCEPTION(scope, void());
456 if (accessType != static_cast<AccessType>(stubInfo->accessType))
459 if (stubInfo->considerCaching(codeBlock, structure))
460 repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
463 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
465 SuperSamplerScope superSamplerScope(false);
467 VM* vm = &exec->vm();
468 NativeCallFrameTracer tracer(vm, exec);
469 auto scope = DECLARE_THROW_SCOPE(*vm);
471 Identifier ident = Identifier::fromUid(vm, uid);
472 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
474 JSValue value = JSValue::decode(encodedValue);
475 JSValue baseValue = JSValue::decode(encodedBase);
476 LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
477 CodeBlock* codeBlock = exec->codeBlock();
478 PutPropertySlot slot(baseValue, false, codeBlock->putByIdContext());
480 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
481 baseValue.putInline(exec, ident, value, slot);
482 RETURN_IF_EXCEPTION(scope, void());
484 if (accessType != static_cast<AccessType>(stubInfo->accessType))
487 if (stubInfo->considerCaching(codeBlock, structure))
488 repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
491 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
493 SuperSamplerScope superSamplerScope(false);
495 VM* vm = &exec->vm();
496 NativeCallFrameTracer tracer(vm, exec);
498 Identifier ident = Identifier::fromUid(vm, uid);
499 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
501 JSValue value = JSValue::decode(encodedValue);
502 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
503 LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(*vm), ident));
504 CodeBlock* codeBlock = exec->codeBlock();
505 PutPropertySlot slot(baseObject, true, codeBlock->putByIdContext());
507 Structure* structure = baseObject->structure(*vm);
508 baseObject->putDirect(*vm, ident, value, slot);
510 if (accessType != static_cast<AccessType>(stubInfo->accessType))
513 if (stubInfo->considerCaching(codeBlock, structure))
514 repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
517 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
519 SuperSamplerScope superSamplerScope(false);
521 VM* vm = &exec->vm();
522 NativeCallFrameTracer tracer(vm, exec);
524 Identifier ident = Identifier::fromUid(vm, uid);
525 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
527 JSValue value = JSValue::decode(encodedValue);
528 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
529 LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(*vm), ident));
530 CodeBlock* codeBlock = exec->codeBlock();
531 PutPropertySlot slot(baseObject, false, codeBlock->putByIdContext());
533 Structure* structure = baseObject->structure(*vm);
534 baseObject->putDirect(*vm, ident, value, slot);
536 if (accessType != static_cast<AccessType>(stubInfo->accessType))
539 if (stubInfo->considerCaching(codeBlock, structure))
540 repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
543 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
545 return value.isString() || value.isSymbol();
548 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
550 VM& vm = callFrame->vm();
551 auto scope = DECLARE_THROW_SCOPE(vm);
552 if (LIKELY(subscript.isUInt32())) {
553 byValInfo->tookSlowPath = true;
554 uint32_t i = subscript.asUInt32();
555 if (baseValue.isObject()) {
556 JSObject* object = asObject(baseValue);
557 if (object->canSetIndexQuickly(i)) {
558 object->setIndexQuickly(vm, i, value);
562 // FIXME: This will make us think that in-bounds typed array accesses are actually
564 // https://bugs.webkit.org/show_bug.cgi?id=149886
565 byValInfo->arrayProfile->setOutOfBounds();
567 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
572 baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
576 auto property = subscript.toPropertyKey(callFrame);
577 // Don't put to an object if toString threw an exception.
578 RETURN_IF_EXCEPTION(scope, void());
580 if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
581 byValInfo->tookSlowPath = true;
584 PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
585 baseValue.putInline(callFrame, property, value, slot);
588 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
590 VM& vm = callFrame->vm();
591 auto scope = DECLARE_THROW_SCOPE(vm);
592 bool isStrictMode = callFrame->codeBlock()->isStrictMode();
593 if (LIKELY(subscript.isUInt32())) {
594 // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
595 byValInfo->tookSlowPath = true;
596 uint32_t index = subscript.asUInt32();
597 ASSERT(isIndex(index));
599 switch (baseObject->indexingType()) {
600 case ALL_INT32_INDEXING_TYPES:
601 case ALL_DOUBLE_INDEXING_TYPES:
602 case ALL_CONTIGUOUS_INDEXING_TYPES:
603 case ALL_ARRAY_STORAGE_INDEXING_TYPES:
604 if (index < baseObject->butterfly()->vectorLength())
608 byValInfo->arrayProfile->setOutOfBounds();
613 baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
617 if (subscript.isDouble()) {
618 double subscriptAsDouble = subscript.asDouble();
619 uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
620 if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
621 byValInfo->tookSlowPath = true;
623 baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
628 // Don't put to an object if toString threw an exception.
629 auto property = subscript.toPropertyKey(callFrame);
630 RETURN_IF_EXCEPTION(scope, void());
632 if (std::optional<uint32_t> index = parseIndex(property)) {
633 byValInfo->tookSlowPath = true;
635 baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
639 if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
640 byValInfo->tookSlowPath = true;
642 PutPropertySlot slot(baseObject, isStrictMode);
643 baseObject->putDirect(vm, property, value, slot);
646 enum class OptimizationResult {
653 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
655 // See if it's worth optimizing at all.
656 OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
660 if (baseValue.isObject() && subscript.isInt32()) {
661 JSObject* object = asObject(baseValue);
663 ASSERT(exec->bytecodeOffset());
664 ASSERT(!byValInfo->stubRoutine);
666 Structure* structure = object->structure(vm);
667 if (hasOptimizableIndexing(structure)) {
668 // Attempt to optimize.
669 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
670 if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
671 CodeBlock* codeBlock = exec->codeBlock();
672 ConcurrentJSLocker locker(codeBlock->m_lock);
673 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
675 JIT::compilePutByVal(&vm, codeBlock, byValInfo, returnAddress, arrayMode);
676 optimizationResult = OptimizationResult::Optimized;
680 // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
681 if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
682 optimizationResult = OptimizationResult::GiveUp;
685 if (baseValue.isObject() && isStringOrSymbol(subscript)) {
686 const Identifier propertyName = subscript.toPropertyKey(exec);
687 if (subscript.isSymbol() || !parseIndex(propertyName)) {
688 ASSERT(exec->bytecodeOffset());
689 ASSERT(!byValInfo->stubRoutine);
690 if (byValInfo->seen) {
691 if (byValInfo->cachedId == propertyName) {
692 JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
693 optimizationResult = OptimizationResult::Optimized;
695 // Seem like a generic property access site.
696 optimizationResult = OptimizationResult::GiveUp;
699 CodeBlock* codeBlock = exec->codeBlock();
700 ConcurrentJSLocker locker(codeBlock->m_lock);
701 byValInfo->seen = true;
702 byValInfo->cachedId = propertyName;
703 if (subscript.isSymbol())
704 byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
705 optimizationResult = OptimizationResult::SeenOnce;
710 if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
711 // If we take slow path more than 10 times without patching then make sure we
712 // never make that mistake again. For cases where we see non-index-intercepting
713 // objects, this gives 10 iterations worth of opportunity for us to observe
714 // that the put_by_val may be polymorphic. We count up slowPathCount even if
715 // the result is GiveUp.
716 if (++byValInfo->slowPathCount >= 10)
717 optimizationResult = OptimizationResult::GiveUp;
720 return optimizationResult;
723 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
726 NativeCallFrameTracer tracer(&vm, exec);
728 JSValue baseValue = JSValue::decode(encodedBaseValue);
729 JSValue subscript = JSValue::decode(encodedSubscript);
730 JSValue value = JSValue::decode(encodedValue);
731 if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
732 // Don't ever try to optimize.
733 byValInfo->tookSlowPath = true;
734 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
736 putByVal(exec, baseValue, subscript, value, byValInfo);
739 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
741 // See if it's worth optimizing at all.
742 OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
746 if (subscript.isInt32()) {
747 ASSERT(exec->bytecodeOffset());
748 ASSERT(!byValInfo->stubRoutine);
750 Structure* structure = object->structure(vm);
751 if (hasOptimizableIndexing(structure)) {
752 // Attempt to optimize.
753 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
754 if (jitArrayModePermitsPutDirect(arrayMode) && arrayMode != byValInfo->arrayMode) {
755 CodeBlock* codeBlock = exec->codeBlock();
756 ConcurrentJSLocker locker(codeBlock->m_lock);
757 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
759 JIT::compileDirectPutByVal(&vm, codeBlock, byValInfo, returnAddress, arrayMode);
760 optimizationResult = OptimizationResult::Optimized;
764 // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
765 if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
766 optimizationResult = OptimizationResult::GiveUp;
767 } else if (isStringOrSymbol(subscript)) {
768 const Identifier propertyName = subscript.toPropertyKey(exec);
769 if (subscript.isSymbol() || !parseIndex(propertyName)) {
770 ASSERT(exec->bytecodeOffset());
771 ASSERT(!byValInfo->stubRoutine);
772 if (byValInfo->seen) {
773 if (byValInfo->cachedId == propertyName) {
774 JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
775 optimizationResult = OptimizationResult::Optimized;
777 // Seem like a generic property access site.
778 optimizationResult = OptimizationResult::GiveUp;
781 CodeBlock* codeBlock = exec->codeBlock();
782 ConcurrentJSLocker locker(codeBlock->m_lock);
783 byValInfo->seen = true;
784 byValInfo->cachedId = propertyName;
785 if (subscript.isSymbol())
786 byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
787 optimizationResult = OptimizationResult::SeenOnce;
792 if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
793 // If we take slow path more than 10 times without patching then make sure we
794 // never make that mistake again. For cases where we see non-index-intercepting
795 // objects, this gives 10 iterations worth of opportunity for us to observe
796 // that the get_by_val may be polymorphic. We count up slowPathCount even if
797 // the result is GiveUp.
798 if (++byValInfo->slowPathCount >= 10)
799 optimizationResult = OptimizationResult::GiveUp;
802 return optimizationResult;
805 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
808 NativeCallFrameTracer tracer(&vm, exec);
810 JSValue baseValue = JSValue::decode(encodedBaseValue);
811 JSValue subscript = JSValue::decode(encodedSubscript);
812 JSValue value = JSValue::decode(encodedValue);
813 RELEASE_ASSERT(baseValue.isObject());
814 JSObject* object = asObject(baseValue);
815 if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
816 // Don't ever try to optimize.
817 byValInfo->tookSlowPath = true;
818 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
821 directPutByVal(exec, object, subscript, value, byValInfo);
824 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
827 NativeCallFrameTracer tracer(&vm, exec);
829 JSValue baseValue = JSValue::decode(encodedBaseValue);
830 JSValue subscript = JSValue::decode(encodedSubscript);
831 JSValue value = JSValue::decode(encodedValue);
833 putByVal(exec, baseValue, subscript, value, byValInfo);
837 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
840 NativeCallFrameTracer tracer(&vm, exec);
842 JSValue baseValue = JSValue::decode(encodedBaseValue);
843 JSValue subscript = JSValue::decode(encodedSubscript);
844 JSValue value = JSValue::decode(encodedValue);
845 RELEASE_ASSERT(baseValue.isObject());
846 directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
849 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
851 VM* vm = &exec->vm();
852 auto scope = DECLARE_THROW_SCOPE(*vm);
854 execCallee->setCodeBlock(0);
856 if (!isHostFunction(execCallee->guaranteedJSValueCallee(), globalFuncEval))
857 return JSValue::encode(JSValue());
859 JSValue result = eval(execCallee);
860 RETURN_IF_EXCEPTION(scope, encodedJSValue());
862 return JSValue::encode(result);
865 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
867 ExecState* exec = execCallee->callerFrame();
868 VM* vm = &exec->vm();
869 auto scope = DECLARE_THROW_SCOPE(*vm);
871 execCallee->setCodeBlock(0);
873 if (callLinkInfo->specializationKind() == CodeForCall) {
875 CallType callType = getCallData(callee, callData);
877 ASSERT(callType != CallType::JS);
879 if (callType == CallType::Host) {
880 NativeCallFrameTracer tracer(vm, execCallee);
881 execCallee->setCallee(asObject(callee));
882 vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
883 if (UNLIKELY(scope.exception())) {
885 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
886 reinterpret_cast<void*>(KeepTheFrame));
890 bitwise_cast<void*>(getHostCallReturnValue),
891 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
894 ASSERT(callType == CallType::None);
895 throwException(exec, scope, createNotAFunctionError(exec, callee));
897 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
898 reinterpret_cast<void*>(KeepTheFrame));
901 ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
903 ConstructData constructData;
904 ConstructType constructType = getConstructData(callee, constructData);
906 ASSERT(constructType != ConstructType::JS);
908 if (constructType == ConstructType::Host) {
909 NativeCallFrameTracer tracer(vm, execCallee);
910 execCallee->setCallee(asObject(callee));
911 vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
912 if (UNLIKELY(scope.exception())) {
914 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
915 reinterpret_cast<void*>(KeepTheFrame));
918 return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
921 ASSERT(constructType == ConstructType::None);
922 throwException(exec, scope, createNotAConstructorError(exec, callee));
924 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
925 reinterpret_cast<void*>(KeepTheFrame));
928 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
930 ExecState* exec = execCallee->callerFrame();
931 VM* vm = &exec->vm();
932 auto throwScope = DECLARE_THROW_SCOPE(*vm);
934 CodeSpecializationKind kind = callLinkInfo->specializationKind();
935 NativeCallFrameTracer tracer(vm, exec);
937 RELEASE_ASSERT(!callLinkInfo->isDirect());
939 JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
940 JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
941 if (!calleeAsFunctionCell) {
942 if (calleeAsValue.isCell() && calleeAsValue.asCell()->type() == InternalFunctionType) {
943 MacroAssemblerCodePtr codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
944 RELEASE_ASSERT(!!codePtr);
946 if (!callLinkInfo->seenOnce())
947 callLinkInfo->setSeen();
949 linkFor(execCallee, *callLinkInfo, nullptr, asObject(calleeAsValue), codePtr);
951 return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
953 throwScope.release();
954 return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
957 JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
958 JSScope* scope = callee->scopeUnchecked();
959 ExecutableBase* executable = callee->executable();
961 MacroAssemblerCodePtr codePtr;
962 CodeBlock* codeBlock = nullptr;
963 if (executable->isHostFunction()) {
964 codePtr = executable->entrypointFor(kind, MustCheckArity);
966 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
968 if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
969 throwException(exec, throwScope, createNotAConstructorError(exec, callee));
971 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
972 reinterpret_cast<void*>(KeepTheFrame));
975 CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
976 JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, *codeBlockSlot);
977 EXCEPTION_ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
980 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
981 reinterpret_cast<void*>(KeepTheFrame));
983 codeBlock = *codeBlockSlot;
984 ArityCheckMode arity;
985 if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
986 arity = MustCheckArity;
988 arity = ArityCheckNotRequired;
989 codePtr = functionExecutable->entrypointFor(kind, arity);
991 if (!callLinkInfo->seenOnce())
992 callLinkInfo->setSeen();
994 linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
996 return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
999 void JIT_OPERATION operationLinkDirectCall(ExecState* exec, CallLinkInfo* callLinkInfo, JSFunction* callee)
1001 VM* vm = &exec->vm();
1002 auto throwScope = DECLARE_THROW_SCOPE(*vm);
1004 CodeSpecializationKind kind = callLinkInfo->specializationKind();
1005 NativeCallFrameTracer tracer(vm, exec);
1007 RELEASE_ASSERT(callLinkInfo->isDirect());
1009 // This would happen if the executable died during GC but the CodeBlock did not die. That should
1010 // not happen because the CodeBlock should have a weak reference to any executable it uses for
1012 RELEASE_ASSERT(callLinkInfo->executable());
1014 // Having a CodeBlock indicates that this is linked. We shouldn't be taking this path if it's
1016 RELEASE_ASSERT(!callLinkInfo->codeBlock());
1018 // We just don't support this yet.
1019 RELEASE_ASSERT(!callLinkInfo->isVarargs());
1021 ExecutableBase* executable = callLinkInfo->executable();
1022 RELEASE_ASSERT(callee->executable() == callLinkInfo->executable());
1024 JSScope* scope = callee->scopeUnchecked();
1026 MacroAssemblerCodePtr codePtr;
1027 CodeBlock* codeBlock = nullptr;
1028 if (executable->isHostFunction())
1029 codePtr = executable->entrypointFor(kind, MustCheckArity);
1031 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1033 RELEASE_ASSERT(isCall(kind) || functionExecutable->constructAbility() != ConstructAbility::CannotConstruct);
1035 JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, codeBlock);
1036 EXCEPTION_ASSERT_UNUSED(throwScope, throwScope.exception() == reinterpret_cast<Exception*>(error));
1039 ArityCheckMode arity;
1040 unsigned argumentStackSlots = callLinkInfo->maxNumArguments();
1041 if (argumentStackSlots < static_cast<size_t>(codeBlock->numParameters()))
1042 arity = MustCheckArity;
1044 arity = ArityCheckNotRequired;
1045 codePtr = functionExecutable->entrypointFor(kind, arity);
1048 linkDirectFor(exec, *callLinkInfo, codeBlock, codePtr);
1051 inline SlowPathReturnType virtualForWithFunction(
1052 ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
1054 ExecState* exec = execCallee->callerFrame();
1055 VM* vm = &exec->vm();
1056 auto throwScope = DECLARE_THROW_SCOPE(*vm);
1058 CodeSpecializationKind kind = callLinkInfo->specializationKind();
1059 NativeCallFrameTracer tracer(vm, exec);
1061 JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1062 calleeAsFunctionCell = getJSFunction(calleeAsValue);
1063 if (UNLIKELY(!calleeAsFunctionCell)) {
1064 if (calleeAsValue.isCell() && calleeAsValue.asCell()->type() == InternalFunctionType) {
1065 MacroAssemblerCodePtr codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1067 return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1069 throwScope.release();
1070 return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
1073 JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
1074 JSScope* scope = function->scopeUnchecked();
1075 ExecutableBase* executable = function->executable();
1076 if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
1077 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1079 if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1080 throwException(exec, throwScope, createNotAConstructorError(exec, function));
1081 return encodeResult(
1082 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
1083 reinterpret_cast<void*>(KeepTheFrame));
1086 CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1087 JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, function, scope, kind, *codeBlockSlot);
1088 EXCEPTION_ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
1090 return encodeResult(
1091 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
1092 reinterpret_cast<void*>(KeepTheFrame));
1095 return encodeResult(executable->entrypointFor(
1096 kind, MustCheckArity).executableAddress(),
1097 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1100 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1102 ASSERT(callLinkInfo->specializationKind() == CodeForCall);
1103 JSCell* calleeAsFunctionCell;
1104 SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
1106 linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
1111 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1113 JSCell* calleeAsFunctionCellIgnored;
1114 return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
1117 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1119 VM* vm = &exec->vm();
1120 NativeCallFrameTracer tracer(vm, exec);
1122 return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1125 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1127 VM* vm = &exec->vm();
1128 NativeCallFrameTracer tracer(vm, exec);
1130 return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1133 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1135 VM* vm = &exec->vm();
1136 NativeCallFrameTracer tracer(vm, exec);
1138 return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1141 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1143 VM* vm = &exec->vm();
1144 NativeCallFrameTracer tracer(vm, exec);
1146 return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1149 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1151 VM* vm = &exec->vm();
1152 NativeCallFrameTracer tracer(vm, exec);
1154 return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1158 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1160 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1163 VM* vm = &exec->vm();
1164 NativeCallFrameTracer tracer(vm, exec);
1166 bool result = asString(left)->equal(exec, asString(right));
1168 return JSValue::encode(jsBoolean(result));
1174 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1176 VM* vm = &exec->vm();
1177 NativeCallFrameTracer tracer(vm, exec);
1178 return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1181 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1183 VM* vm = &exec->vm();
1184 NativeCallFrameTracer tracer(vm, exec);
1185 JSValue sizeValue = JSValue::decode(size);
1186 return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1191 template<typename FunctionType>
1192 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1194 VM& vm = exec->vm();
1195 ASSERT(functionExecutable->inherits(vm, FunctionExecutable::info()));
1196 NativeCallFrameTracer tracer(&vm, exec);
1198 return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1199 return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1204 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1206 return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1209 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1211 return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1214 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1216 return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1219 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1221 return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1224 EncodedJSValue JIT_OPERATION operationNewAsyncFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1226 return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, false);
1229 EncodedJSValue JIT_OPERATION operationNewAsyncFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1231 return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, true);
1234 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1236 return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, false);
1239 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1241 return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, true);
1244 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1246 VM* vm = &exec->vm();
1247 NativeCallFrameTracer tracer(vm, exec);
1249 JSFunction* func = jsCast<JSFunction*>(funcCell);
1250 JSValue name = JSValue::decode(encodedName);
1251 func->setFunctionName(exec, name);
1254 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1256 VM* vm = &exec->vm();
1257 NativeCallFrameTracer tracer(vm, exec);
1259 return constructEmptyObject(exec, structure);
1262 JSCell* JIT_OPERATION operationNewRegexp(ExecState* exec, JSCell* regexpPtr)
1264 SuperSamplerScope superSamplerScope(false);
1265 VM& vm = exec->vm();
1266 NativeCallFrameTracer tracer(&vm, exec);
1268 RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1269 ASSERT(regexp->isValid());
1270 return RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp);
1273 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1274 // existing DFG slow path generator machinery when creating the slow path for CheckTraps
1275 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1276 // future, we can switch to using that then.
1277 UnusedPtr JIT_OPERATION operationHandleTraps(ExecState* exec)
1279 VM& vm = exec->vm();
1280 NativeCallFrameTracer tracer(&vm, exec);
1281 ASSERT(vm.needTrapHandling());
1282 vm.handleTraps(exec);
1286 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookType)
1288 VM& vm = exec->vm();
1289 NativeCallFrameTracer tracer(&vm, exec);
1291 vm.interpreter->debug(exec, static_cast<DebugHookType>(debugHookType));
1295 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1297 codeBlock->updateAllPredictions();
1298 codeBlock->optimizeAfterWarmUp();
1301 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1303 VM& vm = exec->vm();
1304 NativeCallFrameTracer tracer(&vm, exec);
1306 // Defer GC for a while so that it doesn't run between when we enter into this
1307 // slow path and when we figure out the state of our code block. This prevents
1308 // a number of awkward reentrancy scenarios, including:
1310 // - The optimized version of our code block being jettisoned by GC right after
1311 // we concluded that we wanted to use it, but have not planted it into the JS
1314 // - An optimized version of our code block being installed just as we decided
1315 // that it wasn't ready yet.
1317 // Note that jettisoning won't happen if we already initiated OSR, because in
1318 // that case we would have already planted the optimized code block into the JS
1320 DeferGCForAWhile deferGC(vm.heap);
1322 CodeBlock* codeBlock = exec->codeBlock();
1323 if (UNLIKELY(codeBlock->jitType() != JITCode::BaselineJIT)) {
1324 dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1325 RELEASE_ASSERT_NOT_REACHED();
1328 if (bytecodeIndex) {
1329 // If we're attempting to OSR from a loop, assume that this should be
1330 // separately optimized.
1331 codeBlock->m_shouldAlwaysBeInlined = false;
1334 if (UNLIKELY(Options::verboseOSR())) {
1336 *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1337 ", executeCounter = ", codeBlock->jitExecuteCounter(),
1338 ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1339 ", exitCounter = ");
1340 if (codeBlock->hasOptimizedReplacement())
1341 dataLog(codeBlock->replacement()->osrExitCounter());
1347 if (!codeBlock->checkIfOptimizationThresholdReached()) {
1348 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("counter = ", codeBlock->jitExecuteCounter()));
1349 codeBlock->updateAllPredictions();
1350 if (UNLIKELY(Options::verboseOSR()))
1351 dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1352 return encodeResult(0, 0);
1355 Debugger* debugger = codeBlock->globalObject()->debugger();
1356 if (UNLIKELY(debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests()))) {
1357 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("debugger is stepping or has requests"));
1358 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1359 return encodeResult(0, 0);
1362 if (codeBlock->m_shouldAlwaysBeInlined) {
1363 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should always be inlined"));
1364 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1365 if (UNLIKELY(Options::verboseOSR()))
1366 dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1367 return encodeResult(0, 0);
1370 // We cannot be in the process of asynchronous compilation and also have an optimized
1372 DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1375 || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1376 && codeBlock->hasOptimizedReplacement()));
1378 DFG::Worklist::State worklistState;
1380 // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1381 // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1382 // what the result was so that we don't plow ahead and attempt OSR or immediate
1383 // reoptimization. This will have already also set the appropriate JIT execution
1384 // count threshold depending on what happened, so if the compilation was anything
1385 // but successful we just want to return early. See the case for worklistState ==
1386 // DFG::Worklist::Compiled, below.
1388 // Note that we could have alternatively just called Worklist::compilationState()
1389 // here, and if it returned Compiled, we could have then called
1390 // completeAndScheduleOSR() below. But that would have meant that it could take
1391 // longer for code blocks to be completed: they would only complete when *their*
1392 // execution count trigger fired; but that could take a while since the firing is
1393 // racy. It could also mean that code blocks that never run again after being
1394 // compiled would sit on the worklist until next GC. That's fine, but it's
1395 // probably a waste of memory. Our goal here is to complete code blocks as soon as
1396 // possible in order to minimize the chances of us executing baseline code after
1397 // optimized code is already available.
1398 worklistState = worklist->completeAllReadyPlansForVM(
1399 vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1401 worklistState = DFG::Worklist::NotKnown;
1403 if (worklistState == DFG::Worklist::Compiling) {
1404 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiling"));
1405 // We cannot be in the process of asynchronous compilation and also have an optimized
1407 RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1408 codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1409 return encodeResult(0, 0);
1412 if (worklistState == DFG::Worklist::Compiled) {
1413 // If we don't have an optimized replacement but we did just get compiled, then
1414 // the compilation failed or was invalidated, in which case the execution count
1415 // thresholds have already been set appropriately by
1416 // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1417 // nothing left to do.
1418 if (!codeBlock->hasOptimizedReplacement()) {
1419 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiled and failed"));
1420 codeBlock->updateAllPredictions();
1421 if (UNLIKELY(Options::verboseOSR()))
1422 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1423 return encodeResult(0, 0);
1425 } else if (codeBlock->hasOptimizedReplacement()) {
1426 if (UNLIKELY(Options::verboseOSR()))
1427 dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1428 // If we have an optimized replacement, then it must be the case that we entered
1429 // cti_optimize from a loop. That's because if there's an optimized replacement,
1430 // then all calls to this function will be relinked to the replacement and so
1431 // the prologue OSR will never fire.
1433 // This is an interesting threshold check. Consider that a function OSR exits
1434 // in the middle of a loop, while having a relatively low exit count. The exit
1435 // will reset the execution counter to some target threshold, meaning that this
1436 // code won't be reached until that loop heats up for >=1000 executions. But then
1437 // we do a second check here, to see if we should either reoptimize, or just
1438 // attempt OSR entry. Hence it might even be correct for
1439 // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1440 // additional checking anyway, to reduce the amount of recompilation thrashing.
1441 if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1442 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize from loop now"));
1443 if (UNLIKELY(Options::verboseOSR())) {
1445 "Triggering reoptimization of ", *codeBlock,
1446 "(", *codeBlock->replacement(), ") (in loop).\n");
1448 codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1449 return encodeResult(0, 0);
1452 if (!codeBlock->shouldOptimizeNow()) {
1453 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("insufficient profiling"));
1454 if (UNLIKELY(Options::verboseOSR())) {
1456 "Delaying optimization for ", *codeBlock,
1457 " because of insufficient profiling.\n");
1459 return encodeResult(0, 0);
1462 if (UNLIKELY(Options::verboseOSR()))
1463 dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1465 unsigned numVarsWithValues;
1467 numVarsWithValues = codeBlock->m_numCalleeLocals;
1469 numVarsWithValues = 0;
1470 Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1471 int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1472 for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1473 int operand = mustHandleValues.operandForIndex(i);
1474 if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1476 mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1479 CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1480 CompilationResult result = DFG::compile(
1481 vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1482 mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1484 if (result != CompilationSuccessful) {
1485 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compilation failed"));
1486 return encodeResult(0, 0);
1490 CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1491 ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1493 if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1494 CODEBLOCK_LOG_EVENT(optimizedCodeBlock, "osrEntry", ("at bc#", bytecodeIndex));
1495 if (UNLIKELY(Options::verboseOSR())) {
1497 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1500 codeBlock->optimizeSoon();
1501 codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
1502 return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1505 if (UNLIKELY(Options::verboseOSR())) {
1507 "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1508 " succeeded, OSR failed, after a delay of ",
1509 codeBlock->optimizationDelayCounter(), ".\n");
1512 // Count the OSR failure as a speculation failure. If this happens a lot, then
1514 optimizedCodeBlock->countOSRExit();
1516 // We are a lot more conservative about triggering reoptimization after OSR failure than
1517 // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1518 // already, then we really would like to reoptimize immediately. But this case covers
1519 // something else: there weren't many (or any) speculation failures before, but we just
1520 // failed to enter the speculative code because some variable had the wrong value or
1521 // because the OSR code decided for any spurious reason that it did not want to OSR
1522 // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1523 // reoptimization trigger.
1524 if (optimizedCodeBlock->shouldReoptimizeNow()) {
1525 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize now"));
1526 if (UNLIKELY(Options::verboseOSR())) {
1528 "Triggering reoptimization of ", *codeBlock, " -> ",
1529 *codeBlock->replacement(), " (after OSR fail).\n");
1531 optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1532 return encodeResult(0, 0);
1535 // OSR failed this time, but it might succeed next time! Let the code run a bit
1536 // longer and then try again.
1537 codeBlock->optimizeAfterWarmUp();
1539 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("OSR failed"));
1540 return encodeResult(0, 0);
1543 char* JIT_OPERATION operationTryOSREnterAtCatch(ExecState* exec, uint32_t bytecodeIndex)
1545 VM& vm = exec->vm();
1546 NativeCallFrameTracer tracer(&vm, exec);
1548 CodeBlock* optimizedReplacement = exec->codeBlock()->replacement();
1549 switch (optimizedReplacement->jitType()) {
1550 case JITCode::DFGJIT:
1551 case JITCode::FTLJIT:
1552 return static_cast<char*>(DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex));
1559 char* JIT_OPERATION operationTryOSREnterAtCatchAndValueProfile(ExecState* exec, uint32_t bytecodeIndex)
1561 VM& vm = exec->vm();
1562 NativeCallFrameTracer tracer(&vm, exec);
1564 CodeBlock* codeBlock = exec->codeBlock();
1565 CodeBlock* optimizedReplacement = codeBlock->replacement();
1567 switch (optimizedReplacement->jitType()) {
1568 case JITCode::DFGJIT:
1569 case JITCode::FTLJIT:
1570 return static_cast<char*>(DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex));
1575 codeBlock->ensureCatchLivenessIsComputedForBytecodeOffset(bytecodeIndex);
1576 ValueProfileAndOperandBuffer* buffer = static_cast<ValueProfileAndOperandBuffer*>(codeBlock->instructions()[bytecodeIndex + 3].u.pointer);
1577 buffer->forEach([&] (ValueProfileAndOperand& profile) {
1578 profile.m_profile.m_buckets[0] = JSValue::encode(exec->uncheckedR(profile.m_operand).jsValue());
1586 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1588 VM& vm = exec->vm();
1589 NativeCallFrameTracer tracer(&vm, exec);
1591 JSValue arrayValue = JSValue::decode(encodedArrayValue);
1592 ASSERT(isJSArray(arrayValue));
1593 asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1596 enum class AccessorType {
1601 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1603 VM& vm = exec->vm();
1604 auto scope = DECLARE_THROW_SCOPE(vm);
1605 auto propertyKey = subscript.toPropertyKey(exec);
1606 RETURN_IF_EXCEPTION(scope, void());
1609 if (accessorType == AccessorType::Getter)
1610 base->putGetter(exec, propertyKey, accessor, attribute);
1612 base->putSetter(exec, propertyKey, accessor, attribute);
1615 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1617 VM& vm = exec->vm();
1618 NativeCallFrameTracer tracer(&vm, exec);
1620 ASSERT(object && object->isObject());
1621 JSObject* baseObj = object->getObject();
1623 ASSERT(getter->isObject());
1624 baseObj->putGetter(exec, uid, getter, options);
1627 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1629 VM& vm = exec->vm();
1630 NativeCallFrameTracer tracer(&vm, exec);
1632 ASSERT(object && object->isObject());
1633 JSObject* baseObj = object->getObject();
1635 ASSERT(setter->isObject());
1636 baseObj->putSetter(exec, uid, setter, options);
1639 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1641 VM& vm = exec->vm();
1642 NativeCallFrameTracer tracer(&vm, exec);
1644 putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1647 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1649 VM& vm = exec->vm();
1650 NativeCallFrameTracer tracer(&vm, exec);
1652 putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1656 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1658 VM& vm = exec->vm();
1659 NativeCallFrameTracer tracer(&vm, exec);
1661 ASSERT(object && object->isObject());
1662 JSObject* baseObj = asObject(object);
1664 GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1666 JSValue getter = JSValue::decode(encodedGetterValue);
1667 JSValue setter = JSValue::decode(encodedSetterValue);
1668 ASSERT(getter.isObject() || getter.isUndefined());
1669 ASSERT(setter.isObject() || setter.isUndefined());
1670 ASSERT(getter.isObject() || setter.isObject());
1672 if (!getter.isUndefined())
1673 accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1674 if (!setter.isUndefined())
1675 accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1676 baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1680 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1682 VM& vm = exec->vm();
1683 NativeCallFrameTracer tracer(&vm, exec);
1685 ASSERT(object && object->isObject());
1686 JSObject* baseObj = asObject(object);
1688 GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1690 ASSERT(!getter || getter->isObject());
1691 ASSERT(!setter || setter->isObject());
1692 ASSERT(getter || setter);
1695 accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1697 accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1698 baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1702 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1704 VM& vm = exec->vm();
1705 NativeCallFrameTracer tracer(&vm, exec);
1707 JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1708 exec->uncheckedR(scopeReg) = scope->next();
1711 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1713 VM& vm = exec->vm();
1714 NativeCallFrameTracer tracer(&vm, exec);
1716 JSValue value = JSValue::decode(encodedValue);
1717 JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1719 ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1721 if (constructor->hasInstance(exec, value, hasInstanceValue))
1728 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1730 switch (object.structure()->typeInfo().type()) {
1731 case DirectArgumentsType: {
1732 DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1733 if (directArguments->isMappedArgumentInDFG(index))
1737 case ScopedArgumentsType: {
1738 ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1739 if (scopedArguments->isMappedArgumentInDFG(index))
1749 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1751 VM& vm = exec->vm();
1752 auto scope = DECLARE_THROW_SCOPE(vm);
1754 if (LIKELY(baseValue.isCell() && subscript.isString())) {
1755 Structure& structure = *baseValue.asCell()->structure(vm);
1756 if (JSCell::canUseFastGetOwnProperty(structure)) {
1757 if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1758 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1759 ASSERT(exec->bytecodeOffset());
1760 if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1761 byValInfo->tookSlowPath = true;
1768 if (subscript.isUInt32()) {
1769 ASSERT(exec->bytecodeOffset());
1770 byValInfo->tookSlowPath = true;
1772 uint32_t i = subscript.asUInt32();
1773 if (isJSString(baseValue)) {
1774 if (asString(baseValue)->canGetIndex(i)) {
1775 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1777 return asString(baseValue)->getIndex(exec, i);
1779 byValInfo->arrayProfile->setOutOfBounds();
1780 } else if (baseValue.isObject()) {
1781 JSObject* object = asObject(baseValue);
1782 if (object->canGetIndexQuickly(i))
1783 return object->getIndexQuickly(i);
1785 if (!canAccessArgumentIndexQuickly(*object, i)) {
1786 // FIXME: This will make us think that in-bounds typed array accesses are actually
1788 // https://bugs.webkit.org/show_bug.cgi?id=149886
1789 byValInfo->arrayProfile->setOutOfBounds();
1794 return baseValue.get(exec, i);
1797 baseValue.requireObjectCoercible(exec);
1798 RETURN_IF_EXCEPTION(scope, JSValue());
1799 auto property = subscript.toPropertyKey(exec);
1800 RETURN_IF_EXCEPTION(scope, JSValue());
1802 ASSERT(exec->bytecodeOffset());
1803 if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1804 byValInfo->tookSlowPath = true;
1807 return baseValue.get(exec, property);
1810 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1812 // See if it's worth optimizing this at all.
1813 OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1815 VM& vm = exec->vm();
1817 if (baseValue.isObject() && subscript.isInt32()) {
1818 JSObject* object = asObject(baseValue);
1820 ASSERT(exec->bytecodeOffset());
1821 ASSERT(!byValInfo->stubRoutine);
1823 if (hasOptimizableIndexing(object->structure(vm))) {
1824 // Attempt to optimize.
1825 Structure* structure = object->structure(vm);
1826 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1827 if (arrayMode != byValInfo->arrayMode) {
1828 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1829 // Let's update the profile to do better next time.
1830 CodeBlock* codeBlock = exec->codeBlock();
1831 ConcurrentJSLocker locker(codeBlock->m_lock);
1832 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1834 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1835 optimizationResult = OptimizationResult::Optimized;
1839 // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1840 if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1841 optimizationResult = OptimizationResult::GiveUp;
1844 if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1845 const Identifier propertyName = subscript.toPropertyKey(exec);
1846 if (subscript.isSymbol() || !parseIndex(propertyName)) {
1847 ASSERT(exec->bytecodeOffset());
1848 ASSERT(!byValInfo->stubRoutine);
1849 if (byValInfo->seen) {
1850 if (byValInfo->cachedId == propertyName) {
1851 JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1852 optimizationResult = OptimizationResult::Optimized;
1854 // Seem like a generic property access site.
1855 optimizationResult = OptimizationResult::GiveUp;
1858 CodeBlock* codeBlock = exec->codeBlock();
1859 ConcurrentJSLocker locker(codeBlock->m_lock);
1860 byValInfo->seen = true;
1861 byValInfo->cachedId = propertyName;
1862 if (subscript.isSymbol())
1863 byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
1864 optimizationResult = OptimizationResult::SeenOnce;
1869 if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1870 // If we take slow path more than 10 times without patching then make sure we
1871 // never make that mistake again. For cases where we see non-index-intercepting
1872 // objects, this gives 10 iterations worth of opportunity for us to observe
1873 // that the get_by_val may be polymorphic. We count up slowPathCount even if
1874 // the result is GiveUp.
1875 if (++byValInfo->slowPathCount >= 10)
1876 optimizationResult = OptimizationResult::GiveUp;
1879 return optimizationResult;
1884 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1886 VM& vm = exec->vm();
1887 NativeCallFrameTracer tracer(&vm, exec);
1888 JSValue baseValue = JSValue::decode(encodedBase);
1889 JSValue subscript = JSValue::decode(encodedSubscript);
1891 JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1892 return JSValue::encode(result);
1895 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1897 VM& vm = exec->vm();
1898 NativeCallFrameTracer tracer(&vm, exec);
1900 JSValue baseValue = JSValue::decode(encodedBase);
1901 JSValue subscript = JSValue::decode(encodedSubscript);
1902 ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1903 if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1904 // Don't ever try to optimize.
1905 byValInfo->tookSlowPath = true;
1906 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1909 return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1912 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1914 VM& vm = exec->vm();
1915 NativeCallFrameTracer tracer(&vm, exec);
1916 JSValue baseValue = JSValue::decode(encodedBase);
1917 JSValue subscript = JSValue::decode(encodedSubscript);
1919 ASSERT(baseValue.isObject());
1920 ASSERT(subscript.isUInt32());
1922 JSObject* object = asObject(baseValue);
1923 bool didOptimize = false;
1925 ASSERT(exec->bytecodeOffset());
1926 ASSERT(!byValInfo->stubRoutine);
1928 if (hasOptimizableIndexing(object->structure(vm))) {
1929 // Attempt to optimize.
1930 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1931 if (arrayMode != byValInfo->arrayMode) {
1932 JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1938 // If we take slow path more than 10 times without patching then make sure we
1939 // never make that mistake again. Or, if we failed to patch and we have some object
1940 // that intercepts indexed get, then don't even wait until 10 times. For cases
1941 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1942 // opportunity for us to observe that the get_by_val may be polymorphic.
1943 if (++byValInfo->slowPathCount >= 10
1944 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1945 // Don't ever try to optimize.
1946 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1950 uint32_t index = subscript.asUInt32();
1951 if (object->canGetIndexQuickly(index))
1952 return JSValue::encode(JSValue(JSValue::JSTrue));
1954 if (!canAccessArgumentIndexQuickly(*object, index)) {
1955 // FIXME: This will make us think that in-bounds typed array accesses are actually
1957 // https://bugs.webkit.org/show_bug.cgi?id=149886
1958 byValInfo->arrayProfile->setOutOfBounds();
1960 return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1963 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1965 VM& vm = exec->vm();
1966 NativeCallFrameTracer tracer(&vm, exec);
1967 JSValue baseValue = JSValue::decode(encodedBase);
1968 JSValue subscript = JSValue::decode(encodedSubscript);
1970 ASSERT(baseValue.isObject());
1971 ASSERT(subscript.isUInt32());
1973 JSObject* object = asObject(baseValue);
1974 uint32_t index = subscript.asUInt32();
1975 if (object->canGetIndexQuickly(index))
1976 return JSValue::encode(JSValue(JSValue::JSTrue));
1978 if (!canAccessArgumentIndexQuickly(*object, index)) {
1979 // FIXME: This will make us think that in-bounds typed array accesses are actually
1981 // https://bugs.webkit.org/show_bug.cgi?id=149886
1982 byValInfo->arrayProfile->setOutOfBounds();
1984 return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1987 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1989 VM& vm = exec->vm();
1990 NativeCallFrameTracer tracer(&vm, exec);
1991 auto scope = DECLARE_THROW_SCOPE(vm);
1992 JSValue baseValue = JSValue::decode(encodedBase);
1993 JSValue subscript = JSValue::decode(encodedSubscript);
1996 if (LIKELY(subscript.isUInt32())) {
1997 uint32_t i = subscript.asUInt32();
1998 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i)) {
2000 return JSValue::encode(asString(baseValue)->getIndex(exec, i));
2002 result = baseValue.get(exec, i);
2003 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2004 if (!isJSString(baseValue)) {
2005 ASSERT(exec->bytecodeOffset());
2006 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
2009 baseValue.requireObjectCoercible(exec);
2010 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2011 auto property = subscript.toPropertyKey(exec);
2012 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2014 result = baseValue.get(exec, property);
2017 return JSValue::encode(result);
2020 EncodedJSValue JIT_OPERATION operationDeleteByIdJSResult(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
2022 return JSValue::encode(jsBoolean(operationDeleteById(exec, base, uid)));
2025 size_t JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, UniquedStringImpl* uid)
2027 VM& vm = exec->vm();
2028 NativeCallFrameTracer tracer(&vm, exec);
2029 auto scope = DECLARE_THROW_SCOPE(vm);
2031 JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2032 RETURN_IF_EXCEPTION(scope, false);
2035 bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, Identifier::fromUid(&vm, uid));
2036 RETURN_IF_EXCEPTION(scope, false);
2037 if (!couldDelete && exec->codeBlock()->isStrictMode())
2038 throwTypeError(exec, scope, ASCIILiteral(UnableToDeletePropertyError));
2042 EncodedJSValue JIT_OPERATION operationDeleteByValJSResult(ExecState* exec, EncodedJSValue base, EncodedJSValue key)
2044 return JSValue::encode(jsBoolean(operationDeleteByVal(exec, base, key)));
2047 size_t JIT_OPERATION operationDeleteByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedKey)
2049 VM& vm = exec->vm();
2050 NativeCallFrameTracer tracer(&vm, exec);
2051 auto scope = DECLARE_THROW_SCOPE(vm);
2053 JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2054 RETURN_IF_EXCEPTION(scope, false);
2055 JSValue key = JSValue::decode(encodedKey);
2061 if (key.getUInt32(index))
2062 couldDelete = baseObj->methodTable(vm)->deletePropertyByIndex(baseObj, exec, index);
2064 Identifier property = key.toPropertyKey(exec);
2065 RETURN_IF_EXCEPTION(scope, false);
2066 couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, property);
2068 RETURN_IF_EXCEPTION(scope, false);
2069 if (!couldDelete && exec->codeBlock()->isStrictMode())
2070 throwTypeError(exec, scope, ASCIILiteral(UnableToDeletePropertyError));
2074 JSCell* JIT_OPERATION operationPushWithScope(ExecState* exec, JSCell* currentScopeCell, EncodedJSValue objectValue)
2076 VM& vm = exec->vm();
2077 NativeCallFrameTracer tracer(&vm, exec);
2078 auto scope = DECLARE_THROW_SCOPE(vm);
2080 JSObject* object = JSValue::decode(objectValue).toObject(exec);
2081 RETURN_IF_EXCEPTION(scope, nullptr);
2083 JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2085 return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2088 JSCell* JIT_OPERATION operationPushWithScopeObject(ExecState* exec, JSCell* currentScopeCell, JSObject* object)
2090 VM& vm = exec->vm();
2091 NativeCallFrameTracer tracer(&vm, exec);
2092 JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2093 return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2096 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2098 VM& vm = exec->vm();
2099 NativeCallFrameTracer tracer(&vm, exec);
2100 JSValue value = JSValue::decode(encodedValue);
2101 JSValue proto = JSValue::decode(encodedProto);
2103 bool result = JSObject::defaultHasInstance(exec, value, proto);
2104 return JSValue::encode(jsBoolean(result));
2107 int32_t JIT_OPERATION operationSizeFrameForForwardArguments(ExecState* exec, EncodedJSValue, int32_t numUsedStackSlots, int32_t)
2109 VM& vm = exec->vm();
2110 NativeCallFrameTracer tracer(&vm, exec);
2111 return sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
2114 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
2116 VM& vm = exec->vm();
2117 NativeCallFrameTracer tracer(&vm, exec);
2118 JSValue arguments = JSValue::decode(encodedArguments);
2119 return sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArgOffset);
2122 CallFrame* JIT_OPERATION operationSetupForwardArgumentsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue, int32_t, int32_t length)
2124 VM& vm = exec->vm();
2125 NativeCallFrameTracer tracer(&vm, exec);
2126 setupForwardArgumentsFrame(exec, newCallFrame, length);
2127 return newCallFrame;
2130 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
2132 VM& vm = exec->vm();
2133 NativeCallFrameTracer tracer(&vm, exec);
2134 JSValue arguments = JSValue::decode(encodedArguments);
2135 setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
2136 return newCallFrame;
2139 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2141 VM& vm = exec->vm();
2142 NativeCallFrameTracer tracer(&vm, exec);
2143 JSValue key = JSValue::decode(encodedKey);
2144 CodeBlock* codeBlock = exec->codeBlock();
2146 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2147 void* result = jumpTable.ctiDefault.executableAddress();
2149 if (key.isString()) {
2150 StringImpl* value = asString(key)->value(exec).impl();
2151 if (value->length() == 1)
2152 result = jumpTable.ctiForValue((*value)[0]).executableAddress();
2155 return reinterpret_cast<char*>(result);
2158 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2160 VM& vm = exec->vm();
2161 NativeCallFrameTracer tracer(&vm, exec);
2162 JSValue key = JSValue::decode(encodedKey);
2163 CodeBlock* codeBlock = exec->codeBlock();
2165 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2168 result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
2169 else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
2170 result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
2172 result = jumpTable.ctiDefault.executableAddress();
2173 return reinterpret_cast<char*>(result);
2176 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2178 VM& vm = exec->vm();
2179 NativeCallFrameTracer tracer(&vm, exec);
2180 JSValue key = JSValue::decode(encodedKey);
2181 CodeBlock* codeBlock = exec->codeBlock();
2184 StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
2186 if (key.isString()) {
2187 StringImpl* value = asString(key)->value(exec).impl();
2188 result = jumpTable.ctiForValue(value).executableAddress();
2190 result = jumpTable.ctiDefault.executableAddress();
2192 return reinterpret_cast<char*>(result);
2195 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
2197 VM& vm = exec->vm();
2198 NativeCallFrameTracer tracer(&vm, exec);
2199 auto throwScope = DECLARE_THROW_SCOPE(vm);
2201 CodeBlock* codeBlock = exec->codeBlock();
2202 Instruction* pc = bytecodePC;
2204 const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
2205 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
2206 GetPutInfo getPutInfo(pc[4].u.operand);
2208 // ModuleVar is always converted to ClosureVar for get_from_scope.
2209 ASSERT(getPutInfo.resolveType() != ModuleVar);
2211 throwScope.release();
2212 return JSValue::encode(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
2214 if (getPutInfo.resolveMode() == ThrowIfNotFound)
2215 throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2216 return jsUndefined();
2219 JSValue result = JSValue();
2220 if (scope->isGlobalLexicalEnvironment()) {
2221 // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2222 result = slot.getValue(exec, ident);
2223 if (result == jsTDZValue()) {
2224 throwException(exec, throwScope, createTDZError(exec));
2225 return jsUndefined();
2229 CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
2232 return slot.getValue(exec, ident);
2237 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
2239 VM& vm = exec->vm();
2240 NativeCallFrameTracer tracer(&vm, exec);
2241 auto throwScope = DECLARE_THROW_SCOPE(vm);
2243 Instruction* pc = bytecodePC;
2245 CodeBlock* codeBlock = exec->codeBlock();
2246 const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2247 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2248 JSValue value = exec->r(pc[3].u.operand).jsValue();
2249 GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
2251 // ModuleVar does not keep the scope register value alive in DFG.
2252 ASSERT(getPutInfo.resolveType() != ModuleVar);
2254 if (getPutInfo.resolveType() == LocalClosureVar) {
2255 JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2256 environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
2257 if (WatchpointSet* set = pc[5].u.watchpointSet)
2258 set->touch(vm, "Executed op_put_scope<LocalClosureVar>");
2262 bool hasProperty = scope->hasProperty(exec, ident);
2263 EXCEPTION_ASSERT(!throwScope.exception() || !hasProperty);
2265 && scope->isGlobalLexicalEnvironment()
2266 && !isInitialization(getPutInfo.initializationMode())) {
2267 // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2268 PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2269 JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2270 if (slot.getValue(exec, ident) == jsTDZValue()) {
2271 throwException(exec, throwScope, createTDZError(exec));
2276 if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2277 throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2281 PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(getPutInfo.initializationMode()));
2282 scope->methodTable(vm)->put(scope, exec, ident, value, slot);
2284 RETURN_IF_EXCEPTION(throwScope, void());
2286 CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2289 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2291 VM* vm = &exec->vm();
2292 NativeCallFrameTracer tracer(vm, exec);
2293 auto scope = DECLARE_THROW_SCOPE(*vm);
2295 JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2296 throwException(exec, scope, exceptionValue);
2298 // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2299 genericUnwind(vm, exec);
2302 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2304 VM& vm = exec->vm();
2305 NativeCallFrameTracer tracer(&vm, exec);
2307 ASSERT(!object->structure()->outOfLineCapacity());
2308 Butterfly* result = object->allocateMoreOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2309 object->nukeStructureAndSetButterfly(vm, object->structureID(), result, object->indexingType());
2310 return reinterpret_cast<char*>(result);
2313 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2315 VM& vm = exec->vm();
2316 NativeCallFrameTracer tracer(&vm, exec);
2318 Butterfly* result = object->allocateMoreOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize);
2319 object->nukeStructureAndSetButterfly(vm, object->structureID(), result, object->indexingType());
2320 return reinterpret_cast<char*>(result);
2323 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2325 VM* vm = &exec->vm();
2326 NativeCallFrameTracer tracer(vm, exec);
2327 vm->heap.writeBarrier(cell);
2330 void JIT_OPERATION operationWriteBarrierSlowPath(ExecState* exec, JSCell* cell)
2332 VM* vm = &exec->vm();
2333 NativeCallFrameTracer tracer(vm, exec);
2334 vm->heap.writeBarrierSlowPath(cell);
2337 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2339 NativeCallFrameTracer tracer(vm, exec);
2340 genericUnwind(vm, exec);
2341 ASSERT(vm->targetMachinePCForThrow);
2344 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2346 vm->topCallFrame = exec->callerFrame();
2347 genericUnwind(vm, exec, UnwindFromCallerFrame);
2348 ASSERT(vm->targetMachinePCForThrow);
2351 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2353 VM* vm = &exec->vm();
2354 NativeCallFrameTracer tracer(vm, exec);
2355 genericUnwind(vm, exec);
2358 // This function "should" just take the ExecState*, but doing so would make it more difficult
2359 // to call from exception check sites. So, unlike all of our other functions, we allow
2360 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2361 // particularly safe here since this is never called on the critical path - it's only for
2363 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2365 VM* vm = &exec->vm();
2366 NativeCallFrameTracer tracer(vm, exec);
2367 auto scope = DECLARE_THROW_SCOPE(*vm);
2368 UNUSED_PARAM(scope);
2369 #if COMPILER(GCC_OR_CLANG)
2370 void* returnPC = __builtin_return_address(0);
2371 doExceptionFuzzing(exec, scope, "JITOperations", returnPC);
2372 #endif // COMPILER(GCC_OR_CLANG)
2375 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2377 VM& vm = exec->vm();
2378 NativeCallFrameTracer tracer(&vm, exec);
2379 JSValue baseValue = JSValue::decode(encodedBaseValue);
2380 if (baseValue.isUndefinedOrNull())
2381 return JSValue::encode(jsBoolean(false));
2383 JSObject* base = baseValue.toObject(exec);
2385 return JSValue::encode(JSValue());
2386 return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2389 JSCell* JIT_OPERATION operationGetPropertyEnumeratorCell(ExecState* exec, JSCell* cell)
2391 VM& vm = exec->vm();
2392 NativeCallFrameTracer tracer(&vm, exec);
2393 auto scope = DECLARE_THROW_SCOPE(vm);
2395 JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2396 RETURN_IF_EXCEPTION(scope, { });
2399 return propertyNameEnumerator(exec, base);
2402 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, EncodedJSValue encodedBase)
2404 VM& vm = exec->vm();
2405 NativeCallFrameTracer tracer(&vm, exec);
2406 auto scope = DECLARE_THROW_SCOPE(vm);
2408 JSValue base = JSValue::decode(encodedBase);
2409 if (base.isUndefinedOrNull())
2410 return JSPropertyNameEnumerator::create(vm);
2412 JSObject* baseObject = base.toObject(exec);
2413 RETURN_IF_EXCEPTION(scope, { });
2416 return propertyNameEnumerator(exec, baseObject);
2419 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2421 VM& vm = exec->vm();
2422 NativeCallFrameTracer tracer(&vm, exec);
2423 JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2424 JSString* propertyName = enumerator->propertyNameAtIndex(index);
2425 return JSValue::encode(propertyName ? propertyName : jsNull());
2428 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2430 VM& vm = exec->vm();
2431 NativeCallFrameTracer tracer(&vm, exec);
2432 return jsString(exec, Identifier::from(exec, index).string());
2435 ALWAYS_INLINE static EncodedJSValue unprofiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2437 VM* vm = &exec->vm();
2438 NativeCallFrameTracer tracer(vm, exec);
2440 JSValue op1 = JSValue::decode(encodedOp1);
2441 JSValue op2 = JSValue::decode(encodedOp2);
2443 return JSValue::encode(jsAdd(exec, op1, op2));
2446 ALWAYS_INLINE static EncodedJSValue profiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile)
2448 VM* vm = &exec->vm();
2449 NativeCallFrameTracer tracer(vm, exec);
2451 JSValue op1 = JSValue::decode(encodedOp1);
2452 JSValue op2 = JSValue::decode(encodedOp2);
2454 arithProfile.observeLHSAndRHS(op1, op2);
2455 JSValue result = jsAdd(exec, op1, op2);
2456 arithProfile.observeResult(result);
2458 return JSValue::encode(result);
2461 EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2463 return unprofiledAdd(exec, encodedOp1, encodedOp2);
2466 EncodedJSValue JIT_OPERATION operationValueAddProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2468 ASSERT(arithProfile);
2469 return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2472 EncodedJSValue JIT_OPERATION operationValueAddProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2474 VM* vm = &exec->vm();
2475 NativeCallFrameTracer tracer(vm, exec);
2477 JSValue op1 = JSValue::decode(encodedOp1);
2478 JSValue op2 = JSValue::decode(encodedOp2);
2480 ArithProfile* arithProfile = addIC->arithProfile();
2481 ASSERT(arithProfile);
2482 arithProfile->observeLHSAndRHS(op1, op2);
2483 auto nonOptimizeVariant = operationValueAddProfiledNoOptimize;
2484 addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2486 #if ENABLE(MATH_IC_STATS)
2487 exec->codeBlock()->dumpMathICStats();
2490 JSValue result = jsAdd(exec, op1, op2);
2491 arithProfile->observeResult(result);
2493 return JSValue::encode(result);
2496 EncodedJSValue JIT_OPERATION operationValueAddProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2498 VM* vm = &exec->vm();
2499 NativeCallFrameTracer tracer(vm, exec);
2501 ArithProfile* arithProfile = addIC->arithProfile();
2502 ASSERT(arithProfile);
2503 return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2506 EncodedJSValue JIT_OPERATION operationValueAddOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2508 VM* vm = &exec->vm();
2509 NativeCallFrameTracer tracer(vm, exec);
2511 JSValue op1 = JSValue::decode(encodedOp1);
2512 JSValue op2 = JSValue::decode(encodedOp2);
2514 auto nonOptimizeVariant = operationValueAddNoOptimize;
2515 if (ArithProfile* arithProfile = addIC->arithProfile())
2516 arithProfile->observeLHSAndRHS(op1, op2);
2517 addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2519 #if ENABLE(MATH_IC_STATS)
2520 exec->codeBlock()->dumpMathICStats();
2523 return JSValue::encode(jsAdd(exec, op1, op2));
2526 EncodedJSValue JIT_OPERATION operationValueAddNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC*)
2528 VM* vm = &exec->vm();
2529 NativeCallFrameTracer tracer(vm, exec);
2531 JSValue op1 = JSValue::decode(encodedOp1);
2532 JSValue op2 = JSValue::decode(encodedOp2);
2534 JSValue result = jsAdd(exec, op1, op2);
2536 return JSValue::encode(result);
2539 ALWAYS_INLINE static EncodedJSValue unprofiledMul(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2541 auto scope = DECLARE_THROW_SCOPE(vm);
2542 JSValue op1 = JSValue::decode(encodedOp1);
2543 JSValue op2 = JSValue::decode(encodedOp2);
2545 double a = op1.toNumber(exec);
2546 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2548 double b = op2.toNumber(exec);
2549 return JSValue::encode(jsNumber(a * b));
2552 ALWAYS_INLINE static EncodedJSValue profiledMul(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2554 auto scope = DECLARE_THROW_SCOPE(vm);
2555 JSValue op1 = JSValue::decode(encodedOp1);
2556 JSValue op2 = JSValue::decode(encodedOp2);
2558 if (shouldObserveLHSAndRHSTypes)
2559 arithProfile.observeLHSAndRHS(op1, op2);
2561 double a = op1.toNumber(exec);
2562 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2563 double b = op2.toNumber(exec);
2564 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2566 JSValue result = jsNumber(a * b);
2567 arithProfile.observeResult(result);
2568 return JSValue::encode(result);
2571 EncodedJSValue JIT_OPERATION operationValueMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2573 VM* vm = &exec->vm();
2574 NativeCallFrameTracer tracer(vm, exec);
2576 return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2579 EncodedJSValue JIT_OPERATION operationValueMulNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC*)
2581 VM* vm = &exec->vm();
2582 NativeCallFrameTracer tracer(vm, exec);
2584 return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2587 EncodedJSValue JIT_OPERATION operationValueMulOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2589 VM* vm = &exec->vm();
2590 NativeCallFrameTracer tracer(vm, exec);
2592 auto nonOptimizeVariant = operationValueMulNoOptimize;
2593 if (ArithProfile* arithProfile = mulIC->arithProfile())
2594 arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2595 mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2597 #if ENABLE(MATH_IC_STATS)
2598 exec->codeBlock()->dumpMathICStats();
2601 return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2604 EncodedJSValue JIT_OPERATION operationValueMulProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2606 VM* vm = &exec->vm();
2607 NativeCallFrameTracer tracer(vm, exec);
2609 ASSERT(arithProfile);
2610 return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2613 EncodedJSValue JIT_OPERATION operationValueMulProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2615 VM* vm = &exec->vm();
2616 NativeCallFrameTracer tracer(vm, exec);
2618 ArithProfile* arithProfile = mulIC->arithProfile();
2619 ASSERT(arithProfile);
2620 arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2621 auto nonOptimizeVariant = operationValueMulProfiledNoOptimize;
2622 mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2624 #if ENABLE(MATH_IC_STATS)
2625 exec->codeBlock()->dumpMathICStats();
2628 return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2631 EncodedJSValue JIT_OPERATION operationValueMulProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2633 VM* vm = &exec->vm();
2634 NativeCallFrameTracer tracer(vm, exec);
2636 ArithProfile* arithProfile = mulIC->arithProfile();
2637 ASSERT(arithProfile);
2638 return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2641 ALWAYS_INLINE static EncodedJSValue unprofiledNegate(ExecState* exec, EncodedJSValue encodedOperand)
2643 VM& vm = exec->vm();
2644 auto scope = DECLARE_THROW_SCOPE(vm);
2645 NativeCallFrameTracer tracer(&vm, exec);
2647 JSValue operand = JSValue::decode(encodedOperand);
2648 double number = operand.toNumber(exec);
2649 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2650 return JSValue::encode(jsNumber(-number));
2653 ALWAYS_INLINE static EncodedJSValue profiledNegate(ExecState* exec, EncodedJSValue encodedOperand, ArithProfile& arithProfile)
2655 VM& vm = exec->vm();
2656 auto scope = DECLARE_THROW_SCOPE(vm);
2657 NativeCallFrameTracer tracer(&vm, exec);
2659 JSValue operand = JSValue::decode(encodedOperand);
2660 arithProfile.observeLHS(operand);
2661 double number = operand.toNumber(exec);
2662 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2664 JSValue result = jsNumber(-number);
2665 arithProfile.observeResult(result);
2666 return JSValue::encode(result);
2669 EncodedJSValue JIT_OPERATION operationArithNegate(ExecState* exec, EncodedJSValue operand)
2671 return unprofiledNegate(exec, operand);
2674 EncodedJSValue JIT_OPERATION operationArithNegateProfiled(ExecState* exec, EncodedJSValue operand, ArithProfile* arithProfile)
2676 ASSERT(arithProfile);
2677 return profiledNegate(exec, operand, *arithProfile);
2680 EncodedJSValue JIT_OPERATION operationArithNegateProfiledOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2682 VM& vm = exec->vm();
2683 auto scope = DECLARE_THROW_SCOPE(vm);
2684 NativeCallFrameTracer tracer(&vm, exec);
2686 JSValue operand = JSValue::decode(encodedOperand);
2688 ArithProfile* arithProfile = negIC->arithProfile();
2689 ASSERT(arithProfile);
2690 arithProfile->observeLHS(operand);
2691 negIC->generateOutOfLine(exec->codeBlock(), operationArithNegateProfiled);
2693 #if ENABLE(MATH_IC_STATS)
2694 exec->codeBlock()->dumpMathICStats();
2697 double number = operand.toNumber(exec);
2698 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2699 JSValue result = jsNumber(-number);
2700 arithProfile->observeResult(result);
2701 return JSValue::encode(result);
2704 EncodedJSValue JIT_OPERATION operationArithNegateOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2706 VM& vm = exec->vm();
2707 auto scope = DECLARE_THROW_SCOPE(vm);
2708 NativeCallFrameTracer tracer(&vm, exec);
2710 JSValue operand = JSValue::decode(encodedOperand);
2712 if (ArithProfile* arithProfile = negIC->arithProfile())
2713 arithProfile->observeLHS(operand);
2714 negIC->generateOutOfLine(exec->codeBlock(), operationArithNegate);
2716 #if ENABLE(MATH_IC_STATS)
2717 exec->codeBlock()->dumpMathICStats();
2720 double number = operand.toNumber(exec);
2721 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2722 return JSValue::encode(jsNumber(-number));
2725 ALWAYS_INLINE static EncodedJSValue unprofiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2727 auto scope = DECLARE_THROW_SCOPE(vm);
2728 JSValue op1 = JSValue::decode(encodedOp1);
2729 JSValue op2 = JSValue::decode(encodedOp2);
2731 double a = op1.toNumber(exec);
2732 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2734 double b = op2.toNumber(exec);
2735 return JSValue::encode(jsNumber(a - b));
2738 ALWAYS_INLINE static EncodedJSValue profiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2740 auto scope = DECLARE_THROW_SCOPE(vm);
2741 JSValue op1 = JSValue::decode(encodedOp1);
2742 JSValue op2 = JSValue::decode(encodedOp2);
2744 if (shouldObserveLHSAndRHSTypes)
2745 arithProfile.observeLHSAndRHS(op1, op2);
2747 double a = op1.toNumber(exec);
2748 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2749 double b = op2.toNumber(exec);
2750 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2752 JSValue result = jsNumber(a - b);
2753 arithProfile.observeResult(result);
2754 return JSValue::encode(result);
2757 EncodedJSValue JIT_OPERATION operationValueSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2759 VM* vm = &exec->vm();
2760 NativeCallFrameTracer tracer(vm, exec);
2761 return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2764 EncodedJSValue JIT_OPERATION operationValueSubProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2766 ASSERT(arithProfile);
2768 VM* vm = &exec->vm();
2769 NativeCallFrameTracer tracer(vm, exec);
2771 return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2774 EncodedJSValue JIT_OPERATION operationValueSubOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2776 VM* vm = &exec->vm();
2777 NativeCallFrameTracer tracer(vm, exec);
2779 auto nonOptimizeVariant = operationValueSubNoOptimize;
2780 if (ArithProfile* arithProfile = subIC->arithProfile())
2781 arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2782 subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2784 #if ENABLE(MATH_IC_STATS)
2785 exec->codeBlock()->dumpMathICStats();
2788 return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2791 EncodedJSValue JIT_OPERATION operationValueSubNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC*)
2793 VM* vm = &exec->vm();
2794 NativeCallFrameTracer tracer(vm, exec);
2796 return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2799 EncodedJSValue JIT_OPERATION operationValueSubProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2801 VM* vm = &exec->vm();
2802 NativeCallFrameTracer tracer(vm, exec);
2804 ArithProfile* arithProfile = subIC->arithProfile();
2805 ASSERT(arithProfile);
2806 arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2807 auto nonOptimizeVariant = operationValueSubProfiledNoOptimize;
2808 subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2810 #if ENABLE(MATH_IC_STATS)
2811 exec->codeBlock()->dumpMathICStats();
2814 return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2817 EncodedJSValue JIT_OPERATION operationValueSubProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2819 VM* vm = &exec->vm();
2820 NativeCallFrameTracer tracer(vm, exec);
2822 ArithProfile* arithProfile = subIC->arithProfile();
2823 ASSERT(arithProfile);
2824 return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2827 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2829 VM& vm = exec->vm();
2830 NativeCallFrameTracer tracer(&vm, exec);
2831 vm.typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2834 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2836 VM& vm = exec->vm();
2837 NativeCallFrameTracer tracer(&vm, exec);
2838 vm.shadowChicken().update(vm, exec);
2841 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2843 VM& vm = exec->vm();
2844 NativeCallFrameTracer tracer(&vm, exec);
2845 auto scope = DECLARE_THROW_SCOPE(vm);
2846 RELEASE_ASSERT(!!scope.exception());
2848 if (isTerminatedExecutionException(vm, scope.exception())) {
2849 genericUnwind(&vm, exec);
2857 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2858 // definition of getHostCallReturnValue() below because the Windows build
2860 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2863 return JSValue::encode(JSValue());
2864 return JSValue::encode(exec->vm().hostCallReturnValue);
2867 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2869 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2870 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2871 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2872 "lea -8(%rsp), %rdi\n"
2873 "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2876 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2879 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2880 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2881 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2884 "leal -4(%esp), %esp\n"
2886 "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2887 "leal 8(%esp), %esp\n"
2892 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2896 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2897 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2899 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2900 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2901 "sub r0, sp, #8" "\n"
2902 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2905 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2908 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2909 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2910 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2911 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2912 "sub r0, sp, #8" "\n"
2913 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2920 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2921 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2922 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2923 "sub x0, sp, #16" "\n"
2924 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2927 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2930 #define LOAD_FUNCTION_TO_T9(function) \
2931 ".set noreorder" "\n" \
2932 ".cpload $25" "\n" \
2933 ".set reorder" "\n" \
2934 "la $t9, " LOCAL_REFERENCE(function) "\n"
2936 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2941 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2942 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2943 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2944 LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2945 "addi $a0, $sp, -8" "\n"
2946 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2949 #elif COMPILER(MSVC) && CPU(X86)
2951 __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2953 __asm lea eax, [esp - 4]
2954 __asm mov [esp + 4], eax;
2955 __asm jmp getHostCallReturnValueWithExecState
2962 #endif // ENABLE(JIT)