2 * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "JITOperations.h"
31 #include "ArrayConstructor.h"
32 #include "DFGCompilationMode.h"
33 #include "DFGDriver.h"
34 #include "DFGOSREntry.h"
35 #include "DFGThunks.h"
36 #include "DFGWorklist.h"
39 #include "ErrorHandlingScope.h"
40 #include "ExceptionFuzz.h"
41 #include "GetterSetter.h"
42 #include "HostCallReturnValue.h"
44 #include "JITToDFGDeferredCompilationCallback.h"
45 #include "JSCInlines.h"
46 #include "JSCatchScope.h"
47 #include "JSFunctionNameScope.h"
48 #include "JSGlobalObjectFunctions.h"
49 #include "JSLexicalEnvironment.h"
50 #include "JSNameScope.h"
51 #include "JSPropertyNameEnumerator.h"
52 #include "JSStackInlines.h"
53 #include "JSWithScope.h"
54 #include "LegacyProfiler.h"
55 #include "ObjectConstructor.h"
56 #include "PropertyName.h"
58 #include "RepatchBuffer.h"
59 #include "TestRunnerUtils.h"
60 #include "TypeProfilerLog.h"
61 #include <wtf/InlineASM.h>
65 template<typename ScopeType>
66 void pushNameScope(ExecState* exec, int32_t dst, SymbolTable* symbolTable, EncodedJSValue encodedValue)
69 NativeCallFrameTracer tracer(&vm, exec);
71 ASSERT(!JITCode::isOptimizingJIT(exec->codeBlock()->jitType()));
73 // FIXME: This won't work if this operation is called from the DFG or FTL.
74 // This should be changed to pass in the new scope.
75 JSScope* currentScope = exec->uncheckedR(dst).Register::scope();
76 JSNameScope* scope = ScopeType::create(vm, exec->lexicalGlobalObject(), currentScope, symbolTable, JSValue::decode(encodedValue));
78 // FIXME: This won't work if this operation is called from the DFG or FTL.
79 // This should be changed to return the new scope.
80 exec->uncheckedR(dst) = scope;
86 void * _ReturnAddress(void);
87 #pragma intrinsic(_ReturnAddress)
89 #define OUR_RETURN_ADDRESS _ReturnAddress()
91 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
94 #if ENABLE(OPCODE_SAMPLING)
95 #define CTI_SAMPLER vm->interpreter->sampler()
101 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
103 // We pass in our own code block, because the callframe hasn't been populated.
104 VM* vm = codeBlock->vm();
106 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
107 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
111 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
112 ErrorHandlingScope errorScope(*vm);
113 vm->throwException(callerFrame, createStackOverflowError(callerFrame));
116 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
118 VM* vm = &exec->vm();
119 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
120 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
122 JSStack& stack = vm->interpreter->stack();
124 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
125 if (missingArgCount < 0) {
126 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
127 throwStackOverflowError(callerFrame);
130 return missingArgCount;
133 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
135 VM* vm = &exec->vm();
136 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
137 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
139 JSStack& stack = vm->interpreter->stack();
141 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
142 if (missingArgCount < 0) {
143 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
144 throwStackOverflowError(callerFrame);
147 return missingArgCount;
150 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo*, EncodedJSValue base, UniquedStringImpl* uid)
152 VM* vm = &exec->vm();
153 NativeCallFrameTracer tracer(vm, exec);
155 JSValue baseValue = JSValue::decode(base);
156 PropertySlot slot(baseValue);
157 Identifier ident = Identifier::fromUid(vm, uid);
158 return JSValue::encode(baseValue.get(exec, ident, slot));
161 EncodedJSValue JIT_OPERATION operationGetByIdBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
163 VM* vm = &exec->vm();
164 NativeCallFrameTracer tracer(vm, exec);
166 Identifier ident = Identifier::fromUid(vm, uid);
167 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
169 JSValue baseValue = JSValue::decode(base);
170 PropertySlot slot(baseValue);
171 bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
173 if (accessType == static_cast<AccessType>(stubInfo->accessType))
174 buildGetByIDList(exec, baseValue, ident, slot, *stubInfo);
176 return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
179 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
181 VM* vm = &exec->vm();
182 NativeCallFrameTracer tracer(vm, exec);
183 Identifier ident = Identifier::fromUid(vm, uid);
185 JSValue baseValue = JSValue::decode(base);
186 PropertySlot slot(baseValue);
188 bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
190 repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
192 stubInfo->seen = true;
194 return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
198 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
200 VM* vm = &exec->vm();
201 NativeCallFrameTracer tracer(vm, exec);
203 if (!base->isObject()) {
204 vm->throwException(exec, createInvalidInParameterError(exec, base));
205 return JSValue::encode(jsUndefined());
208 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
210 Identifier ident = Identifier::fromUid(vm, key);
211 PropertySlot slot(base);
212 bool result = asObject(base)->getPropertySlot(exec, ident, slot);
214 RELEASE_ASSERT(accessType == stubInfo->accessType);
217 repatchIn(exec, base, ident, result, slot, *stubInfo);
219 stubInfo->seen = true;
221 return JSValue::encode(jsBoolean(result));
224 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo*, JSCell* base, UniquedStringImpl* key)
226 VM* vm = &exec->vm();
227 NativeCallFrameTracer tracer(vm, exec);
229 if (!base->isObject()) {
230 vm->throwException(exec, createInvalidInParameterError(exec, base));
231 return JSValue::encode(jsUndefined());
234 Identifier ident = Identifier::fromUid(vm, key);
235 return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
238 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
240 VM* vm = &exec->vm();
241 NativeCallFrameTracer tracer(vm, exec);
243 return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
246 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo*, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
248 VM* vm = &exec->vm();
249 NativeCallFrameTracer tracer(vm, exec);
251 Identifier ident = Identifier::fromUid(vm, uid);
252 PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
253 JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
256 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo*, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
258 VM* vm = &exec->vm();
259 NativeCallFrameTracer tracer(vm, exec);
261 Identifier ident = Identifier::fromUid(vm, uid);
262 PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
263 JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
266 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo*, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
268 VM* vm = &exec->vm();
269 NativeCallFrameTracer tracer(vm, exec);
271 Identifier ident = Identifier::fromUid(vm, uid);
272 PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
273 asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
276 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo*, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
278 VM* vm = &exec->vm();
279 NativeCallFrameTracer tracer(vm, exec);
281 Identifier ident = Identifier::fromUid(vm, uid);
282 PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
283 asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
286 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
288 VM* vm = &exec->vm();
289 NativeCallFrameTracer tracer(vm, exec);
291 Identifier ident = Identifier::fromUid(vm, uid);
292 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
294 JSValue value = JSValue::decode(encodedValue);
295 JSValue baseValue = JSValue::decode(encodedBase);
296 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
298 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
299 baseValue.put(exec, ident, value, slot);
301 if (accessType != static_cast<AccessType>(stubInfo->accessType))
305 repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
307 stubInfo->seen = true;
310 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
312 VM* vm = &exec->vm();
313 NativeCallFrameTracer tracer(vm, exec);
315 Identifier ident = Identifier::fromUid(vm, uid);
316 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
318 JSValue value = JSValue::decode(encodedValue);
319 JSValue baseValue = JSValue::decode(encodedBase);
320 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
322 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
323 baseValue.put(exec, ident, value, slot);
325 if (accessType != static_cast<AccessType>(stubInfo->accessType))
329 repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
331 stubInfo->seen = true;
334 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
336 VM* vm = &exec->vm();
337 NativeCallFrameTracer tracer(vm, exec);
339 Identifier ident = Identifier::fromUid(vm, uid);
340 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
342 JSValue value = JSValue::decode(encodedValue);
343 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
344 PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
346 Structure* structure = baseObject->structure(*vm);
347 baseObject->putDirect(exec->vm(), ident, value, slot);
349 if (accessType != static_cast<AccessType>(stubInfo->accessType))
353 repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
355 stubInfo->seen = true;
358 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
360 VM* vm = &exec->vm();
361 NativeCallFrameTracer tracer(vm, exec);
363 Identifier ident = Identifier::fromUid(vm, uid);
364 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
366 JSValue value = JSValue::decode(encodedValue);
367 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
368 PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
370 Structure* structure = baseObject->structure(*vm);
371 baseObject->putDirect(exec->vm(), ident, value, slot);
373 if (accessType != static_cast<AccessType>(stubInfo->accessType))
377 repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
379 stubInfo->seen = true;
382 void JIT_OPERATION operationPutByIdStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
384 VM* vm = &exec->vm();
385 NativeCallFrameTracer tracer(vm, exec);
387 Identifier ident = Identifier::fromUid(vm, uid);
388 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
390 JSValue value = JSValue::decode(encodedValue);
391 JSValue baseValue = JSValue::decode(encodedBase);
392 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
394 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
395 baseValue.put(exec, ident, value, slot);
397 if (accessType != static_cast<AccessType>(stubInfo->accessType))
400 buildPutByIdList(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
403 void JIT_OPERATION operationPutByIdNonStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
405 VM* vm = &exec->vm();
406 NativeCallFrameTracer tracer(vm, exec);
408 Identifier ident = Identifier::fromUid(vm, uid);
409 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
411 JSValue value = JSValue::decode(encodedValue);
412 JSValue baseValue = JSValue::decode(encodedBase);
413 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
415 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
416 baseValue.put(exec, ident, value, slot);
418 if (accessType != static_cast<AccessType>(stubInfo->accessType))
421 buildPutByIdList(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
424 void JIT_OPERATION operationPutByIdDirectStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
426 VM* vm = &exec->vm();
427 NativeCallFrameTracer tracer(vm, exec);
429 Identifier ident = Identifier::fromUid(vm, uid);
430 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
432 JSValue value = JSValue::decode(encodedValue);
433 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
434 PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
436 Structure* structure = baseObject->structure(*vm);
437 baseObject->putDirect(*vm, ident, value, slot);
439 if (accessType != static_cast<AccessType>(stubInfo->accessType))
442 buildPutByIdList(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
445 void JIT_OPERATION operationPutByIdDirectNonStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
447 VM* vm = &exec->vm();
448 NativeCallFrameTracer tracer(vm, exec);
450 Identifier ident = Identifier::fromUid(vm, uid);
451 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
453 JSValue value = JSValue::decode(encodedValue);
454 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
455 PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
457 Structure* structure = baseObject->structure(*vm);
458 baseObject->putDirect(*vm, ident, value, slot);
460 if (accessType != static_cast<AccessType>(stubInfo->accessType))
463 buildPutByIdList(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
466 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
469 NativeCallFrameTracer tracer(&vm, exec);
471 ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
472 ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
473 base->setStructureAndReallocateStorageIfNecessary(vm, structure);
474 base->putDirect(vm, offset, JSValue::decode(value));
477 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value)
479 VM& vm = callFrame->vm();
480 if (LIKELY(subscript.isUInt32())) {
481 uint32_t i = subscript.asUInt32();
482 if (baseValue.isObject()) {
483 JSObject* object = asObject(baseValue);
484 if (object->canSetIndexQuickly(i))
485 object->setIndexQuickly(callFrame->vm(), i, value);
487 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
489 baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
491 auto property = subscript.toPropertyKey(callFrame);
492 if (!callFrame->vm().exception()) { // Don't put to an object if toString threw an exception.
493 PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
494 baseValue.put(callFrame, property, value, slot);
499 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value)
501 bool isStrictMode = callFrame->codeBlock()->isStrictMode();
502 if (LIKELY(subscript.isUInt32())) {
503 // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
504 ASSERT(isIndex(subscript.asUInt32()));
505 baseObject->putDirectIndex(callFrame, subscript.asUInt32(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
509 if (subscript.isDouble()) {
510 double subscriptAsDouble = subscript.asDouble();
511 uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
512 if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
513 baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
518 // Don't put to an object if toString threw an exception.
519 auto property = subscript.toPropertyKey(callFrame);
520 if (callFrame->vm().exception())
523 if (Optional<uint32_t> index = parseIndex(property))
524 baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
526 PutPropertySlot slot(baseObject, isStrictMode);
527 baseObject->putDirect(callFrame->vm(), property, value, slot);
530 void JIT_OPERATION operationPutByVal(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue)
533 NativeCallFrameTracer tracer(&vm, exec);
535 JSValue baseValue = JSValue::decode(encodedBaseValue);
536 JSValue subscript = JSValue::decode(encodedSubscript);
537 JSValue value = JSValue::decode(encodedValue);
539 if (baseValue.isObject() && subscript.isInt32()) {
540 // See if it's worth optimizing at all.
541 JSObject* object = asObject(baseValue);
542 bool didOptimize = false;
544 unsigned bytecodeOffset = exec->locationAsBytecodeOffset();
545 ASSERT(bytecodeOffset);
546 ByValInfo& byValInfo = exec->codeBlock()->getByValInfo(bytecodeOffset - 1);
547 ASSERT(!byValInfo.stubRoutine);
549 if (hasOptimizableIndexing(object->structure(vm))) {
550 // Attempt to optimize.
551 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
552 if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo.arrayMode) {
553 JIT::compilePutByVal(&vm, exec->codeBlock(), &byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
559 // If we take slow path more than 10 times without patching then make sure we
560 // never make that mistake again. Or, if we failed to patch and we have some object
561 // that intercepts indexed get, then don't even wait until 10 times. For cases
562 // where we see non-index-intercepting objects, this gives 10 iterations worth of
563 // opportunity for us to observe that the get_by_val may be polymorphic.
564 if (++byValInfo.slowPathCount >= 10
565 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
566 // Don't ever try to optimize.
567 ctiPatchCallByReturnAddress(exec->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
572 putByVal(exec, baseValue, subscript, value);
575 void JIT_OPERATION operationDirectPutByVal(ExecState* callFrame, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue)
577 VM& vm = callFrame->vm();
578 NativeCallFrameTracer tracer(&vm, callFrame);
580 JSValue baseValue = JSValue::decode(encodedBaseValue);
581 JSValue subscript = JSValue::decode(encodedSubscript);
582 JSValue value = JSValue::decode(encodedValue);
583 RELEASE_ASSERT(baseValue.isObject());
584 JSObject* object = asObject(baseValue);
585 if (subscript.isInt32()) {
586 // See if it's worth optimizing at all.
587 bool didOptimize = false;
589 unsigned bytecodeOffset = callFrame->locationAsBytecodeOffset();
590 ASSERT(bytecodeOffset);
591 ByValInfo& byValInfo = callFrame->codeBlock()->getByValInfo(bytecodeOffset - 1);
592 ASSERT(!byValInfo.stubRoutine);
594 if (hasOptimizableIndexing(object->structure(vm))) {
595 // Attempt to optimize.
596 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
597 if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo.arrayMode) {
598 JIT::compileDirectPutByVal(&vm, callFrame->codeBlock(), &byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
604 // If we take slow path more than 10 times without patching then make sure we
605 // never make that mistake again. Or, if we failed to patch and we have some object
606 // that intercepts indexed get, then don't even wait until 10 times. For cases
607 // where we see non-index-intercepting objects, this gives 10 iterations worth of
608 // opportunity for us to observe that the get_by_val may be polymorphic.
609 if (++byValInfo.slowPathCount >= 10
610 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
611 // Don't ever try to optimize.
612 ctiPatchCallByReturnAddress(callFrame->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
616 directPutByVal(callFrame, object, subscript, value);
619 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue)
622 NativeCallFrameTracer tracer(&vm, exec);
624 JSValue baseValue = JSValue::decode(encodedBaseValue);
625 JSValue subscript = JSValue::decode(encodedSubscript);
626 JSValue value = JSValue::decode(encodedValue);
628 putByVal(exec, baseValue, subscript, value);
632 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue)
635 NativeCallFrameTracer tracer(&vm, exec);
637 JSValue baseValue = JSValue::decode(encodedBaseValue);
638 JSValue subscript = JSValue::decode(encodedSubscript);
639 JSValue value = JSValue::decode(encodedValue);
640 RELEASE_ASSERT(baseValue.isObject());
641 directPutByVal(exec, asObject(baseValue), subscript, value);
644 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
647 ASSERT_UNUSED(exec, exec->codeBlock()->codeType() != FunctionCode
648 || !exec->codeBlock()->needsActivation()
649 || exec->hasActivation());
651 execCallee->setCodeBlock(0);
653 if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
654 return JSValue::encode(JSValue());
656 VM* vm = &execCallee->vm();
657 JSValue result = eval(execCallee);
659 return EncodedJSValue();
661 return JSValue::encode(result);
664 static void* handleHostCall(ExecState* execCallee, JSValue callee, CodeSpecializationKind kind)
666 ExecState* exec = execCallee->callerFrame();
667 VM* vm = &exec->vm();
669 execCallee->setCodeBlock(0);
671 if (kind == CodeForCall) {
673 CallType callType = getCallData(callee, callData);
675 ASSERT(callType != CallTypeJS);
677 if (callType == CallTypeHost) {
678 NativeCallFrameTracer tracer(vm, execCallee);
679 execCallee->setCallee(asObject(callee));
680 vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
682 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
684 return reinterpret_cast<void*>(getHostCallReturnValue);
687 ASSERT(callType == CallTypeNone);
688 exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
689 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
692 ASSERT(kind == CodeForConstruct);
694 ConstructData constructData;
695 ConstructType constructType = getConstructData(callee, constructData);
697 ASSERT(constructType != ConstructTypeJS);
699 if (constructType == ConstructTypeHost) {
700 NativeCallFrameTracer tracer(vm, execCallee);
701 execCallee->setCallee(asObject(callee));
702 vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
704 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
706 return reinterpret_cast<void*>(getHostCallReturnValue);
709 ASSERT(constructType == ConstructTypeNone);
710 exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
711 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
714 inline char* linkFor(
715 ExecState* execCallee, CallLinkInfo* callLinkInfo, CodeSpecializationKind kind,
716 RegisterPreservationMode registers)
718 ExecState* exec = execCallee->callerFrame();
719 VM* vm = &exec->vm();
720 NativeCallFrameTracer tracer(vm, exec);
722 JSValue calleeAsValue = execCallee->calleeAsValue();
723 JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
724 if (!calleeAsFunctionCell) {
725 // FIXME: We should cache these kinds of calls. They can be common and currently they are
727 // https://bugs.webkit.org/show_bug.cgi?id=144458
728 return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
731 JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
732 JSScope* scope = callee->scopeUnchecked();
733 ExecutableBase* executable = callee->executable();
735 MacroAssemblerCodePtr codePtr;
736 CodeBlock* codeBlock = 0;
737 if (executable->isHostFunction())
738 codePtr = executable->entrypointFor(*vm, kind, MustCheckArity, registers);
740 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
742 if (!isCall(kind) && functionExecutable->isBuiltinFunction()) {
743 exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
744 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
747 JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
749 exec->vm().throwException(exec, error);
750 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
752 codeBlock = functionExecutable->codeBlockFor(kind);
753 ArityCheckMode arity;
754 if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->callType == CallLinkInfo::CallVarargs || callLinkInfo->callType == CallLinkInfo::ConstructVarargs)
755 arity = MustCheckArity;
757 arity = ArityCheckNotRequired;
758 codePtr = functionExecutable->entrypointFor(*vm, kind, arity, registers);
760 if (!callLinkInfo->seenOnce())
761 callLinkInfo->setSeen();
763 linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr, kind, registers);
765 return reinterpret_cast<char*>(codePtr.executableAddress());
768 char* JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
770 return linkFor(execCallee, callLinkInfo, CodeForCall, RegisterPreservationNotRequired);
773 char* JIT_OPERATION operationLinkConstruct(ExecState* execCallee, CallLinkInfo* callLinkInfo)
775 return linkFor(execCallee, callLinkInfo, CodeForConstruct, RegisterPreservationNotRequired);
778 char* JIT_OPERATION operationLinkCallThatPreservesRegs(ExecState* execCallee, CallLinkInfo* callLinkInfo)
780 return linkFor(execCallee, callLinkInfo, CodeForCall, MustPreserveRegisters);
783 char* JIT_OPERATION operationLinkConstructThatPreservesRegs(ExecState* execCallee, CallLinkInfo* callLinkInfo)
785 return linkFor(execCallee, callLinkInfo, CodeForConstruct, MustPreserveRegisters);
788 inline char* virtualForWithFunction(
789 ExecState* execCallee, CodeSpecializationKind kind, RegisterPreservationMode registers,
790 JSCell*& calleeAsFunctionCell)
792 ExecState* exec = execCallee->callerFrame();
793 VM* vm = &exec->vm();
794 NativeCallFrameTracer tracer(vm, exec);
796 JSValue calleeAsValue = execCallee->calleeAsValue();
797 calleeAsFunctionCell = getJSFunction(calleeAsValue);
798 if (UNLIKELY(!calleeAsFunctionCell))
799 return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
801 JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
802 JSScope* scope = function->scopeUnchecked();
803 ExecutableBase* executable = function->executable();
804 if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
805 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
807 if (!isCall(kind) && functionExecutable->isBuiltinFunction()) {
808 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
809 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
812 JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
814 exec->vm().throwException(exec, error);
815 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
818 return reinterpret_cast<char*>(executable->entrypointFor(
819 *vm, kind, MustCheckArity, registers).executableAddress());
822 inline char* virtualFor(
823 ExecState* execCallee, CodeSpecializationKind kind, RegisterPreservationMode registers)
825 JSCell* calleeAsFunctionCellIgnored;
826 return virtualForWithFunction(execCallee, kind, registers, calleeAsFunctionCellIgnored);
829 char* JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
831 JSCell* calleeAsFunctionCell;
832 char* result = virtualForWithFunction(execCallee, CodeForCall, RegisterPreservationNotRequired, calleeAsFunctionCell);
834 linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell), RegisterPreservationNotRequired);
839 char* JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo*)
841 return virtualFor(execCallee, CodeForCall, RegisterPreservationNotRequired);
844 char* JIT_OPERATION operationVirtualConstruct(ExecState* execCallee, CallLinkInfo*)
846 return virtualFor(execCallee, CodeForConstruct, RegisterPreservationNotRequired);
849 char* JIT_OPERATION operationLinkPolymorphicCallThatPreservesRegs(ExecState* execCallee, CallLinkInfo* callLinkInfo)
851 JSCell* calleeAsFunctionCell;
852 char* result = virtualForWithFunction(execCallee, CodeForCall, MustPreserveRegisters, calleeAsFunctionCell);
854 linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell), MustPreserveRegisters);
859 char* JIT_OPERATION operationVirtualCallThatPreservesRegs(ExecState* execCallee, CallLinkInfo*)
861 return virtualFor(execCallee, CodeForCall, MustPreserveRegisters);
864 char* JIT_OPERATION operationVirtualConstructThatPreservesRegs(ExecState* execCallee, CallLinkInfo*)
866 return virtualFor(execCallee, CodeForConstruct, MustPreserveRegisters);
869 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
871 VM* vm = &exec->vm();
872 NativeCallFrameTracer tracer(vm, exec);
874 return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
877 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
879 VM* vm = &exec->vm();
880 NativeCallFrameTracer tracer(vm, exec);
882 return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
885 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
887 VM* vm = &exec->vm();
888 NativeCallFrameTracer tracer(vm, exec);
890 return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
893 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
895 VM* vm = &exec->vm();
896 NativeCallFrameTracer tracer(vm, exec);
898 return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
901 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
903 VM* vm = &exec->vm();
904 NativeCallFrameTracer tracer(vm, exec);
906 return JSValue::decode(encodedOp).toBoolean(exec);
909 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
911 VM* vm = &exec->vm();
912 NativeCallFrameTracer tracer(vm, exec);
914 return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
918 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
920 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
923 VM* vm = &exec->vm();
924 NativeCallFrameTracer tracer(vm, exec);
926 bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
928 return JSValue::encode(jsBoolean(result));
934 size_t JIT_OPERATION operationHasProperty(ExecState* exec, JSObject* base, JSString* property)
936 int result = base->hasProperty(exec, property->toIdentifier(exec));
941 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
943 VM* vm = &exec->vm();
944 NativeCallFrameTracer tracer(vm, exec);
945 return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
948 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
950 VM* vm = &exec->vm();
951 NativeCallFrameTracer tracer(vm, exec);
952 return JSValue::encode(constructArray(exec, profile, values, size));
955 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
957 VM* vm = &exec->vm();
958 NativeCallFrameTracer tracer(vm, exec);
959 JSValue sizeValue = JSValue::decode(size);
960 return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
963 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
965 ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
967 NativeCallFrameTracer tracer(&vm, exec);
968 return JSValue::encode(JSFunction::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
971 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
973 ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
975 NativeCallFrameTracer tracer(&vm, exec);
976 return JSValue::encode(JSFunction::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
979 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
981 VM* vm = &exec->vm();
982 NativeCallFrameTracer tracer(vm, exec);
984 return constructEmptyObject(exec, structure);
987 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
990 NativeCallFrameTracer tracer(&vm, exec);
991 RegExp* regexp = static_cast<RegExp*>(regexpPtr);
992 if (!regexp->isValid()) {
993 vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
994 return JSValue::encode(jsUndefined());
997 return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1000 void JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1002 VM& vm = exec->vm();
1003 NativeCallFrameTracer tracer(&vm, exec);
1005 if (UNLIKELY(vm.watchdog && vm.watchdog->didFire(exec)))
1006 vm.throwException(exec, createTerminatedExecutionException(&vm));
1009 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1011 VM& vm = exec->vm();
1012 NativeCallFrameTracer tracer(&vm, exec);
1013 JSValue errorMessageValue = JSValue::decode(encodedValue);
1014 RELEASE_ASSERT(errorMessageValue.isString());
1015 String errorMessage = asString(errorMessageValue)->value(exec);
1016 if (referenceErrorFlag)
1017 vm.throwException(exec, createReferenceError(exec, errorMessage));
1019 vm.throwException(exec, createTypeError(exec, errorMessage));
1022 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1024 VM& vm = exec->vm();
1025 NativeCallFrameTracer tracer(&vm, exec);
1027 vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1031 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1033 codeBlock->updateAllPredictions();
1034 codeBlock->optimizeAfterWarmUp();
1037 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1039 VM& vm = exec->vm();
1040 NativeCallFrameTracer tracer(&vm, exec);
1042 // Defer GC for a while so that it doesn't run between when we enter into this
1043 // slow path and when we figure out the state of our code block. This prevents
1044 // a number of awkward reentrancy scenarios, including:
1046 // - The optimized version of our code block being jettisoned by GC right after
1047 // we concluded that we wanted to use it, but have not planted it into the JS
1050 // - An optimized version of our code block being installed just as we decided
1051 // that it wasn't ready yet.
1053 // Note that jettisoning won't happen if we already initiated OSR, because in
1054 // that case we would have already planted the optimized code block into the JS
1056 DeferGCForAWhile deferGC(vm.heap);
1058 CodeBlock* codeBlock = exec->codeBlock();
1059 if (codeBlock->jitType() != JITCode::BaselineJIT) {
1060 dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1061 RELEASE_ASSERT_NOT_REACHED();
1064 if (bytecodeIndex) {
1065 // If we're attempting to OSR from a loop, assume that this should be
1066 // separately optimized.
1067 codeBlock->m_shouldAlwaysBeInlined = false;
1070 if (Options::verboseOSR()) {
1072 *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1073 ", executeCounter = ", codeBlock->jitExecuteCounter(),
1074 ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1075 ", exitCounter = ");
1076 if (codeBlock->hasOptimizedReplacement())
1077 dataLog(codeBlock->replacement()->osrExitCounter());
1083 if (!codeBlock->checkIfOptimizationThresholdReached()) {
1084 codeBlock->updateAllPredictions();
1085 if (Options::verboseOSR())
1086 dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1087 return encodeResult(0, 0);
1090 if (vm.enabledProfiler()) {
1091 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1092 return encodeResult(0, 0);
1095 Debugger* debugger = codeBlock->globalObject()->debugger();
1096 if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1097 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1098 return encodeResult(0, 0);
1101 if (codeBlock->m_shouldAlwaysBeInlined) {
1102 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1103 if (Options::verboseOSR())
1104 dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1105 return encodeResult(0, 0);
1108 // We cannot be in the process of asynchronous compilation and also have an optimized
1110 DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1113 || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1114 && codeBlock->hasOptimizedReplacement()));
1116 DFG::Worklist::State worklistState;
1118 // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1119 // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1120 // what the result was so that we don't plow ahead and attempt OSR or immediate
1121 // reoptimization. This will have already also set the appropriate JIT execution
1122 // count threshold depending on what happened, so if the compilation was anything
1123 // but successful we just want to return early. See the case for worklistState ==
1124 // DFG::Worklist::Compiled, below.
1126 // Note that we could have alternatively just called Worklist::compilationState()
1127 // here, and if it returned Compiled, we could have then called
1128 // completeAndScheduleOSR() below. But that would have meant that it could take
1129 // longer for code blocks to be completed: they would only complete when *their*
1130 // execution count trigger fired; but that could take a while since the firing is
1131 // racy. It could also mean that code blocks that never run again after being
1132 // compiled would sit on the worklist until next GC. That's fine, but it's
1133 // probably a waste of memory. Our goal here is to complete code blocks as soon as
1134 // possible in order to minimize the chances of us executing baseline code after
1135 // optimized code is already available.
1136 worklistState = worklist->completeAllReadyPlansForVM(
1137 vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1139 worklistState = DFG::Worklist::NotKnown;
1141 if (worklistState == DFG::Worklist::Compiling) {
1142 // We cannot be in the process of asynchronous compilation and also have an optimized
1144 RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1145 codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1146 return encodeResult(0, 0);
1149 if (worklistState == DFG::Worklist::Compiled) {
1150 // If we don't have an optimized replacement but we did just get compiled, then
1151 // the compilation failed or was invalidated, in which case the execution count
1152 // thresholds have already been set appropriately by
1153 // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1154 // nothing left to do.
1155 if (!codeBlock->hasOptimizedReplacement()) {
1156 codeBlock->updateAllPredictions();
1157 if (Options::verboseOSR())
1158 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1159 return encodeResult(0, 0);
1161 } else if (codeBlock->hasOptimizedReplacement()) {
1162 if (Options::verboseOSR())
1163 dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1164 // If we have an optimized replacement, then it must be the case that we entered
1165 // cti_optimize from a loop. That's because if there's an optimized replacement,
1166 // then all calls to this function will be relinked to the replacement and so
1167 // the prologue OSR will never fire.
1169 // This is an interesting threshold check. Consider that a function OSR exits
1170 // in the middle of a loop, while having a relatively low exit count. The exit
1171 // will reset the execution counter to some target threshold, meaning that this
1172 // code won't be reached until that loop heats up for >=1000 executions. But then
1173 // we do a second check here, to see if we should either reoptimize, or just
1174 // attempt OSR entry. Hence it might even be correct for
1175 // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1176 // additional checking anyway, to reduce the amount of recompilation thrashing.
1177 if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1178 if (Options::verboseOSR()) {
1180 "Triggering reoptimization of ", *codeBlock,
1181 "(", *codeBlock->replacement(), ") (in loop).\n");
1183 codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1184 return encodeResult(0, 0);
1187 if (!codeBlock->shouldOptimizeNow()) {
1188 if (Options::verboseOSR()) {
1190 "Delaying optimization for ", *codeBlock,
1191 " because of insufficient profiling.\n");
1193 return encodeResult(0, 0);
1196 if (Options::verboseOSR())
1197 dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1199 unsigned numVarsWithValues;
1201 numVarsWithValues = codeBlock->m_numVars;
1203 numVarsWithValues = 0;
1204 Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1205 for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1206 int operand = mustHandleValues.operandForIndex(i);
1207 mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1210 RefPtr<CodeBlock> replacementCodeBlock = codeBlock->newReplacement();
1211 CompilationResult result = DFG::compile(
1212 vm, replacementCodeBlock.get(), 0, DFG::DFGMode, bytecodeIndex,
1213 mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1215 if (result != CompilationSuccessful) {
1216 ASSERT(result == CompilationDeferred || replacementCodeBlock->hasOneRef());
1217 return encodeResult(0, 0);
1221 CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1222 ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1224 if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1225 if (Options::verboseOSR()) {
1227 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1230 codeBlock->optimizeSoon();
1231 return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1234 if (Options::verboseOSR()) {
1236 "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1237 " succeeded, OSR failed, after a delay of ",
1238 codeBlock->optimizationDelayCounter(), ".\n");
1241 // Count the OSR failure as a speculation failure. If this happens a lot, then
1243 optimizedCodeBlock->countOSRExit();
1245 // We are a lot more conservative about triggering reoptimization after OSR failure than
1246 // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1247 // already, then we really would like to reoptimize immediately. But this case covers
1248 // something else: there weren't many (or any) speculation failures before, but we just
1249 // failed to enter the speculative code because some variable had the wrong value or
1250 // because the OSR code decided for any spurious reason that it did not want to OSR
1251 // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1252 // reoptimization trigger.
1253 if (optimizedCodeBlock->shouldReoptimizeNow()) {
1254 if (Options::verboseOSR()) {
1256 "Triggering reoptimization of ", *codeBlock, " -> ",
1257 *codeBlock->replacement(), " (after OSR fail).\n");
1259 optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1260 return encodeResult(0, 0);
1263 // OSR failed this time, but it might succeed next time! Let the code run a bit
1264 // longer and then try again.
1265 codeBlock->optimizeAfterWarmUp();
1267 return encodeResult(0, 0);
1271 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1273 VM& vm = exec->vm();
1274 NativeCallFrameTracer tracer(&vm, exec);
1276 JSValue arrayValue = JSValue::decode(encodedArrayValue);
1277 ASSERT(isJSArray(arrayValue));
1278 asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1282 void JIT_OPERATION operationPutGetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, EncodedJSValue encodedGetterValue)
1284 VM& vm = exec->vm();
1285 NativeCallFrameTracer tracer(&vm, exec);
1287 ASSERT(JSValue::decode(encodedObjectValue).isObject());
1288 JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1290 JSValue getter = JSValue::decode(encodedGetterValue);
1291 ASSERT(getter.isObject());
1292 baseObj->putGetter(exec, *identifier, asObject(getter));
1295 void JIT_OPERATION operationPutSetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, EncodedJSValue encodedSetterValue)
1297 VM& vm = exec->vm();
1298 NativeCallFrameTracer tracer(&vm, exec);
1300 ASSERT(JSValue::decode(encodedObjectValue).isObject());
1301 JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1303 JSValue setter = JSValue::decode(encodedSetterValue);
1304 ASSERT(setter.isObject());
1305 baseObj->putSetter(exec, *identifier, asObject(setter));
1308 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1310 VM& vm = exec->vm();
1311 NativeCallFrameTracer tracer(&vm, exec);
1313 ASSERT(JSValue::decode(encodedObjectValue).isObject());
1314 JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1316 GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1318 JSValue getter = JSValue::decode(encodedGetterValue);
1319 JSValue setter = JSValue::decode(encodedSetterValue);
1320 ASSERT(getter.isObject() || getter.isUndefined());
1321 ASSERT(setter.isObject() || setter.isUndefined());
1322 ASSERT(getter.isObject() || setter.isObject());
1324 if (!getter.isUndefined())
1325 accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1326 if (!setter.isUndefined())
1327 accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1328 baseObj->putDirectAccessor(exec, *identifier, accessor, Accessor);
1331 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, Identifier* identifier, JSCell* getter)
1333 VM& vm = exec->vm();
1334 NativeCallFrameTracer tracer(&vm, exec);
1336 ASSERT(object && object->isObject());
1337 JSObject* baseObj = object->getObject();
1339 ASSERT(getter->isObject());
1340 baseObj->putGetter(exec, *identifier, getter);
1343 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, Identifier* identifier, JSCell* setter)
1345 VM& vm = exec->vm();
1346 NativeCallFrameTracer tracer(&vm, exec);
1348 ASSERT(object && object->isObject());
1349 JSObject* baseObj = object->getObject();
1351 ASSERT(setter->isObject());
1352 baseObj->putSetter(exec, *identifier, setter);
1355 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, Identifier* identifier, JSCell* getter, JSCell* setter)
1357 VM& vm = exec->vm();
1358 NativeCallFrameTracer tracer(&vm, exec);
1360 ASSERT(object && object->isObject());
1361 JSObject* baseObj = object->getObject();
1363 GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1365 ASSERT(!getter || getter->isObject());
1366 ASSERT(!setter || setter->isObject());
1367 ASSERT(getter || setter);
1370 accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1372 accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1373 baseObj->putDirectAccessor(exec, *identifier, accessor, Accessor);
1377 void JIT_OPERATION operationPushCatchScope(ExecState* exec, int32_t dst, SymbolTable* symbolTable, EncodedJSValue encodedValue)
1379 pushNameScope<JSCatchScope>(exec, dst, symbolTable, encodedValue);
1382 void JIT_OPERATION operationPushFunctionNameScope(ExecState* exec, int32_t dst, SymbolTable* symbolTable, EncodedJSValue encodedValue)
1384 pushNameScope<JSFunctionNameScope>(exec, dst, symbolTable, encodedValue);
1387 void JIT_OPERATION operationPushWithScope(ExecState* exec, int32_t dst, EncodedJSValue encodedValue)
1389 VM& vm = exec->vm();
1390 NativeCallFrameTracer tracer(&vm, exec);
1392 JSObject* o = JSValue::decode(encodedValue).toObject(exec);
1396 // FIXME: This won't work if this operation is called from the DFG or FTL.
1397 // This should be changed to pass in the old scope and return the new scope.
1398 JSScope* currentScope = exec->uncheckedR(dst).Register::scope();
1399 exec->uncheckedR(dst) = JSWithScope::create(exec, o, currentScope);
1402 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1404 VM& vm = exec->vm();
1405 NativeCallFrameTracer tracer(&vm, exec);
1407 JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1408 exec->uncheckedR(scopeReg) = scope->next();
1411 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1413 VM& vm = exec->vm();
1414 NativeCallFrameTracer tracer(&vm, exec);
1416 if (LegacyProfiler* profiler = vm.enabledProfiler())
1417 profiler->didExecute(exec, JSValue::decode(encodedValue));
1420 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1422 VM& vm = exec->vm();
1423 NativeCallFrameTracer tracer(&vm, exec);
1425 if (LegacyProfiler* profiler = vm.enabledProfiler())
1426 profiler->willExecute(exec, JSValue::decode(encodedValue));
1429 EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedBaseVal)
1431 VM& vm = exec->vm();
1432 NativeCallFrameTracer tracer(&vm, exec);
1434 JSValue value = JSValue::decode(encodedValue);
1435 JSValue baseVal = JSValue::decode(encodedBaseVal);
1437 if (baseVal.isObject()) {
1438 JSObject* baseObject = asObject(baseVal);
1439 ASSERT(!baseObject->structure(vm)->typeInfo().implementsDefaultHasInstance());
1440 if (baseObject->structure(vm)->typeInfo().implementsHasInstance()) {
1441 bool result = baseObject->methodTable(vm)->customHasInstance(baseObject, exec, value);
1442 return JSValue::encode(jsBoolean(result));
1446 vm.throwException(exec, createInvalidInstanceofParameterError(exec, baseVal));
1447 return JSValue::encode(JSValue());
1450 JSCell* JIT_OPERATION operationCreateActivation(ExecState* exec, JSScope* currentScope)
1452 VM& vm = exec->vm();
1453 NativeCallFrameTracer tracer(&vm, exec);
1454 JSLexicalEnvironment* lexicalEnvironment = JSLexicalEnvironment::create(vm, exec, currentScope, exec->codeBlock());
1455 return lexicalEnvironment;
1460 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ReturnAddressPtr returnAddress)
1462 if (LIKELY(baseValue.isCell() && subscript.isString())) {
1463 VM& vm = exec->vm();
1464 Structure& structure = *baseValue.asCell()->structure(vm);
1465 if (JSCell::canUseFastGetOwnProperty(structure)) {
1466 if (AtomicStringImpl* existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1467 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString))
1473 if (subscript.isUInt32()) {
1474 uint32_t i = subscript.asUInt32();
1475 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i)) {
1476 ctiPatchCallByReturnAddress(exec->codeBlock(), returnAddress, FunctionPtr(operationGetByValString));
1477 return asString(baseValue)->getIndex(exec, i);
1479 return baseValue.get(exec, i);
1482 baseValue.requireObjectCoercible(exec);
1483 if (exec->hadException())
1484 return jsUndefined();
1485 auto property = subscript.toPropertyKey(exec);
1486 if (exec->hadException())
1487 return jsUndefined();
1488 return baseValue.get(exec, property);
1493 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript)
1495 VM& vm = exec->vm();
1496 NativeCallFrameTracer tracer(&vm, exec);
1497 JSValue baseValue = JSValue::decode(encodedBase);
1498 JSValue subscript = JSValue::decode(encodedSubscript);
1500 JSValue result = getByVal(exec, baseValue, subscript, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1501 return JSValue::encode(result);
1504 EncodedJSValue JIT_OPERATION operationGetByValDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript)
1506 VM& vm = exec->vm();
1507 NativeCallFrameTracer tracer(&vm, exec);
1508 JSValue baseValue = JSValue::decode(encodedBase);
1509 JSValue subscript = JSValue::decode(encodedSubscript);
1511 if (baseValue.isObject() && subscript.isInt32()) {
1512 // See if it's worth optimizing this at all.
1513 JSObject* object = asObject(baseValue);
1514 bool didOptimize = false;
1516 unsigned bytecodeOffset = exec->locationAsBytecodeOffset();
1517 ASSERT(bytecodeOffset);
1518 ByValInfo& byValInfo = exec->codeBlock()->getByValInfo(bytecodeOffset - 1);
1519 ASSERT(!byValInfo.stubRoutine);
1521 if (hasOptimizableIndexing(object->structure(vm))) {
1522 // Attempt to optimize.
1523 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1524 if (arrayMode != byValInfo.arrayMode) {
1525 JIT::compileGetByVal(&vm, exec->codeBlock(), &byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1531 // If we take slow path more than 10 times without patching then make sure we
1532 // never make that mistake again. Or, if we failed to patch and we have some object
1533 // that intercepts indexed get, then don't even wait until 10 times. For cases
1534 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1535 // opportunity for us to observe that the get_by_val may be polymorphic.
1536 if (++byValInfo.slowPathCount >= 10
1537 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1538 // Don't ever try to optimize.
1539 ctiPatchCallByReturnAddress(exec->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationGetByValGeneric));
1544 JSValue result = getByVal(exec, baseValue, subscript, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1545 return JSValue::encode(result);
1548 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript)
1550 VM& vm = exec->vm();
1551 NativeCallFrameTracer tracer(&vm, exec);
1552 JSValue baseValue = JSValue::decode(encodedBase);
1553 JSValue subscript = JSValue::decode(encodedSubscript);
1555 ASSERT(baseValue.isObject());
1556 ASSERT(subscript.isUInt32());
1558 JSObject* object = asObject(baseValue);
1559 bool didOptimize = false;
1561 unsigned bytecodeOffset = exec->locationAsBytecodeOffset();
1562 ASSERT(bytecodeOffset);
1563 ByValInfo& byValInfo = exec->codeBlock()->getByValInfo(bytecodeOffset - 1);
1564 ASSERT(!byValInfo.stubRoutine);
1566 if (hasOptimizableIndexing(object->structure(vm))) {
1567 // Attempt to optimize.
1568 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1569 if (arrayMode != byValInfo.arrayMode) {
1570 JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), &byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1576 // If we take slow path more than 10 times without patching then make sure we
1577 // never make that mistake again. Or, if we failed to patch and we have some object
1578 // that intercepts indexed get, then don't even wait until 10 times. For cases
1579 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1580 // opportunity for us to observe that the get_by_val may be polymorphic.
1581 if (++byValInfo.slowPathCount >= 10
1582 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1583 // Don't ever try to optimize.
1584 ctiPatchCallByReturnAddress(exec->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1588 return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript.asUInt32())));
1591 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript)
1593 VM& vm = exec->vm();
1594 NativeCallFrameTracer tracer(&vm, exec);
1595 JSValue baseValue = JSValue::decode(encodedBase);
1596 JSValue subscript = JSValue::decode(encodedSubscript);
1598 ASSERT(baseValue.isObject());
1599 ASSERT(subscript.isUInt32());
1601 JSObject* object = asObject(baseValue);
1602 return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript.asUInt32())));
1605 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript)
1607 VM& vm = exec->vm();
1608 NativeCallFrameTracer tracer(&vm, exec);
1609 JSValue baseValue = JSValue::decode(encodedBase);
1610 JSValue subscript = JSValue::decode(encodedSubscript);
1613 if (LIKELY(subscript.isUInt32())) {
1614 uint32_t i = subscript.asUInt32();
1615 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1616 result = asString(baseValue)->getIndex(exec, i);
1618 result = baseValue.get(exec, i);
1619 if (!isJSString(baseValue))
1620 ctiPatchCallByReturnAddress(exec->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationGetByValDefault));
1623 baseValue.requireObjectCoercible(exec);
1624 if (exec->hadException())
1625 return JSValue::encode(jsUndefined());
1626 auto property = subscript.toPropertyKey(exec);
1627 if (exec->hadException())
1628 return JSValue::encode(jsUndefined());
1629 result = baseValue.get(exec, property);
1632 return JSValue::encode(result);
1635 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1637 VM& vm = exec->vm();
1638 NativeCallFrameTracer tracer(&vm, exec);
1640 JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1641 bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1642 JSValue result = jsBoolean(couldDelete);
1643 if (!couldDelete && exec->codeBlock()->isStrictMode())
1644 vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1645 return JSValue::encode(result);
1648 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1650 VM& vm = exec->vm();
1651 NativeCallFrameTracer tracer(&vm, exec);
1652 JSValue value = JSValue::decode(encodedValue);
1653 JSValue proto = JSValue::decode(encodedProto);
1655 ASSERT(!value.isObject() || !proto.isObject());
1657 bool result = JSObject::defaultHasInstance(exec, value, proto);
1658 return JSValue::encode(jsBoolean(result));
1661 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1663 VM& vm = exec->vm();
1664 NativeCallFrameTracer tracer(&vm, exec);
1665 JSStack* stack = &exec->interpreter()->stack();
1666 JSValue arguments = JSValue::decode(encodedArguments);
1667 return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1670 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1672 VM& vm = exec->vm();
1673 NativeCallFrameTracer tracer(&vm, exec);
1674 JSValue arguments = JSValue::decode(encodedArguments);
1675 setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1676 return newCallFrame;
1679 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1681 VM& vm = exec->vm();
1682 NativeCallFrameTracer tracer(&vm, exec);
1683 return JSValue::encode(JSValue::decode(value).toObject(exec));
1686 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1688 VM& vm = exec->vm();
1689 NativeCallFrameTracer tracer(&vm, exec);
1690 JSValue key = JSValue::decode(encodedKey);
1691 CodeBlock* codeBlock = exec->codeBlock();
1693 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1694 void* result = jumpTable.ctiDefault.executableAddress();
1696 if (key.isString()) {
1697 StringImpl* value = asString(key)->value(exec).impl();
1698 if (value->length() == 1)
1699 result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1702 return reinterpret_cast<char*>(result);
1705 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1707 VM& vm = exec->vm();
1708 NativeCallFrameTracer tracer(&vm, exec);
1709 JSValue key = JSValue::decode(encodedKey);
1710 CodeBlock* codeBlock = exec->codeBlock();
1712 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1715 result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1716 else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1717 result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1719 result = jumpTable.ctiDefault.executableAddress();
1720 return reinterpret_cast<char*>(result);
1723 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1725 VM& vm = exec->vm();
1726 NativeCallFrameTracer tracer(&vm, exec);
1727 JSValue key = JSValue::decode(encodedKey);
1728 CodeBlock* codeBlock = exec->codeBlock();
1731 StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1733 if (key.isString()) {
1734 StringImpl* value = asString(key)->value(exec).impl();
1735 result = jumpTable.ctiForValue(value).executableAddress();
1737 result = jumpTable.ctiDefault.executableAddress();
1739 return reinterpret_cast<char*>(result);
1742 EncodedJSValue JIT_OPERATION operationResolveScope(ExecState* exec, int32_t scopeReg, int32_t identifierIndex)
1744 VM& vm = exec->vm();
1745 NativeCallFrameTracer tracer(&vm, exec);
1746 const Identifier& ident = exec->codeBlock()->identifier(identifierIndex);
1747 JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1748 return JSValue::encode(JSScope::resolve(exec, scope, ident));
1751 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1753 VM& vm = exec->vm();
1754 NativeCallFrameTracer tracer(&vm, exec);
1755 CodeBlock* codeBlock = exec->codeBlock();
1756 Instruction* pc = bytecodePC;
1758 const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1759 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1760 ResolveModeAndType modeAndType(pc[4].u.operand);
1762 PropertySlot slot(scope);
1763 if (!scope->getPropertySlot(exec, ident, slot)) {
1764 if (modeAndType.mode() == ThrowIfNotFound)
1765 vm.throwException(exec, createUndefinedVariableError(exec, ident));
1766 return JSValue::encode(jsUndefined());
1769 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
1770 if (slot.isCacheableValue() && slot.slotBase() == scope && scope->structure(vm)->propertyAccessesAreCacheable()) {
1771 if (modeAndType.type() == GlobalProperty || modeAndType.type() == GlobalPropertyWithVarInjectionChecks) {
1772 Structure* structure = scope->structure(vm);
1774 ConcurrentJITLocker locker(codeBlock->m_lock);
1775 pc[5].u.structure.set(exec->vm(), codeBlock->ownerExecutable(), structure);
1776 pc[6].u.operand = slot.cachedOffset();
1778 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
1782 return JSValue::encode(slot.getValue(exec, ident));
1785 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1787 VM& vm = exec->vm();
1788 NativeCallFrameTracer tracer(&vm, exec);
1789 Instruction* pc = bytecodePC;
1791 CodeBlock* codeBlock = exec->codeBlock();
1792 const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1793 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1794 JSValue value = exec->r(pc[3].u.operand).jsValue();
1795 ResolveModeAndType modeAndType = ResolveModeAndType(pc[4].u.operand);
1796 if (modeAndType.type() == LocalClosureVar) {
1797 JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1798 environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1799 if (WatchpointSet* set = pc[5].u.watchpointSet)
1800 set->touch("Executed op_put_scope<LocalClosureVar>");
1803 if (modeAndType.mode() == ThrowIfNotFound && !scope->hasProperty(exec, ident)) {
1804 exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1808 PutPropertySlot slot(scope, codeBlock->isStrictMode());
1809 scope->methodTable()->put(scope, exec, ident, value, slot);
1811 if (exec->vm().exception())
1814 CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, modeAndType, slot);
1817 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
1819 VM* vm = &exec->vm();
1820 NativeCallFrameTracer tracer(vm, exec);
1822 JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
1823 vm->throwException(exec, exceptionValue);
1825 // Results stored out-of-band in vm.targetMachinePCForThrow, vm.callFrameForThrow & vm.vmEntryFrameForThrow
1826 genericUnwind(vm, exec, exceptionValue);
1829 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
1831 VM* vm = &exec->vm();
1832 NativeCallFrameTracer tracer(vm, exec);
1833 vm->heap.flushWriteBarrierBuffer(cell);
1836 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
1838 VM* vm = &exec->vm();
1839 NativeCallFrameTracer tracer(vm, exec);
1840 vm->heap.writeBarrier(cell);
1843 // NB: We don't include the value as part of the barrier because the write barrier elision
1844 // phase in the DFG only tracks whether the object being stored to has been barriered. It
1845 // would be much more complicated to try to model the value being stored as well.
1846 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
1848 VM* vm = &exec->vm();
1849 NativeCallFrameTracer tracer(vm, exec);
1850 vm->heap.writeBarrier(cell);
1853 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
1855 VM* vm = &exec->vm();
1856 NativeCallFrameTracer tracer(vm, exec);
1858 JSValue value = exec->r(pc[2].u.operand).jsValue();
1859 pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
1862 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
1864 NativeCallFrameTracer tracer(vm, exec);
1866 JSValue exceptionValue = vm->exception();
1867 ASSERT(exceptionValue);
1869 genericUnwind(vm, exec, exceptionValue);
1870 ASSERT(vm->targetMachinePCForThrow);
1873 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
1875 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
1876 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
1877 ASSERT(callerFrame);
1879 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
1881 JSValue exceptionValue = vm->exception();
1882 ASSERT(exceptionValue);
1884 genericUnwind(vm, callerFrame, exceptionValue);
1885 ASSERT(vm->targetMachinePCForThrow);
1888 void JIT_OPERATION operationVMHandleException(ExecState* exec)
1890 VM* vm = &exec->vm();
1891 NativeCallFrameTracer tracer(vm, exec);
1893 genericUnwind(vm, exec, vm->exception());
1896 // This function "should" just take the ExecState*, but doing so would make it more difficult
1897 // to call from exception check sites. So, unlike all of our other functions, we allow
1898 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
1899 // particularly safe here since this is never called on the critical path - it's only for
1901 void JIT_OPERATION operationExceptionFuzz()
1903 // This probably "just works" for GCC also, but I haven't tried.
1905 ExecState* exec = static_cast<ExecState*>(__builtin_frame_address(1));
1906 void* returnPC = __builtin_return_address(0);
1907 doExceptionFuzzing(exec, "JITOperations", returnPC);
1908 #endif // COMPILER(CLANG)
1911 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
1913 VM& vm = exec->vm();
1914 NativeCallFrameTracer tracer(&vm, exec);
1915 JSValue baseValue = JSValue::decode(encodedBaseValue);
1916 if (baseValue.isUndefinedOrNull())
1917 return JSValue::encode(jsBoolean(false));
1919 JSObject* base = baseValue.toObject(exec);
1920 return JSValue::encode(jsBoolean(base->hasProperty(exec, asString(propertyName)->toIdentifier(exec))));
1923 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
1925 VM& vm = exec->vm();
1926 NativeCallFrameTracer tracer(&vm, exec);
1927 JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
1928 return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript)));
1931 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
1933 VM& vm = exec->vm();
1934 NativeCallFrameTracer tracer(&vm, exec);
1936 JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
1938 return propertyNameEnumerator(exec, base);
1941 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
1943 VM& vm = exec->vm();
1944 NativeCallFrameTracer tracer(&vm, exec);
1945 JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
1946 JSString* propertyName = enumerator->propertyNameAtIndex(index);
1947 return JSValue::encode(propertyName ? propertyName : jsNull());
1950 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
1952 VM& vm = exec->vm();
1953 NativeCallFrameTracer tracer(&vm, exec);
1954 return jsString(exec, Identifier::from(exec, index).string());
1957 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
1959 exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
1964 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
1965 // definition of getHostCallReturnValue() below because the Windows build
1967 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
1970 return JSValue::encode(JSValue());
1971 return JSValue::encode(exec->vm().hostCallReturnValue);
1974 #if COMPILER(GCC) && CPU(X86_64)
1976 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
1977 HIDE_SYMBOL(getHostCallReturnValue) "\n"
1978 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
1980 "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
1983 #elif COMPILER(GCC) && CPU(X86)
1986 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
1987 HIDE_SYMBOL(getHostCallReturnValue) "\n"
1988 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
1990 "leal -4(%esp), %esp\n"
1992 "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
1993 "leal 8(%esp), %esp\n"
1998 #elif COMPILER(GCC) && CPU(ARM_THUMB2)
2002 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2003 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2005 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2006 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2008 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2011 #elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
2014 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2015 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2016 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2017 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2019 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2026 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2027 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2028 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2030 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2033 #elif COMPILER(GCC) && CPU(MIPS)
2036 #define LOAD_FUNCTION_TO_T9(function) \
2037 ".set noreorder" "\n" \
2038 ".cpload $25" "\n" \
2039 ".set reorder" "\n" \
2040 "la $t9, " LOCAL_REFERENCE(function) "\n"
2042 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2047 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2048 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2049 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2050 LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2051 "move $a0, $fp" "\n"
2052 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2055 #elif COMPILER(GCC) && CPU(SH4)
2057 #define SH4_SCRATCH_REGISTER "r11"
2061 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2062 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2063 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2065 "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2066 "braf " SH4_SCRATCH_REGISTER "\n"
2069 "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2072 #elif COMPILER(MSVC) && CPU(X86)
2074 __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2076 __asm mov [esp + 4], ebp;
2077 __asm jmp getHostCallReturnValueWithExecState
2084 #endif // ENABLE(JIT)