2 * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "JITOperations.h"
31 #include "ArrayConstructor.h"
32 #include "DFGCompilationMode.h"
33 #include "DFGDriver.h"
34 #include "DFGOSREntry.h"
35 #include "DFGThunks.h"
36 #include "DFGWorklist.h"
38 #include "DirectArguments.h"
40 #include "ErrorHandlingScope.h"
41 #include "ExceptionFuzz.h"
42 #include "GetterSetter.h"
43 #include "HostCallReturnValue.h"
45 #include "JITToDFGDeferredCompilationCallback.h"
46 #include "JSCInlines.h"
47 #include "JSCatchScope.h"
48 #include "JSFunctionNameScope.h"
49 #include "JSGlobalObjectFunctions.h"
50 #include "JSLexicalEnvironment.h"
51 #include "JSNameScope.h"
52 #include "JSPropertyNameEnumerator.h"
53 #include "JSStackInlines.h"
54 #include "JSWithScope.h"
55 #include "LegacyProfiler.h"
56 #include "ObjectConstructor.h"
57 #include "PropertyName.h"
59 #include "RepatchBuffer.h"
60 #include "ScopedArguments.h"
61 #include "TestRunnerUtils.h"
62 #include "TypeProfilerLog.h"
63 #include <wtf/InlineASM.h>
67 template<typename ScopeType>
68 void pushNameScope(ExecState* exec, int32_t dst, SymbolTable* symbolTable, EncodedJSValue encodedValue)
71 NativeCallFrameTracer tracer(&vm, exec);
73 ASSERT(!JITCode::isOptimizingJIT(exec->codeBlock()->jitType()));
75 // FIXME: This won't work if this operation is called from the DFG or FTL.
76 // This should be changed to pass in the new scope.
77 JSScope* currentScope = exec->uncheckedR(dst).Register::scope();
78 JSNameScope* scope = ScopeType::create(vm, exec->lexicalGlobalObject(), currentScope, symbolTable, JSValue::decode(encodedValue));
80 // FIXME: This won't work if this operation is called from the DFG or FTL.
81 // This should be changed to return the new scope.
82 exec->uncheckedR(dst) = scope;
88 void * _ReturnAddress(void);
89 #pragma intrinsic(_ReturnAddress)
91 #define OUR_RETURN_ADDRESS _ReturnAddress()
93 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
96 #if ENABLE(OPCODE_SAMPLING)
97 #define CTI_SAMPLER vm->interpreter->sampler()
103 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
105 // We pass in our own code block, because the callframe hasn't been populated.
106 VM* vm = codeBlock->vm();
108 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
109 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
113 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
114 ErrorHandlingScope errorScope(*vm);
115 vm->throwException(callerFrame, createStackOverflowError(callerFrame));
118 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
120 VM* vm = &exec->vm();
121 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
122 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
124 JSStack& stack = vm->interpreter->stack();
126 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
127 if (missingArgCount < 0) {
128 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
129 throwStackOverflowError(callerFrame);
132 return missingArgCount;
135 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
137 VM* vm = &exec->vm();
138 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
139 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
141 JSStack& stack = vm->interpreter->stack();
143 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
144 if (missingArgCount < 0) {
145 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
146 throwStackOverflowError(callerFrame);
149 return missingArgCount;
152 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
154 VM* vm = &exec->vm();
155 NativeCallFrameTracer tracer(vm, exec);
157 stubInfo->tookSlowPath = true;
159 JSValue baseValue = JSValue::decode(base);
160 PropertySlot slot(baseValue);
161 Identifier ident = Identifier::fromUid(vm, uid);
162 return JSValue::encode(baseValue.get(exec, ident, slot));
165 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
167 VM* vm = &exec->vm();
168 NativeCallFrameTracer tracer(vm, exec);
170 JSValue baseValue = JSValue::decode(base);
171 PropertySlot slot(baseValue);
172 Identifier ident = Identifier::fromUid(vm, uid);
173 return JSValue::encode(baseValue.get(exec, ident, slot));
176 EncodedJSValue JIT_OPERATION operationGetByIdBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
178 VM* vm = &exec->vm();
179 NativeCallFrameTracer tracer(vm, exec);
181 Identifier ident = Identifier::fromUid(vm, uid);
182 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
184 JSValue baseValue = JSValue::decode(base);
185 PropertySlot slot(baseValue);
186 bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
188 if (accessType == static_cast<AccessType>(stubInfo->accessType))
189 buildGetByIDList(exec, baseValue, ident, slot, *stubInfo);
191 return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
194 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
196 VM* vm = &exec->vm();
197 NativeCallFrameTracer tracer(vm, exec);
198 Identifier ident = Identifier::fromUid(vm, uid);
200 JSValue baseValue = JSValue::decode(base);
201 PropertySlot slot(baseValue);
203 bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
205 repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
207 stubInfo->seen = true;
209 return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
213 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
215 VM* vm = &exec->vm();
216 NativeCallFrameTracer tracer(vm, exec);
218 if (!base->isObject()) {
219 vm->throwException(exec, createInvalidInParameterError(exec, base));
220 return JSValue::encode(jsUndefined());
223 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
225 Identifier ident = Identifier::fromUid(vm, key);
226 PropertySlot slot(base);
227 bool result = asObject(base)->getPropertySlot(exec, ident, slot);
229 RELEASE_ASSERT(accessType == stubInfo->accessType);
232 repatchIn(exec, base, ident, result, slot, *stubInfo);
234 stubInfo->seen = true;
236 return JSValue::encode(jsBoolean(result));
239 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
241 VM* vm = &exec->vm();
242 NativeCallFrameTracer tracer(vm, exec);
244 stubInfo->tookSlowPath = true;
246 if (!base->isObject()) {
247 vm->throwException(exec, createInvalidInParameterError(exec, base));
248 return JSValue::encode(jsUndefined());
251 Identifier ident = Identifier::fromUid(vm, key);
252 return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
255 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
257 VM* vm = &exec->vm();
258 NativeCallFrameTracer tracer(vm, exec);
260 return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
263 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
265 VM* vm = &exec->vm();
266 NativeCallFrameTracer tracer(vm, exec);
268 stubInfo->tookSlowPath = true;
270 Identifier ident = Identifier::fromUid(vm, uid);
271 PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
272 JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
275 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
277 VM* vm = &exec->vm();
278 NativeCallFrameTracer tracer(vm, exec);
280 stubInfo->tookSlowPath = true;
282 Identifier ident = Identifier::fromUid(vm, uid);
283 PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
284 JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
287 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
289 VM* vm = &exec->vm();
290 NativeCallFrameTracer tracer(vm, exec);
292 stubInfo->tookSlowPath = true;
294 Identifier ident = Identifier::fromUid(vm, uid);
295 PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
296 asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
299 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
301 VM* vm = &exec->vm();
302 NativeCallFrameTracer tracer(vm, exec);
304 stubInfo->tookSlowPath = true;
306 Identifier ident = Identifier::fromUid(vm, uid);
307 PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
308 asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
311 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
313 VM* vm = &exec->vm();
314 NativeCallFrameTracer tracer(vm, exec);
316 Identifier ident = Identifier::fromUid(vm, uid);
317 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
319 JSValue value = JSValue::decode(encodedValue);
320 JSValue baseValue = JSValue::decode(encodedBase);
321 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
323 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
324 baseValue.put(exec, ident, value, slot);
326 if (accessType != static_cast<AccessType>(stubInfo->accessType))
330 repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
332 stubInfo->seen = true;
335 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
337 VM* vm = &exec->vm();
338 NativeCallFrameTracer tracer(vm, exec);
340 Identifier ident = Identifier::fromUid(vm, uid);
341 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
343 JSValue value = JSValue::decode(encodedValue);
344 JSValue baseValue = JSValue::decode(encodedBase);
345 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
347 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
348 baseValue.put(exec, ident, value, slot);
350 if (accessType != static_cast<AccessType>(stubInfo->accessType))
354 repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
356 stubInfo->seen = true;
359 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
361 VM* vm = &exec->vm();
362 NativeCallFrameTracer tracer(vm, exec);
364 Identifier ident = Identifier::fromUid(vm, uid);
365 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
367 JSValue value = JSValue::decode(encodedValue);
368 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
369 PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
371 Structure* structure = baseObject->structure(*vm);
372 baseObject->putDirect(exec->vm(), ident, value, slot);
374 if (accessType != static_cast<AccessType>(stubInfo->accessType))
378 repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
380 stubInfo->seen = true;
383 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
385 VM* vm = &exec->vm();
386 NativeCallFrameTracer tracer(vm, exec);
388 Identifier ident = Identifier::fromUid(vm, uid);
389 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
391 JSValue value = JSValue::decode(encodedValue);
392 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
393 PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
395 Structure* structure = baseObject->structure(*vm);
396 baseObject->putDirect(exec->vm(), ident, value, slot);
398 if (accessType != static_cast<AccessType>(stubInfo->accessType))
402 repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
404 stubInfo->seen = true;
407 void JIT_OPERATION operationPutByIdStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
409 VM* vm = &exec->vm();
410 NativeCallFrameTracer tracer(vm, exec);
412 Identifier ident = Identifier::fromUid(vm, uid);
413 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
415 JSValue value = JSValue::decode(encodedValue);
416 JSValue baseValue = JSValue::decode(encodedBase);
417 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
419 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
420 baseValue.put(exec, ident, value, slot);
422 if (accessType != static_cast<AccessType>(stubInfo->accessType))
425 buildPutByIdList(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
428 void JIT_OPERATION operationPutByIdNonStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
430 VM* vm = &exec->vm();
431 NativeCallFrameTracer tracer(vm, exec);
433 Identifier ident = Identifier::fromUid(vm, uid);
434 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
436 JSValue value = JSValue::decode(encodedValue);
437 JSValue baseValue = JSValue::decode(encodedBase);
438 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
440 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
441 baseValue.put(exec, ident, value, slot);
443 if (accessType != static_cast<AccessType>(stubInfo->accessType))
446 buildPutByIdList(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
449 void JIT_OPERATION operationPutByIdDirectStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
451 VM* vm = &exec->vm();
452 NativeCallFrameTracer tracer(vm, exec);
454 Identifier ident = Identifier::fromUid(vm, uid);
455 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
457 JSValue value = JSValue::decode(encodedValue);
458 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
459 PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
461 Structure* structure = baseObject->structure(*vm);
462 baseObject->putDirect(*vm, ident, value, slot);
464 if (accessType != static_cast<AccessType>(stubInfo->accessType))
467 buildPutByIdList(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
470 void JIT_OPERATION operationPutByIdDirectNonStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
472 VM* vm = &exec->vm();
473 NativeCallFrameTracer tracer(vm, exec);
475 Identifier ident = Identifier::fromUid(vm, uid);
476 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
478 JSValue value = JSValue::decode(encodedValue);
479 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
480 PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
482 Structure* structure = baseObject->structure(*vm);
483 baseObject->putDirect(*vm, ident, value, slot);
485 if (accessType != static_cast<AccessType>(stubInfo->accessType))
488 buildPutByIdList(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
491 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
494 NativeCallFrameTracer tracer(&vm, exec);
496 ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
497 ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
498 base->setStructureAndReallocateStorageIfNecessary(vm, structure);
499 base->putDirect(vm, offset, JSValue::decode(value));
502 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ArrayProfile* arrayProfile)
504 VM& vm = callFrame->vm();
505 if (LIKELY(subscript.isUInt32())) {
506 uint32_t i = subscript.asUInt32();
507 if (baseValue.isObject()) {
508 JSObject* object = asObject(baseValue);
509 if (object->canSetIndexQuickly(i))
510 object->setIndexQuickly(callFrame->vm(), i, value);
512 arrayProfile->setOutOfBounds();
513 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
516 baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
518 auto property = subscript.toPropertyKey(callFrame);
519 if (!callFrame->vm().exception()) { // Don't put to an object if toString threw an exception.
520 PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
521 baseValue.put(callFrame, property, value, slot);
526 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ArrayProfile* arrayProfile)
528 bool isStrictMode = callFrame->codeBlock()->isStrictMode();
529 if (LIKELY(subscript.isUInt32())) {
530 // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
531 uint32_t index = subscript.asUInt32();
532 ASSERT(isIndex(index));
533 if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
534 baseObject->setIndexQuickly(callFrame->vm(), index, value);
538 arrayProfile->setOutOfBounds();
539 baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
543 if (subscript.isDouble()) {
544 double subscriptAsDouble = subscript.asDouble();
545 uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
546 if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
547 baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
552 // Don't put to an object if toString threw an exception.
553 auto property = subscript.toPropertyKey(callFrame);
554 if (callFrame->vm().exception())
557 if (Optional<uint32_t> index = parseIndex(property))
558 baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
560 PutPropertySlot slot(baseObject, isStrictMode);
561 baseObject->putDirect(callFrame->vm(), property, value, slot);
564 void JIT_OPERATION operationPutByVal(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ArrayProfile* arrayProfile)
567 NativeCallFrameTracer tracer(&vm, exec);
569 JSValue baseValue = JSValue::decode(encodedBaseValue);
570 JSValue subscript = JSValue::decode(encodedSubscript);
571 JSValue value = JSValue::decode(encodedValue);
573 if (baseValue.isObject() && subscript.isInt32()) {
574 // See if it's worth optimizing at all.
575 JSObject* object = asObject(baseValue);
576 bool didOptimize = false;
578 unsigned bytecodeOffset = exec->locationAsBytecodeOffset();
579 ASSERT(bytecodeOffset);
580 ByValInfo& byValInfo = exec->codeBlock()->getByValInfo(bytecodeOffset - 1);
581 ASSERT(!byValInfo.stubRoutine);
583 Structure* structure = object->structure(vm);
584 if (hasOptimizableIndexing(structure)) {
585 // Attempt to optimize.
586 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
587 if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo.arrayMode) {
588 CodeBlock* codeBlock = exec->codeBlock();
589 ConcurrentJITLocker locker(codeBlock->m_lock);
590 arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
592 JIT::compilePutByVal(&vm, exec->codeBlock(), &byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
598 // If we take slow path more than 10 times without patching then make sure we
599 // never make that mistake again. Or, if we failed to patch and we have some object
600 // that intercepts indexed get, then don't even wait until 10 times. For cases
601 // where we see non-index-intercepting objects, this gives 10 iterations worth of
602 // opportunity for us to observe that the get_by_val may be polymorphic.
603 if (++byValInfo.slowPathCount >= 10
604 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
605 // Don't ever try to optimize.
606 ctiPatchCallByReturnAddress(exec->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
611 putByVal(exec, baseValue, subscript, value, arrayProfile);
614 void JIT_OPERATION operationDirectPutByVal(ExecState* callFrame, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ArrayProfile* arrayProfile)
616 VM& vm = callFrame->vm();
617 NativeCallFrameTracer tracer(&vm, callFrame);
619 JSValue baseValue = JSValue::decode(encodedBaseValue);
620 JSValue subscript = JSValue::decode(encodedSubscript);
621 JSValue value = JSValue::decode(encodedValue);
622 RELEASE_ASSERT(baseValue.isObject());
623 JSObject* object = asObject(baseValue);
624 if (subscript.isInt32()) {
625 // See if it's worth optimizing at all.
626 bool didOptimize = false;
628 unsigned bytecodeOffset = callFrame->locationAsBytecodeOffset();
629 ASSERT(bytecodeOffset);
630 ByValInfo& byValInfo = callFrame->codeBlock()->getByValInfo(bytecodeOffset - 1);
631 ASSERT(!byValInfo.stubRoutine);
633 Structure* structure = object->structure(vm);
634 if (hasOptimizableIndexing(structure)) {
635 // Attempt to optimize.
636 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
637 if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo.arrayMode) {
638 CodeBlock* codeBlock = callFrame->codeBlock();
639 ConcurrentJITLocker locker(codeBlock->m_lock);
640 arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
642 JIT::compileDirectPutByVal(&vm, callFrame->codeBlock(), &byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
648 // If we take slow path more than 10 times without patching then make sure we
649 // never make that mistake again. Or, if we failed to patch and we have some object
650 // that intercepts indexed get, then don't even wait until 10 times. For cases
651 // where we see non-index-intercepting objects, this gives 10 iterations worth of
652 // opportunity for us to observe that the get_by_val may be polymorphic.
653 if (++byValInfo.slowPathCount >= 10
654 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
655 // Don't ever try to optimize.
656 ctiPatchCallByReturnAddress(callFrame->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
660 directPutByVal(callFrame, object, subscript, value, arrayProfile);
663 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ArrayProfile* arrayProfile)
666 NativeCallFrameTracer tracer(&vm, exec);
668 JSValue baseValue = JSValue::decode(encodedBaseValue);
669 JSValue subscript = JSValue::decode(encodedSubscript);
670 JSValue value = JSValue::decode(encodedValue);
672 putByVal(exec, baseValue, subscript, value, arrayProfile);
676 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ArrayProfile* arrayProfile)
679 NativeCallFrameTracer tracer(&vm, exec);
681 JSValue baseValue = JSValue::decode(encodedBaseValue);
682 JSValue subscript = JSValue::decode(encodedSubscript);
683 JSValue value = JSValue::decode(encodedValue);
684 RELEASE_ASSERT(baseValue.isObject());
685 directPutByVal(exec, asObject(baseValue), subscript, value, arrayProfile);
688 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
691 ASSERT_UNUSED(exec, exec->codeBlock()->codeType() != FunctionCode
692 || !exec->codeBlock()->needsActivation()
693 || exec->hasActivation());
695 execCallee->setCodeBlock(0);
697 if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
698 return JSValue::encode(JSValue());
700 VM* vm = &execCallee->vm();
701 JSValue result = eval(execCallee);
703 return EncodedJSValue();
705 return JSValue::encode(result);
708 static void* handleHostCall(ExecState* execCallee, JSValue callee, CodeSpecializationKind kind)
710 ExecState* exec = execCallee->callerFrame();
711 VM* vm = &exec->vm();
713 execCallee->setCodeBlock(0);
715 if (kind == CodeForCall) {
717 CallType callType = getCallData(callee, callData);
719 ASSERT(callType != CallTypeJS);
721 if (callType == CallTypeHost) {
722 NativeCallFrameTracer tracer(vm, execCallee);
723 execCallee->setCallee(asObject(callee));
724 vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
726 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
728 return reinterpret_cast<void*>(getHostCallReturnValue);
731 ASSERT(callType == CallTypeNone);
732 exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
733 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
736 ASSERT(kind == CodeForConstruct);
738 ConstructData constructData;
739 ConstructType constructType = getConstructData(callee, constructData);
741 ASSERT(constructType != ConstructTypeJS);
743 if (constructType == ConstructTypeHost) {
744 NativeCallFrameTracer tracer(vm, execCallee);
745 execCallee->setCallee(asObject(callee));
746 vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
748 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
750 return reinterpret_cast<void*>(getHostCallReturnValue);
753 ASSERT(constructType == ConstructTypeNone);
754 exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
755 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
758 inline char* linkFor(
759 ExecState* execCallee, CallLinkInfo* callLinkInfo, CodeSpecializationKind kind,
760 RegisterPreservationMode registers)
762 ExecState* exec = execCallee->callerFrame();
763 VM* vm = &exec->vm();
764 NativeCallFrameTracer tracer(vm, exec);
766 JSValue calleeAsValue = execCallee->calleeAsValue();
767 JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
768 if (!calleeAsFunctionCell) {
769 // FIXME: We should cache these kinds of calls. They can be common and currently they are
771 // https://bugs.webkit.org/show_bug.cgi?id=144458
772 return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
775 JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
776 JSScope* scope = callee->scopeUnchecked();
777 ExecutableBase* executable = callee->executable();
779 MacroAssemblerCodePtr codePtr;
780 CodeBlock* codeBlock = 0;
781 if (executable->isHostFunction())
782 codePtr = executable->entrypointFor(*vm, kind, MustCheckArity, registers);
784 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
786 if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
787 exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
788 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
791 JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
793 exec->vm().throwException(exec, error);
794 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
796 codeBlock = functionExecutable->codeBlockFor(kind);
797 ArityCheckMode arity;
798 if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->callType() == CallLinkInfo::CallVarargs || callLinkInfo->callType() == CallLinkInfo::ConstructVarargs)
799 arity = MustCheckArity;
801 arity = ArityCheckNotRequired;
802 codePtr = functionExecutable->entrypointFor(*vm, kind, arity, registers);
804 if (!callLinkInfo->seenOnce())
805 callLinkInfo->setSeen();
807 linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr, kind, registers);
809 return reinterpret_cast<char*>(codePtr.executableAddress());
812 char* JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
814 return linkFor(execCallee, callLinkInfo, CodeForCall, RegisterPreservationNotRequired);
817 char* JIT_OPERATION operationLinkConstruct(ExecState* execCallee, CallLinkInfo* callLinkInfo)
819 return linkFor(execCallee, callLinkInfo, CodeForConstruct, RegisterPreservationNotRequired);
822 char* JIT_OPERATION operationLinkCallThatPreservesRegs(ExecState* execCallee, CallLinkInfo* callLinkInfo)
824 return linkFor(execCallee, callLinkInfo, CodeForCall, MustPreserveRegisters);
827 char* JIT_OPERATION operationLinkConstructThatPreservesRegs(ExecState* execCallee, CallLinkInfo* callLinkInfo)
829 return linkFor(execCallee, callLinkInfo, CodeForConstruct, MustPreserveRegisters);
832 inline char* virtualForWithFunction(
833 ExecState* execCallee, CodeSpecializationKind kind, RegisterPreservationMode registers,
834 JSCell*& calleeAsFunctionCell)
836 ExecState* exec = execCallee->callerFrame();
837 VM* vm = &exec->vm();
838 NativeCallFrameTracer tracer(vm, exec);
840 JSValue calleeAsValue = execCallee->calleeAsValue();
841 calleeAsFunctionCell = getJSFunction(calleeAsValue);
842 if (UNLIKELY(!calleeAsFunctionCell))
843 return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
845 JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
846 JSScope* scope = function->scopeUnchecked();
847 ExecutableBase* executable = function->executable();
848 if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
849 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
851 if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
852 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
853 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
856 JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
858 exec->vm().throwException(exec, error);
859 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
862 return reinterpret_cast<char*>(executable->entrypointFor(
863 *vm, kind, MustCheckArity, registers).executableAddress());
866 inline char* virtualFor(
867 ExecState* execCallee, CodeSpecializationKind kind, RegisterPreservationMode registers)
869 JSCell* calleeAsFunctionCellIgnored;
870 return virtualForWithFunction(execCallee, kind, registers, calleeAsFunctionCellIgnored);
873 char* JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
875 JSCell* calleeAsFunctionCell;
876 char* result = virtualForWithFunction(execCallee, CodeForCall, RegisterPreservationNotRequired, calleeAsFunctionCell);
878 linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell), RegisterPreservationNotRequired);
883 char* JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo*)
885 return virtualFor(execCallee, CodeForCall, RegisterPreservationNotRequired);
888 char* JIT_OPERATION operationVirtualConstruct(ExecState* execCallee, CallLinkInfo*)
890 return virtualFor(execCallee, CodeForConstruct, RegisterPreservationNotRequired);
893 char* JIT_OPERATION operationLinkPolymorphicCallThatPreservesRegs(ExecState* execCallee, CallLinkInfo* callLinkInfo)
895 JSCell* calleeAsFunctionCell;
896 char* result = virtualForWithFunction(execCallee, CodeForCall, MustPreserveRegisters, calleeAsFunctionCell);
898 linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell), MustPreserveRegisters);
903 char* JIT_OPERATION operationVirtualCallThatPreservesRegs(ExecState* execCallee, CallLinkInfo*)
905 return virtualFor(execCallee, CodeForCall, MustPreserveRegisters);
908 char* JIT_OPERATION operationVirtualConstructThatPreservesRegs(ExecState* execCallee, CallLinkInfo*)
910 return virtualFor(execCallee, CodeForConstruct, MustPreserveRegisters);
913 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
915 VM* vm = &exec->vm();
916 NativeCallFrameTracer tracer(vm, exec);
918 return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
921 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
923 VM* vm = &exec->vm();
924 NativeCallFrameTracer tracer(vm, exec);
926 return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
929 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
931 VM* vm = &exec->vm();
932 NativeCallFrameTracer tracer(vm, exec);
934 return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
937 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
939 VM* vm = &exec->vm();
940 NativeCallFrameTracer tracer(vm, exec);
942 return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
945 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
947 VM* vm = &exec->vm();
948 NativeCallFrameTracer tracer(vm, exec);
950 return JSValue::decode(encodedOp).toBoolean(exec);
953 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
955 VM* vm = &exec->vm();
956 NativeCallFrameTracer tracer(vm, exec);
958 return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
962 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
964 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
967 VM* vm = &exec->vm();
968 NativeCallFrameTracer tracer(vm, exec);
970 bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
972 return JSValue::encode(jsBoolean(result));
978 size_t JIT_OPERATION operationHasProperty(ExecState* exec, JSObject* base, JSString* property)
980 int result = base->hasProperty(exec, property->toIdentifier(exec));
985 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
987 VM* vm = &exec->vm();
988 NativeCallFrameTracer tracer(vm, exec);
989 return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
992 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
994 VM* vm = &exec->vm();
995 NativeCallFrameTracer tracer(vm, exec);
996 return JSValue::encode(constructArray(exec, profile, values, size));
999 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1001 VM* vm = &exec->vm();
1002 NativeCallFrameTracer tracer(vm, exec);
1003 JSValue sizeValue = JSValue::decode(size);
1004 return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1007 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1009 ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1010 VM& vm = exec->vm();
1011 NativeCallFrameTracer tracer(&vm, exec);
1012 return JSValue::encode(JSFunction::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1015 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1017 ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1018 VM& vm = exec->vm();
1019 NativeCallFrameTracer tracer(&vm, exec);
1020 return JSValue::encode(JSFunction::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1023 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1025 VM* vm = &exec->vm();
1026 NativeCallFrameTracer tracer(vm, exec);
1028 return constructEmptyObject(exec, structure);
1031 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1033 VM& vm = exec->vm();
1034 NativeCallFrameTracer tracer(&vm, exec);
1035 RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1036 if (!regexp->isValid()) {
1037 vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1038 return JSValue::encode(jsUndefined());
1041 return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1044 void JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1046 VM& vm = exec->vm();
1047 NativeCallFrameTracer tracer(&vm, exec);
1049 if (UNLIKELY(vm.watchdog && vm.watchdog->didFire(exec)))
1050 vm.throwException(exec, createTerminatedExecutionException(&vm));
1053 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1055 VM& vm = exec->vm();
1056 NativeCallFrameTracer tracer(&vm, exec);
1057 JSValue errorMessageValue = JSValue::decode(encodedValue);
1058 RELEASE_ASSERT(errorMessageValue.isString());
1059 String errorMessage = asString(errorMessageValue)->value(exec);
1060 if (referenceErrorFlag)
1061 vm.throwException(exec, createReferenceError(exec, errorMessage));
1063 vm.throwException(exec, createTypeError(exec, errorMessage));
1066 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1068 VM& vm = exec->vm();
1069 NativeCallFrameTracer tracer(&vm, exec);
1071 vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1075 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1077 codeBlock->updateAllPredictions();
1078 codeBlock->optimizeAfterWarmUp();
1081 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1083 VM& vm = exec->vm();
1084 NativeCallFrameTracer tracer(&vm, exec);
1086 // Defer GC for a while so that it doesn't run between when we enter into this
1087 // slow path and when we figure out the state of our code block. This prevents
1088 // a number of awkward reentrancy scenarios, including:
1090 // - The optimized version of our code block being jettisoned by GC right after
1091 // we concluded that we wanted to use it, but have not planted it into the JS
1094 // - An optimized version of our code block being installed just as we decided
1095 // that it wasn't ready yet.
1097 // Note that jettisoning won't happen if we already initiated OSR, because in
1098 // that case we would have already planted the optimized code block into the JS
1100 DeferGCForAWhile deferGC(vm.heap);
1102 CodeBlock* codeBlock = exec->codeBlock();
1103 if (codeBlock->jitType() != JITCode::BaselineJIT) {
1104 dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1105 RELEASE_ASSERT_NOT_REACHED();
1108 if (bytecodeIndex) {
1109 // If we're attempting to OSR from a loop, assume that this should be
1110 // separately optimized.
1111 codeBlock->m_shouldAlwaysBeInlined = false;
1114 if (Options::verboseOSR()) {
1116 *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1117 ", executeCounter = ", codeBlock->jitExecuteCounter(),
1118 ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1119 ", exitCounter = ");
1120 if (codeBlock->hasOptimizedReplacement())
1121 dataLog(codeBlock->replacement()->osrExitCounter());
1127 if (!codeBlock->checkIfOptimizationThresholdReached()) {
1128 codeBlock->updateAllPredictions();
1129 if (Options::verboseOSR())
1130 dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1131 return encodeResult(0, 0);
1134 if (vm.enabledProfiler()) {
1135 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1136 return encodeResult(0, 0);
1139 Debugger* debugger = codeBlock->globalObject()->debugger();
1140 if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1141 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1142 return encodeResult(0, 0);
1145 if (codeBlock->m_shouldAlwaysBeInlined) {
1146 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1147 if (Options::verboseOSR())
1148 dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1149 return encodeResult(0, 0);
1152 // We cannot be in the process of asynchronous compilation and also have an optimized
1154 DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1157 || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1158 && codeBlock->hasOptimizedReplacement()));
1160 DFG::Worklist::State worklistState;
1162 // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1163 // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1164 // what the result was so that we don't plow ahead and attempt OSR or immediate
1165 // reoptimization. This will have already also set the appropriate JIT execution
1166 // count threshold depending on what happened, so if the compilation was anything
1167 // but successful we just want to return early. See the case for worklistState ==
1168 // DFG::Worklist::Compiled, below.
1170 // Note that we could have alternatively just called Worklist::compilationState()
1171 // here, and if it returned Compiled, we could have then called
1172 // completeAndScheduleOSR() below. But that would have meant that it could take
1173 // longer for code blocks to be completed: they would only complete when *their*
1174 // execution count trigger fired; but that could take a while since the firing is
1175 // racy. It could also mean that code blocks that never run again after being
1176 // compiled would sit on the worklist until next GC. That's fine, but it's
1177 // probably a waste of memory. Our goal here is to complete code blocks as soon as
1178 // possible in order to minimize the chances of us executing baseline code after
1179 // optimized code is already available.
1180 worklistState = worklist->completeAllReadyPlansForVM(
1181 vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1183 worklistState = DFG::Worklist::NotKnown;
1185 if (worklistState == DFG::Worklist::Compiling) {
1186 // We cannot be in the process of asynchronous compilation and also have an optimized
1188 RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1189 codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1190 return encodeResult(0, 0);
1193 if (worklistState == DFG::Worklist::Compiled) {
1194 // If we don't have an optimized replacement but we did just get compiled, then
1195 // the compilation failed or was invalidated, in which case the execution count
1196 // thresholds have already been set appropriately by
1197 // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1198 // nothing left to do.
1199 if (!codeBlock->hasOptimizedReplacement()) {
1200 codeBlock->updateAllPredictions();
1201 if (Options::verboseOSR())
1202 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1203 return encodeResult(0, 0);
1205 } else if (codeBlock->hasOptimizedReplacement()) {
1206 if (Options::verboseOSR())
1207 dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1208 // If we have an optimized replacement, then it must be the case that we entered
1209 // cti_optimize from a loop. That's because if there's an optimized replacement,
1210 // then all calls to this function will be relinked to the replacement and so
1211 // the prologue OSR will never fire.
1213 // This is an interesting threshold check. Consider that a function OSR exits
1214 // in the middle of a loop, while having a relatively low exit count. The exit
1215 // will reset the execution counter to some target threshold, meaning that this
1216 // code won't be reached until that loop heats up for >=1000 executions. But then
1217 // we do a second check here, to see if we should either reoptimize, or just
1218 // attempt OSR entry. Hence it might even be correct for
1219 // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1220 // additional checking anyway, to reduce the amount of recompilation thrashing.
1221 if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1222 if (Options::verboseOSR()) {
1224 "Triggering reoptimization of ", *codeBlock,
1225 "(", *codeBlock->replacement(), ") (in loop).\n");
1227 codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1228 return encodeResult(0, 0);
1231 if (!codeBlock->shouldOptimizeNow()) {
1232 if (Options::verboseOSR()) {
1234 "Delaying optimization for ", *codeBlock,
1235 " because of insufficient profiling.\n");
1237 return encodeResult(0, 0);
1240 if (Options::verboseOSR())
1241 dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1243 unsigned numVarsWithValues;
1245 numVarsWithValues = codeBlock->m_numVars;
1247 numVarsWithValues = 0;
1248 Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1249 for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1250 int operand = mustHandleValues.operandForIndex(i);
1251 mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1254 RefPtr<CodeBlock> replacementCodeBlock = codeBlock->newReplacement();
1255 CompilationResult result = DFG::compile(
1256 vm, replacementCodeBlock.get(), 0, DFG::DFGMode, bytecodeIndex,
1257 mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1259 if (result != CompilationSuccessful) {
1260 ASSERT(result == CompilationDeferred || replacementCodeBlock->hasOneRef());
1261 return encodeResult(0, 0);
1265 CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1266 ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1268 if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1269 if (Options::verboseOSR()) {
1271 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1274 codeBlock->optimizeSoon();
1275 return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1278 if (Options::verboseOSR()) {
1280 "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1281 " succeeded, OSR failed, after a delay of ",
1282 codeBlock->optimizationDelayCounter(), ".\n");
1285 // Count the OSR failure as a speculation failure. If this happens a lot, then
1287 optimizedCodeBlock->countOSRExit();
1289 // We are a lot more conservative about triggering reoptimization after OSR failure than
1290 // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1291 // already, then we really would like to reoptimize immediately. But this case covers
1292 // something else: there weren't many (or any) speculation failures before, but we just
1293 // failed to enter the speculative code because some variable had the wrong value or
1294 // because the OSR code decided for any spurious reason that it did not want to OSR
1295 // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1296 // reoptimization trigger.
1297 if (optimizedCodeBlock->shouldReoptimizeNow()) {
1298 if (Options::verboseOSR()) {
1300 "Triggering reoptimization of ", *codeBlock, " -> ",
1301 *codeBlock->replacement(), " (after OSR fail).\n");
1303 optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1304 return encodeResult(0, 0);
1307 // OSR failed this time, but it might succeed next time! Let the code run a bit
1308 // longer and then try again.
1309 codeBlock->optimizeAfterWarmUp();
1311 return encodeResult(0, 0);
1315 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1317 VM& vm = exec->vm();
1318 NativeCallFrameTracer tracer(&vm, exec);
1320 JSValue arrayValue = JSValue::decode(encodedArrayValue);
1321 ASSERT(isJSArray(arrayValue));
1322 asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1326 void JIT_OPERATION operationPutGetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, EncodedJSValue encodedGetterValue)
1328 VM& vm = exec->vm();
1329 NativeCallFrameTracer tracer(&vm, exec);
1331 ASSERT(JSValue::decode(encodedObjectValue).isObject());
1332 JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1334 JSValue getter = JSValue::decode(encodedGetterValue);
1335 ASSERT(getter.isObject());
1336 baseObj->putGetter(exec, *identifier, asObject(getter));
1339 void JIT_OPERATION operationPutSetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, EncodedJSValue encodedSetterValue)
1341 VM& vm = exec->vm();
1342 NativeCallFrameTracer tracer(&vm, exec);
1344 ASSERT(JSValue::decode(encodedObjectValue).isObject());
1345 JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1347 JSValue setter = JSValue::decode(encodedSetterValue);
1348 ASSERT(setter.isObject());
1349 baseObj->putSetter(exec, *identifier, asObject(setter));
1352 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1354 VM& vm = exec->vm();
1355 NativeCallFrameTracer tracer(&vm, exec);
1357 ASSERT(JSValue::decode(encodedObjectValue).isObject());
1358 JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1360 GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1362 JSValue getter = JSValue::decode(encodedGetterValue);
1363 JSValue setter = JSValue::decode(encodedSetterValue);
1364 ASSERT(getter.isObject() || getter.isUndefined());
1365 ASSERT(setter.isObject() || setter.isUndefined());
1366 ASSERT(getter.isObject() || setter.isObject());
1368 if (!getter.isUndefined())
1369 accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1370 if (!setter.isUndefined())
1371 accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1372 baseObj->putDirectAccessor(exec, *identifier, accessor, Accessor);
1375 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, Identifier* identifier, JSCell* getter)
1377 VM& vm = exec->vm();
1378 NativeCallFrameTracer tracer(&vm, exec);
1380 ASSERT(object && object->isObject());
1381 JSObject* baseObj = object->getObject();
1383 ASSERT(getter->isObject());
1384 baseObj->putGetter(exec, *identifier, getter);
1387 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, Identifier* identifier, JSCell* setter)
1389 VM& vm = exec->vm();
1390 NativeCallFrameTracer tracer(&vm, exec);
1392 ASSERT(object && object->isObject());
1393 JSObject* baseObj = object->getObject();
1395 ASSERT(setter->isObject());
1396 baseObj->putSetter(exec, *identifier, setter);
1399 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, Identifier* identifier, JSCell* getter, JSCell* setter)
1401 VM& vm = exec->vm();
1402 NativeCallFrameTracer tracer(&vm, exec);
1404 ASSERT(object && object->isObject());
1405 JSObject* baseObj = object->getObject();
1407 GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1409 ASSERT(!getter || getter->isObject());
1410 ASSERT(!setter || setter->isObject());
1411 ASSERT(getter || setter);
1414 accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1416 accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1417 baseObj->putDirectAccessor(exec, *identifier, accessor, Accessor);
1421 void JIT_OPERATION operationPushCatchScope(ExecState* exec, int32_t dst, SymbolTable* symbolTable, EncodedJSValue encodedValue)
1423 pushNameScope<JSCatchScope>(exec, dst, symbolTable, encodedValue);
1426 void JIT_OPERATION operationPushFunctionNameScope(ExecState* exec, int32_t dst, SymbolTable* symbolTable, EncodedJSValue encodedValue)
1428 pushNameScope<JSFunctionNameScope>(exec, dst, symbolTable, encodedValue);
1431 void JIT_OPERATION operationPushWithScope(ExecState* exec, int32_t dst, EncodedJSValue encodedValue)
1433 VM& vm = exec->vm();
1434 NativeCallFrameTracer tracer(&vm, exec);
1436 JSObject* o = JSValue::decode(encodedValue).toObject(exec);
1440 // FIXME: This won't work if this operation is called from the DFG or FTL.
1441 // This should be changed to pass in the old scope and return the new scope.
1442 JSScope* currentScope = exec->uncheckedR(dst).Register::scope();
1443 exec->uncheckedR(dst) = JSWithScope::create(exec, o, currentScope);
1446 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1448 VM& vm = exec->vm();
1449 NativeCallFrameTracer tracer(&vm, exec);
1451 JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1452 exec->uncheckedR(scopeReg) = scope->next();
1455 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1457 VM& vm = exec->vm();
1458 NativeCallFrameTracer tracer(&vm, exec);
1460 if (LegacyProfiler* profiler = vm.enabledProfiler())
1461 profiler->didExecute(exec, JSValue::decode(encodedValue));
1464 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1466 VM& vm = exec->vm();
1467 NativeCallFrameTracer tracer(&vm, exec);
1469 if (LegacyProfiler* profiler = vm.enabledProfiler())
1470 profiler->willExecute(exec, JSValue::decode(encodedValue));
1473 EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedBaseVal)
1475 VM& vm = exec->vm();
1476 NativeCallFrameTracer tracer(&vm, exec);
1478 JSValue value = JSValue::decode(encodedValue);
1479 JSValue baseVal = JSValue::decode(encodedBaseVal);
1481 if (baseVal.isObject()) {
1482 JSObject* baseObject = asObject(baseVal);
1483 ASSERT(!baseObject->structure(vm)->typeInfo().implementsDefaultHasInstance());
1484 if (baseObject->structure(vm)->typeInfo().implementsHasInstance()) {
1485 bool result = baseObject->methodTable(vm)->customHasInstance(baseObject, exec, value);
1486 return JSValue::encode(jsBoolean(result));
1490 vm.throwException(exec, createInvalidInstanceofParameterError(exec, baseVal));
1491 return JSValue::encode(JSValue());
1496 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1498 switch (object.structure()->typeInfo().type()) {
1499 case DirectArgumentsType: {
1500 DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1501 if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1505 case ScopedArgumentsType: {
1506 ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1507 if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1517 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ArrayProfile* arrayProfile, ReturnAddressPtr returnAddress)
1519 if (LIKELY(baseValue.isCell() && subscript.isString())) {
1520 VM& vm = exec->vm();
1521 Structure& structure = *baseValue.asCell()->structure(vm);
1522 if (JSCell::canUseFastGetOwnProperty(structure)) {
1523 if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1524 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get()))
1530 if (subscript.isUInt32()) {
1531 uint32_t i = subscript.asUInt32();
1532 if (isJSString(baseValue)) {
1533 if (asString(baseValue)->canGetIndex(i)) {
1534 ctiPatchCallByReturnAddress(exec->codeBlock(), returnAddress, FunctionPtr(operationGetByValString));
1535 return asString(baseValue)->getIndex(exec, i);
1537 arrayProfile->setOutOfBounds();
1538 } else if (baseValue.isObject()) {
1539 JSObject* object = asObject(baseValue);
1540 if (object->canGetIndexQuickly(i))
1541 return object->getIndexQuickly(i);
1543 if (!canAccessArgumentIndexQuickly(*object, i))
1544 arrayProfile->setOutOfBounds();
1547 return baseValue.get(exec, i);
1550 baseValue.requireObjectCoercible(exec);
1551 if (exec->hadException())
1552 return jsUndefined();
1553 auto property = subscript.toPropertyKey(exec);
1554 if (exec->hadException())
1555 return jsUndefined();
1556 return baseValue.get(exec, property);
1561 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ArrayProfile* arrayProfile)
1563 VM& vm = exec->vm();
1564 NativeCallFrameTracer tracer(&vm, exec);
1565 JSValue baseValue = JSValue::decode(encodedBase);
1566 JSValue subscript = JSValue::decode(encodedSubscript);
1568 JSValue result = getByVal(exec, baseValue, subscript, arrayProfile, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1569 return JSValue::encode(result);
1572 EncodedJSValue JIT_OPERATION operationGetByValDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ArrayProfile* arrayProfile)
1574 VM& vm = exec->vm();
1575 NativeCallFrameTracer tracer(&vm, exec);
1576 JSValue baseValue = JSValue::decode(encodedBase);
1577 JSValue subscript = JSValue::decode(encodedSubscript);
1579 if (baseValue.isObject() && subscript.isInt32()) {
1580 // See if it's worth optimizing this at all.
1581 JSObject* object = asObject(baseValue);
1582 bool didOptimize = false;
1584 unsigned bytecodeOffset = exec->locationAsBytecodeOffset();
1585 ASSERT(bytecodeOffset);
1586 ByValInfo& byValInfo = exec->codeBlock()->getByValInfo(bytecodeOffset - 1);
1587 ASSERT(!byValInfo.stubRoutine);
1589 if (hasOptimizableIndexing(object->structure(vm))) {
1590 // Attempt to optimize.
1591 Structure* structure = object->structure(vm);
1592 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1593 if (arrayMode != byValInfo.arrayMode) {
1594 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1595 // Let's update the profile to do better next time.
1596 CodeBlock* codeBlock = exec->codeBlock();
1597 ConcurrentJITLocker locker(codeBlock->m_lock);
1598 arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1600 JIT::compileGetByVal(&vm, exec->codeBlock(), &byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1606 // If we take slow path more than 10 times without patching then make sure we
1607 // never make that mistake again. Or, if we failed to patch and we have some object
1608 // that intercepts indexed get, then don't even wait until 10 times. For cases
1609 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1610 // opportunity for us to observe that the get_by_val may be polymorphic.
1611 if (++byValInfo.slowPathCount >= 10
1612 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1613 // Don't ever try to optimize.
1614 ctiPatchCallByReturnAddress(exec->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationGetByValGeneric));
1619 JSValue result = getByVal(exec, baseValue, subscript, arrayProfile, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1620 return JSValue::encode(result);
1623 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ArrayProfile* arrayProfile)
1625 VM& vm = exec->vm();
1626 NativeCallFrameTracer tracer(&vm, exec);
1627 JSValue baseValue = JSValue::decode(encodedBase);
1628 JSValue subscript = JSValue::decode(encodedSubscript);
1630 ASSERT(baseValue.isObject());
1631 ASSERT(subscript.isUInt32());
1633 JSObject* object = asObject(baseValue);
1634 bool didOptimize = false;
1636 unsigned bytecodeOffset = exec->locationAsBytecodeOffset();
1637 ASSERT(bytecodeOffset);
1638 ByValInfo& byValInfo = exec->codeBlock()->getByValInfo(bytecodeOffset - 1);
1639 ASSERT(!byValInfo.stubRoutine);
1641 if (hasOptimizableIndexing(object->structure(vm))) {
1642 // Attempt to optimize.
1643 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1644 if (arrayMode != byValInfo.arrayMode) {
1645 JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), &byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1651 // If we take slow path more than 10 times without patching then make sure we
1652 // never make that mistake again. Or, if we failed to patch and we have some object
1653 // that intercepts indexed get, then don't even wait until 10 times. For cases
1654 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1655 // opportunity for us to observe that the get_by_val may be polymorphic.
1656 if (++byValInfo.slowPathCount >= 10
1657 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1658 // Don't ever try to optimize.
1659 ctiPatchCallByReturnAddress(exec->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1663 uint32_t index = subscript.asUInt32();
1664 if (object->canGetIndexQuickly(index))
1665 return JSValue::encode(JSValue(JSValue::JSTrue));
1667 if (!canAccessArgumentIndexQuickly(*object, index))
1668 arrayProfile->setOutOfBounds();
1669 return JSValue::encode(jsBoolean(object->hasProperty(exec, index)));
1672 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ArrayProfile* arrayProfile)
1674 VM& vm = exec->vm();
1675 NativeCallFrameTracer tracer(&vm, exec);
1676 JSValue baseValue = JSValue::decode(encodedBase);
1677 JSValue subscript = JSValue::decode(encodedSubscript);
1679 ASSERT(baseValue.isObject());
1680 ASSERT(subscript.isUInt32());
1682 JSObject* object = asObject(baseValue);
1683 uint32_t index = subscript.asUInt32();
1684 if (object->canGetIndexQuickly(index))
1685 return JSValue::encode(JSValue(JSValue::JSTrue));
1687 if (!canAccessArgumentIndexQuickly(*object, index))
1688 arrayProfile->setOutOfBounds();
1689 return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript.asUInt32())));
1692 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript)
1694 VM& vm = exec->vm();
1695 NativeCallFrameTracer tracer(&vm, exec);
1696 JSValue baseValue = JSValue::decode(encodedBase);
1697 JSValue subscript = JSValue::decode(encodedSubscript);
1700 if (LIKELY(subscript.isUInt32())) {
1701 uint32_t i = subscript.asUInt32();
1702 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1703 result = asString(baseValue)->getIndex(exec, i);
1705 result = baseValue.get(exec, i);
1706 if (!isJSString(baseValue))
1707 ctiPatchCallByReturnAddress(exec->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationGetByValDefault));
1710 baseValue.requireObjectCoercible(exec);
1711 if (exec->hadException())
1712 return JSValue::encode(jsUndefined());
1713 auto property = subscript.toPropertyKey(exec);
1714 if (exec->hadException())
1715 return JSValue::encode(jsUndefined());
1716 result = baseValue.get(exec, property);
1719 return JSValue::encode(result);
1722 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1724 VM& vm = exec->vm();
1725 NativeCallFrameTracer tracer(&vm, exec);
1727 JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1728 bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1729 JSValue result = jsBoolean(couldDelete);
1730 if (!couldDelete && exec->codeBlock()->isStrictMode())
1731 vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1732 return JSValue::encode(result);
1735 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1737 VM& vm = exec->vm();
1738 NativeCallFrameTracer tracer(&vm, exec);
1739 JSValue value = JSValue::decode(encodedValue);
1740 JSValue proto = JSValue::decode(encodedProto);
1742 ASSERT(!value.isObject() || !proto.isObject());
1744 bool result = JSObject::defaultHasInstance(exec, value, proto);
1745 return JSValue::encode(jsBoolean(result));
1748 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1750 VM& vm = exec->vm();
1751 NativeCallFrameTracer tracer(&vm, exec);
1752 JSStack* stack = &exec->interpreter()->stack();
1753 JSValue arguments = JSValue::decode(encodedArguments);
1754 return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1757 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1759 VM& vm = exec->vm();
1760 NativeCallFrameTracer tracer(&vm, exec);
1761 JSValue arguments = JSValue::decode(encodedArguments);
1762 setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1763 return newCallFrame;
1766 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1768 VM& vm = exec->vm();
1769 NativeCallFrameTracer tracer(&vm, exec);
1770 return JSValue::encode(JSValue::decode(value).toObject(exec));
1773 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1775 VM& vm = exec->vm();
1776 NativeCallFrameTracer tracer(&vm, exec);
1777 JSValue key = JSValue::decode(encodedKey);
1778 CodeBlock* codeBlock = exec->codeBlock();
1780 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1781 void* result = jumpTable.ctiDefault.executableAddress();
1783 if (key.isString()) {
1784 StringImpl* value = asString(key)->value(exec).impl();
1785 if (value->length() == 1)
1786 result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1789 return reinterpret_cast<char*>(result);
1792 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1794 VM& vm = exec->vm();
1795 NativeCallFrameTracer tracer(&vm, exec);
1796 JSValue key = JSValue::decode(encodedKey);
1797 CodeBlock* codeBlock = exec->codeBlock();
1799 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1802 result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1803 else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1804 result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1806 result = jumpTable.ctiDefault.executableAddress();
1807 return reinterpret_cast<char*>(result);
1810 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1812 VM& vm = exec->vm();
1813 NativeCallFrameTracer tracer(&vm, exec);
1814 JSValue key = JSValue::decode(encodedKey);
1815 CodeBlock* codeBlock = exec->codeBlock();
1818 StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1820 if (key.isString()) {
1821 StringImpl* value = asString(key)->value(exec).impl();
1822 result = jumpTable.ctiForValue(value).executableAddress();
1824 result = jumpTable.ctiDefault.executableAddress();
1826 return reinterpret_cast<char*>(result);
1829 EncodedJSValue JIT_OPERATION operationResolveScope(ExecState* exec, int32_t scopeReg, int32_t identifierIndex)
1831 VM& vm = exec->vm();
1832 NativeCallFrameTracer tracer(&vm, exec);
1833 const Identifier& ident = exec->codeBlock()->identifier(identifierIndex);
1834 JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1835 return JSValue::encode(JSScope::resolve(exec, scope, ident));
1838 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1840 VM& vm = exec->vm();
1841 NativeCallFrameTracer tracer(&vm, exec);
1842 CodeBlock* codeBlock = exec->codeBlock();
1843 Instruction* pc = bytecodePC;
1845 const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1846 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1847 ResolveModeAndType modeAndType(pc[4].u.operand);
1849 PropertySlot slot(scope);
1850 if (!scope->getPropertySlot(exec, ident, slot)) {
1851 if (modeAndType.mode() == ThrowIfNotFound)
1852 vm.throwException(exec, createUndefinedVariableError(exec, ident));
1853 return JSValue::encode(jsUndefined());
1856 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
1857 if (slot.isCacheableValue() && slot.slotBase() == scope && scope->structure(vm)->propertyAccessesAreCacheable()) {
1858 if (modeAndType.type() == GlobalProperty || modeAndType.type() == GlobalPropertyWithVarInjectionChecks) {
1859 Structure* structure = scope->structure(vm);
1861 ConcurrentJITLocker locker(codeBlock->m_lock);
1862 pc[5].u.structure.set(exec->vm(), codeBlock->ownerExecutable(), structure);
1863 pc[6].u.operand = slot.cachedOffset();
1865 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
1869 return JSValue::encode(slot.getValue(exec, ident));
1872 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1874 VM& vm = exec->vm();
1875 NativeCallFrameTracer tracer(&vm, exec);
1876 Instruction* pc = bytecodePC;
1878 CodeBlock* codeBlock = exec->codeBlock();
1879 const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1880 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1881 JSValue value = exec->r(pc[3].u.operand).jsValue();
1882 ResolveModeAndType modeAndType = ResolveModeAndType(pc[4].u.operand);
1883 if (modeAndType.type() == LocalClosureVar) {
1884 JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1885 environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1886 if (WatchpointSet* set = pc[5].u.watchpointSet)
1887 set->touch("Executed op_put_scope<LocalClosureVar>");
1890 if (modeAndType.mode() == ThrowIfNotFound && !scope->hasProperty(exec, ident)) {
1891 exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1895 PutPropertySlot slot(scope, codeBlock->isStrictMode());
1896 scope->methodTable()->put(scope, exec, ident, value, slot);
1898 if (exec->vm().exception())
1901 CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, modeAndType, slot);
1904 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
1906 VM* vm = &exec->vm();
1907 NativeCallFrameTracer tracer(vm, exec);
1909 JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
1910 vm->throwException(exec, exceptionValue);
1912 // Results stored out-of-band in vm.targetMachinePCForThrow, vm.callFrameForThrow & vm.vmEntryFrameForThrow
1913 genericUnwind(vm, exec);
1916 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
1918 VM* vm = &exec->vm();
1919 NativeCallFrameTracer tracer(vm, exec);
1920 vm->heap.flushWriteBarrierBuffer(cell);
1923 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
1925 VM* vm = &exec->vm();
1926 NativeCallFrameTracer tracer(vm, exec);
1927 vm->heap.writeBarrier(cell);
1930 // NB: We don't include the value as part of the barrier because the write barrier elision
1931 // phase in the DFG only tracks whether the object being stored to has been barriered. It
1932 // would be much more complicated to try to model the value being stored as well.
1933 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
1935 VM* vm = &exec->vm();
1936 NativeCallFrameTracer tracer(vm, exec);
1937 vm->heap.writeBarrier(cell);
1940 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
1942 VM* vm = &exec->vm();
1943 NativeCallFrameTracer tracer(vm, exec);
1945 JSValue value = exec->r(pc[2].u.operand).jsValue();
1946 pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
1949 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
1951 NativeCallFrameTracer tracer(vm, exec);
1952 genericUnwind(vm, exec);
1953 ASSERT(vm->targetMachinePCForThrow);
1956 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
1958 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
1959 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
1960 ASSERT(callerFrame);
1962 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
1963 genericUnwind(vm, callerFrame);
1964 ASSERT(vm->targetMachinePCForThrow);
1967 void JIT_OPERATION operationVMHandleException(ExecState* exec)
1969 VM* vm = &exec->vm();
1970 NativeCallFrameTracer tracer(vm, exec);
1971 genericUnwind(vm, exec);
1974 // This function "should" just take the ExecState*, but doing so would make it more difficult
1975 // to call from exception check sites. So, unlike all of our other functions, we allow
1976 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
1977 // particularly safe here since this is never called on the critical path - it's only for
1979 void JIT_OPERATION operationExceptionFuzz()
1982 ExecState* exec = static_cast<ExecState*>(__builtin_frame_address(1));
1983 void* returnPC = __builtin_return_address(0);
1984 doExceptionFuzzing(exec, "JITOperations", returnPC);
1985 #endif // COMPILER(GCC)
1988 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
1990 VM& vm = exec->vm();
1991 NativeCallFrameTracer tracer(&vm, exec);
1992 JSValue baseValue = JSValue::decode(encodedBaseValue);
1993 if (baseValue.isUndefinedOrNull())
1994 return JSValue::encode(jsBoolean(false));
1996 JSObject* base = baseValue.toObject(exec);
1997 return JSValue::encode(jsBoolean(base->hasProperty(exec, asString(propertyName)->toIdentifier(exec))));
2000 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2002 VM& vm = exec->vm();
2003 NativeCallFrameTracer tracer(&vm, exec);
2004 JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2005 return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript)));
2008 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2010 VM& vm = exec->vm();
2011 NativeCallFrameTracer tracer(&vm, exec);
2013 JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2015 return propertyNameEnumerator(exec, base);
2018 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2020 VM& vm = exec->vm();
2021 NativeCallFrameTracer tracer(&vm, exec);
2022 JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2023 JSString* propertyName = enumerator->propertyNameAtIndex(index);
2024 return JSValue::encode(propertyName ? propertyName : jsNull());
2027 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2029 VM& vm = exec->vm();
2030 NativeCallFrameTracer tracer(&vm, exec);
2031 return jsString(exec, Identifier::from(exec, index).string());
2034 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2036 exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2041 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2042 // definition of getHostCallReturnValue() below because the Windows build
2044 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2047 return JSValue::encode(JSValue());
2048 return JSValue::encode(exec->vm().hostCallReturnValue);
2051 #if COMPILER(GCC) && CPU(X86_64)
2053 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2054 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2055 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2057 "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2060 #elif COMPILER(GCC) && CPU(X86)
2063 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2064 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2065 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2067 "leal -4(%esp), %esp\n"
2069 "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2070 "leal 8(%esp), %esp\n"
2075 #elif COMPILER(GCC) && CPU(ARM_THUMB2)
2079 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2080 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2082 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2083 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2085 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2088 #elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
2091 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2092 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2093 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2094 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2096 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2103 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2104 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2105 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2107 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2110 #elif COMPILER(GCC) && CPU(MIPS)
2113 #define LOAD_FUNCTION_TO_T9(function) \
2114 ".set noreorder" "\n" \
2115 ".cpload $25" "\n" \
2116 ".set reorder" "\n" \
2117 "la $t9, " LOCAL_REFERENCE(function) "\n"
2119 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2124 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2125 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2126 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2127 LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2128 "move $a0, $fp" "\n"
2129 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2132 #elif COMPILER(GCC) && CPU(SH4)
2134 #define SH4_SCRATCH_REGISTER "r11"
2138 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2139 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2140 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2142 "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2143 "braf " SH4_SCRATCH_REGISTER "\n"
2146 "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2149 #elif COMPILER(MSVC) && CPU(X86)
2151 __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2153 __asm mov [esp + 4], ebp;
2154 __asm jmp getHostCallReturnValueWithExecState
2161 #endif // ENABLE(JIT)