2 * Copyright (C) 2008, 2009, 2013 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 * Copyright (C) Research In Motion Limited 2010, 2011. All rights reserved.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16 * its contributors may be used to endorse or promote products derived
17 * from this software without specific prior written permission.
19 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 #include "Arguments.h"
37 #include "ArrayConstructor.h"
38 #include "CallFrame.h"
39 #include "CallFrameInlines.h"
40 #include "CodeBlock.h"
41 #include "CodeProfiling.h"
42 #include "CommonSlowPaths.h"
43 #include "DFGOSREntry.h"
44 #include "DFGWorklist.h"
47 #include "ErrorInstance.h"
48 #include "ExceptionHelpers.h"
49 #include "GetterSetter.h"
51 #include <wtf/InlineASM.h>
53 #include "JITExceptions.h"
54 #include "JITToDFGDeferredCompilationCallback.h"
55 #include "JSActivation.h"
57 #include "JSFunction.h"
58 #include "JSGlobalObjectFunctions.h"
59 #include "JSNameScope.h"
60 #include "JSNotAnObject.h"
61 #include "JSPropertyNameIterator.h"
63 #include "JSWithScope.h"
64 #include "LegacyProfiler.h"
65 #include "NameInstance.h"
66 #include "ObjectConstructor.h"
67 #include "ObjectPrototype.h"
68 #include "Operations.h"
70 #include "RegExpObject.h"
71 #include "RegExpPrototype.h"
73 #include "RepatchBuffer.h"
74 #include "SamplingTool.h"
75 #include "SlowPathCall.h"
77 #include "StructureRareDataInlines.h"
78 #include <wtf/StdLibExtras.h>
84 #if CPU(ARM_TRADITIONAL)
85 #include "JITStubsARM.h"
87 #include "JITStubsARMv7.h"
89 #include "JITStubsMIPS.h"
91 #include "JITStubsSH4.h"
93 #include "JITStubsX86.h"
95 #include "JITStubsX86_64.h"
97 #error "JIT not supported on this platform."
102 #if ENABLE(OPCODE_SAMPLING)
103 #define CTI_SAMPLER stackFrame.vm->interpreter->sampler()
105 #define CTI_SAMPLER 0
108 void performPlatformSpecificJITAssertions(VM* vm)
110 if (!vm->canUseJIT())
114 performARMv7JITAssertions();
115 #elif CPU(ARM_TRADITIONAL)
116 performARMJITAssertions();
118 performMIPSJITAssertions();
120 performSH4JITAssertions();
124 NEVER_INLINE static void tryCachePutByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot& slot, StructureStubInfo* stubInfo, bool direct)
126 ConcurrentJITLocker locker(codeBlock->m_lock);
128 // The interpreter checks for recursion here; I do not believe this can occur in CTI.
130 if (!baseValue.isCell())
133 // Uncacheable: give up.
134 if (!slot.isCacheable()) {
135 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
139 JSCell* baseCell = baseValue.asCell();
140 Structure* structure = baseCell->structure();
142 if (structure->isUncacheableDictionary() || structure->typeInfo().prohibitsPropertyCaching()) {
143 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
147 // If baseCell != base, then baseCell must be a proxy for another object.
148 if (baseCell != slot.base()) {
149 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
153 // Cache hit: Specialize instruction and ref Structures.
155 // Structure transition, cache transition info
156 if (slot.type() == PutPropertySlot::NewProperty) {
157 if (structure->isDictionary()) {
158 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
162 // put_by_id_transition checks the prototype chain for setters.
163 if (normalizePrototypeChain(callFrame, baseCell) == InvalidPrototypeChain) {
164 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
168 StructureChain* prototypeChain = structure->prototypeChain(callFrame);
169 ASSERT(structure->previousID()->transitionWatchpointSetHasBeenInvalidated());
170 stubInfo->initPutByIdTransition(callFrame->vm(), codeBlock->ownerExecutable(), structure->previousID(), structure, prototypeChain, direct);
171 JIT::compilePutByIdTransition(callFrame->scope()->vm(), codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress, direct);
175 stubInfo->initPutByIdReplace(callFrame->vm(), codeBlock->ownerExecutable(), structure);
177 JIT::patchPutByIdReplace(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress, direct);
180 NEVER_INLINE static void tryCacheGetByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo* stubInfo)
182 ConcurrentJITLocker locker(codeBlock->m_lock);
184 // FIXME: Write a test that proves we need to check for recursion here just
185 // like the interpreter does, then add a check for recursion.
187 // FIXME: Cache property access for immediates.
188 if (!baseValue.isCell()) {
189 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
193 VM* vm = &callFrame->vm();
195 if (isJSArray(baseValue) && propertyName == callFrame->propertyNames().length) {
196 JIT::compilePatchGetArrayLength(callFrame->scope()->vm(), codeBlock, returnAddress);
200 if (isJSString(baseValue) && propertyName == callFrame->propertyNames().length) {
201 // The tradeoff of compiling an patched inline string length access routine does not seem
202 // to pay off, so we currently only do this for arrays.
203 ctiPatchCallByReturnAddress(codeBlock, returnAddress, vm->getCTIStub(stringLengthTrampolineGenerator).code());
207 // Uncacheable: give up.
208 if (!slot.isCacheable()) {
209 stubInfo->accessType = access_get_by_id_generic;
210 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
214 JSCell* baseCell = baseValue.asCell();
215 Structure* structure = baseCell->structure();
217 if (structure->isUncacheableDictionary() || structure->typeInfo().prohibitsPropertyCaching()) {
218 stubInfo->accessType = access_get_by_id_generic;
219 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
223 // Cache hit: Specialize instruction and ref Structures.
225 if (slot.slotBase() == baseValue) {
226 RELEASE_ASSERT(stubInfo->accessType == access_unset);
227 if (!slot.isCacheableValue() || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset())))
228 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
230 JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
231 stubInfo->initGetByIdSelf(callFrame->vm(), codeBlock->ownerExecutable(), structure);
236 if (structure->isDictionary()) {
237 stubInfo->accessType = access_get_by_id_generic;
238 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
242 if (slot.slotBase() == structure->prototypeForLookup(callFrame)) {
243 JSObject* slotBaseObject = asObject(slot.slotBase());
244 size_t offset = slot.cachedOffset();
246 if (structure->typeInfo().hasImpureGetOwnPropertySlot()) {
247 stubInfo->accessType = access_get_by_id_generic;
248 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
252 // Since we're accessing a prototype in a loop, it's a good bet that it
253 // should not be treated as a dictionary.
254 if (slotBaseObject->structure()->isDictionary()) {
255 slotBaseObject->flattenDictionaryObject(callFrame->vm());
256 offset = slotBaseObject->structure()->get(callFrame->vm(), propertyName);
259 stubInfo->initGetByIdProto(callFrame->vm(), codeBlock->ownerExecutable(), structure, slotBaseObject->structure(), slot.isCacheableValue());
261 ASSERT(!structure->isDictionary());
262 ASSERT(!slotBaseObject->structure()->isDictionary());
263 JIT::compileGetByIdProto(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), propertyName, slot, offset, returnAddress);
267 PropertyOffset offset = slot.cachedOffset();
268 size_t count = normalizePrototypeChainForChainAccess(callFrame, baseValue, slot.slotBase(), propertyName, offset);
269 if (count == InvalidPrototypeChain) {
270 stubInfo->accessType = access_get_by_id_generic;
271 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
275 StructureChain* prototypeChain = structure->prototypeChain(callFrame);
276 stubInfo->initGetByIdChain(callFrame->vm(), codeBlock->ownerExecutable(), structure, prototypeChain, count, slot.isCacheableValue());
277 JIT::compileGetByIdChain(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, structure, prototypeChain, count, propertyName, slot, offset, returnAddress);
284 static void jscGeneratedNativeCode()
286 // When executing a JIT stub function (which might do an allocation), we hack the return address
287 // to pretend to be executing this function, to keep stack logging tools from blowing out
294 ALWAYS_INLINE StackHack(JITStackFrame& stackFrame)
295 : stackFrame(stackFrame)
296 , savedReturnAddress(*stackFrame.returnAddressSlot())
298 if (!CodeProfiling::enabled())
299 *stackFrame.returnAddressSlot() = ReturnAddressPtr(FunctionPtr(jscGeneratedNativeCode));
302 ALWAYS_INLINE ~StackHack()
304 *stackFrame.returnAddressSlot() = savedReturnAddress;
307 JITStackFrame& stackFrame;
308 ReturnAddressPtr savedReturnAddress;
311 #define STUB_INIT_STACK_FRAME(stackFrame) JITStackFrame& stackFrame = *reinterpret_cast_ptr<JITStackFrame*>(STUB_ARGS); StackHack stackHack(stackFrame)
312 #define STUB_SET_RETURN_ADDRESS(returnAddress) stackHack.savedReturnAddress = ReturnAddressPtr(returnAddress)
313 #define STUB_RETURN_ADDRESS stackHack.savedReturnAddress
317 #define STUB_INIT_STACK_FRAME(stackFrame) JITStackFrame& stackFrame = *reinterpret_cast_ptr<JITStackFrame*>(STUB_ARGS)
318 #define STUB_SET_RETURN_ADDRESS(returnAddress) *stackFrame.returnAddressSlot() = ReturnAddressPtr(returnAddress)
319 #define STUB_RETURN_ADDRESS *stackFrame.returnAddressSlot()
323 // The reason this is not inlined is to avoid having to do a PIC branch
324 // to get the address of the ctiVMThrowTrampoline function. It's also
325 // good to keep the code size down by leaving as much of the exception
326 // handling code out of line as possible.
327 static NEVER_INLINE void returnToThrowTrampoline(VM* vm, ReturnAddressPtr exceptionLocation, ReturnAddressPtr& returnAddressSlot)
329 RELEASE_ASSERT(vm->exception());
330 vm->exceptionLocation = exceptionLocation;
331 returnAddressSlot = ReturnAddressPtr(FunctionPtr(ctiVMThrowTrampoline));
334 #define VM_THROW_EXCEPTION() \
336 VM_THROW_EXCEPTION_AT_END(); \
339 #define VM_THROW_EXCEPTION_AT_END() \
341 returnToThrowTrampoline(stackFrame.vm, STUB_RETURN_ADDRESS, STUB_RETURN_ADDRESS);\
344 #define CHECK_FOR_EXCEPTION() \
346 if (UNLIKELY(stackFrame.vm->exception())) \
347 VM_THROW_EXCEPTION(); \
349 #define CHECK_FOR_EXCEPTION_AT_END() \
351 if (UNLIKELY(stackFrame.vm->exception())) \
352 VM_THROW_EXCEPTION_AT_END(); \
354 #define CHECK_FOR_EXCEPTION_VOID() \
356 if (UNLIKELY(stackFrame.vm->exception())) { \
357 VM_THROW_EXCEPTION_AT_END(); \
364 virtual ~ErrorFunctor() { }
365 virtual JSValue operator()(ExecState*) = 0;
368 class ErrorWithExecFunctor : public ErrorFunctor {
370 typedef JSObject* (*Factory)(ExecState* exec);
372 ErrorWithExecFunctor(Factory factory)
376 JSValue operator()(ExecState* exec)
378 return m_factory(exec);
385 class ErrorWithExecAndCalleeFunctor : public ErrorFunctor {
387 typedef JSObject* (*Factory)(ExecState* exec, JSValue callee);
389 ErrorWithExecAndCalleeFunctor(Factory factory, JSValue callee)
390 : m_factory(factory), m_callee(callee)
393 JSValue operator()(ExecState* exec)
395 return m_factory(exec, m_callee);
402 // Helper function for JIT stubs that may throw an exception in the middle of
403 // processing a function call. This function rolls back the stack to
404 // our caller, so exception processing can proceed from a valid state.
405 template<typename T> static T throwExceptionFromOpCall(JITStackFrame& jitStackFrame, CallFrame* newCallFrame, ReturnAddressPtr& returnAddressSlot, ErrorFunctor* createError = 0)
407 CallFrame* callFrame = newCallFrame->callerFrame()->removeHostCallFrameFlag();
408 jitStackFrame.callFrame = callFrame;
409 callFrame->vm().topCallFrame = callFrame;
411 callFrame->vm().throwException(callFrame, (*createError)(callFrame));
412 ASSERT(callFrame->vm().exception());
413 returnToThrowTrampoline(&callFrame->vm(), ReturnAddressPtr(newCallFrame->returnPC()), returnAddressSlot);
417 // If the CPU specific header does not provide an implementation, use the default one here.
418 #ifndef DEFINE_STUB_FUNCTION
419 #define DEFINE_STUB_FUNCTION(rtype, op) rtype JIT_STUB cti_##op(STUB_ARGS_DECLARATION)
422 DEFINE_STUB_FUNCTION(void, handle_watchdog_timer)
424 STUB_INIT_STACK_FRAME(stackFrame);
425 CallFrame* callFrame = stackFrame.callFrame;
426 VM* vm = stackFrame.vm;
427 if (UNLIKELY(vm->watchdog.didFire(callFrame))) {
428 vm->throwException(callFrame, createTerminatedExecutionException(vm));
429 VM_THROW_EXCEPTION_AT_END();
434 DEFINE_STUB_FUNCTION(void*, stack_check)
436 STUB_INIT_STACK_FRAME(stackFrame);
437 CallFrame* callFrame = stackFrame.callFrame;
439 if (UNLIKELY(!stackFrame.stack->grow(&callFrame->registers()[callFrame->codeBlock()->m_numCalleeRegisters]))) {
440 ErrorWithExecFunctor functor = ErrorWithExecFunctor(createStackOverflowError);
441 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS, &functor);
447 DEFINE_STUB_FUNCTION(JSObject*, op_new_object)
449 STUB_INIT_STACK_FRAME(stackFrame);
451 return constructEmptyObject(stackFrame.callFrame, stackFrame.args[0].structure());
454 DEFINE_STUB_FUNCTION(void, op_put_by_id_generic)
456 STUB_INIT_STACK_FRAME(stackFrame);
458 PutPropertySlot slot(
459 stackFrame.callFrame->codeBlock()->isStrictMode(),
460 stackFrame.callFrame->codeBlock()->putByIdContext());
461 stackFrame.args[0].jsValue().put(stackFrame.callFrame, stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot);
462 CHECK_FOR_EXCEPTION_AT_END();
465 DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_generic)
467 STUB_INIT_STACK_FRAME(stackFrame);
469 PutPropertySlot slot(
470 stackFrame.callFrame->codeBlock()->isStrictMode(),
471 stackFrame.callFrame->codeBlock()->putByIdContext());
472 JSValue baseValue = stackFrame.args[0].jsValue();
473 ASSERT(baseValue.isObject());
474 asObject(baseValue)->putDirect(stackFrame.callFrame->vm(), stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot);
475 CHECK_FOR_EXCEPTION_AT_END();
478 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_generic)
480 STUB_INIT_STACK_FRAME(stackFrame);
482 CallFrame* callFrame = stackFrame.callFrame;
483 Identifier& ident = stackFrame.args[1].identifier();
485 JSValue baseValue = stackFrame.args[0].jsValue();
486 PropertySlot slot(baseValue);
487 JSValue result = baseValue.get(callFrame, ident, slot);
489 CHECK_FOR_EXCEPTION_AT_END();
490 return JSValue::encode(result);
493 DEFINE_STUB_FUNCTION(void, op_put_by_id)
495 STUB_INIT_STACK_FRAME(stackFrame);
496 CallFrame* callFrame = stackFrame.callFrame;
497 Identifier& ident = stackFrame.args[1].identifier();
499 CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
500 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
501 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
503 PutPropertySlot slot(
504 callFrame->codeBlock()->isStrictMode(),
505 callFrame->codeBlock()->putByIdContext());
506 stackFrame.args[0].jsValue().put(callFrame, ident, stackFrame.args[2].jsValue(), slot);
508 if (accessType == static_cast<AccessType>(stubInfo->accessType)) {
510 tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, false);
513 CHECK_FOR_EXCEPTION_AT_END();
516 DEFINE_STUB_FUNCTION(void, op_put_by_id_direct)
518 STUB_INIT_STACK_FRAME(stackFrame);
519 CallFrame* callFrame = stackFrame.callFrame;
520 Identifier& ident = stackFrame.args[1].identifier();
522 CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
523 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
524 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
526 PutPropertySlot slot(
527 callFrame->codeBlock()->isStrictMode(),
528 callFrame->codeBlock()->putByIdContext());
529 JSValue baseValue = stackFrame.args[0].jsValue();
530 ASSERT(baseValue.isObject());
532 asObject(baseValue)->putDirect(callFrame->vm(), ident, stackFrame.args[2].jsValue(), slot);
534 if (accessType == static_cast<AccessType>(stubInfo->accessType)) {
536 tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, true);
539 CHECK_FOR_EXCEPTION_AT_END();
542 DEFINE_STUB_FUNCTION(void, op_put_by_id_fail)
544 STUB_INIT_STACK_FRAME(stackFrame);
546 CallFrame* callFrame = stackFrame.callFrame;
547 Identifier& ident = stackFrame.args[1].identifier();
549 PutPropertySlot slot(
550 callFrame->codeBlock()->isStrictMode(),
551 callFrame->codeBlock()->putByIdContext());
552 stackFrame.args[0].jsValue().put(callFrame, ident, stackFrame.args[2].jsValue(), slot);
554 CHECK_FOR_EXCEPTION_AT_END();
557 DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_fail)
559 STUB_INIT_STACK_FRAME(stackFrame);
561 CallFrame* callFrame = stackFrame.callFrame;
562 Identifier& ident = stackFrame.args[1].identifier();
564 PutPropertySlot slot(
565 callFrame->codeBlock()->isStrictMode(),
566 callFrame->codeBlock()->putByIdContext());
567 JSValue baseValue = stackFrame.args[0].jsValue();
568 ASSERT(baseValue.isObject());
569 asObject(baseValue)->putDirect(callFrame->vm(), ident, stackFrame.args[2].jsValue(), slot);
571 CHECK_FOR_EXCEPTION_AT_END();
574 DEFINE_STUB_FUNCTION(JSObject*, op_put_by_id_transition_realloc)
576 STUB_INIT_STACK_FRAME(stackFrame);
578 JSValue baseValue = stackFrame.args[0].jsValue();
579 int32_t oldSize = stackFrame.args[3].int32();
580 Structure* newStructure = stackFrame.args[4].structure();
581 int32_t newSize = newStructure->outOfLineCapacity();
583 ASSERT(oldSize >= 0);
584 ASSERT(newSize > oldSize);
586 ASSERT(baseValue.isObject());
587 JSObject* base = asObject(baseValue);
588 VM& vm = *stackFrame.vm;
589 Butterfly* butterfly = base->growOutOfLineStorage(vm, oldSize, newSize);
590 base->setStructureAndButterfly(vm, newStructure, butterfly);
595 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id)
597 STUB_INIT_STACK_FRAME(stackFrame);
598 CallFrame* callFrame = stackFrame.callFrame;
599 Identifier& ident = stackFrame.args[1].identifier();
601 CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
602 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
603 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
605 JSValue baseValue = stackFrame.args[0].jsValue();
606 PropertySlot slot(baseValue);
607 JSValue result = baseValue.get(callFrame, ident, slot);
609 if (accessType != static_cast<AccessType>(stubInfo->accessType))
610 return JSValue::encode(result);
612 if (!stubInfo->seenOnce())
615 tryCacheGetByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, baseValue, ident, slot, stubInfo);
617 CHECK_FOR_EXCEPTION_AT_END();
618 return JSValue::encode(result);
621 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_self_fail)
623 STUB_INIT_STACK_FRAME(stackFrame);
625 CallFrame* callFrame = stackFrame.callFrame;
626 Identifier& ident = stackFrame.args[1].identifier();
628 CodeBlock* codeBlock = callFrame->codeBlock();
629 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
630 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
632 JSValue baseValue = stackFrame.args[0].jsValue();
633 PropertySlot slot(baseValue);
634 JSValue result = baseValue.get(callFrame, ident, slot);
636 if (accessType != static_cast<AccessType>(stubInfo->accessType))
637 return JSValue::encode(result);
639 CHECK_FOR_EXCEPTION();
641 ConcurrentJITLocker locker(codeBlock->m_lock);
643 if (baseValue.isCell()
644 && slot.isCacheable()
645 && !baseValue.asCell()->structure()->isUncacheableDictionary()
646 && slot.slotBase() == baseValue) {
648 PolymorphicAccessStructureList* polymorphicStructureList;
651 if (stubInfo->accessType == access_unset)
652 stubInfo->initGetByIdSelf(callFrame->vm(), codeBlock->ownerExecutable(), baseValue.asCell()->structure());
654 if (stubInfo->accessType == access_get_by_id_self) {
655 ASSERT(!stubInfo->stubRoutine);
656 polymorphicStructureList = new PolymorphicAccessStructureList(callFrame->vm(), codeBlock->ownerExecutable(), 0, stubInfo->u.getByIdSelf.baseObjectStructure.get(), true);
657 stubInfo->initGetByIdSelfList(polymorphicStructureList, 1);
659 polymorphicStructureList = stubInfo->u.getByIdSelfList.structureList;
660 listIndex = stubInfo->u.getByIdSelfList.listSize;
662 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
663 stubInfo->u.getByIdSelfList.listSize++;
664 JIT::compileGetByIdSelfList(callFrame->scope()->vm(), codeBlock, stubInfo, polymorphicStructureList, listIndex, baseValue.asCell()->structure(), ident, slot, slot.cachedOffset());
666 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
667 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
670 ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
671 return JSValue::encode(result);
674 static PolymorphicAccessStructureList* getPolymorphicAccessStructureListSlot(VM& vm, ScriptExecutable* owner, StructureStubInfo* stubInfo, int& listIndex)
676 PolymorphicAccessStructureList* prototypeStructureList = 0;
679 switch (stubInfo->accessType) {
680 case access_get_by_id_proto:
681 prototypeStructureList = new PolymorphicAccessStructureList(vm, owner, stubInfo->stubRoutine, stubInfo->u.getByIdProto.baseObjectStructure.get(), stubInfo->u.getByIdProto.prototypeStructure.get(), true);
682 stubInfo->stubRoutine.clear();
683 stubInfo->initGetByIdProtoList(prototypeStructureList, 2);
685 case access_get_by_id_chain:
686 prototypeStructureList = new PolymorphicAccessStructureList(vm, owner, stubInfo->stubRoutine, stubInfo->u.getByIdChain.baseObjectStructure.get(), stubInfo->u.getByIdChain.chain.get(), true);
687 stubInfo->stubRoutine.clear();
688 stubInfo->initGetByIdProtoList(prototypeStructureList, 2);
690 case access_get_by_id_proto_list:
691 prototypeStructureList = stubInfo->u.getByIdProtoList.structureList;
692 listIndex = stubInfo->u.getByIdProtoList.listSize;
693 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE)
694 stubInfo->u.getByIdProtoList.listSize++;
697 RELEASE_ASSERT_NOT_REACHED();
700 ASSERT(listIndex <= POLYMORPHIC_LIST_CACHE_SIZE);
701 return prototypeStructureList;
704 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_getter_stub)
706 STUB_INIT_STACK_FRAME(stackFrame);
707 CallFrame* callFrame = stackFrame.callFrame;
708 JSValue result = callGetter(callFrame, stackFrame.args[1].jsObject(), stackFrame.args[0].jsObject());
709 if (callFrame->hadException())
710 returnToThrowTrampoline(&callFrame->vm(), stackFrame.args[2].returnAddress(), STUB_RETURN_ADDRESS);
712 return JSValue::encode(result);
715 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_custom_stub)
717 STUB_INIT_STACK_FRAME(stackFrame);
718 CallFrame* callFrame = stackFrame.callFrame;
719 JSObject* slotBase = stackFrame.args[0].jsObject();
720 PropertySlot::GetValueFunc getter = reinterpret_cast<PropertySlot::GetValueFunc>(stackFrame.args[1].asPointer);
721 const Identifier& ident = stackFrame.args[2].identifier();
722 JSValue result = getter(callFrame, slotBase, ident);
723 if (callFrame->hadException())
724 returnToThrowTrampoline(&callFrame->vm(), stackFrame.args[3].returnAddress(), STUB_RETURN_ADDRESS);
726 return JSValue::encode(result);
729 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
731 STUB_INIT_STACK_FRAME(stackFrame);
733 CallFrame* callFrame = stackFrame.callFrame;
734 const Identifier& propertyName = stackFrame.args[1].identifier();
736 CodeBlock* codeBlock = callFrame->codeBlock();
737 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
738 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
740 JSValue baseValue = stackFrame.args[0].jsValue();
741 PropertySlot slot(baseValue);
742 JSValue result = baseValue.get(callFrame, propertyName, slot);
744 CHECK_FOR_EXCEPTION();
746 if (accessType != static_cast<AccessType>(stubInfo->accessType)
747 || !baseValue.isCell()
748 || !slot.isCacheable()
749 || baseValue.asCell()->structure()->isDictionary()
750 || baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching()) {
751 ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
752 return JSValue::encode(result);
755 ConcurrentJITLocker locker(codeBlock->m_lock);
757 Structure* structure = baseValue.asCell()->structure();
759 JSObject* slotBaseObject = asObject(slot.slotBase());
761 PropertyOffset offset = slot.cachedOffset();
763 if (slot.slotBase() == baseValue)
764 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
765 else if (slot.slotBase() == baseValue.asCell()->structure()->prototypeForLookup(callFrame)) {
766 ASSERT(!baseValue.asCell()->structure()->isDictionary());
768 if (baseValue.asCell()->structure()->typeInfo().hasImpureGetOwnPropertySlot()) {
769 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
770 return JSValue::encode(result);
773 // Since we're accessing a prototype in a loop, it's a good bet that it
774 // should not be treated as a dictionary.
775 if (slotBaseObject->structure()->isDictionary()) {
776 slotBaseObject->flattenDictionaryObject(callFrame->vm());
777 offset = slotBaseObject->structure()->get(callFrame->vm(), propertyName);
781 PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(callFrame->vm(), codeBlock->ownerExecutable(), stubInfo, listIndex);
782 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
783 JIT::compileGetByIdProtoList(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), propertyName, slot, offset);
785 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
786 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
789 size_t count = normalizePrototypeChainForChainAccess(callFrame, baseValue, slot.slotBase(), propertyName, offset);
790 if (count == InvalidPrototypeChain) {
791 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
792 return JSValue::encode(result);
795 ASSERT(!baseValue.asCell()->structure()->isDictionary());
797 PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(callFrame->vm(), codeBlock->ownerExecutable(), stubInfo, listIndex);
799 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
800 StructureChain* protoChain = structure->prototypeChain(callFrame);
801 JIT::compileGetByIdChainList(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, propertyName, slot, offset);
803 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
804 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
808 return JSValue::encode(result);
811 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list_full)
813 STUB_INIT_STACK_FRAME(stackFrame);
815 JSValue baseValue = stackFrame.args[0].jsValue();
816 PropertySlot slot(baseValue);
817 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
819 CHECK_FOR_EXCEPTION_AT_END();
820 return JSValue::encode(result);
823 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_fail)
825 STUB_INIT_STACK_FRAME(stackFrame);
827 JSValue baseValue = stackFrame.args[0].jsValue();
828 PropertySlot slot(baseValue);
829 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
831 CHECK_FOR_EXCEPTION_AT_END();
832 return JSValue::encode(result);
835 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_array_fail)
837 STUB_INIT_STACK_FRAME(stackFrame);
839 JSValue baseValue = stackFrame.args[0].jsValue();
840 PropertySlot slot(baseValue);
841 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
843 CHECK_FOR_EXCEPTION_AT_END();
844 return JSValue::encode(result);
847 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_string_fail)
849 STUB_INIT_STACK_FRAME(stackFrame);
851 JSValue baseValue = stackFrame.args[0].jsValue();
852 PropertySlot slot(baseValue);
853 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
855 CHECK_FOR_EXCEPTION_AT_END();
856 return JSValue::encode(result);
859 DEFINE_STUB_FUNCTION(EncodedJSValue, op_check_has_instance)
861 STUB_INIT_STACK_FRAME(stackFrame);
863 CallFrame* callFrame = stackFrame.callFrame;
864 JSValue value = stackFrame.args[0].jsValue();
865 JSValue baseVal = stackFrame.args[1].jsValue();
867 if (baseVal.isObject()) {
868 JSObject* baseObject = asObject(baseVal);
869 ASSERT(!baseObject->structure()->typeInfo().implementsDefaultHasInstance());
870 if (baseObject->structure()->typeInfo().implementsHasInstance()) {
871 bool result = baseObject->methodTable()->customHasInstance(baseObject, callFrame, value);
872 CHECK_FOR_EXCEPTION_AT_END();
873 return JSValue::encode(jsBoolean(result));
877 stackFrame.vm->throwException(callFrame, createInvalidParameterError(callFrame, "instanceof", baseVal));
878 VM_THROW_EXCEPTION_AT_END();
879 return JSValue::encode(JSValue());
883 DEFINE_STUB_FUNCTION(void, optimize)
885 STUB_INIT_STACK_FRAME(stackFrame);
887 // Defer GC so that it doesn't run between when we enter into this slow path and
888 // when we figure out the state of our code block. This prevents a number of
889 // awkward reentrancy scenarios, including:
891 // - The optimized version of our code block being jettisoned by GC right after
892 // we concluded that we wanted to use it.
894 // - An optimized version of our code block being installed just as we decided
895 // that it wasn't ready yet.
897 // This still leaves the following: anytime we return from cti_optimize, we may
898 // GC, and the GC may either jettison the optimized version of our code block,
899 // or it may install the optimized version of our code block even though we
900 // concluded that it wasn't ready yet.
902 // Note that jettisoning won't happen if we already initiated OSR, because in
903 // that case we would have already planted the optimized code block into the JS
905 DeferGC deferGC(stackFrame.vm->heap);
907 CallFrame* callFrame = stackFrame.callFrame;
908 CodeBlock* codeBlock = callFrame->codeBlock();
909 unsigned bytecodeIndex = stackFrame.args[0].int32();
912 // If we're attempting to OSR from a loop, assume that this should be
913 // separately optimized.
914 codeBlock->m_shouldAlwaysBeInlined = false;
917 if (Options::verboseOSR()) {
919 *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
920 ", executeCounter = ", codeBlock->jitExecuteCounter(),
921 ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
923 if (codeBlock->hasOptimizedReplacement())
924 dataLog(codeBlock->replacement()->osrExitCounter());
930 if (!codeBlock->checkIfOptimizationThresholdReached()) {
931 codeBlock->updateAllPredictions();
932 if (Options::verboseOSR())
933 dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
937 if (codeBlock->m_shouldAlwaysBeInlined) {
938 codeBlock->updateAllPredictions();
939 codeBlock->optimizeAfterWarmUp();
940 if (Options::verboseOSR())
941 dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
945 // We cannot be in the process of asynchronous compilation and also have an optimized
948 !stackFrame.vm->worklist
949 || !(stackFrame.vm->worklist->compilationState(codeBlock) != DFG::Worklist::NotKnown
950 && codeBlock->hasOptimizedReplacement()));
952 DFG::Worklist::State worklistState;
953 if (stackFrame.vm->worklist) {
954 // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
955 // (i.e. compiled) code blocks. But if it completes ours, we also need to know
956 // what the result was so that we don't plow ahead and attempt OSR or immediate
957 // reoptimization. This will have already also set the appropriate JIT execution
958 // count threshold depending on what happened, so if the compilation was anything
959 // but successful we just want to return early. See the case for worklistState ==
960 // DFG::Worklist::Compiled, below.
962 // Note that we could have alternatively just called Worklist::compilationState()
963 // here, and if it returned Compiled, we could have then called
964 // completeAndScheduleOSR() below. But that would have meant that it could take
965 // longer for code blocks to be completed: they would only complete when *their*
966 // execution count trigger fired; but that could take a while since the firing is
967 // racy. It could also mean that code blocks that never run again after being
968 // compiled would sit on the worklist until next GC. That's fine, but it's
969 // probably a waste of memory. Our goal here is to complete code blocks as soon as
970 // possible in order to minimize the chances of us executing baseline code after
971 // optimized code is already available.
974 stackFrame.vm->worklist->completeAllReadyPlansForVM(*stackFrame.vm, codeBlock);
976 worklistState = DFG::Worklist::NotKnown;
978 if (worklistState == DFG::Worklist::Compiling) {
979 // We cannot be in the process of asynchronous compilation and also have an optimized
981 RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
982 codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
986 if (worklistState == DFG::Worklist::Compiled) {
987 // If we don't have an optimized replacement but we did just get compiled, then
988 // the compilation failed or was invalidated, in which case the execution count
989 // thresholds have already been set appropriately by
990 // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
991 // nothing left to do.
992 if (!codeBlock->hasOptimizedReplacement()) {
993 codeBlock->updateAllPredictions();
994 if (Options::verboseOSR())
995 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
998 } else if (codeBlock->hasOptimizedReplacement()) {
999 if (Options::verboseOSR())
1000 dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1001 // If we have an optimized replacement, then it must be the case that we entered
1002 // cti_optimize from a loop. That's because if there's an optimized replacement,
1003 // then all calls to this function will be relinked to the replacement and so
1004 // the prologue OSR will never fire.
1006 // This is an interesting threshold check. Consider that a function OSR exits
1007 // in the middle of a loop, while having a relatively low exit count. The exit
1008 // will reset the execution counter to some target threshold, meaning that this
1009 // code won't be reached until that loop heats up for >=1000 executions. But then
1010 // we do a second check here, to see if we should either reoptimize, or just
1011 // attempt OSR entry. Hence it might even be correct for
1012 // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1013 // additional checking anyway, to reduce the amount of recompilation thrashing.
1014 if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1015 if (Options::verboseOSR()) {
1017 "Triggering reoptimization of ", *codeBlock,
1018 "(", *codeBlock->replacement(), ") (in loop).\n");
1020 codeBlock->reoptimize();
1024 if (!codeBlock->shouldOptimizeNow()) {
1025 if (Options::verboseOSR()) {
1027 "Delaying optimization for ", *codeBlock,
1028 " because of insufficient profiling.\n");
1033 if (Options::verboseOSR())
1034 dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1036 RefPtr<DeferredCompilationCallback> callback =
1037 JITToDFGDeferredCompilationCallback::create();
1038 RefPtr<CodeBlock> newCodeBlock = codeBlock->newReplacement();
1039 CompilationResult result = newCodeBlock->prepareForExecutionAsynchronously(
1040 callFrame, JITCode::DFGJIT, callback, JITCompilationCanFail, bytecodeIndex);
1042 if (result != CompilationSuccessful)
1046 CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1047 ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1049 if (optimizedCodeBlock->jitType() == JITCode::FTLJIT) {
1050 // FTL JIT doesn't support OSR entry yet.
1051 // https://bugs.webkit.org/show_bug.cgi?id=113625
1053 // Don't attempt OSR entry again.
1054 codeBlock->dontOptimizeAnytimeSoon();
1058 if (void* address = DFG::prepareOSREntry(callFrame, optimizedCodeBlock, bytecodeIndex)) {
1059 if (Options::verboseOSR()) {
1061 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ", address ",
1062 RawPointer((STUB_RETURN_ADDRESS).value()), " -> ", RawPointer(address), ".\n");
1065 codeBlock->optimizeSoon();
1066 STUB_SET_RETURN_ADDRESS(address);
1070 if (Options::verboseOSR()) {
1072 "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1073 " succeeded, OSR failed, after a delay of ",
1074 codeBlock->optimizationDelayCounter(), ".\n");
1077 // Count the OSR failure as a speculation failure. If this happens a lot, then
1079 optimizedCodeBlock->countOSRExit();
1081 // We are a lot more conservative about triggering reoptimization after OSR failure than
1082 // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1083 // already, then we really would like to reoptimize immediately. But this case covers
1084 // something else: there weren't many (or any) speculation failures before, but we just
1085 // failed to enter the speculative code because some variable had the wrong value or
1086 // because the OSR code decided for any spurious reason that it did not want to OSR
1087 // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1088 // reoptimization trigger.
1089 if (optimizedCodeBlock->shouldReoptimizeNow()) {
1090 if (Options::verboseOSR()) {
1092 "Triggering reoptimization of ", *codeBlock, " -> ",
1093 *codeBlock->replacement(), " (after OSR fail).\n");
1095 codeBlock->reoptimize();
1099 // OSR failed this time, but it might succeed next time! Let the code run a bit
1100 // longer and then try again.
1101 codeBlock->optimizeAfterWarmUp();
1103 #endif // ENABLE(DFG_JIT)
1105 DEFINE_STUB_FUNCTION(EncodedJSValue, op_instanceof)
1107 STUB_INIT_STACK_FRAME(stackFrame);
1109 CallFrame* callFrame = stackFrame.callFrame;
1110 JSValue value = stackFrame.args[0].jsValue();
1111 JSValue proto = stackFrame.args[1].jsValue();
1113 ASSERT(!value.isObject() || !proto.isObject());
1115 bool result = JSObject::defaultHasInstance(callFrame, value, proto);
1116 CHECK_FOR_EXCEPTION_AT_END();
1117 return JSValue::encode(jsBoolean(result));
1120 DEFINE_STUB_FUNCTION(EncodedJSValue, op_del_by_id)
1122 STUB_INIT_STACK_FRAME(stackFrame);
1124 CallFrame* callFrame = stackFrame.callFrame;
1126 JSObject* baseObj = stackFrame.args[0].jsValue().toObject(callFrame);
1128 bool couldDelete = baseObj->methodTable()->deleteProperty(baseObj, callFrame, stackFrame.args[1].identifier());
1129 JSValue result = jsBoolean(couldDelete);
1130 if (!couldDelete && callFrame->codeBlock()->isStrictMode())
1131 stackFrame.vm->throwException(stackFrame.callFrame, createTypeError(stackFrame.callFrame, "Unable to delete property."));
1133 CHECK_FOR_EXCEPTION_AT_END();
1134 return JSValue::encode(result);
1137 DEFINE_STUB_FUNCTION(JSObject*, op_new_func)
1139 STUB_INIT_STACK_FRAME(stackFrame);
1141 ASSERT(stackFrame.callFrame->codeBlock()->codeType() != FunctionCode || !stackFrame.callFrame->codeBlock()->needsFullScopeChain() || stackFrame.callFrame->uncheckedR(stackFrame.callFrame->codeBlock()->activationRegister()).jsValue());
1142 return JSFunction::create(stackFrame.callFrame, stackFrame.args[0].function(), stackFrame.callFrame->scope());
1145 inline void* jitCompileFor(CallFrame* callFrame, CodeSpecializationKind kind)
1147 // This function is called by cti_op_call_jitCompile() and
1148 // cti_op_construct_jitCompile() JIT glue trampolines to compile the
1149 // callee function that we want to call. Both cti glue trampolines are
1150 // called by JIT'ed code which has pushed a frame and initialized most of
1151 // the frame content except for the codeBlock.
1153 // Normally, the prologue of the callee is supposed to set the frame's cb
1154 // pointer to the cb of the callee. But in this case, the callee code does
1155 // not exist yet until it is compiled below. The compilation process will
1156 // allocate memory which may trigger a GC. The GC, in turn, will scan the
1157 // JSStack, and will expect the frame's cb to either be valid or 0. If
1158 // we don't initialize it, the GC will be accessing invalid memory and may
1161 // Hence, we should nullify it here before proceeding with the compilation.
1162 callFrame->setCodeBlock(0);
1164 JSFunction* function = jsCast<JSFunction*>(callFrame->callee());
1165 ASSERT(!function->isHostFunction());
1166 FunctionExecutable* executable = function->jsExecutable();
1167 JSScope* callDataScopeChain = function->scope();
1168 JSObject* error = executable->prepareForExecution(callFrame, callDataScopeChain, kind);
1171 callFrame->vm().throwException(callFrame, error);
1175 DEFINE_STUB_FUNCTION(void*, op_call_jitCompile)
1177 STUB_INIT_STACK_FRAME(stackFrame);
1179 #if !ASSERT_DISABLED
1181 ASSERT(stackFrame.callFrame->callee()->methodTable()->getCallData(stackFrame.callFrame->callee(), callData) == CallTypeJS);
1184 CallFrame* callFrame = stackFrame.callFrame;
1185 void* result = jitCompileFor(callFrame, CodeForCall);
1187 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1192 DEFINE_STUB_FUNCTION(void*, op_construct_jitCompile)
1194 STUB_INIT_STACK_FRAME(stackFrame);
1196 #if !ASSERT_DISABLED
1197 ConstructData constructData;
1198 ASSERT(jsCast<JSFunction*>(stackFrame.callFrame->callee())->methodTable()->getConstructData(stackFrame.callFrame->callee(), constructData) == ConstructTypeJS);
1201 CallFrame* callFrame = stackFrame.callFrame;
1202 void* result = jitCompileFor(callFrame, CodeForConstruct);
1204 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1209 DEFINE_STUB_FUNCTION(int, op_call_arityCheck)
1211 STUB_INIT_STACK_FRAME(stackFrame);
1213 CallFrame* callFrame = stackFrame.callFrame;
1215 int missingArgCount = CommonSlowPaths::arityCheckFor(callFrame, stackFrame.stack, CodeForCall);
1216 if (missingArgCount < 0) {
1217 ErrorWithExecFunctor functor = ErrorWithExecFunctor(createStackOverflowError);
1218 return throwExceptionFromOpCall<int>(stackFrame, callFrame, STUB_RETURN_ADDRESS, &functor);
1220 return missingArgCount;
1223 DEFINE_STUB_FUNCTION(int, op_construct_arityCheck)
1225 STUB_INIT_STACK_FRAME(stackFrame);
1227 CallFrame* callFrame = stackFrame.callFrame;
1229 int missingArgCount = CommonSlowPaths::arityCheckFor(callFrame, stackFrame.stack, CodeForConstruct);
1230 if (missingArgCount < 0) {
1231 ErrorWithExecFunctor functor = ErrorWithExecFunctor(createStackOverflowError);
1232 return throwExceptionFromOpCall<int>(stackFrame, callFrame, STUB_RETURN_ADDRESS, &functor);
1234 return missingArgCount;
1237 inline void* lazyLinkFor(CallFrame* callFrame, CodeSpecializationKind kind)
1239 JSFunction* callee = jsCast<JSFunction*>(callFrame->callee());
1240 ExecutableBase* executable = callee->executable();
1242 MacroAssemblerCodePtr codePtr;
1243 CodeBlock* codeBlock = 0;
1244 CallLinkInfo* callLinkInfo = &callFrame->callerFrame()->codeBlock()->getCallLinkInfo(callFrame->returnPC());
1246 // This function is called by cti_vm_lazyLinkCall() and
1247 // cti_lazyLinkConstruct JIT glue trampolines to link the callee function
1248 // that we want to call. Both cti glue trampolines are called by JIT'ed
1249 // code which has pushed a frame and initialized most of the frame content
1250 // except for the codeBlock.
1252 // Normally, the prologue of the callee is supposed to set the frame's cb
1253 // field to the cb of the callee. But in this case, the callee may not
1254 // exist yet, and if not, it will be generated in the compilation below.
1255 // The compilation will allocate memory which may trigger a GC. The GC, in
1256 // turn, will scan the JSStack, and will expect the frame's cb to be valid
1257 // or 0. If we don't initialize it, the GC will be accessing invalid
1258 // memory and may crash.
1260 // Hence, we should nullify it here before proceeding with the compilation.
1261 callFrame->setCodeBlock(0);
1263 if (executable->isHostFunction())
1264 codePtr = executable->generatedJITCodeFor(kind)->addressForCall();
1266 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1267 if (JSObject* error = functionExecutable->prepareForExecution(callFrame, callee->scope(), kind)) {
1268 callFrame->vm().throwException(callFrame, error);
1271 codeBlock = &functionExecutable->generatedBytecodeFor(kind);
1272 if (callFrame->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters())
1273 || callLinkInfo->callType == CallLinkInfo::CallVarargs)
1274 codePtr = functionExecutable->generatedJITCodeWithArityCheckFor(kind);
1276 codePtr = functionExecutable->generatedJITCodeFor(kind)->addressForCall();
1279 ConcurrentJITLocker locker(callFrame->callerFrame()->codeBlock()->m_lock);
1280 if (!callLinkInfo->seenOnce())
1281 callLinkInfo->setSeen();
1283 JIT::linkFor(callFrame->callerFrame(), callee, callFrame->callerFrame()->codeBlock(), codeBlock, codePtr, callLinkInfo, &callFrame->vm(), kind);
1285 return codePtr.executableAddress();
1288 DEFINE_STUB_FUNCTION(void*, vm_lazyLinkCall)
1290 STUB_INIT_STACK_FRAME(stackFrame);
1292 CallFrame* callFrame = stackFrame.callFrame;
1293 void* result = lazyLinkFor(callFrame, CodeForCall);
1295 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1300 DEFINE_STUB_FUNCTION(void*, vm_lazyLinkClosureCall)
1302 STUB_INIT_STACK_FRAME(stackFrame);
1304 CallFrame* callFrame = stackFrame.callFrame;
1306 CodeBlock* callerCodeBlock = callFrame->callerFrame()->codeBlock();
1307 VM* vm = callerCodeBlock->vm();
1308 CallLinkInfo* callLinkInfo = &callerCodeBlock->getCallLinkInfo(callFrame->returnPC());
1309 JSFunction* callee = jsCast<JSFunction*>(callFrame->callee());
1310 ExecutableBase* executable = callee->executable();
1311 Structure* structure = callee->structure();
1313 ASSERT(callLinkInfo->callType == CallLinkInfo::Call);
1314 ASSERT(callLinkInfo->isLinked());
1315 ASSERT(callLinkInfo->callee);
1316 ASSERT(callee != callLinkInfo->callee.get());
1318 bool shouldLink = false;
1319 CodeBlock* calleeCodeBlock = 0;
1320 MacroAssemblerCodePtr codePtr;
1322 if (executable == callLinkInfo->callee.get()->executable()
1323 && structure == callLinkInfo->callee.get()->structure()) {
1327 ASSERT(executable->hasJITCodeForCall());
1328 codePtr = executable->generatedJITCodeForCall()->addressForCall();
1329 if (!callee->executable()->isHostFunction()) {
1330 calleeCodeBlock = &jsCast<FunctionExecutable*>(executable)->generatedBytecodeForCall();
1331 if (callFrame->argumentCountIncludingThis() < static_cast<size_t>(calleeCodeBlock->numParameters())) {
1333 codePtr = executable->generatedJITCodeWithArityCheckFor(CodeForCall);
1336 } else if (callee->isHostFunction())
1337 codePtr = executable->generatedJITCodeForCall()->addressForCall();
1339 // Need to clear the code block before compilation, because compilation can GC.
1340 callFrame->setCodeBlock(0);
1342 FunctionExecutable* functionExecutable = jsCast<FunctionExecutable*>(executable);
1343 JSScope* scopeChain = callee->scope();
1344 JSObject* error = functionExecutable->prepareForExecution(callFrame, scopeChain, CodeForCall);
1346 callFrame->vm().throwException(callFrame, error);
1350 codePtr = functionExecutable->generatedJITCodeWithArityCheckFor(CodeForCall);
1355 ConcurrentJITLocker locker(callerCodeBlock->m_lock);
1356 JIT::compileClosureCall(vm, callLinkInfo, callerCodeBlock, calleeCodeBlock, structure, executable, codePtr);
1357 callLinkInfo->hasSeenClosure = true;
1359 JIT::linkSlowCall(callerCodeBlock, callLinkInfo);
1361 return codePtr.executableAddress();
1364 DEFINE_STUB_FUNCTION(void*, vm_lazyLinkConstruct)
1366 STUB_INIT_STACK_FRAME(stackFrame);
1368 CallFrame* callFrame = stackFrame.callFrame;
1369 void* result = lazyLinkFor(callFrame, CodeForConstruct);
1371 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1376 DEFINE_STUB_FUNCTION(JSObject*, op_push_activation)
1378 STUB_INIT_STACK_FRAME(stackFrame);
1380 JSActivation* activation = JSActivation::create(stackFrame.callFrame->vm(), stackFrame.callFrame, stackFrame.callFrame->codeBlock());
1381 stackFrame.callFrame->setScope(activation);
1385 DEFINE_STUB_FUNCTION(EncodedJSValue, op_call_NotJSFunction)
1387 STUB_INIT_STACK_FRAME(stackFrame);
1389 CallFrame* callFrame = stackFrame.callFrame;
1391 JSValue callee = callFrame->calleeAsValue();
1394 CallType callType = getCallData(callee, callData);
1396 ASSERT(callType != CallTypeJS);
1397 if (callType != CallTypeHost) {
1398 ASSERT(callType == CallTypeNone);
1399 ErrorWithExecAndCalleeFunctor functor = ErrorWithExecAndCalleeFunctor(createNotAFunctionError, callee);
1400 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS, &functor);
1403 EncodedJSValue returnValue;
1405 SamplingTool::CallRecord callRecord(CTI_SAMPLER, true);
1406 returnValue = callData.native.function(callFrame);
1409 if (stackFrame.vm->exception())
1410 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1415 DEFINE_STUB_FUNCTION(EncodedJSValue, op_create_arguments)
1417 STUB_INIT_STACK_FRAME(stackFrame);
1419 Arguments* arguments = Arguments::create(*stackFrame.vm, stackFrame.callFrame);
1420 return JSValue::encode(JSValue(arguments));
1423 DEFINE_STUB_FUNCTION(void, op_tear_off_activation)
1425 STUB_INIT_STACK_FRAME(stackFrame);
1427 ASSERT(stackFrame.callFrame->codeBlock()->needsFullScopeChain());
1428 jsCast<JSActivation*>(stackFrame.args[0].jsValue())->tearOff(*stackFrame.vm);
1431 DEFINE_STUB_FUNCTION(void, op_tear_off_arguments)
1433 STUB_INIT_STACK_FRAME(stackFrame);
1435 CallFrame* callFrame = stackFrame.callFrame;
1436 ASSERT(callFrame->codeBlock()->usesArguments());
1437 Arguments* arguments = jsCast<Arguments*>(stackFrame.args[0].jsValue());
1438 if (JSValue activationValue = stackFrame.args[1].jsValue()) {
1439 arguments->didTearOffActivation(callFrame, jsCast<JSActivation*>(activationValue));
1442 arguments->tearOff(callFrame);
1445 DEFINE_STUB_FUNCTION(void, op_profile_will_call)
1447 STUB_INIT_STACK_FRAME(stackFrame);
1449 if (LegacyProfiler* profiler = stackFrame.vm->enabledProfiler())
1450 profiler->willExecute(stackFrame.callFrame, stackFrame.args[0].jsValue());
1453 DEFINE_STUB_FUNCTION(void, op_profile_did_call)
1455 STUB_INIT_STACK_FRAME(stackFrame);
1457 if (LegacyProfiler* profiler = stackFrame.vm->enabledProfiler())
1458 profiler->didExecute(stackFrame.callFrame, stackFrame.args[0].jsValue());
1461 DEFINE_STUB_FUNCTION(JSObject*, op_new_array)
1463 STUB_INIT_STACK_FRAME(stackFrame);
1465 return constructArray(stackFrame.callFrame, stackFrame.args[2].arrayAllocationProfile(), reinterpret_cast<JSValue*>(&stackFrame.callFrame->registers()[stackFrame.args[0].int32()]), stackFrame.args[1].int32());
1468 DEFINE_STUB_FUNCTION(JSObject*, op_new_array_with_size)
1470 STUB_INIT_STACK_FRAME(stackFrame);
1472 return constructArrayWithSizeQuirk(stackFrame.callFrame, stackFrame.args[1].arrayAllocationProfile(), stackFrame.callFrame->lexicalGlobalObject(), stackFrame.args[0].jsValue());
1475 DEFINE_STUB_FUNCTION(JSObject*, op_new_array_buffer)
1477 STUB_INIT_STACK_FRAME(stackFrame);
1479 return constructArray(stackFrame.callFrame, stackFrame.args[2].arrayAllocationProfile(), stackFrame.callFrame->codeBlock()->constantBuffer(stackFrame.args[0].int32()), stackFrame.args[1].int32());
1482 DEFINE_STUB_FUNCTION(EncodedJSValue, op_construct_NotJSConstruct)
1484 STUB_INIT_STACK_FRAME(stackFrame);
1486 CallFrame* callFrame = stackFrame.callFrame;
1487 JSValue callee = callFrame->calleeAsValue();
1489 ConstructData constructData;
1490 ConstructType constructType = getConstructData(callee, constructData);
1492 ASSERT(constructType != ConstructTypeJS);
1493 if (constructType != ConstructTypeHost) {
1494 ASSERT(constructType == ConstructTypeNone);
1495 ErrorWithExecAndCalleeFunctor functor = ErrorWithExecAndCalleeFunctor(createNotAConstructorError, callee);
1496 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS, &functor);
1499 EncodedJSValue returnValue;
1501 SamplingTool::CallRecord callRecord(CTI_SAMPLER, true);
1502 returnValue = constructData.native.function(callFrame);
1505 if (stackFrame.vm->exception())
1506 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1511 static JSValue getByVal(
1512 CallFrame* callFrame, JSValue baseValue, JSValue subscript, ReturnAddressPtr returnAddress)
1514 if (LIKELY(baseValue.isCell() && subscript.isString())) {
1515 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(callFrame, asString(subscript)->value(callFrame)))
1519 if (subscript.isUInt32()) {
1520 uint32_t i = subscript.asUInt32();
1521 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i)) {
1522 ctiPatchCallByReturnAddress(callFrame->codeBlock(), returnAddress, FunctionPtr(cti_op_get_by_val_string));
1523 return asString(baseValue)->getIndex(callFrame, i);
1525 return baseValue.get(callFrame, i);
1528 if (isName(subscript))
1529 return baseValue.get(callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName());
1531 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
1532 return baseValue.get(callFrame, property);
1535 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val)
1537 STUB_INIT_STACK_FRAME(stackFrame);
1539 CallFrame* callFrame = stackFrame.callFrame;
1541 JSValue baseValue = stackFrame.args[0].jsValue();
1542 JSValue subscript = stackFrame.args[1].jsValue();
1544 if (baseValue.isObject() && subscript.isInt32()) {
1545 // See if it's worth optimizing this at all.
1546 JSObject* object = asObject(baseValue);
1547 bool didOptimize = false;
1549 unsigned bytecodeOffset = callFrame->locationAsBytecodeOffset();
1550 ASSERT(bytecodeOffset);
1551 ByValInfo& byValInfo = callFrame->codeBlock()->getByValInfo(bytecodeOffset - 1);
1552 ASSERT(!byValInfo.stubRoutine);
1554 if (hasOptimizableIndexing(object->structure())) {
1555 // Attempt to optimize.
1556 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure());
1557 if (arrayMode != byValInfo.arrayMode) {
1558 JIT::compileGetByVal(&callFrame->vm(), callFrame->codeBlock(), &byValInfo, STUB_RETURN_ADDRESS, arrayMode);
1564 // If we take slow path more than 10 times without patching then make sure we
1565 // never make that mistake again. Or, if we failed to patch and we have some object
1566 // that intercepts indexed get, then don't even wait until 10 times. For cases
1567 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1568 // opportunity for us to observe that the get_by_val may be polymorphic.
1569 if (++byValInfo.slowPathCount >= 10
1570 || object->structure()->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1571 // Don't ever try to optimize.
1572 RepatchBuffer repatchBuffer(callFrame->codeBlock());
1573 repatchBuffer.relinkCallerToFunction(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_generic));
1578 JSValue result = getByVal(callFrame, baseValue, subscript, STUB_RETURN_ADDRESS);
1579 CHECK_FOR_EXCEPTION();
1580 return JSValue::encode(result);
1583 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val_generic)
1585 STUB_INIT_STACK_FRAME(stackFrame);
1587 CallFrame* callFrame = stackFrame.callFrame;
1589 JSValue baseValue = stackFrame.args[0].jsValue();
1590 JSValue subscript = stackFrame.args[1].jsValue();
1592 JSValue result = getByVal(callFrame, baseValue, subscript, STUB_RETURN_ADDRESS);
1593 CHECK_FOR_EXCEPTION();
1594 return JSValue::encode(result);
1597 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val_string)
1599 STUB_INIT_STACK_FRAME(stackFrame);
1601 CallFrame* callFrame = stackFrame.callFrame;
1603 JSValue baseValue = stackFrame.args[0].jsValue();
1604 JSValue subscript = stackFrame.args[1].jsValue();
1608 if (LIKELY(subscript.isUInt32())) {
1609 uint32_t i = subscript.asUInt32();
1610 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1611 result = asString(baseValue)->getIndex(callFrame, i);
1613 result = baseValue.get(callFrame, i);
1614 if (!isJSString(baseValue))
1615 ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val));
1617 } else if (isName(subscript))
1618 result = baseValue.get(callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName());
1620 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
1621 result = baseValue.get(callFrame, property);
1624 CHECK_FOR_EXCEPTION_AT_END();
1625 return JSValue::encode(result);
1628 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value)
1630 if (LIKELY(subscript.isUInt32())) {
1631 uint32_t i = subscript.asUInt32();
1632 if (baseValue.isObject()) {
1633 JSObject* object = asObject(baseValue);
1634 if (object->canSetIndexQuickly(i))
1635 object->setIndexQuickly(callFrame->vm(), i, value);
1637 object->methodTable()->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
1639 baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
1640 } else if (isName(subscript)) {
1641 PutPropertySlot slot(callFrame->codeBlock()->isStrictMode());
1642 baseValue.put(callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName(), value, slot);
1644 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
1645 if (!callFrame->vm().exception()) { // Don't put to an object if toString threw an exception.
1646 PutPropertySlot slot(callFrame->codeBlock()->isStrictMode());
1647 baseValue.put(callFrame, property, value, slot);
1652 DEFINE_STUB_FUNCTION(void, op_put_by_val)
1654 STUB_INIT_STACK_FRAME(stackFrame);
1656 CallFrame* callFrame = stackFrame.callFrame;
1658 JSValue baseValue = stackFrame.args[0].jsValue();
1659 JSValue subscript = stackFrame.args[1].jsValue();
1660 JSValue value = stackFrame.args[2].jsValue();
1662 if (baseValue.isObject() && subscript.isInt32()) {
1663 // See if it's worth optimizing at all.
1664 JSObject* object = asObject(baseValue);
1665 bool didOptimize = false;
1667 unsigned bytecodeOffset = callFrame->locationAsBytecodeOffset();
1668 ASSERT(bytecodeOffset);
1669 ByValInfo& byValInfo = callFrame->codeBlock()->getByValInfo(bytecodeOffset - 1);
1670 ASSERT(!byValInfo.stubRoutine);
1672 if (hasOptimizableIndexing(object->structure())) {
1673 // Attempt to optimize.
1674 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure());
1675 if (arrayMode != byValInfo.arrayMode) {
1676 JIT::compilePutByVal(&callFrame->vm(), callFrame->codeBlock(), &byValInfo, STUB_RETURN_ADDRESS, arrayMode);
1682 // If we take slow path more than 10 times without patching then make sure we
1683 // never make that mistake again. Or, if we failed to patch and we have some object
1684 // that intercepts indexed get, then don't even wait until 10 times. For cases
1685 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1686 // opportunity for us to observe that the get_by_val may be polymorphic.
1687 if (++byValInfo.slowPathCount >= 10
1688 || object->structure()->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1689 // Don't ever try to optimize.
1690 RepatchBuffer repatchBuffer(callFrame->codeBlock());
1691 repatchBuffer.relinkCallerToFunction(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_val_generic));
1696 putByVal(callFrame, baseValue, subscript, value);
1698 CHECK_FOR_EXCEPTION_AT_END();
1701 DEFINE_STUB_FUNCTION(void, op_put_by_val_generic)
1703 STUB_INIT_STACK_FRAME(stackFrame);
1705 CallFrame* callFrame = stackFrame.callFrame;
1707 JSValue baseValue = stackFrame.args[0].jsValue();
1708 JSValue subscript = stackFrame.args[1].jsValue();
1709 JSValue value = stackFrame.args[2].jsValue();
1711 putByVal(callFrame, baseValue, subscript, value);
1713 CHECK_FOR_EXCEPTION_AT_END();
1716 DEFINE_STUB_FUNCTION(void*, op_load_varargs)
1718 STUB_INIT_STACK_FRAME(stackFrame);
1720 CallFrame* callFrame = stackFrame.callFrame;
1721 JSStack* stack = stackFrame.stack;
1722 JSValue thisValue = stackFrame.args[0].jsValue();
1723 JSValue arguments = stackFrame.args[1].jsValue();
1724 int firstFreeRegister = stackFrame.args[2].int32();
1726 CallFrame* newCallFrame = loadVarargs(callFrame, stack, thisValue, arguments, firstFreeRegister);
1728 VM_THROW_EXCEPTION();
1729 return newCallFrame;
1732 DEFINE_STUB_FUNCTION(int, op_jless)
1734 STUB_INIT_STACK_FRAME(stackFrame);
1736 JSValue src1 = stackFrame.args[0].jsValue();
1737 JSValue src2 = stackFrame.args[1].jsValue();
1738 CallFrame* callFrame = stackFrame.callFrame;
1740 bool result = jsLess<true>(callFrame, src1, src2);
1741 CHECK_FOR_EXCEPTION_AT_END();
1745 DEFINE_STUB_FUNCTION(int, op_jlesseq)
1747 STUB_INIT_STACK_FRAME(stackFrame);
1749 JSValue src1 = stackFrame.args[0].jsValue();
1750 JSValue src2 = stackFrame.args[1].jsValue();
1751 CallFrame* callFrame = stackFrame.callFrame;
1753 bool result = jsLessEq<true>(callFrame, src1, src2);
1754 CHECK_FOR_EXCEPTION_AT_END();
1758 DEFINE_STUB_FUNCTION(int, op_jgreater)
1760 STUB_INIT_STACK_FRAME(stackFrame);
1762 JSValue src1 = stackFrame.args[0].jsValue();
1763 JSValue src2 = stackFrame.args[1].jsValue();
1764 CallFrame* callFrame = stackFrame.callFrame;
1766 bool result = jsLess<false>(callFrame, src2, src1);
1767 CHECK_FOR_EXCEPTION_AT_END();
1771 DEFINE_STUB_FUNCTION(int, op_jgreatereq)
1773 STUB_INIT_STACK_FRAME(stackFrame);
1775 JSValue src1 = stackFrame.args[0].jsValue();
1776 JSValue src2 = stackFrame.args[1].jsValue();
1777 CallFrame* callFrame = stackFrame.callFrame;
1779 bool result = jsLessEq<false>(callFrame, src2, src1);
1780 CHECK_FOR_EXCEPTION_AT_END();
1784 DEFINE_STUB_FUNCTION(int, op_jtrue)
1786 STUB_INIT_STACK_FRAME(stackFrame);
1788 JSValue src1 = stackFrame.args[0].jsValue();
1790 bool result = src1.toBoolean(stackFrame.callFrame);
1791 CHECK_FOR_EXCEPTION_AT_END();
1795 DEFINE_STUB_FUNCTION(int, op_eq)
1797 STUB_INIT_STACK_FRAME(stackFrame);
1799 JSValue src1 = stackFrame.args[0].jsValue();
1800 JSValue src2 = stackFrame.args[1].jsValue();
1802 #if USE(JSVALUE32_64)
1804 if (src2.isUndefined()) {
1805 return src1.isNull() ||
1806 (src1.isCell() && src1.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject()))
1807 || src1.isUndefined();
1810 if (src2.isNull()) {
1811 return src1.isUndefined() ||
1812 (src1.isCell() && src1.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject()))
1816 if (src1.isInt32()) {
1817 if (src2.isDouble())
1818 return src1.asInt32() == src2.asDouble();
1819 double d = src2.toNumber(stackFrame.callFrame);
1820 CHECK_FOR_EXCEPTION();
1821 return src1.asInt32() == d;
1824 if (src1.isDouble()) {
1826 return src1.asDouble() == src2.asInt32();
1827 double d = src2.toNumber(stackFrame.callFrame);
1828 CHECK_FOR_EXCEPTION();
1829 return src1.asDouble() == d;
1832 if (src1.isTrue()) {
1835 double d = src2.toNumber(stackFrame.callFrame);
1836 CHECK_FOR_EXCEPTION();
1840 if (src1.isFalse()) {
1843 double d = src2.toNumber(stackFrame.callFrame);
1844 CHECK_FOR_EXCEPTION();
1848 if (src1.isUndefined())
1849 return src2.isCell() && src2.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject());
1852 return src2.isCell() && src2.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject());
1854 JSCell* cell1 = src1.asCell();
1856 if (cell1->isString()) {
1858 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == src2.asInt32();
1860 if (src2.isDouble())
1861 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == src2.asDouble();
1864 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == 1.0;
1867 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == 0.0;
1869 JSCell* cell2 = src2.asCell();
1870 if (cell2->isString())
1871 return jsCast<JSString*>(cell1)->value(stackFrame.callFrame) == jsCast<JSString*>(cell2)->value(stackFrame.callFrame);
1873 src2 = asObject(cell2)->toPrimitive(stackFrame.callFrame);
1874 CHECK_FOR_EXCEPTION();
1878 if (src2.isObject())
1879 return asObject(cell1) == asObject(src2);
1880 src1 = asObject(cell1)->toPrimitive(stackFrame.callFrame);
1881 CHECK_FOR_EXCEPTION();
1884 #else // USE(JSVALUE32_64)
1885 CallFrame* callFrame = stackFrame.callFrame;
1887 bool result = JSValue::equalSlowCaseInline(callFrame, src1, src2);
1888 CHECK_FOR_EXCEPTION_AT_END();
1890 #endif // USE(JSVALUE32_64)
1893 DEFINE_STUB_FUNCTION(int, op_eq_strings)
1895 #if USE(JSVALUE32_64)
1896 STUB_INIT_STACK_FRAME(stackFrame);
1898 JSString* string1 = stackFrame.args[0].jsString();
1899 JSString* string2 = stackFrame.args[1].jsString();
1901 ASSERT(string1->isString());
1902 ASSERT(string2->isString());
1903 return string1->value(stackFrame.callFrame) == string2->value(stackFrame.callFrame);
1906 RELEASE_ASSERT_NOT_REACHED();
1911 DEFINE_STUB_FUNCTION(JSObject*, op_new_func_exp)
1913 STUB_INIT_STACK_FRAME(stackFrame);
1914 CallFrame* callFrame = stackFrame.callFrame;
1916 FunctionExecutable* function = stackFrame.args[0].function();
1917 JSFunction* func = JSFunction::create(callFrame, function, callFrame->scope());
1918 ASSERT(callFrame->codeBlock()->codeType() != FunctionCode || !callFrame->codeBlock()->needsFullScopeChain() || callFrame->uncheckedR(callFrame->codeBlock()->activationRegister()).jsValue());
1923 DEFINE_STUB_FUNCTION(JSObject*, op_new_regexp)
1925 STUB_INIT_STACK_FRAME(stackFrame);
1927 CallFrame* callFrame = stackFrame.callFrame;
1929 RegExp* regExp = stackFrame.args[0].regExp();
1930 if (!regExp->isValid()) {
1931 stackFrame.vm->throwException(callFrame, createSyntaxError(callFrame, "Invalid flags supplied to RegExp constructor."));
1932 VM_THROW_EXCEPTION();
1935 return RegExpObject::create(*stackFrame.vm, stackFrame.callFrame->lexicalGlobalObject(), stackFrame.callFrame->lexicalGlobalObject()->regExpStructure(), regExp);
1938 DEFINE_STUB_FUNCTION(EncodedJSValue, op_call_eval)
1940 STUB_INIT_STACK_FRAME(stackFrame);
1942 CallFrame* callFrame = stackFrame.callFrame;
1943 CallFrame* callerFrame = callFrame->callerFrame();
1944 ASSERT(callFrame->callerFrame()->codeBlock()->codeType() != FunctionCode
1945 || !callFrame->callerFrame()->codeBlock()->needsFullScopeChain()
1946 || callFrame->callerFrame()->uncheckedR(callFrame->callerFrame()->codeBlock()->activationRegister()).jsValue());
1948 callFrame->setScope(callerFrame->scope());
1949 callFrame->setReturnPC(static_cast<Instruction*>((STUB_RETURN_ADDRESS).value()));
1950 callFrame->setCodeBlock(0);
1952 if (!isHostFunction(callFrame->calleeAsValue(), globalFuncEval))
1953 return JSValue::encode(JSValue());
1955 JSValue result = eval(callFrame);
1956 if (stackFrame.vm->exception())
1957 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1959 return JSValue::encode(result);
1962 DEFINE_STUB_FUNCTION(void*, op_throw)
1964 STUB_INIT_STACK_FRAME(stackFrame);
1965 stackFrame.vm->throwException(stackFrame.callFrame, stackFrame.args[0].jsValue());
1966 ExceptionHandler handler = jitThrow(stackFrame.vm, stackFrame.callFrame, stackFrame.args[0].jsValue(), STUB_RETURN_ADDRESS);
1967 STUB_SET_RETURN_ADDRESS(handler.catchRoutine);
1968 return handler.callFrame;
1971 DEFINE_STUB_FUNCTION(JSPropertyNameIterator*, op_get_pnames)
1973 STUB_INIT_STACK_FRAME(stackFrame);
1975 CallFrame* callFrame = stackFrame.callFrame;
1976 JSObject* o = stackFrame.args[0].jsObject();
1977 Structure* structure = o->structure();
1978 JSPropertyNameIterator* jsPropertyNameIterator = structure->enumerationCache();
1979 if (!jsPropertyNameIterator || jsPropertyNameIterator->cachedPrototypeChain() != structure->prototypeChain(callFrame))
1980 jsPropertyNameIterator = JSPropertyNameIterator::create(callFrame, o);
1981 return jsPropertyNameIterator;
1984 DEFINE_STUB_FUNCTION(int, has_property)
1986 STUB_INIT_STACK_FRAME(stackFrame);
1988 JSObject* base = stackFrame.args[0].jsObject();
1989 JSString* property = stackFrame.args[1].jsString();
1990 int result = base->hasProperty(stackFrame.callFrame, Identifier(stackFrame.callFrame, property->value(stackFrame.callFrame)));
1991 CHECK_FOR_EXCEPTION_AT_END();
1995 DEFINE_STUB_FUNCTION(void, op_push_with_scope)
1997 STUB_INIT_STACK_FRAME(stackFrame);
1999 JSObject* o = stackFrame.args[0].jsValue().toObject(stackFrame.callFrame);
2000 CHECK_FOR_EXCEPTION_VOID();
2001 stackFrame.callFrame->setScope(JSWithScope::create(stackFrame.callFrame, o));
2004 DEFINE_STUB_FUNCTION(void, op_pop_scope)
2006 STUB_INIT_STACK_FRAME(stackFrame);
2008 stackFrame.callFrame->setScope(stackFrame.callFrame->scope()->next());
2011 DEFINE_STUB_FUNCTION(void, op_push_name_scope)
2013 STUB_INIT_STACK_FRAME(stackFrame);
2015 JSNameScope* scope = JSNameScope::create(stackFrame.callFrame, stackFrame.args[0].identifier(), stackFrame.args[1].jsValue(), stackFrame.args[2].int32());
2017 CallFrame* callFrame = stackFrame.callFrame;
2018 callFrame->setScope(scope);
2021 DEFINE_STUB_FUNCTION(void, op_put_by_index)
2023 STUB_INIT_STACK_FRAME(stackFrame);
2025 CallFrame* callFrame = stackFrame.callFrame;
2026 unsigned property = stackFrame.args[1].int32();
2028 JSValue arrayValue = stackFrame.args[0].jsValue();
2029 ASSERT(isJSArray(arrayValue));
2030 asArray(arrayValue)->putDirectIndex(callFrame, property, stackFrame.args[2].jsValue());
2033 DEFINE_STUB_FUNCTION(void*, op_switch_imm)
2035 STUB_INIT_STACK_FRAME(stackFrame);
2037 JSValue scrutinee = stackFrame.args[0].jsValue();
2038 unsigned tableIndex = stackFrame.args[1].int32();
2039 CallFrame* callFrame = stackFrame.callFrame;
2040 CodeBlock* codeBlock = callFrame->codeBlock();
2042 if (scrutinee.isInt32())
2043 return codeBlock->switchJumpTable(tableIndex).ctiForValue(scrutinee.asInt32()).executableAddress();
2044 if (scrutinee.isDouble() && scrutinee.asDouble() == static_cast<int32_t>(scrutinee.asDouble()))
2045 return codeBlock->switchJumpTable(tableIndex).ctiForValue(static_cast<int32_t>(scrutinee.asDouble())).executableAddress();
2046 return codeBlock->switchJumpTable(tableIndex).ctiDefault.executableAddress();
2049 DEFINE_STUB_FUNCTION(void*, op_switch_char)
2051 STUB_INIT_STACK_FRAME(stackFrame);
2053 JSValue scrutinee = stackFrame.args[0].jsValue();
2054 unsigned tableIndex = stackFrame.args[1].int32();
2055 CallFrame* callFrame = stackFrame.callFrame;
2056 CodeBlock* codeBlock = callFrame->codeBlock();
2058 void* result = codeBlock->switchJumpTable(tableIndex).ctiDefault.executableAddress();
2060 if (scrutinee.isString()) {
2061 StringImpl* value = asString(scrutinee)->value(callFrame).impl();
2062 if (value->length() == 1)
2063 result = codeBlock->switchJumpTable(tableIndex).ctiForValue((*value)[0]).executableAddress();
2066 CHECK_FOR_EXCEPTION_AT_END();
2070 DEFINE_STUB_FUNCTION(void*, op_switch_string)
2072 STUB_INIT_STACK_FRAME(stackFrame);
2074 JSValue scrutinee = stackFrame.args[0].jsValue();
2075 unsigned tableIndex = stackFrame.args[1].int32();
2076 CallFrame* callFrame = stackFrame.callFrame;
2077 CodeBlock* codeBlock = callFrame->codeBlock();
2079 void* result = codeBlock->stringSwitchJumpTable(tableIndex).ctiDefault.executableAddress();
2081 if (scrutinee.isString()) {
2082 StringImpl* value = asString(scrutinee)->value(callFrame).impl();
2083 result = codeBlock->stringSwitchJumpTable(tableIndex).ctiForValue(value).executableAddress();
2086 CHECK_FOR_EXCEPTION_AT_END();
2090 DEFINE_STUB_FUNCTION(void, op_put_getter_setter)
2092 STUB_INIT_STACK_FRAME(stackFrame);
2094 CallFrame* callFrame = stackFrame.callFrame;
2096 ASSERT(stackFrame.args[0].jsValue().isObject());
2097 JSObject* baseObj = asObject(stackFrame.args[0].jsValue());
2099 GetterSetter* accessor = GetterSetter::create(callFrame);
2101 JSValue getter = stackFrame.args[2].jsValue();
2102 JSValue setter = stackFrame.args[3].jsValue();
2103 ASSERT(getter.isObject() || getter.isUndefined());
2104 ASSERT(setter.isObject() || setter.isUndefined());
2105 ASSERT(getter.isObject() || setter.isObject());
2107 if (!getter.isUndefined())
2108 accessor->setGetter(callFrame->vm(), asObject(getter));
2109 if (!setter.isUndefined())
2110 accessor->setSetter(callFrame->vm(), asObject(setter));
2111 baseObj->putDirectAccessor(callFrame, stackFrame.args[1].identifier(), accessor, Accessor);
2114 DEFINE_STUB_FUNCTION(void, op_throw_static_error)
2116 STUB_INIT_STACK_FRAME(stackFrame);
2118 CallFrame* callFrame = stackFrame.callFrame;
2119 String message = errorDescriptionForValue(callFrame, stackFrame.args[0].jsValue())->value(callFrame);
2120 if (stackFrame.args[1].asInt32)
2121 stackFrame.vm->throwException(callFrame, createReferenceError(callFrame, message));
2123 stackFrame.vm->throwException(callFrame, createTypeError(callFrame, message));
2124 VM_THROW_EXCEPTION_AT_END();
2127 DEFINE_STUB_FUNCTION(void, op_debug)
2129 STUB_INIT_STACK_FRAME(stackFrame);
2131 CallFrame* callFrame = stackFrame.callFrame;
2133 int debugHookID = stackFrame.args[0].int32();
2134 int firstLine = stackFrame.args[1].int32();
2135 int lastLine = stackFrame.args[2].int32();
2136 int column = stackFrame.args[3].int32();
2138 stackFrame.vm->interpreter->debug(callFrame, static_cast<DebugHookID>(debugHookID), firstLine, lastLine, column);
2141 DEFINE_STUB_FUNCTION(void*, vm_throw)
2143 STUB_INIT_STACK_FRAME(stackFrame);
2144 VM* vm = stackFrame.vm;
2145 ExceptionHandler handler = jitThrow(vm, stackFrame.callFrame, vm->exception(), vm->exceptionLocation);
2146 STUB_SET_RETURN_ADDRESS(handler.catchRoutine);
2147 return handler.callFrame;
2150 #if USE(JSVALUE32_64)
2151 EncodedExceptionHandler JIT_STUB cti_vm_handle_exception(CallFrame* callFrame)
2153 ASSERT(!callFrame->hasHostCallFrameFlag());
2155 // The entire stack has already been unwound. Nothing more to handle.
2156 return encode(uncaughtExceptionHandler());
2159 VM* vm = callFrame->codeBlock()->vm();
2160 vm->topCallFrame = callFrame;
2161 return encode(jitThrowNew(vm, callFrame, vm->exception()));
2164 ExceptionHandler JIT_STUB cti_vm_handle_exception(CallFrame* callFrame)
2166 ASSERT(!callFrame->hasHostCallFrameFlag());
2168 // The entire stack has already been unwound. Nothing more to handle.
2169 return uncaughtExceptionHandler();
2172 VM* vm = callFrame->codeBlock()->vm();
2173 vm->topCallFrame = callFrame;
2174 return jitThrowNew(vm, callFrame, vm->exception());
2178 DEFINE_STUB_FUNCTION(EncodedJSValue, to_object)
2180 STUB_INIT_STACK_FRAME(stackFrame);
2182 CallFrame* callFrame = stackFrame.callFrame;
2183 return JSValue::encode(stackFrame.args[0].jsValue().toObject(callFrame));
2186 DEFINE_STUB_FUNCTION(EncodedJSValue, op_resolve_scope)
2188 STUB_INIT_STACK_FRAME(stackFrame);
2189 ExecState* exec = stackFrame.callFrame;
2190 Instruction* pc = stackFrame.args[0].pc();
2192 const Identifier& ident = exec->codeBlock()->identifier(pc[2].u.operand);
2193 return JSValue::encode(JSScope::resolve(exec, exec->scope(), ident));
2196 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_from_scope)
2198 STUB_INIT_STACK_FRAME(stackFrame);
2199 ExecState* exec = stackFrame.callFrame;
2200 Instruction* pc = stackFrame.args[0].pc();
2202 const Identifier& ident = exec->codeBlock()->identifier(pc[3].u.operand);
2203 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
2204 ResolveModeAndType modeAndType(pc[4].u.operand);
2206 PropertySlot slot(scope);
2207 if (!scope->getPropertySlot(exec, ident, slot)) {
2208 if (modeAndType.mode() == ThrowIfNotFound) {
2209 exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
2210 VM_THROW_EXCEPTION();
2212 return JSValue::encode(jsUndefined());
2215 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
2216 if (slot.isCacheableValue() && slot.slotBase() == scope && scope->structure()->propertyAccessesAreCacheable()) {
2217 if (modeAndType.type() == GlobalProperty || modeAndType.type() == GlobalPropertyWithVarInjectionChecks) {
2218 CodeBlock* codeBlock = exec->codeBlock();
2219 ConcurrentJITLocker locker(codeBlock->m_lock);
2220 pc[5].u.structure.set(exec->vm(), codeBlock->ownerExecutable(), scope->structure());
2221 pc[6].u.operand = slot.cachedOffset();
2225 return JSValue::encode(slot.getValue(exec, ident));
2228 DEFINE_STUB_FUNCTION(void, op_put_to_scope)
2230 STUB_INIT_STACK_FRAME(stackFrame);
2231 ExecState* exec = stackFrame.callFrame;
2232 Instruction* pc = stackFrame.args[0].pc();
2234 CodeBlock* codeBlock = exec->codeBlock();
2235 const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2236 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2237 JSValue value = exec->r(pc[3].u.operand).jsValue();
2238 ResolveModeAndType modeAndType = ResolveModeAndType(pc[4].u.operand);
2240 if (modeAndType.mode() == ThrowIfNotFound && !scope->hasProperty(exec, ident)) {
2241 exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
2242 VM_THROW_EXCEPTION_AT_END();
2246 PutPropertySlot slot(codeBlock->isStrictMode());
2247 scope->methodTable()->put(scope, exec, ident, value, slot);
2249 if (exec->vm().exception()) {
2250 VM_THROW_EXCEPTION_AT_END();
2254 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
2255 if (modeAndType.type() == GlobalProperty || modeAndType.type() == GlobalPropertyWithVarInjectionChecks) {
2256 if (slot.isCacheable() && slot.base() == scope && scope->structure()->propertyAccessesAreCacheable()) {
2257 ConcurrentJITLocker locker(codeBlock->m_lock);
2258 pc[5].u.structure.set(exec->vm(), codeBlock->ownerExecutable(), scope->structure());
2259 pc[6].u.operand = slot.cachedOffset();
2266 #endif // ENABLE(JIT)