2 * Copyright (C) 2008, 2009, 2013 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 * Copyright (C) Research In Motion Limited 2010, 2011. All rights reserved.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16 * its contributors may be used to endorse or promote products derived
17 * from this software without specific prior written permission.
19 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 #include "Arguments.h"
37 #include "ArrayConstructor.h"
38 #include "CallFrame.h"
39 #include "CallFrameInlines.h"
40 #include "CodeBlock.h"
41 #include "CodeProfiling.h"
42 #include "CommonSlowPaths.h"
43 #include "DFGOSREntry.h"
44 #include "DFGWorklist.h"
47 #include "ExceptionHelpers.h"
48 #include "GetterSetter.h"
50 #include <wtf/InlineASM.h>
52 #include "JITExceptions.h"
53 #include "JSActivation.h"
55 #include "JSFunction.h"
56 #include "JSGlobalObjectFunctions.h"
57 #include "JSNameScope.h"
58 #include "JSNotAnObject.h"
59 #include "JSPropertyNameIterator.h"
61 #include "JSWithScope.h"
62 #include "LegacyProfiler.h"
63 #include "NameInstance.h"
64 #include "ObjectConstructor.h"
65 #include "ObjectPrototype.h"
66 #include "Operations.h"
68 #include "RegExpObject.h"
69 #include "RegExpPrototype.h"
71 #include "RepatchBuffer.h"
72 #include "SamplingTool.h"
73 #include "SlowPathCall.h"
75 #include "StructureRareDataInlines.h"
76 #include <wtf/StdLibExtras.h>
82 #if CPU(ARM_TRADITIONAL)
83 #include "JITStubsARM.h"
85 #include "JITStubsARMv7.h"
87 #include "JITStubsMIPS.h"
89 #include "JITStubsSH4.h"
91 #include "JITStubsX86.h"
93 #include "JITStubsX86_64.h"
95 #error "JIT not supported on this platform."
100 #if ENABLE(OPCODE_SAMPLING)
101 #define CTI_SAMPLER stackFrame.vm->interpreter->sampler()
103 #define CTI_SAMPLER 0
106 void performPlatformSpecificJITAssertions(VM* vm)
108 if (!vm->canUseJIT())
112 performARMv7JITAssertions();
113 #elif CPU(ARM_TRADITIONAL)
114 performARMJITAssertions();
116 performMIPSJITAssertions();
118 performSH4JITAssertions();
122 NEVER_INLINE static void tryCachePutByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot& slot, StructureStubInfo* stubInfo, bool direct)
124 ConcurrentJITLocker locker(codeBlock->m_lock);
126 // The interpreter checks for recursion here; I do not believe this can occur in CTI.
128 if (!baseValue.isCell())
131 // Uncacheable: give up.
132 if (!slot.isCacheable()) {
133 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
137 JSCell* baseCell = baseValue.asCell();
138 Structure* structure = baseCell->structure();
140 if (structure->isUncacheableDictionary() || structure->typeInfo().prohibitsPropertyCaching()) {
141 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
145 // If baseCell != base, then baseCell must be a proxy for another object.
146 if (baseCell != slot.base()) {
147 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
151 // Cache hit: Specialize instruction and ref Structures.
153 // Structure transition, cache transition info
154 if (slot.type() == PutPropertySlot::NewProperty) {
155 if (structure->isDictionary()) {
156 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
160 // put_by_id_transition checks the prototype chain for setters.
161 if (normalizePrototypeChain(callFrame, baseCell) == InvalidPrototypeChain) {
162 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
166 StructureChain* prototypeChain = structure->prototypeChain(callFrame);
167 ASSERT(structure->previousID()->transitionWatchpointSetHasBeenInvalidated());
168 stubInfo->initPutByIdTransition(callFrame->vm(), codeBlock->ownerExecutable(), structure->previousID(), structure, prototypeChain, direct);
169 JIT::compilePutByIdTransition(callFrame->scope()->vm(), codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress, direct);
173 stubInfo->initPutByIdReplace(callFrame->vm(), codeBlock->ownerExecutable(), structure);
175 JIT::patchPutByIdReplace(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress, direct);
178 NEVER_INLINE static void tryCacheGetByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo* stubInfo)
180 ConcurrentJITLocker locker(codeBlock->m_lock);
182 // FIXME: Write a test that proves we need to check for recursion here just
183 // like the interpreter does, then add a check for recursion.
185 // FIXME: Cache property access for immediates.
186 if (!baseValue.isCell()) {
187 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
191 VM* vm = &callFrame->vm();
193 if (isJSArray(baseValue) && propertyName == callFrame->propertyNames().length) {
194 JIT::compilePatchGetArrayLength(callFrame->scope()->vm(), codeBlock, returnAddress);
198 if (isJSString(baseValue) && propertyName == callFrame->propertyNames().length) {
199 // The tradeoff of compiling an patched inline string length access routine does not seem
200 // to pay off, so we currently only do this for arrays.
201 ctiPatchCallByReturnAddress(codeBlock, returnAddress, vm->getCTIStub(stringLengthTrampolineGenerator).code());
205 // Uncacheable: give up.
206 if (!slot.isCacheable()) {
207 stubInfo->accessType = access_get_by_id_generic;
208 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
212 JSCell* baseCell = baseValue.asCell();
213 Structure* structure = baseCell->structure();
215 if (structure->isUncacheableDictionary() || structure->typeInfo().prohibitsPropertyCaching()) {
216 stubInfo->accessType = access_get_by_id_generic;
217 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
221 // Cache hit: Specialize instruction and ref Structures.
223 if (slot.slotBase() == baseValue) {
224 RELEASE_ASSERT(stubInfo->accessType == access_unset);
225 if (!slot.isCacheableValue() || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset())))
226 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
228 JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
229 stubInfo->initGetByIdSelf(callFrame->vm(), codeBlock->ownerExecutable(), structure);
234 if (structure->isDictionary()) {
235 stubInfo->accessType = access_get_by_id_generic;
236 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
240 if (slot.slotBase() == structure->prototypeForLookup(callFrame)) {
241 JSObject* slotBaseObject = asObject(slot.slotBase());
242 size_t offset = slot.cachedOffset();
244 if (structure->typeInfo().hasImpureGetOwnPropertySlot()) {
245 stubInfo->accessType = access_get_by_id_generic;
246 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
250 // Since we're accessing a prototype in a loop, it's a good bet that it
251 // should not be treated as a dictionary.
252 if (slotBaseObject->structure()->isDictionary()) {
253 slotBaseObject->flattenDictionaryObject(callFrame->vm());
254 offset = slotBaseObject->structure()->get(callFrame->vm(), propertyName);
257 stubInfo->initGetByIdProto(callFrame->vm(), codeBlock->ownerExecutable(), structure, slotBaseObject->structure(), slot.isCacheableValue());
259 ASSERT(!structure->isDictionary());
260 ASSERT(!slotBaseObject->structure()->isDictionary());
261 JIT::compileGetByIdProto(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), propertyName, slot, offset, returnAddress);
265 PropertyOffset offset = slot.cachedOffset();
266 size_t count = normalizePrototypeChainForChainAccess(callFrame, baseValue, slot.slotBase(), propertyName, offset);
267 if (count == InvalidPrototypeChain) {
268 stubInfo->accessType = access_get_by_id_generic;
269 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
273 StructureChain* prototypeChain = structure->prototypeChain(callFrame);
274 stubInfo->initGetByIdChain(callFrame->vm(), codeBlock->ownerExecutable(), structure, prototypeChain, count, slot.isCacheableValue());
275 JIT::compileGetByIdChain(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, structure, prototypeChain, count, propertyName, slot, offset, returnAddress);
282 static void jscGeneratedNativeCode()
284 // When executing a JIT stub function (which might do an allocation), we hack the return address
285 // to pretend to be executing this function, to keep stack logging tools from blowing out
292 ALWAYS_INLINE StackHack(JITStackFrame& stackFrame)
293 : stackFrame(stackFrame)
294 , savedReturnAddress(*stackFrame.returnAddressSlot())
296 if (!CodeProfiling::enabled())
297 *stackFrame.returnAddressSlot() = ReturnAddressPtr(FunctionPtr(jscGeneratedNativeCode));
300 ALWAYS_INLINE ~StackHack()
302 *stackFrame.returnAddressSlot() = savedReturnAddress;
305 JITStackFrame& stackFrame;
306 ReturnAddressPtr savedReturnAddress;
309 #define STUB_INIT_STACK_FRAME(stackFrame) JITStackFrame& stackFrame = *reinterpret_cast_ptr<JITStackFrame*>(STUB_ARGS); StackHack stackHack(stackFrame)
310 #define STUB_SET_RETURN_ADDRESS(returnAddress) stackHack.savedReturnAddress = ReturnAddressPtr(returnAddress)
311 #define STUB_RETURN_ADDRESS stackHack.savedReturnAddress
315 #define STUB_INIT_STACK_FRAME(stackFrame) JITStackFrame& stackFrame = *reinterpret_cast_ptr<JITStackFrame*>(STUB_ARGS)
316 #define STUB_SET_RETURN_ADDRESS(returnAddress) *stackFrame.returnAddressSlot() = ReturnAddressPtr(returnAddress)
317 #define STUB_RETURN_ADDRESS *stackFrame.returnAddressSlot()
321 // The reason this is not inlined is to avoid having to do a PIC branch
322 // to get the address of the ctiVMThrowTrampoline function. It's also
323 // good to keep the code size down by leaving as much of the exception
324 // handling code out of line as possible.
325 static NEVER_INLINE void returnToThrowTrampoline(VM* vm, ReturnAddressPtr exceptionLocation, ReturnAddressPtr& returnAddressSlot)
327 RELEASE_ASSERT(vm->exception);
328 vm->exceptionLocation = exceptionLocation;
329 returnAddressSlot = ReturnAddressPtr(FunctionPtr(ctiVMThrowTrampoline));
332 #define VM_THROW_EXCEPTION() \
334 VM_THROW_EXCEPTION_AT_END(); \
337 #define VM_THROW_EXCEPTION_AT_END() \
339 returnToThrowTrampoline(stackFrame.vm, STUB_RETURN_ADDRESS, STUB_RETURN_ADDRESS);\
342 #define CHECK_FOR_EXCEPTION() \
344 if (UNLIKELY(stackFrame.vm->exception)) \
345 VM_THROW_EXCEPTION(); \
347 #define CHECK_FOR_EXCEPTION_AT_END() \
349 if (UNLIKELY(stackFrame.vm->exception)) \
350 VM_THROW_EXCEPTION_AT_END(); \
352 #define CHECK_FOR_EXCEPTION_VOID() \
354 if (UNLIKELY(stackFrame.vm->exception)) { \
355 VM_THROW_EXCEPTION_AT_END(); \
362 virtual ~ErrorFunctor() { }
363 virtual JSValue operator()(ExecState*) = 0;
366 class ErrorWithExecFunctor : public ErrorFunctor {
368 typedef JSObject* (*Factory)(ExecState* exec);
370 ErrorWithExecFunctor(Factory factory)
374 JSValue operator()(ExecState* exec)
376 return m_factory(exec);
383 class ErrorWithExecAndCalleeFunctor : public ErrorFunctor {
385 typedef JSObject* (*Factory)(ExecState* exec, JSValue callee);
387 ErrorWithExecAndCalleeFunctor(Factory factory, JSValue callee)
388 : m_factory(factory), m_callee(callee)
391 JSValue operator()(ExecState* exec)
393 return m_factory(exec, m_callee);
400 class ErrorWithExceptionFunctor : public ErrorFunctor {
402 ErrorWithExceptionFunctor(JSValue exception)
403 : m_exception(exception)
406 JSValue operator()(ExecState*)
415 // Helper function for JIT stubs that may throw an exception in the middle of
416 // processing a function call. This function rolls back the stack to
417 // our caller, so exception processing can proceed from a valid state.
418 template<typename T> static T throwExceptionFromOpCall(JITStackFrame& jitStackFrame, CallFrame* newCallFrame, ReturnAddressPtr& returnAddressSlot, ErrorFunctor& createError )
420 CallFrame* callFrame = newCallFrame->callerFrame()->removeHostCallFrameFlag();
421 jitStackFrame.callFrame = callFrame;
422 callFrame->vm().topCallFrame = callFrame;
423 callFrame->vm().exception = createError(callFrame);
424 ASSERT(callFrame->vm().exception);
425 returnToThrowTrampoline(&callFrame->vm(), ReturnAddressPtr(newCallFrame->returnPC()), returnAddressSlot);
429 template<typename T> static T throwExceptionFromOpCall(JITStackFrame& jitStackFrame, CallFrame* newCallFrame, ReturnAddressPtr& returnAddressSlot)
431 CallFrame* callFrame = newCallFrame->callerFrame();
432 ASSERT(callFrame->vm().exception);
433 ErrorWithExceptionFunctor functor = ErrorWithExceptionFunctor(callFrame->vm().exception);
434 return throwExceptionFromOpCall<T>(jitStackFrame, newCallFrame, returnAddressSlot, functor);
437 // If the CPU specific header does not provide an implementation, use the default one here.
438 #ifndef DEFINE_STUB_FUNCTION
439 #define DEFINE_STUB_FUNCTION(rtype, op) rtype JIT_STUB cti_##op(STUB_ARGS_DECLARATION)
442 DEFINE_STUB_FUNCTION(void, handle_watchdog_timer)
444 STUB_INIT_STACK_FRAME(stackFrame);
445 CallFrame* callFrame = stackFrame.callFrame;
446 VM* vm = stackFrame.vm;
447 if (UNLIKELY(vm->watchdog.didFire(callFrame))) {
448 vm->exception = createTerminatedExecutionException(vm);
449 VM_THROW_EXCEPTION_AT_END();
454 DEFINE_STUB_FUNCTION(void*, stack_check)
456 STUB_INIT_STACK_FRAME(stackFrame);
457 CallFrame* callFrame = stackFrame.callFrame;
459 if (UNLIKELY(!stackFrame.stack->grow(&callFrame->registers()[callFrame->codeBlock()->m_numCalleeRegisters]))) {
460 ErrorWithExecFunctor functor = ErrorWithExecFunctor(createStackOverflowError);
461 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS, functor);
467 DEFINE_STUB_FUNCTION(JSObject*, op_new_object)
469 STUB_INIT_STACK_FRAME(stackFrame);
471 return constructEmptyObject(stackFrame.callFrame, stackFrame.args[0].structure());
474 DEFINE_STUB_FUNCTION(void, op_put_by_id_generic)
476 STUB_INIT_STACK_FRAME(stackFrame);
478 PutPropertySlot slot(
479 stackFrame.callFrame->codeBlock()->isStrictMode(),
480 stackFrame.callFrame->codeBlock()->putByIdContext());
481 stackFrame.args[0].jsValue().put(stackFrame.callFrame, stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot);
482 CHECK_FOR_EXCEPTION_AT_END();
485 DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_generic)
487 STUB_INIT_STACK_FRAME(stackFrame);
489 PutPropertySlot slot(
490 stackFrame.callFrame->codeBlock()->isStrictMode(),
491 stackFrame.callFrame->codeBlock()->putByIdContext());
492 JSValue baseValue = stackFrame.args[0].jsValue();
493 ASSERT(baseValue.isObject());
494 asObject(baseValue)->putDirect(stackFrame.callFrame->vm(), stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot);
495 CHECK_FOR_EXCEPTION_AT_END();
498 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_generic)
500 STUB_INIT_STACK_FRAME(stackFrame);
502 CallFrame* callFrame = stackFrame.callFrame;
503 Identifier& ident = stackFrame.args[1].identifier();
505 JSValue baseValue = stackFrame.args[0].jsValue();
506 PropertySlot slot(baseValue);
507 JSValue result = baseValue.get(callFrame, ident, slot);
509 CHECK_FOR_EXCEPTION_AT_END();
510 return JSValue::encode(result);
513 DEFINE_STUB_FUNCTION(void, op_put_by_id)
515 STUB_INIT_STACK_FRAME(stackFrame);
516 CallFrame* callFrame = stackFrame.callFrame;
517 Identifier& ident = stackFrame.args[1].identifier();
519 CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
520 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
521 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
523 PutPropertySlot slot(
524 callFrame->codeBlock()->isStrictMode(),
525 callFrame->codeBlock()->putByIdContext());
526 stackFrame.args[0].jsValue().put(callFrame, ident, stackFrame.args[2].jsValue(), slot);
528 if (accessType == static_cast<AccessType>(stubInfo->accessType)) {
530 tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, false);
533 CHECK_FOR_EXCEPTION_AT_END();
536 DEFINE_STUB_FUNCTION(void, op_put_by_id_direct)
538 STUB_INIT_STACK_FRAME(stackFrame);
539 CallFrame* callFrame = stackFrame.callFrame;
540 Identifier& ident = stackFrame.args[1].identifier();
542 CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
543 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
544 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
546 PutPropertySlot slot(
547 callFrame->codeBlock()->isStrictMode(),
548 callFrame->codeBlock()->putByIdContext());
549 JSValue baseValue = stackFrame.args[0].jsValue();
550 ASSERT(baseValue.isObject());
552 asObject(baseValue)->putDirect(callFrame->vm(), ident, stackFrame.args[2].jsValue(), slot);
554 if (accessType == static_cast<AccessType>(stubInfo->accessType)) {
556 tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, true);
559 CHECK_FOR_EXCEPTION_AT_END();
562 DEFINE_STUB_FUNCTION(void, op_put_by_id_fail)
564 STUB_INIT_STACK_FRAME(stackFrame);
566 CallFrame* callFrame = stackFrame.callFrame;
567 Identifier& ident = stackFrame.args[1].identifier();
569 PutPropertySlot slot(
570 callFrame->codeBlock()->isStrictMode(),
571 callFrame->codeBlock()->putByIdContext());
572 stackFrame.args[0].jsValue().put(callFrame, ident, stackFrame.args[2].jsValue(), slot);
574 CHECK_FOR_EXCEPTION_AT_END();
577 DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_fail)
579 STUB_INIT_STACK_FRAME(stackFrame);
581 CallFrame* callFrame = stackFrame.callFrame;
582 Identifier& ident = stackFrame.args[1].identifier();
584 PutPropertySlot slot(
585 callFrame->codeBlock()->isStrictMode(),
586 callFrame->codeBlock()->putByIdContext());
587 JSValue baseValue = stackFrame.args[0].jsValue();
588 ASSERT(baseValue.isObject());
589 asObject(baseValue)->putDirect(callFrame->vm(), ident, stackFrame.args[2].jsValue(), slot);
591 CHECK_FOR_EXCEPTION_AT_END();
594 DEFINE_STUB_FUNCTION(JSObject*, op_put_by_id_transition_realloc)
596 STUB_INIT_STACK_FRAME(stackFrame);
598 JSValue baseValue = stackFrame.args[0].jsValue();
599 int32_t oldSize = stackFrame.args[3].int32();
600 Structure* newStructure = stackFrame.args[4].structure();
601 int32_t newSize = newStructure->outOfLineCapacity();
603 ASSERT(oldSize >= 0);
604 ASSERT(newSize > oldSize);
606 ASSERT(baseValue.isObject());
607 JSObject* base = asObject(baseValue);
608 VM& vm = *stackFrame.vm;
609 Butterfly* butterfly = base->growOutOfLineStorage(vm, oldSize, newSize);
610 base->setStructureAndButterfly(vm, newStructure, butterfly);
615 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id)
617 STUB_INIT_STACK_FRAME(stackFrame);
618 CallFrame* callFrame = stackFrame.callFrame;
619 Identifier& ident = stackFrame.args[1].identifier();
621 CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
622 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
623 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
625 JSValue baseValue = stackFrame.args[0].jsValue();
626 PropertySlot slot(baseValue);
627 JSValue result = baseValue.get(callFrame, ident, slot);
629 if (accessType != static_cast<AccessType>(stubInfo->accessType))
630 return JSValue::encode(result);
632 if (!stubInfo->seenOnce())
635 tryCacheGetByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, baseValue, ident, slot, stubInfo);
637 CHECK_FOR_EXCEPTION_AT_END();
638 return JSValue::encode(result);
641 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_self_fail)
643 STUB_INIT_STACK_FRAME(stackFrame);
645 CallFrame* callFrame = stackFrame.callFrame;
646 Identifier& ident = stackFrame.args[1].identifier();
648 CodeBlock* codeBlock = callFrame->codeBlock();
649 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
650 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
652 JSValue baseValue = stackFrame.args[0].jsValue();
653 PropertySlot slot(baseValue);
654 JSValue result = baseValue.get(callFrame, ident, slot);
656 if (accessType != static_cast<AccessType>(stubInfo->accessType))
657 return JSValue::encode(result);
659 CHECK_FOR_EXCEPTION();
661 ConcurrentJITLocker locker(codeBlock->m_lock);
663 if (baseValue.isCell()
664 && slot.isCacheable()
665 && !baseValue.asCell()->structure()->isUncacheableDictionary()
666 && slot.slotBase() == baseValue) {
668 PolymorphicAccessStructureList* polymorphicStructureList;
671 if (stubInfo->accessType == access_unset)
672 stubInfo->initGetByIdSelf(callFrame->vm(), codeBlock->ownerExecutable(), baseValue.asCell()->structure());
674 if (stubInfo->accessType == access_get_by_id_self) {
675 ASSERT(!stubInfo->stubRoutine);
676 polymorphicStructureList = new PolymorphicAccessStructureList(callFrame->vm(), codeBlock->ownerExecutable(), 0, stubInfo->u.getByIdSelf.baseObjectStructure.get(), true);
677 stubInfo->initGetByIdSelfList(polymorphicStructureList, 1);
679 polymorphicStructureList = stubInfo->u.getByIdSelfList.structureList;
680 listIndex = stubInfo->u.getByIdSelfList.listSize;
682 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
683 stubInfo->u.getByIdSelfList.listSize++;
684 JIT::compileGetByIdSelfList(callFrame->scope()->vm(), codeBlock, stubInfo, polymorphicStructureList, listIndex, baseValue.asCell()->structure(), ident, slot, slot.cachedOffset());
686 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
687 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
690 ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
691 return JSValue::encode(result);
694 static PolymorphicAccessStructureList* getPolymorphicAccessStructureListSlot(VM& vm, ScriptExecutable* owner, StructureStubInfo* stubInfo, int& listIndex)
696 PolymorphicAccessStructureList* prototypeStructureList = 0;
699 switch (stubInfo->accessType) {
700 case access_get_by_id_proto:
701 prototypeStructureList = new PolymorphicAccessStructureList(vm, owner, stubInfo->stubRoutine, stubInfo->u.getByIdProto.baseObjectStructure.get(), stubInfo->u.getByIdProto.prototypeStructure.get(), true);
702 stubInfo->stubRoutine.clear();
703 stubInfo->initGetByIdProtoList(prototypeStructureList, 2);
705 case access_get_by_id_chain:
706 prototypeStructureList = new PolymorphicAccessStructureList(vm, owner, stubInfo->stubRoutine, stubInfo->u.getByIdChain.baseObjectStructure.get(), stubInfo->u.getByIdChain.chain.get(), true);
707 stubInfo->stubRoutine.clear();
708 stubInfo->initGetByIdProtoList(prototypeStructureList, 2);
710 case access_get_by_id_proto_list:
711 prototypeStructureList = stubInfo->u.getByIdProtoList.structureList;
712 listIndex = stubInfo->u.getByIdProtoList.listSize;
713 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE)
714 stubInfo->u.getByIdProtoList.listSize++;
717 RELEASE_ASSERT_NOT_REACHED();
720 ASSERT(listIndex <= POLYMORPHIC_LIST_CACHE_SIZE);
721 return prototypeStructureList;
724 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_getter_stub)
726 STUB_INIT_STACK_FRAME(stackFrame);
727 CallFrame* callFrame = stackFrame.callFrame;
728 JSValue result = callGetter(callFrame, stackFrame.args[1].jsObject(), stackFrame.args[0].jsObject());
729 if (callFrame->hadException())
730 returnToThrowTrampoline(&callFrame->vm(), stackFrame.args[2].returnAddress(), STUB_RETURN_ADDRESS);
732 return JSValue::encode(result);
735 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_custom_stub)
737 STUB_INIT_STACK_FRAME(stackFrame);
738 CallFrame* callFrame = stackFrame.callFrame;
739 JSObject* slotBase = stackFrame.args[0].jsObject();
740 PropertySlot::GetValueFunc getter = reinterpret_cast<PropertySlot::GetValueFunc>(stackFrame.args[1].asPointer);
741 const Identifier& ident = stackFrame.args[2].identifier();
742 JSValue result = getter(callFrame, slotBase, ident);
743 if (callFrame->hadException())
744 returnToThrowTrampoline(&callFrame->vm(), stackFrame.args[3].returnAddress(), STUB_RETURN_ADDRESS);
746 return JSValue::encode(result);
749 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
751 STUB_INIT_STACK_FRAME(stackFrame);
753 CallFrame* callFrame = stackFrame.callFrame;
754 const Identifier& propertyName = stackFrame.args[1].identifier();
756 CodeBlock* codeBlock = callFrame->codeBlock();
757 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
758 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
760 JSValue baseValue = stackFrame.args[0].jsValue();
761 PropertySlot slot(baseValue);
762 JSValue result = baseValue.get(callFrame, propertyName, slot);
764 CHECK_FOR_EXCEPTION();
766 if (accessType != static_cast<AccessType>(stubInfo->accessType)
767 || !baseValue.isCell()
768 || !slot.isCacheable()
769 || baseValue.asCell()->structure()->isDictionary()
770 || baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching()) {
771 ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
772 return JSValue::encode(result);
775 ConcurrentJITLocker locker(codeBlock->m_lock);
777 Structure* structure = baseValue.asCell()->structure();
779 JSObject* slotBaseObject = asObject(slot.slotBase());
781 PropertyOffset offset = slot.cachedOffset();
783 if (slot.slotBase() == baseValue)
784 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
785 else if (slot.slotBase() == baseValue.asCell()->structure()->prototypeForLookup(callFrame)) {
786 ASSERT(!baseValue.asCell()->structure()->isDictionary());
788 if (baseValue.asCell()->structure()->typeInfo().hasImpureGetOwnPropertySlot()) {
789 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
790 return JSValue::encode(result);
793 // Since we're accessing a prototype in a loop, it's a good bet that it
794 // should not be treated as a dictionary.
795 if (slotBaseObject->structure()->isDictionary()) {
796 slotBaseObject->flattenDictionaryObject(callFrame->vm());
797 offset = slotBaseObject->structure()->get(callFrame->vm(), propertyName);
801 PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(callFrame->vm(), codeBlock->ownerExecutable(), stubInfo, listIndex);
802 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
803 JIT::compileGetByIdProtoList(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), propertyName, slot, offset);
805 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
806 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
809 size_t count = normalizePrototypeChainForChainAccess(callFrame, baseValue, slot.slotBase(), propertyName, offset);
810 if (count == InvalidPrototypeChain) {
811 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
812 return JSValue::encode(result);
815 ASSERT(!baseValue.asCell()->structure()->isDictionary());
817 PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(callFrame->vm(), codeBlock->ownerExecutable(), stubInfo, listIndex);
819 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
820 StructureChain* protoChain = structure->prototypeChain(callFrame);
821 JIT::compileGetByIdChainList(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, propertyName, slot, offset);
823 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
824 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
828 return JSValue::encode(result);
831 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list_full)
833 STUB_INIT_STACK_FRAME(stackFrame);
835 JSValue baseValue = stackFrame.args[0].jsValue();
836 PropertySlot slot(baseValue);
837 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
839 CHECK_FOR_EXCEPTION_AT_END();
840 return JSValue::encode(result);
843 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_fail)
845 STUB_INIT_STACK_FRAME(stackFrame);
847 JSValue baseValue = stackFrame.args[0].jsValue();
848 PropertySlot slot(baseValue);
849 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
851 CHECK_FOR_EXCEPTION_AT_END();
852 return JSValue::encode(result);
855 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_array_fail)
857 STUB_INIT_STACK_FRAME(stackFrame);
859 JSValue baseValue = stackFrame.args[0].jsValue();
860 PropertySlot slot(baseValue);
861 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
863 CHECK_FOR_EXCEPTION_AT_END();
864 return JSValue::encode(result);
867 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_string_fail)
869 STUB_INIT_STACK_FRAME(stackFrame);
871 JSValue baseValue = stackFrame.args[0].jsValue();
872 PropertySlot slot(baseValue);
873 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
875 CHECK_FOR_EXCEPTION_AT_END();
876 return JSValue::encode(result);
879 DEFINE_STUB_FUNCTION(EncodedJSValue, op_check_has_instance)
881 STUB_INIT_STACK_FRAME(stackFrame);
883 CallFrame* callFrame = stackFrame.callFrame;
884 JSValue value = stackFrame.args[0].jsValue();
885 JSValue baseVal = stackFrame.args[1].jsValue();
887 if (baseVal.isObject()) {
888 JSObject* baseObject = asObject(baseVal);
889 ASSERT(!baseObject->structure()->typeInfo().implementsDefaultHasInstance());
890 if (baseObject->structure()->typeInfo().implementsHasInstance()) {
891 bool result = baseObject->methodTable()->customHasInstance(baseObject, callFrame, value);
892 CHECK_FOR_EXCEPTION_AT_END();
893 return JSValue::encode(jsBoolean(result));
897 stackFrame.vm->exception = createInvalidParameterError(callFrame, "instanceof", baseVal);
898 VM_THROW_EXCEPTION_AT_END();
899 return JSValue::encode(JSValue());
903 DEFINE_STUB_FUNCTION(void, optimize)
905 STUB_INIT_STACK_FRAME(stackFrame);
907 // Defer GC so that it doesn't run between when we enter into this slow path and
908 // when we figure out the state of our code block. This prevents a number of
909 // awkward reentrancy scenarios, including:
911 // - The optimized version of our code block being jettisoned by GC right after
912 // we concluded that we wanted to use it.
914 // - An optimized version of our code block being installed just as we decided
915 // that it wasn't ready yet.
917 // This still leaves the following: anytime we return from cti_optimize, we may
918 // GC, and the GC may either jettison the optimized version of our code block,
919 // or it may install the optimized version of our code block even though we
920 // concluded that it wasn't ready yet.
922 // Note that jettisoning won't happen if we already initiated OSR, because in
923 // that case we would have already planted the optimized code block into the JS
925 DeferGC deferGC(stackFrame.vm->heap);
927 CallFrame* callFrame = stackFrame.callFrame;
928 CodeBlock* codeBlock = callFrame->codeBlock();
929 unsigned bytecodeIndex = stackFrame.args[0].int32();
932 // If we're attempting to OSR from a loop, assume that this should be
933 // separately optimized.
934 codeBlock->m_shouldAlwaysBeInlined = false;
937 if (Options::verboseOSR()) {
939 *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
940 ", executeCounter = ", codeBlock->jitExecuteCounter(),
941 ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
943 if (codeBlock->hasOptimizedReplacement())
944 dataLog(codeBlock->replacement()->osrExitCounter());
950 if (!codeBlock->checkIfOptimizationThresholdReached()) {
951 codeBlock->updateAllPredictions();
952 if (Options::verboseOSR())
953 dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
957 if (codeBlock->m_shouldAlwaysBeInlined) {
958 codeBlock->updateAllPredictions();
959 codeBlock->optimizeAfterWarmUp();
960 if (Options::verboseOSR())
961 dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
965 // We cannot be in the process of asynchronous compilation and also have an optimized
968 !stackFrame.vm->worklist
969 || !(stackFrame.vm->worklist->compilationState(codeBlock) != DFG::Worklist::NotKnown
970 && codeBlock->hasOptimizedReplacement()));
972 DFG::Worklist::State worklistState;
973 if (stackFrame.vm->worklist) {
974 // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
975 // (i.e. compiled) code blocks. But if it completes ours, we also need to know
976 // what the result was so that we don't plow ahead and attempt OSR or immediate
977 // reoptimization. This will have already also set the appropriate JIT execution
978 // count threshold depending on what happened, so if the compilation was anything
979 // but successful we just want to return early. See the case for worklistState ==
980 // DFG::Worklist::Compiled, below.
982 // Note that we could have alternatively just called Worklist::compilationState()
983 // here, and if it returned Compiled, we could have then called
984 // completeAndScheduleOSR() below. But that would have meant that it could take
985 // longer for code blocks to be completed: they would only complete when *their*
986 // execution count trigger fired; but that could take a while since the firing is
987 // racy. It could also mean that code blocks that never run again after being
988 // compiled would sit on the worklist until next GC. That's fine, but it's
989 // probably a waste of memory. Our goal here is to complete code blocks as soon as
990 // possible in order to minimize the chances of us executing baseline code after
991 // optimized code is already available.
994 stackFrame.vm->worklist->completeAllReadyPlansForVM(*stackFrame.vm, codeBlock);
996 worklistState = DFG::Worklist::NotKnown;
998 if (worklistState == DFG::Worklist::Compiling) {
999 // We cannot be in the process of asynchronous compilation and also have an optimized
1001 RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1002 codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1006 if (worklistState == DFG::Worklist::Compiled) {
1007 // If we don't have an optimized replacement but we did just get compiled, then
1008 // the compilation failed or was invalidated, in which case the execution count
1009 // thresholds have already been set appropriately by
1010 // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1011 // nothing left to do.
1012 if (!codeBlock->hasOptimizedReplacement()) {
1013 codeBlock->updateAllPredictions();
1014 if (Options::verboseOSR())
1015 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1018 } else if (codeBlock->hasOptimizedReplacement()) {
1019 if (Options::verboseOSR())
1020 dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1021 // If we have an optimized replacement, then it must be the case that we entered
1022 // cti_optimize from a loop. That's because is there's an optimized replacement,
1023 // then all calls to this function will be relinked to the replacement and so
1024 // the prologue OSR will never fire.
1026 // This is an interesting threshold check. Consider that a function OSR exits
1027 // in the middle of a loop, while having a relatively low exit count. The exit
1028 // will reset the execution counter to some target threshold, meaning that this
1029 // code won't be reached until that loop heats up for >=1000 executions. But then
1030 // we do a second check here, to see if we should either reoptimize, or just
1031 // attempt OSR entry. Hence it might even be correct for
1032 // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1033 // additional checking anyway, to reduce the amount of recompilation thrashing.
1034 if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1035 if (Options::verboseOSR()) {
1037 "Triggering reoptimization of ", *codeBlock,
1038 "(", *codeBlock->replacement(), ") (in loop).\n");
1040 codeBlock->reoptimize();
1044 if (!codeBlock->shouldOptimizeNow()) {
1045 if (Options::verboseOSR()) {
1047 "Delaying optimization for ", *codeBlock,
1048 " because of insufficient profiling.\n");
1053 if (Options::verboseOSR())
1054 dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1056 JSScope* scope = callFrame->scope();
1057 CompilationResult result;
1058 JSObject* error = codeBlock->compileOptimized(callFrame, scope, result, bytecodeIndex);
1059 if (Options::verboseOSR()) {
1060 dataLog("Optimizing compilation of ", *codeBlock, " result: ", result, "\n");
1062 dataLog("WARNING: optimized compilation failed with a JS error.\n");
1065 codeBlock->setOptimizationThresholdBasedOnCompilationResult(result);
1066 if (result != CompilationSuccessful)
1070 CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1071 ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1073 if (optimizedCodeBlock->jitType() == JITCode::FTLJIT) {
1074 // FTL JIT doesn't support OSR entry yet.
1075 // https://bugs.webkit.org/show_bug.cgi?id=113625
1077 // Don't attempt OSR entry again.
1078 codeBlock->dontOptimizeAnytimeSoon();
1082 if (void* address = DFG::prepareOSREntry(callFrame, optimizedCodeBlock, bytecodeIndex)) {
1083 if (Options::verboseOSR()) {
1085 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ", address ",
1086 RawPointer((STUB_RETURN_ADDRESS).value()), " -> ", RawPointer(address), ".\n");
1089 codeBlock->optimizeSoon();
1090 STUB_SET_RETURN_ADDRESS(address);
1094 if (Options::verboseOSR()) {
1096 "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1097 " succeeded, OSR failed, after a delay of ",
1098 codeBlock->optimizationDelayCounter(), ".\n");
1101 // Count the OSR failure as a speculation failure. If this happens a lot, then
1103 optimizedCodeBlock->countOSRExit();
1105 // We are a lot more conservative about triggering reoptimization after OSR failure than
1106 // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1107 // already, then we really would like to reoptimize immediately. But this case covers
1108 // something else: there weren't many (or any) speculation failures before, but we just
1109 // failed to enter the speculative code because some variable had the wrong value or
1110 // because the OSR code decided for any spurious reason that it did not want to OSR
1111 // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1112 // reoptimization trigger.
1113 if (optimizedCodeBlock->shouldReoptimizeNow()) {
1114 if (Options::verboseOSR()) {
1116 "Triggering reoptimization of ", *codeBlock, " -> ",
1117 *codeBlock->replacement(), " (after OSR fail).\n");
1119 codeBlock->reoptimize();
1123 // OSR failed this time, but it might succeed next time! Let the code run a bit
1124 // longer and then try again.
1125 codeBlock->optimizeAfterWarmUp();
1127 #endif // ENABLE(DFG_JIT)
1129 DEFINE_STUB_FUNCTION(EncodedJSValue, op_instanceof)
1131 STUB_INIT_STACK_FRAME(stackFrame);
1133 CallFrame* callFrame = stackFrame.callFrame;
1134 JSValue value = stackFrame.args[0].jsValue();
1135 JSValue proto = stackFrame.args[1].jsValue();
1137 ASSERT(!value.isObject() || !proto.isObject());
1139 bool result = JSObject::defaultHasInstance(callFrame, value, proto);
1140 CHECK_FOR_EXCEPTION_AT_END();
1141 return JSValue::encode(jsBoolean(result));
1144 DEFINE_STUB_FUNCTION(EncodedJSValue, op_del_by_id)
1146 STUB_INIT_STACK_FRAME(stackFrame);
1148 CallFrame* callFrame = stackFrame.callFrame;
1150 JSObject* baseObj = stackFrame.args[0].jsValue().toObject(callFrame);
1152 bool couldDelete = baseObj->methodTable()->deleteProperty(baseObj, callFrame, stackFrame.args[1].identifier());
1153 JSValue result = jsBoolean(couldDelete);
1154 if (!couldDelete && callFrame->codeBlock()->isStrictMode())
1155 stackFrame.vm->exception = createTypeError(stackFrame.callFrame, "Unable to delete property.");
1157 CHECK_FOR_EXCEPTION_AT_END();
1158 return JSValue::encode(result);
1161 DEFINE_STUB_FUNCTION(JSObject*, op_new_func)
1163 STUB_INIT_STACK_FRAME(stackFrame);
1165 ASSERT(stackFrame.callFrame->codeBlock()->codeType() != FunctionCode || !stackFrame.callFrame->codeBlock()->needsFullScopeChain() || stackFrame.callFrame->uncheckedR(stackFrame.callFrame->codeBlock()->activationRegister()).jsValue());
1166 return JSFunction::create(stackFrame.callFrame, stackFrame.args[0].function(), stackFrame.callFrame->scope());
1169 inline void* jitCompileFor(CallFrame* callFrame, CodeSpecializationKind kind)
1171 // This function is called by cti_op_call_jitCompile() and
1172 // cti_op_construct_jitCompile() JIT glue trampolines to compile the
1173 // callee function that we want to call. Both cti glue trampolines are
1174 // called by JIT'ed code which has pushed a frame and initialized most of
1175 // the frame content except for the codeBlock.
1177 // Normally, the prologue of the callee is supposed to set the frame's cb
1178 // pointer to the cb of the callee. But in this case, the callee code does
1179 // not exist yet until it is compiled below. The compilation process will
1180 // allocate memory which may trigger a GC. The GC, in turn, will scan the
1181 // JSStack, and will expect the frame's cb to either be valid or 0. If
1182 // we don't initialize it, the GC will be accessing invalid memory and may
1185 // Hence, we should nullify it here before proceeding with the compilation.
1186 callFrame->setCodeBlock(0);
1188 JSFunction* function = jsCast<JSFunction*>(callFrame->callee());
1189 ASSERT(!function->isHostFunction());
1190 FunctionExecutable* executable = function->jsExecutable();
1191 JSScope* callDataScopeChain = function->scope();
1192 JSObject* error = executable->compileFor(callFrame, callDataScopeChain, kind);
1195 callFrame->vm().exception = error;
1199 DEFINE_STUB_FUNCTION(void*, op_call_jitCompile)
1201 STUB_INIT_STACK_FRAME(stackFrame);
1203 #if !ASSERT_DISABLED
1205 ASSERT(stackFrame.callFrame->callee()->methodTable()->getCallData(stackFrame.callFrame->callee(), callData) == CallTypeJS);
1208 CallFrame* callFrame = stackFrame.callFrame;
1209 void* result = jitCompileFor(callFrame, CodeForCall);
1211 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1216 DEFINE_STUB_FUNCTION(void*, op_construct_jitCompile)
1218 STUB_INIT_STACK_FRAME(stackFrame);
1220 #if !ASSERT_DISABLED
1221 ConstructData constructData;
1222 ASSERT(jsCast<JSFunction*>(stackFrame.callFrame->callee())->methodTable()->getConstructData(stackFrame.callFrame->callee(), constructData) == ConstructTypeJS);
1225 CallFrame* callFrame = stackFrame.callFrame;
1226 void* result = jitCompileFor(callFrame, CodeForConstruct);
1228 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1233 DEFINE_STUB_FUNCTION(int, op_call_arityCheck)
1235 STUB_INIT_STACK_FRAME(stackFrame);
1237 CallFrame* callFrame = stackFrame.callFrame;
1239 int missingArgCount = CommonSlowPaths::arityCheckFor(callFrame, stackFrame.stack, CodeForCall);
1240 if (missingArgCount < 0) {
1241 ErrorWithExecFunctor functor = ErrorWithExecFunctor(createStackOverflowError);
1242 return throwExceptionFromOpCall<int>(stackFrame, callFrame, STUB_RETURN_ADDRESS, functor);
1244 return missingArgCount;
1247 DEFINE_STUB_FUNCTION(int, op_construct_arityCheck)
1249 STUB_INIT_STACK_FRAME(stackFrame);
1251 CallFrame* callFrame = stackFrame.callFrame;
1253 int missingArgCount = CommonSlowPaths::arityCheckFor(callFrame, stackFrame.stack, CodeForConstruct);
1254 if (missingArgCount < 0) {
1255 ErrorWithExecFunctor functor = ErrorWithExecFunctor(createStackOverflowError);
1256 return throwExceptionFromOpCall<int>(stackFrame, callFrame, STUB_RETURN_ADDRESS, functor);
1258 return missingArgCount;
1261 inline void* lazyLinkFor(CallFrame* callFrame, CodeSpecializationKind kind)
1263 JSFunction* callee = jsCast<JSFunction*>(callFrame->callee());
1264 ExecutableBase* executable = callee->executable();
1266 MacroAssemblerCodePtr codePtr;
1267 CodeBlock* codeBlock = 0;
1268 CallLinkInfo* callLinkInfo = &callFrame->callerFrame()->codeBlock()->getCallLinkInfo(callFrame->returnPC());
1270 // This function is called by cti_vm_lazyLinkCall() and
1271 // cti_lazyLinkConstruct JIT glue trampolines to link the callee function
1272 // that we want to call. Both cti glue trampolines are called by JIT'ed
1273 // code which has pushed a frame and initialized most of the frame content
1274 // except for the codeBlock.
1276 // Normally, the prologue of the callee is supposed to set the frame's cb
1277 // field to the cb of the callee. But in this case, the callee may not
1278 // exist yet, and if not, it will be generated in the compilation below.
1279 // The compilation will allocate memory which may trigger a GC. The GC, in
1280 // turn, will scan the JSStack, and will expect the frame's cb to be valid
1281 // or 0. If we don't initialize it, the GC will be accessing invalid
1282 // memory and may crash.
1284 // Hence, we should nullify it here before proceeding with the compilation.
1285 callFrame->setCodeBlock(0);
1287 if (executable->isHostFunction())
1288 codePtr = executable->generatedJITCodeFor(kind)->addressForCall();
1290 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1291 if (JSObject* error = functionExecutable->compileFor(callFrame, callee->scope(), kind)) {
1292 callFrame->vm().exception = error;
1295 codeBlock = &functionExecutable->generatedBytecodeFor(kind);
1296 if (callFrame->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters())
1297 || callLinkInfo->callType == CallLinkInfo::CallVarargs)
1298 codePtr = functionExecutable->generatedJITCodeWithArityCheckFor(kind);
1300 codePtr = functionExecutable->generatedJITCodeFor(kind)->addressForCall();
1303 ConcurrentJITLocker locker(callFrame->callerFrame()->codeBlock()->m_lock);
1304 if (!callLinkInfo->seenOnce())
1305 callLinkInfo->setSeen();
1307 JIT::linkFor(callFrame->callerFrame(), callee, callFrame->callerFrame()->codeBlock(), codeBlock, codePtr, callLinkInfo, &callFrame->vm(), kind);
1309 return codePtr.executableAddress();
1312 DEFINE_STUB_FUNCTION(void*, vm_lazyLinkCall)
1314 STUB_INIT_STACK_FRAME(stackFrame);
1316 CallFrame* callFrame = stackFrame.callFrame;
1317 void* result = lazyLinkFor(callFrame, CodeForCall);
1319 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1324 DEFINE_STUB_FUNCTION(void*, vm_lazyLinkClosureCall)
1326 STUB_INIT_STACK_FRAME(stackFrame);
1328 CallFrame* callFrame = stackFrame.callFrame;
1330 CodeBlock* callerCodeBlock = callFrame->callerFrame()->codeBlock();
1331 VM* vm = callerCodeBlock->vm();
1332 CallLinkInfo* callLinkInfo = &callerCodeBlock->getCallLinkInfo(callFrame->returnPC());
1333 JSFunction* callee = jsCast<JSFunction*>(callFrame->callee());
1334 ExecutableBase* executable = callee->executable();
1335 Structure* structure = callee->structure();
1337 ASSERT(callLinkInfo->callType == CallLinkInfo::Call);
1338 ASSERT(callLinkInfo->isLinked());
1339 ASSERT(callLinkInfo->callee);
1340 ASSERT(callee != callLinkInfo->callee.get());
1342 bool shouldLink = false;
1343 CodeBlock* calleeCodeBlock = 0;
1344 MacroAssemblerCodePtr codePtr;
1346 if (executable == callLinkInfo->callee.get()->executable()
1347 && structure == callLinkInfo->callee.get()->structure()) {
1351 ASSERT(executable->hasJITCodeForCall());
1352 codePtr = executable->generatedJITCodeForCall()->addressForCall();
1353 if (!callee->executable()->isHostFunction()) {
1354 calleeCodeBlock = &jsCast<FunctionExecutable*>(executable)->generatedBytecodeForCall();
1355 if (callFrame->argumentCountIncludingThis() < static_cast<size_t>(calleeCodeBlock->numParameters())) {
1357 codePtr = executable->generatedJITCodeWithArityCheckFor(CodeForCall);
1360 } else if (callee->isHostFunction())
1361 codePtr = executable->generatedJITCodeForCall()->addressForCall();
1363 // Need to clear the code block before compilation, because compilation can GC.
1364 callFrame->setCodeBlock(0);
1366 FunctionExecutable* functionExecutable = jsCast<FunctionExecutable*>(executable);
1367 JSScope* scopeChain = callee->scope();
1368 JSObject* error = functionExecutable->compileFor(callFrame, scopeChain, CodeForCall);
1370 callFrame->vm().exception = error;
1374 codePtr = functionExecutable->generatedJITCodeWithArityCheckFor(CodeForCall);
1379 ConcurrentJITLocker locker(callerCodeBlock->m_lock);
1380 JIT::compileClosureCall(vm, callLinkInfo, callerCodeBlock, calleeCodeBlock, structure, executable, codePtr);
1381 callLinkInfo->hasSeenClosure = true;
1383 JIT::linkSlowCall(callerCodeBlock, callLinkInfo);
1385 return codePtr.executableAddress();
1388 DEFINE_STUB_FUNCTION(void*, vm_lazyLinkConstruct)
1390 STUB_INIT_STACK_FRAME(stackFrame);
1392 CallFrame* callFrame = stackFrame.callFrame;
1393 void* result = lazyLinkFor(callFrame, CodeForConstruct);
1395 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1400 DEFINE_STUB_FUNCTION(JSObject*, op_push_activation)
1402 STUB_INIT_STACK_FRAME(stackFrame);
1404 JSActivation* activation = JSActivation::create(stackFrame.callFrame->vm(), stackFrame.callFrame, stackFrame.callFrame->codeBlock());
1405 stackFrame.callFrame->setScope(activation);
1409 DEFINE_STUB_FUNCTION(EncodedJSValue, op_call_NotJSFunction)
1411 STUB_INIT_STACK_FRAME(stackFrame);
1413 CallFrame* callFrame = stackFrame.callFrame;
1415 JSValue callee = callFrame->calleeAsValue();
1418 CallType callType = getCallData(callee, callData);
1420 ASSERT(callType != CallTypeJS);
1421 if (callType != CallTypeHost) {
1422 ASSERT(callType == CallTypeNone);
1423 ErrorWithExecAndCalleeFunctor functor = ErrorWithExecAndCalleeFunctor(createNotAFunctionError, callee);
1424 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS, functor);
1427 EncodedJSValue returnValue;
1429 SamplingTool::CallRecord callRecord(CTI_SAMPLER, true);
1430 returnValue = callData.native.function(callFrame);
1433 if (stackFrame.vm->exception)
1434 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1439 DEFINE_STUB_FUNCTION(EncodedJSValue, op_create_arguments)
1441 STUB_INIT_STACK_FRAME(stackFrame);
1443 Arguments* arguments = Arguments::create(*stackFrame.vm, stackFrame.callFrame);
1444 return JSValue::encode(JSValue(arguments));
1447 DEFINE_STUB_FUNCTION(void, op_tear_off_activation)
1449 STUB_INIT_STACK_FRAME(stackFrame);
1451 ASSERT(stackFrame.callFrame->codeBlock()->needsFullScopeChain());
1452 jsCast<JSActivation*>(stackFrame.args[0].jsValue())->tearOff(*stackFrame.vm);
1455 DEFINE_STUB_FUNCTION(void, op_tear_off_arguments)
1457 STUB_INIT_STACK_FRAME(stackFrame);
1459 CallFrame* callFrame = stackFrame.callFrame;
1460 ASSERT(callFrame->codeBlock()->usesArguments());
1461 Arguments* arguments = jsCast<Arguments*>(stackFrame.args[0].jsValue());
1462 if (JSValue activationValue = stackFrame.args[1].jsValue()) {
1463 arguments->didTearOffActivation(callFrame, jsCast<JSActivation*>(activationValue));
1466 arguments->tearOff(callFrame);
1469 DEFINE_STUB_FUNCTION(void, op_profile_will_call)
1471 STUB_INIT_STACK_FRAME(stackFrame);
1473 if (LegacyProfiler* profiler = stackFrame.vm->enabledProfiler())
1474 profiler->willExecute(stackFrame.callFrame, stackFrame.args[0].jsValue());
1477 DEFINE_STUB_FUNCTION(void, op_profile_did_call)
1479 STUB_INIT_STACK_FRAME(stackFrame);
1481 if (LegacyProfiler* profiler = stackFrame.vm->enabledProfiler())
1482 profiler->didExecute(stackFrame.callFrame, stackFrame.args[0].jsValue());
1485 DEFINE_STUB_FUNCTION(JSObject*, op_new_array)
1487 STUB_INIT_STACK_FRAME(stackFrame);
1489 return constructArray(stackFrame.callFrame, stackFrame.args[2].arrayAllocationProfile(), reinterpret_cast<JSValue*>(&stackFrame.callFrame->registers()[stackFrame.args[0].int32()]), stackFrame.args[1].int32());
1492 DEFINE_STUB_FUNCTION(JSObject*, op_new_array_with_size)
1494 STUB_INIT_STACK_FRAME(stackFrame);
1496 return constructArrayWithSizeQuirk(stackFrame.callFrame, stackFrame.args[1].arrayAllocationProfile(), stackFrame.callFrame->lexicalGlobalObject(), stackFrame.args[0].jsValue());
1499 DEFINE_STUB_FUNCTION(JSObject*, op_new_array_buffer)
1501 STUB_INIT_STACK_FRAME(stackFrame);
1503 return constructArray(stackFrame.callFrame, stackFrame.args[2].arrayAllocationProfile(), stackFrame.callFrame->codeBlock()->constantBuffer(stackFrame.args[0].int32()), stackFrame.args[1].int32());
1506 DEFINE_STUB_FUNCTION(EncodedJSValue, op_construct_NotJSConstruct)
1508 STUB_INIT_STACK_FRAME(stackFrame);
1510 CallFrame* callFrame = stackFrame.callFrame;
1511 JSValue callee = callFrame->calleeAsValue();
1513 ConstructData constructData;
1514 ConstructType constructType = getConstructData(callee, constructData);
1516 ASSERT(constructType != ConstructTypeJS);
1517 if (constructType != ConstructTypeHost) {
1518 ASSERT(constructType == ConstructTypeNone);
1519 ErrorWithExecAndCalleeFunctor functor = ErrorWithExecAndCalleeFunctor(createNotAConstructorError, callee);
1520 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS, functor);
1523 EncodedJSValue returnValue;
1525 SamplingTool::CallRecord callRecord(CTI_SAMPLER, true);
1526 returnValue = constructData.native.function(callFrame);
1529 if (stackFrame.vm->exception)
1530 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1535 static JSValue getByVal(
1536 CallFrame* callFrame, JSValue baseValue, JSValue subscript, ReturnAddressPtr returnAddress)
1538 if (LIKELY(baseValue.isCell() && subscript.isString())) {
1539 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(callFrame, asString(subscript)->value(callFrame)))
1543 if (subscript.isUInt32()) {
1544 uint32_t i = subscript.asUInt32();
1545 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i)) {
1546 ctiPatchCallByReturnAddress(callFrame->codeBlock(), returnAddress, FunctionPtr(cti_op_get_by_val_string));
1547 return asString(baseValue)->getIndex(callFrame, i);
1549 return baseValue.get(callFrame, i);
1552 if (isName(subscript))
1553 return baseValue.get(callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName());
1555 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
1556 return baseValue.get(callFrame, property);
1559 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val)
1561 STUB_INIT_STACK_FRAME(stackFrame);
1563 CallFrame* callFrame = stackFrame.callFrame;
1565 JSValue baseValue = stackFrame.args[0].jsValue();
1566 JSValue subscript = stackFrame.args[1].jsValue();
1568 if (baseValue.isObject() && subscript.isInt32()) {
1569 // See if it's worth optimizing this at all.
1570 JSObject* object = asObject(baseValue);
1571 bool didOptimize = false;
1573 unsigned bytecodeOffset = callFrame->locationAsBytecodeOffset();
1574 ASSERT(bytecodeOffset);
1575 ByValInfo& byValInfo = callFrame->codeBlock()->getByValInfo(bytecodeOffset - 1);
1576 ASSERT(!byValInfo.stubRoutine);
1578 if (hasOptimizableIndexing(object->structure())) {
1579 // Attempt to optimize.
1580 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure());
1581 if (arrayMode != byValInfo.arrayMode) {
1582 JIT::compileGetByVal(&callFrame->vm(), callFrame->codeBlock(), &byValInfo, STUB_RETURN_ADDRESS, arrayMode);
1588 // If we take slow path more than 10 times without patching then make sure we
1589 // never make that mistake again. Or, if we failed to patch and we have some object
1590 // that intercepts indexed get, then don't even wait until 10 times. For cases
1591 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1592 // opportunity for us to observe that the get_by_val may be polymorphic.
1593 if (++byValInfo.slowPathCount >= 10
1594 || object->structure()->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1595 // Don't ever try to optimize.
1596 RepatchBuffer repatchBuffer(callFrame->codeBlock());
1597 repatchBuffer.relinkCallerToFunction(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_generic));
1602 JSValue result = getByVal(callFrame, baseValue, subscript, STUB_RETURN_ADDRESS);
1603 CHECK_FOR_EXCEPTION();
1604 return JSValue::encode(result);
1607 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val_generic)
1609 STUB_INIT_STACK_FRAME(stackFrame);
1611 CallFrame* callFrame = stackFrame.callFrame;
1613 JSValue baseValue = stackFrame.args[0].jsValue();
1614 JSValue subscript = stackFrame.args[1].jsValue();
1616 JSValue result = getByVal(callFrame, baseValue, subscript, STUB_RETURN_ADDRESS);
1617 CHECK_FOR_EXCEPTION();
1618 return JSValue::encode(result);
1621 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val_string)
1623 STUB_INIT_STACK_FRAME(stackFrame);
1625 CallFrame* callFrame = stackFrame.callFrame;
1627 JSValue baseValue = stackFrame.args[0].jsValue();
1628 JSValue subscript = stackFrame.args[1].jsValue();
1632 if (LIKELY(subscript.isUInt32())) {
1633 uint32_t i = subscript.asUInt32();
1634 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1635 result = asString(baseValue)->getIndex(callFrame, i);
1637 result = baseValue.get(callFrame, i);
1638 if (!isJSString(baseValue))
1639 ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val));
1641 } else if (isName(subscript))
1642 result = baseValue.get(callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName());
1644 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
1645 result = baseValue.get(callFrame, property);
1648 CHECK_FOR_EXCEPTION_AT_END();
1649 return JSValue::encode(result);
1652 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value)
1654 if (LIKELY(subscript.isUInt32())) {
1655 uint32_t i = subscript.asUInt32();
1656 if (baseValue.isObject()) {
1657 JSObject* object = asObject(baseValue);
1658 if (object->canSetIndexQuickly(i))
1659 object->setIndexQuickly(callFrame->vm(), i, value);
1661 object->methodTable()->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
1663 baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
1664 } else if (isName(subscript)) {
1665 PutPropertySlot slot(callFrame->codeBlock()->isStrictMode());
1666 baseValue.put(callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName(), value, slot);
1668 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
1669 if (!callFrame->vm().exception) { // Don't put to an object if toString threw an exception.
1670 PutPropertySlot slot(callFrame->codeBlock()->isStrictMode());
1671 baseValue.put(callFrame, property, value, slot);
1676 DEFINE_STUB_FUNCTION(void, op_put_by_val)
1678 STUB_INIT_STACK_FRAME(stackFrame);
1680 CallFrame* callFrame = stackFrame.callFrame;
1682 JSValue baseValue = stackFrame.args[0].jsValue();
1683 JSValue subscript = stackFrame.args[1].jsValue();
1684 JSValue value = stackFrame.args[2].jsValue();
1686 if (baseValue.isObject() && subscript.isInt32()) {
1687 // See if it's worth optimizing at all.
1688 JSObject* object = asObject(baseValue);
1689 bool didOptimize = false;
1691 unsigned bytecodeOffset = callFrame->locationAsBytecodeOffset();
1692 ASSERT(bytecodeOffset);
1693 ByValInfo& byValInfo = callFrame->codeBlock()->getByValInfo(bytecodeOffset - 1);
1694 ASSERT(!byValInfo.stubRoutine);
1696 if (hasOptimizableIndexing(object->structure())) {
1697 // Attempt to optimize.
1698 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure());
1699 if (arrayMode != byValInfo.arrayMode) {
1700 JIT::compilePutByVal(&callFrame->vm(), callFrame->codeBlock(), &byValInfo, STUB_RETURN_ADDRESS, arrayMode);
1706 // If we take slow path more than 10 times without patching then make sure we
1707 // never make that mistake again. Or, if we failed to patch and we have some object
1708 // that intercepts indexed get, then don't even wait until 10 times. For cases
1709 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1710 // opportunity for us to observe that the get_by_val may be polymorphic.
1711 if (++byValInfo.slowPathCount >= 10
1712 || object->structure()->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1713 // Don't ever try to optimize.
1714 RepatchBuffer repatchBuffer(callFrame->codeBlock());
1715 repatchBuffer.relinkCallerToFunction(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_val_generic));
1720 putByVal(callFrame, baseValue, subscript, value);
1722 CHECK_FOR_EXCEPTION_AT_END();
1725 DEFINE_STUB_FUNCTION(void, op_put_by_val_generic)
1727 STUB_INIT_STACK_FRAME(stackFrame);
1729 CallFrame* callFrame = stackFrame.callFrame;
1731 JSValue baseValue = stackFrame.args[0].jsValue();
1732 JSValue subscript = stackFrame.args[1].jsValue();
1733 JSValue value = stackFrame.args[2].jsValue();
1735 putByVal(callFrame, baseValue, subscript, value);
1737 CHECK_FOR_EXCEPTION_AT_END();
1740 DEFINE_STUB_FUNCTION(void*, op_load_varargs)
1742 STUB_INIT_STACK_FRAME(stackFrame);
1744 CallFrame* callFrame = stackFrame.callFrame;
1745 JSStack* stack = stackFrame.stack;
1746 JSValue thisValue = stackFrame.args[0].jsValue();
1747 JSValue arguments = stackFrame.args[1].jsValue();
1748 int firstFreeRegister = stackFrame.args[2].int32();
1750 CallFrame* newCallFrame = loadVarargs(callFrame, stack, thisValue, arguments, firstFreeRegister);
1752 VM_THROW_EXCEPTION();
1753 return newCallFrame;
1756 DEFINE_STUB_FUNCTION(int, op_jless)
1758 STUB_INIT_STACK_FRAME(stackFrame);
1760 JSValue src1 = stackFrame.args[0].jsValue();
1761 JSValue src2 = stackFrame.args[1].jsValue();
1762 CallFrame* callFrame = stackFrame.callFrame;
1764 bool result = jsLess<true>(callFrame, src1, src2);
1765 CHECK_FOR_EXCEPTION_AT_END();
1769 DEFINE_STUB_FUNCTION(int, op_jlesseq)
1771 STUB_INIT_STACK_FRAME(stackFrame);
1773 JSValue src1 = stackFrame.args[0].jsValue();
1774 JSValue src2 = stackFrame.args[1].jsValue();
1775 CallFrame* callFrame = stackFrame.callFrame;
1777 bool result = jsLessEq<true>(callFrame, src1, src2);
1778 CHECK_FOR_EXCEPTION_AT_END();
1782 DEFINE_STUB_FUNCTION(int, op_jgreater)
1784 STUB_INIT_STACK_FRAME(stackFrame);
1786 JSValue src1 = stackFrame.args[0].jsValue();
1787 JSValue src2 = stackFrame.args[1].jsValue();
1788 CallFrame* callFrame = stackFrame.callFrame;
1790 bool result = jsLess<false>(callFrame, src2, src1);
1791 CHECK_FOR_EXCEPTION_AT_END();
1795 DEFINE_STUB_FUNCTION(int, op_jgreatereq)
1797 STUB_INIT_STACK_FRAME(stackFrame);
1799 JSValue src1 = stackFrame.args[0].jsValue();
1800 JSValue src2 = stackFrame.args[1].jsValue();
1801 CallFrame* callFrame = stackFrame.callFrame;
1803 bool result = jsLessEq<false>(callFrame, src2, src1);
1804 CHECK_FOR_EXCEPTION_AT_END();
1808 DEFINE_STUB_FUNCTION(int, op_jtrue)
1810 STUB_INIT_STACK_FRAME(stackFrame);
1812 JSValue src1 = stackFrame.args[0].jsValue();
1814 bool result = src1.toBoolean(stackFrame.callFrame);
1815 CHECK_FOR_EXCEPTION_AT_END();
1819 DEFINE_STUB_FUNCTION(int, op_eq)
1821 STUB_INIT_STACK_FRAME(stackFrame);
1823 JSValue src1 = stackFrame.args[0].jsValue();
1824 JSValue src2 = stackFrame.args[1].jsValue();
1826 #if USE(JSVALUE32_64)
1828 if (src2.isUndefined()) {
1829 return src1.isNull() ||
1830 (src1.isCell() && src1.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject()))
1831 || src1.isUndefined();
1834 if (src2.isNull()) {
1835 return src1.isUndefined() ||
1836 (src1.isCell() && src1.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject()))
1840 if (src1.isInt32()) {
1841 if (src2.isDouble())
1842 return src1.asInt32() == src2.asDouble();
1843 double d = src2.toNumber(stackFrame.callFrame);
1844 CHECK_FOR_EXCEPTION();
1845 return src1.asInt32() == d;
1848 if (src1.isDouble()) {
1850 return src1.asDouble() == src2.asInt32();
1851 double d = src2.toNumber(stackFrame.callFrame);
1852 CHECK_FOR_EXCEPTION();
1853 return src1.asDouble() == d;
1856 if (src1.isTrue()) {
1859 double d = src2.toNumber(stackFrame.callFrame);
1860 CHECK_FOR_EXCEPTION();
1864 if (src1.isFalse()) {
1867 double d = src2.toNumber(stackFrame.callFrame);
1868 CHECK_FOR_EXCEPTION();
1872 if (src1.isUndefined())
1873 return src2.isCell() && src2.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject());
1876 return src2.isCell() && src2.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject());
1878 JSCell* cell1 = src1.asCell();
1880 if (cell1->isString()) {
1882 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == src2.asInt32();
1884 if (src2.isDouble())
1885 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == src2.asDouble();
1888 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == 1.0;
1891 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == 0.0;
1893 JSCell* cell2 = src2.asCell();
1894 if (cell2->isString())
1895 return jsCast<JSString*>(cell1)->value(stackFrame.callFrame) == jsCast<JSString*>(cell2)->value(stackFrame.callFrame);
1897 src2 = asObject(cell2)->toPrimitive(stackFrame.callFrame);
1898 CHECK_FOR_EXCEPTION();
1902 if (src2.isObject())
1903 return asObject(cell1) == asObject(src2);
1904 src1 = asObject(cell1)->toPrimitive(stackFrame.callFrame);
1905 CHECK_FOR_EXCEPTION();
1908 #else // USE(JSVALUE32_64)
1909 CallFrame* callFrame = stackFrame.callFrame;
1911 bool result = JSValue::equalSlowCaseInline(callFrame, src1, src2);
1912 CHECK_FOR_EXCEPTION_AT_END();
1914 #endif // USE(JSVALUE32_64)
1917 DEFINE_STUB_FUNCTION(int, op_eq_strings)
1919 #if USE(JSVALUE32_64)
1920 STUB_INIT_STACK_FRAME(stackFrame);
1922 JSString* string1 = stackFrame.args[0].jsString();
1923 JSString* string2 = stackFrame.args[1].jsString();
1925 ASSERT(string1->isString());
1926 ASSERT(string2->isString());
1927 return string1->value(stackFrame.callFrame) == string2->value(stackFrame.callFrame);
1930 RELEASE_ASSERT_NOT_REACHED();
1935 DEFINE_STUB_FUNCTION(JSObject*, op_new_func_exp)
1937 STUB_INIT_STACK_FRAME(stackFrame);
1938 CallFrame* callFrame = stackFrame.callFrame;
1940 FunctionExecutable* function = stackFrame.args[0].function();
1941 JSFunction* func = JSFunction::create(callFrame, function, callFrame->scope());
1942 ASSERT(callFrame->codeBlock()->codeType() != FunctionCode || !callFrame->codeBlock()->needsFullScopeChain() || callFrame->uncheckedR(callFrame->codeBlock()->activationRegister()).jsValue());
1947 DEFINE_STUB_FUNCTION(JSObject*, op_new_regexp)
1949 STUB_INIT_STACK_FRAME(stackFrame);
1951 CallFrame* callFrame = stackFrame.callFrame;
1953 RegExp* regExp = stackFrame.args[0].regExp();
1954 if (!regExp->isValid()) {
1955 stackFrame.vm->exception = createSyntaxError(callFrame, "Invalid flags supplied to RegExp constructor.");
1956 VM_THROW_EXCEPTION();
1959 return RegExpObject::create(*stackFrame.vm, stackFrame.callFrame->lexicalGlobalObject(), stackFrame.callFrame->lexicalGlobalObject()->regExpStructure(), regExp);
1962 DEFINE_STUB_FUNCTION(EncodedJSValue, op_call_eval)
1964 STUB_INIT_STACK_FRAME(stackFrame);
1966 CallFrame* callFrame = stackFrame.callFrame;
1967 CallFrame* callerFrame = callFrame->callerFrame();
1968 ASSERT(callFrame->callerFrame()->codeBlock()->codeType() != FunctionCode
1969 || !callFrame->callerFrame()->codeBlock()->needsFullScopeChain()
1970 || callFrame->callerFrame()->uncheckedR(callFrame->callerFrame()->codeBlock()->activationRegister()).jsValue());
1972 callFrame->setScope(callerFrame->scope());
1973 callFrame->setReturnPC(static_cast<Instruction*>((STUB_RETURN_ADDRESS).value()));
1974 callFrame->setCodeBlock(0);
1976 if (!isHostFunction(callFrame->calleeAsValue(), globalFuncEval))
1977 return JSValue::encode(JSValue());
1979 JSValue result = eval(callFrame);
1980 if (stackFrame.vm->exception)
1981 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1983 return JSValue::encode(result);
1986 DEFINE_STUB_FUNCTION(void*, op_throw)
1988 STUB_INIT_STACK_FRAME(stackFrame);
1989 ExceptionHandler handler = jitThrow(stackFrame.vm, stackFrame.callFrame, stackFrame.args[0].jsValue(), STUB_RETURN_ADDRESS);
1990 STUB_SET_RETURN_ADDRESS(handler.catchRoutine);
1991 return handler.callFrame;
1994 DEFINE_STUB_FUNCTION(JSPropertyNameIterator*, op_get_pnames)
1996 STUB_INIT_STACK_FRAME(stackFrame);
1998 CallFrame* callFrame = stackFrame.callFrame;
1999 JSObject* o = stackFrame.args[0].jsObject();
2000 Structure* structure = o->structure();
2001 JSPropertyNameIterator* jsPropertyNameIterator = structure->enumerationCache();
2002 if (!jsPropertyNameIterator || jsPropertyNameIterator->cachedPrototypeChain() != structure->prototypeChain(callFrame))
2003 jsPropertyNameIterator = JSPropertyNameIterator::create(callFrame, o);
2004 return jsPropertyNameIterator;
2007 DEFINE_STUB_FUNCTION(int, has_property)
2009 STUB_INIT_STACK_FRAME(stackFrame);
2011 JSObject* base = stackFrame.args[0].jsObject();
2012 JSString* property = stackFrame.args[1].jsString();
2013 int result = base->hasProperty(stackFrame.callFrame, Identifier(stackFrame.callFrame, property->value(stackFrame.callFrame)));
2014 CHECK_FOR_EXCEPTION_AT_END();
2018 DEFINE_STUB_FUNCTION(void, op_push_with_scope)
2020 STUB_INIT_STACK_FRAME(stackFrame);
2022 JSObject* o = stackFrame.args[0].jsValue().toObject(stackFrame.callFrame);
2023 CHECK_FOR_EXCEPTION_VOID();
2024 stackFrame.callFrame->setScope(JSWithScope::create(stackFrame.callFrame, o));
2027 DEFINE_STUB_FUNCTION(void, op_pop_scope)
2029 STUB_INIT_STACK_FRAME(stackFrame);
2031 stackFrame.callFrame->setScope(stackFrame.callFrame->scope()->next());
2034 DEFINE_STUB_FUNCTION(void, op_push_name_scope)
2036 STUB_INIT_STACK_FRAME(stackFrame);
2038 JSNameScope* scope = JSNameScope::create(stackFrame.callFrame, stackFrame.args[0].identifier(), stackFrame.args[1].jsValue(), stackFrame.args[2].int32());
2040 CallFrame* callFrame = stackFrame.callFrame;
2041 callFrame->setScope(scope);
2044 DEFINE_STUB_FUNCTION(void, op_put_by_index)
2046 STUB_INIT_STACK_FRAME(stackFrame);
2048 CallFrame* callFrame = stackFrame.callFrame;
2049 unsigned property = stackFrame.args[1].int32();
2051 JSValue arrayValue = stackFrame.args[0].jsValue();
2052 ASSERT(isJSArray(arrayValue));
2053 asArray(arrayValue)->putDirectIndex(callFrame, property, stackFrame.args[2].jsValue());
2056 DEFINE_STUB_FUNCTION(void*, op_switch_imm)
2058 STUB_INIT_STACK_FRAME(stackFrame);
2060 JSValue scrutinee = stackFrame.args[0].jsValue();
2061 unsigned tableIndex = stackFrame.args[1].int32();
2062 CallFrame* callFrame = stackFrame.callFrame;
2063 CodeBlock* codeBlock = callFrame->codeBlock();
2065 if (scrutinee.isInt32())
2066 return codeBlock->switchJumpTable(tableIndex).ctiForValue(scrutinee.asInt32()).executableAddress();
2067 if (scrutinee.isDouble() && scrutinee.asDouble() == static_cast<int32_t>(scrutinee.asDouble()))
2068 return codeBlock->switchJumpTable(tableIndex).ctiForValue(static_cast<int32_t>(scrutinee.asDouble())).executableAddress();
2069 return codeBlock->switchJumpTable(tableIndex).ctiDefault.executableAddress();
2072 DEFINE_STUB_FUNCTION(void*, op_switch_char)
2074 STUB_INIT_STACK_FRAME(stackFrame);
2076 JSValue scrutinee = stackFrame.args[0].jsValue();
2077 unsigned tableIndex = stackFrame.args[1].int32();
2078 CallFrame* callFrame = stackFrame.callFrame;
2079 CodeBlock* codeBlock = callFrame->codeBlock();
2081 void* result = codeBlock->switchJumpTable(tableIndex).ctiDefault.executableAddress();
2083 if (scrutinee.isString()) {
2084 StringImpl* value = asString(scrutinee)->value(callFrame).impl();
2085 if (value->length() == 1)
2086 result = codeBlock->switchJumpTable(tableIndex).ctiForValue((*value)[0]).executableAddress();
2089 CHECK_FOR_EXCEPTION_AT_END();
2093 DEFINE_STUB_FUNCTION(void*, op_switch_string)
2095 STUB_INIT_STACK_FRAME(stackFrame);
2097 JSValue scrutinee = stackFrame.args[0].jsValue();
2098 unsigned tableIndex = stackFrame.args[1].int32();
2099 CallFrame* callFrame = stackFrame.callFrame;
2100 CodeBlock* codeBlock = callFrame->codeBlock();
2102 void* result = codeBlock->stringSwitchJumpTable(tableIndex).ctiDefault.executableAddress();
2104 if (scrutinee.isString()) {
2105 StringImpl* value = asString(scrutinee)->value(callFrame).impl();
2106 result = codeBlock->stringSwitchJumpTable(tableIndex).ctiForValue(value).executableAddress();
2109 CHECK_FOR_EXCEPTION_AT_END();
2113 DEFINE_STUB_FUNCTION(void, op_put_getter_setter)
2115 STUB_INIT_STACK_FRAME(stackFrame);
2117 CallFrame* callFrame = stackFrame.callFrame;
2119 ASSERT(stackFrame.args[0].jsValue().isObject());
2120 JSObject* baseObj = asObject(stackFrame.args[0].jsValue());
2122 GetterSetter* accessor = GetterSetter::create(callFrame);
2124 JSValue getter = stackFrame.args[2].jsValue();
2125 JSValue setter = stackFrame.args[3].jsValue();
2126 ASSERT(getter.isObject() || getter.isUndefined());
2127 ASSERT(setter.isObject() || setter.isUndefined());
2128 ASSERT(getter.isObject() || setter.isObject());
2130 if (!getter.isUndefined())
2131 accessor->setGetter(callFrame->vm(), asObject(getter));
2132 if (!setter.isUndefined())
2133 accessor->setSetter(callFrame->vm(), asObject(setter));
2134 baseObj->putDirectAccessor(callFrame, stackFrame.args[1].identifier(), accessor, Accessor);
2137 DEFINE_STUB_FUNCTION(void, op_throw_static_error)
2139 STUB_INIT_STACK_FRAME(stackFrame);
2141 CallFrame* callFrame = stackFrame.callFrame;
2142 String message = errorDescriptionForValue(callFrame, stackFrame.args[0].jsValue())->value(callFrame);
2143 if (stackFrame.args[1].asInt32)
2144 stackFrame.vm->exception = createReferenceError(callFrame, message);
2146 stackFrame.vm->exception = createTypeError(callFrame, message);
2147 VM_THROW_EXCEPTION_AT_END();
2150 DEFINE_STUB_FUNCTION(void, op_debug)
2152 STUB_INIT_STACK_FRAME(stackFrame);
2154 CallFrame* callFrame = stackFrame.callFrame;
2156 int debugHookID = stackFrame.args[0].int32();
2157 int firstLine = stackFrame.args[1].int32();
2158 int lastLine = stackFrame.args[2].int32();
2159 int column = stackFrame.args[3].int32();
2161 stackFrame.vm->interpreter->debug(callFrame, static_cast<DebugHookID>(debugHookID), firstLine, lastLine, column);
2164 DEFINE_STUB_FUNCTION(void*, vm_throw)
2166 STUB_INIT_STACK_FRAME(stackFrame);
2167 VM* vm = stackFrame.vm;
2168 ExceptionHandler handler = jitThrow(vm, stackFrame.callFrame, vm->exception, vm->exceptionLocation);
2169 STUB_SET_RETURN_ADDRESS(handler.catchRoutine);
2170 return handler.callFrame;
2173 #if USE(JSVALUE32_64)
2174 EncodedExceptionHandler JIT_STUB cti_vm_handle_exception(CallFrame* callFrame)
2176 ASSERT(!callFrame->hasHostCallFrameFlag());
2178 // The entire stack has already been unwound. Nothing more to handle.
2179 return encode(uncaughtExceptionHandler());
2182 VM* vm = callFrame->codeBlock()->vm();
2183 vm->topCallFrame = callFrame;
2184 return encode(jitThrowNew(vm, callFrame, vm->exception));
2187 ExceptionHandler JIT_STUB cti_vm_handle_exception(CallFrame* callFrame)
2189 ASSERT(!callFrame->hasHostCallFrameFlag());
2191 // The entire stack has already been unwound. Nothing more to handle.
2192 return uncaughtExceptionHandler();
2195 VM* vm = callFrame->codeBlock()->vm();
2196 vm->topCallFrame = callFrame;
2197 return jitThrowNew(vm, callFrame, vm->exception);
2201 DEFINE_STUB_FUNCTION(EncodedJSValue, to_object)
2203 STUB_INIT_STACK_FRAME(stackFrame);
2205 CallFrame* callFrame = stackFrame.callFrame;
2206 return JSValue::encode(stackFrame.args[0].jsValue().toObject(callFrame));
2209 DEFINE_STUB_FUNCTION(EncodedJSValue, op_resolve_scope)
2211 STUB_INIT_STACK_FRAME(stackFrame);
2212 ExecState* exec = stackFrame.callFrame;
2213 Instruction* pc = stackFrame.args[0].pc();
2215 const Identifier& ident = exec->codeBlock()->identifier(pc[2].u.operand);
2216 return JSValue::encode(JSScope::resolve(exec, exec->scope(), ident));
2219 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_from_scope)
2221 STUB_INIT_STACK_FRAME(stackFrame);
2222 ExecState* exec = stackFrame.callFrame;
2223 Instruction* pc = stackFrame.args[0].pc();
2225 const Identifier& ident = exec->codeBlock()->identifier(pc[3].u.operand);
2226 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
2227 ResolveModeAndType modeAndType(pc[4].u.operand);
2229 PropertySlot slot(scope);
2230 if (!scope->getPropertySlot(exec, ident, slot)) {
2231 if (modeAndType.mode() == ThrowIfNotFound) {
2232 throwError(exec, createUndefinedVariableError(exec, ident));
2233 VM_THROW_EXCEPTION();
2235 return JSValue::encode(jsUndefined());
2238 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
2239 if (slot.isCacheableValue() && slot.slotBase() == scope && scope->structure()->propertyAccessesAreCacheable()) {
2240 if (modeAndType.type() == GlobalProperty || modeAndType.type() == GlobalPropertyWithVarInjectionChecks) {
2241 CodeBlock* codeBlock = exec->codeBlock();
2242 ConcurrentJITLocker locker(codeBlock->m_lock);
2243 pc[5].u.structure.set(exec->vm(), codeBlock->ownerExecutable(), scope->structure());
2244 pc[6].u.operand = slot.cachedOffset();
2248 return JSValue::encode(slot.getValue(exec, ident));
2251 DEFINE_STUB_FUNCTION(void, op_put_to_scope)
2253 STUB_INIT_STACK_FRAME(stackFrame);
2254 ExecState* exec = stackFrame.callFrame;
2255 Instruction* pc = stackFrame.args[0].pc();
2257 CodeBlock* codeBlock = exec->codeBlock();
2258 const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2259 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2260 JSValue value = exec->r(pc[3].u.operand).jsValue();
2261 ResolveModeAndType modeAndType = ResolveModeAndType(pc[4].u.operand);
2263 if (modeAndType.mode() == ThrowIfNotFound && !scope->hasProperty(exec, ident)) {
2264 throwError(exec, createUndefinedVariableError(exec, ident));
2265 VM_THROW_EXCEPTION_AT_END();
2269 PutPropertySlot slot(codeBlock->isStrictMode());
2270 scope->methodTable()->put(scope, exec, ident, value, slot);
2272 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
2273 if (modeAndType.type() == GlobalProperty || modeAndType.type() == GlobalPropertyWithVarInjectionChecks) {
2274 if (slot.isCacheable() && slot.base() == scope && scope->structure()->propertyAccessesAreCacheable()) {
2275 ConcurrentJITLocker locker(codeBlock->m_lock);
2276 pc[5].u.structure.set(exec->vm(), codeBlock->ownerExecutable(), scope->structure());
2277 pc[6].u.operand = slot.cachedOffset();
2284 #endif // ENABLE(JIT)