2 * Copyright (C) 2008, 2009, 2013 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 * Copyright (C) Research In Motion Limited 2010, 2011. All rights reserved.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16 * its contributors may be used to endorse or promote products derived
17 * from this software without specific prior written permission.
19 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 #include "Arguments.h"
37 #include "ArrayConstructor.h"
38 #include "CallFrame.h"
39 #include "CallFrameInlines.h"
40 #include "CodeBlock.h"
41 #include "CodeProfiling.h"
42 #include "CommonSlowPaths.h"
43 #include "DFGOSREntry.h"
44 #include "DFGWorklist.h"
47 #include "ErrorInstance.h"
48 #include "ExceptionHelpers.h"
49 #include "GetterSetter.h"
51 #include <wtf/InlineASM.h>
53 #include "JITExceptions.h"
54 #include "JSActivation.h"
56 #include "JSFunction.h"
57 #include "JSGlobalObjectFunctions.h"
58 #include "JSNameScope.h"
59 #include "JSNotAnObject.h"
60 #include "JSPropertyNameIterator.h"
62 #include "JSWithScope.h"
63 #include "LegacyProfiler.h"
64 #include "NameInstance.h"
65 #include "ObjectConstructor.h"
66 #include "ObjectPrototype.h"
67 #include "Operations.h"
69 #include "RegExpObject.h"
70 #include "RegExpPrototype.h"
72 #include "RepatchBuffer.h"
73 #include "SamplingTool.h"
74 #include "SlowPathCall.h"
76 #include "StructureRareDataInlines.h"
77 #include <wtf/StdLibExtras.h>
83 #if CPU(ARM_TRADITIONAL)
84 #include "JITStubsARM.h"
86 #include "JITStubsARMv7.h"
88 #include "JITStubsMIPS.h"
90 #include "JITStubsSH4.h"
92 #include "JITStubsX86.h"
94 #include "JITStubsX86_64.h"
96 #error "JIT not supported on this platform."
101 #if ENABLE(OPCODE_SAMPLING)
102 #define CTI_SAMPLER stackFrame.vm->interpreter->sampler()
104 #define CTI_SAMPLER 0
107 void performPlatformSpecificJITAssertions(VM* vm)
109 if (!vm->canUseJIT())
113 performARMv7JITAssertions();
114 #elif CPU(ARM_TRADITIONAL)
115 performARMJITAssertions();
117 performMIPSJITAssertions();
119 performSH4JITAssertions();
123 NEVER_INLINE static void tryCachePutByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot& slot, StructureStubInfo* stubInfo, bool direct)
125 ConcurrentJITLocker locker(codeBlock->m_lock);
127 // The interpreter checks for recursion here; I do not believe this can occur in CTI.
129 if (!baseValue.isCell())
132 // Uncacheable: give up.
133 if (!slot.isCacheable()) {
134 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
138 JSCell* baseCell = baseValue.asCell();
139 Structure* structure = baseCell->structure();
141 if (structure->isUncacheableDictionary() || structure->typeInfo().prohibitsPropertyCaching()) {
142 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
146 // If baseCell != base, then baseCell must be a proxy for another object.
147 if (baseCell != slot.base()) {
148 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
152 // Cache hit: Specialize instruction and ref Structures.
154 // Structure transition, cache transition info
155 if (slot.type() == PutPropertySlot::NewProperty) {
156 if (structure->isDictionary()) {
157 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
161 // put_by_id_transition checks the prototype chain for setters.
162 if (normalizePrototypeChain(callFrame, baseCell) == InvalidPrototypeChain) {
163 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
167 StructureChain* prototypeChain = structure->prototypeChain(callFrame);
168 ASSERT(structure->previousID()->transitionWatchpointSetHasBeenInvalidated());
169 stubInfo->initPutByIdTransition(callFrame->vm(), codeBlock->ownerExecutable(), structure->previousID(), structure, prototypeChain, direct);
170 JIT::compilePutByIdTransition(callFrame->scope()->vm(), codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress, direct);
174 stubInfo->initPutByIdReplace(callFrame->vm(), codeBlock->ownerExecutable(), structure);
176 JIT::patchPutByIdReplace(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress, direct);
179 NEVER_INLINE static void tryCacheGetByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo* stubInfo)
181 ConcurrentJITLocker locker(codeBlock->m_lock);
183 // FIXME: Write a test that proves we need to check for recursion here just
184 // like the interpreter does, then add a check for recursion.
186 // FIXME: Cache property access for immediates.
187 if (!baseValue.isCell()) {
188 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
192 VM* vm = &callFrame->vm();
194 if (isJSArray(baseValue) && propertyName == callFrame->propertyNames().length) {
195 JIT::compilePatchGetArrayLength(callFrame->scope()->vm(), codeBlock, returnAddress);
199 if (isJSString(baseValue) && propertyName == callFrame->propertyNames().length) {
200 // The tradeoff of compiling an patched inline string length access routine does not seem
201 // to pay off, so we currently only do this for arrays.
202 ctiPatchCallByReturnAddress(codeBlock, returnAddress, vm->getCTIStub(stringLengthTrampolineGenerator).code());
206 // Uncacheable: give up.
207 if (!slot.isCacheable()) {
208 stubInfo->accessType = access_get_by_id_generic;
209 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
213 JSCell* baseCell = baseValue.asCell();
214 Structure* structure = baseCell->structure();
216 if (structure->isUncacheableDictionary() || structure->typeInfo().prohibitsPropertyCaching()) {
217 stubInfo->accessType = access_get_by_id_generic;
218 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
222 // Cache hit: Specialize instruction and ref Structures.
224 if (slot.slotBase() == baseValue) {
225 RELEASE_ASSERT(stubInfo->accessType == access_unset);
226 if (!slot.isCacheableValue() || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset())))
227 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
229 JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
230 stubInfo->initGetByIdSelf(callFrame->vm(), codeBlock->ownerExecutable(), structure);
235 if (structure->isDictionary()) {
236 stubInfo->accessType = access_get_by_id_generic;
237 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
241 if (slot.slotBase() == structure->prototypeForLookup(callFrame)) {
242 JSObject* slotBaseObject = asObject(slot.slotBase());
243 size_t offset = slot.cachedOffset();
245 if (structure->typeInfo().hasImpureGetOwnPropertySlot()) {
246 stubInfo->accessType = access_get_by_id_generic;
247 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
251 // Since we're accessing a prototype in a loop, it's a good bet that it
252 // should not be treated as a dictionary.
253 if (slotBaseObject->structure()->isDictionary()) {
254 slotBaseObject->flattenDictionaryObject(callFrame->vm());
255 offset = slotBaseObject->structure()->get(callFrame->vm(), propertyName);
258 stubInfo->initGetByIdProto(callFrame->vm(), codeBlock->ownerExecutable(), structure, slotBaseObject->structure(), slot.isCacheableValue());
260 ASSERT(!structure->isDictionary());
261 ASSERT(!slotBaseObject->structure()->isDictionary());
262 JIT::compileGetByIdProto(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), propertyName, slot, offset, returnAddress);
266 PropertyOffset offset = slot.cachedOffset();
267 size_t count = normalizePrototypeChainForChainAccess(callFrame, baseValue, slot.slotBase(), propertyName, offset);
268 if (count == InvalidPrototypeChain) {
269 stubInfo->accessType = access_get_by_id_generic;
270 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
274 StructureChain* prototypeChain = structure->prototypeChain(callFrame);
275 stubInfo->initGetByIdChain(callFrame->vm(), codeBlock->ownerExecutable(), structure, prototypeChain, count, slot.isCacheableValue());
276 JIT::compileGetByIdChain(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, structure, prototypeChain, count, propertyName, slot, offset, returnAddress);
283 static void jscGeneratedNativeCode()
285 // When executing a JIT stub function (which might do an allocation), we hack the return address
286 // to pretend to be executing this function, to keep stack logging tools from blowing out
293 ALWAYS_INLINE StackHack(JITStackFrame& stackFrame)
294 : stackFrame(stackFrame)
295 , savedReturnAddress(*stackFrame.returnAddressSlot())
297 if (!CodeProfiling::enabled())
298 *stackFrame.returnAddressSlot() = ReturnAddressPtr(FunctionPtr(jscGeneratedNativeCode));
301 ALWAYS_INLINE ~StackHack()
303 *stackFrame.returnAddressSlot() = savedReturnAddress;
306 JITStackFrame& stackFrame;
307 ReturnAddressPtr savedReturnAddress;
310 #define STUB_INIT_STACK_FRAME(stackFrame) JITStackFrame& stackFrame = *reinterpret_cast_ptr<JITStackFrame*>(STUB_ARGS); StackHack stackHack(stackFrame)
311 #define STUB_SET_RETURN_ADDRESS(returnAddress) stackHack.savedReturnAddress = ReturnAddressPtr(returnAddress)
312 #define STUB_RETURN_ADDRESS stackHack.savedReturnAddress
316 #define STUB_INIT_STACK_FRAME(stackFrame) JITStackFrame& stackFrame = *reinterpret_cast_ptr<JITStackFrame*>(STUB_ARGS)
317 #define STUB_SET_RETURN_ADDRESS(returnAddress) *stackFrame.returnAddressSlot() = ReturnAddressPtr(returnAddress)
318 #define STUB_RETURN_ADDRESS *stackFrame.returnAddressSlot()
322 // The reason this is not inlined is to avoid having to do a PIC branch
323 // to get the address of the ctiVMThrowTrampoline function. It's also
324 // good to keep the code size down by leaving as much of the exception
325 // handling code out of line as possible.
326 static NEVER_INLINE void returnToThrowTrampoline(VM* vm, ReturnAddressPtr exceptionLocation, ReturnAddressPtr& returnAddressSlot)
328 RELEASE_ASSERT(vm->exception());
329 vm->exceptionLocation = exceptionLocation;
330 returnAddressSlot = ReturnAddressPtr(FunctionPtr(ctiVMThrowTrampoline));
333 #define VM_THROW_EXCEPTION() \
335 VM_THROW_EXCEPTION_AT_END(); \
338 #define VM_THROW_EXCEPTION_AT_END() \
340 returnToThrowTrampoline(stackFrame.vm, STUB_RETURN_ADDRESS, STUB_RETURN_ADDRESS);\
343 #define CHECK_FOR_EXCEPTION() \
345 if (UNLIKELY(stackFrame.vm->exception())) \
346 VM_THROW_EXCEPTION(); \
348 #define CHECK_FOR_EXCEPTION_AT_END() \
350 if (UNLIKELY(stackFrame.vm->exception())) \
351 VM_THROW_EXCEPTION_AT_END(); \
353 #define CHECK_FOR_EXCEPTION_VOID() \
355 if (UNLIKELY(stackFrame.vm->exception())) { \
356 VM_THROW_EXCEPTION_AT_END(); \
363 virtual ~ErrorFunctor() { }
364 virtual JSValue operator()(ExecState*) = 0;
367 class ErrorWithExecFunctor : public ErrorFunctor {
369 typedef JSObject* (*Factory)(ExecState* exec);
371 ErrorWithExecFunctor(Factory factory)
375 JSValue operator()(ExecState* exec)
377 return m_factory(exec);
384 class ErrorWithExecAndCalleeFunctor : public ErrorFunctor {
386 typedef JSObject* (*Factory)(ExecState* exec, JSValue callee);
388 ErrorWithExecAndCalleeFunctor(Factory factory, JSValue callee)
389 : m_factory(factory), m_callee(callee)
392 JSValue operator()(ExecState* exec)
394 return m_factory(exec, m_callee);
401 // Helper function for JIT stubs that may throw an exception in the middle of
402 // processing a function call. This function rolls back the stack to
403 // our caller, so exception processing can proceed from a valid state.
404 template<typename T> static T throwExceptionFromOpCall(JITStackFrame& jitStackFrame, CallFrame* newCallFrame, ReturnAddressPtr& returnAddressSlot, ErrorFunctor* createError = 0)
406 CallFrame* callFrame = newCallFrame->callerFrame()->removeHostCallFrameFlag();
407 jitStackFrame.callFrame = callFrame;
408 callFrame->vm().topCallFrame = callFrame;
410 callFrame->vm().throwException(callFrame, (*createError)(callFrame));
411 ASSERT(callFrame->vm().exception());
412 returnToThrowTrampoline(&callFrame->vm(), ReturnAddressPtr(newCallFrame->returnPC()), returnAddressSlot);
416 // If the CPU specific header does not provide an implementation, use the default one here.
417 #ifndef DEFINE_STUB_FUNCTION
418 #define DEFINE_STUB_FUNCTION(rtype, op) rtype JIT_STUB cti_##op(STUB_ARGS_DECLARATION)
421 DEFINE_STUB_FUNCTION(void, handle_watchdog_timer)
423 STUB_INIT_STACK_FRAME(stackFrame);
424 CallFrame* callFrame = stackFrame.callFrame;
425 VM* vm = stackFrame.vm;
426 if (UNLIKELY(vm->watchdog.didFire(callFrame))) {
427 vm->throwException(callFrame, createTerminatedExecutionException(vm));
428 VM_THROW_EXCEPTION_AT_END();
433 DEFINE_STUB_FUNCTION(void*, stack_check)
435 STUB_INIT_STACK_FRAME(stackFrame);
436 CallFrame* callFrame = stackFrame.callFrame;
438 if (UNLIKELY(!stackFrame.stack->grow(&callFrame->registers()[callFrame->codeBlock()->m_numCalleeRegisters]))) {
439 ErrorWithExecFunctor functor = ErrorWithExecFunctor(createStackOverflowError);
440 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS, &functor);
446 DEFINE_STUB_FUNCTION(JSObject*, op_new_object)
448 STUB_INIT_STACK_FRAME(stackFrame);
450 return constructEmptyObject(stackFrame.callFrame, stackFrame.args[0].structure());
453 DEFINE_STUB_FUNCTION(void, op_put_by_id_generic)
455 STUB_INIT_STACK_FRAME(stackFrame);
457 PutPropertySlot slot(
458 stackFrame.callFrame->codeBlock()->isStrictMode(),
459 stackFrame.callFrame->codeBlock()->putByIdContext());
460 stackFrame.args[0].jsValue().put(stackFrame.callFrame, stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot);
461 CHECK_FOR_EXCEPTION_AT_END();
464 DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_generic)
466 STUB_INIT_STACK_FRAME(stackFrame);
468 PutPropertySlot slot(
469 stackFrame.callFrame->codeBlock()->isStrictMode(),
470 stackFrame.callFrame->codeBlock()->putByIdContext());
471 JSValue baseValue = stackFrame.args[0].jsValue();
472 ASSERT(baseValue.isObject());
473 asObject(baseValue)->putDirect(stackFrame.callFrame->vm(), stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot);
474 CHECK_FOR_EXCEPTION_AT_END();
477 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_generic)
479 STUB_INIT_STACK_FRAME(stackFrame);
481 CallFrame* callFrame = stackFrame.callFrame;
482 Identifier& ident = stackFrame.args[1].identifier();
484 JSValue baseValue = stackFrame.args[0].jsValue();
485 PropertySlot slot(baseValue);
486 JSValue result = baseValue.get(callFrame, ident, slot);
488 CHECK_FOR_EXCEPTION_AT_END();
489 return JSValue::encode(result);
492 DEFINE_STUB_FUNCTION(void, op_put_by_id)
494 STUB_INIT_STACK_FRAME(stackFrame);
495 CallFrame* callFrame = stackFrame.callFrame;
496 Identifier& ident = stackFrame.args[1].identifier();
498 CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
499 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
500 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
502 PutPropertySlot slot(
503 callFrame->codeBlock()->isStrictMode(),
504 callFrame->codeBlock()->putByIdContext());
505 stackFrame.args[0].jsValue().put(callFrame, ident, stackFrame.args[2].jsValue(), slot);
507 if (accessType == static_cast<AccessType>(stubInfo->accessType)) {
509 tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, false);
512 CHECK_FOR_EXCEPTION_AT_END();
515 DEFINE_STUB_FUNCTION(void, op_put_by_id_direct)
517 STUB_INIT_STACK_FRAME(stackFrame);
518 CallFrame* callFrame = stackFrame.callFrame;
519 Identifier& ident = stackFrame.args[1].identifier();
521 CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
522 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
523 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
525 PutPropertySlot slot(
526 callFrame->codeBlock()->isStrictMode(),
527 callFrame->codeBlock()->putByIdContext());
528 JSValue baseValue = stackFrame.args[0].jsValue();
529 ASSERT(baseValue.isObject());
531 asObject(baseValue)->putDirect(callFrame->vm(), ident, stackFrame.args[2].jsValue(), slot);
533 if (accessType == static_cast<AccessType>(stubInfo->accessType)) {
535 tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, true);
538 CHECK_FOR_EXCEPTION_AT_END();
541 DEFINE_STUB_FUNCTION(void, op_put_by_id_fail)
543 STUB_INIT_STACK_FRAME(stackFrame);
545 CallFrame* callFrame = stackFrame.callFrame;
546 Identifier& ident = stackFrame.args[1].identifier();
548 PutPropertySlot slot(
549 callFrame->codeBlock()->isStrictMode(),
550 callFrame->codeBlock()->putByIdContext());
551 stackFrame.args[0].jsValue().put(callFrame, ident, stackFrame.args[2].jsValue(), slot);
553 CHECK_FOR_EXCEPTION_AT_END();
556 DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_fail)
558 STUB_INIT_STACK_FRAME(stackFrame);
560 CallFrame* callFrame = stackFrame.callFrame;
561 Identifier& ident = stackFrame.args[1].identifier();
563 PutPropertySlot slot(
564 callFrame->codeBlock()->isStrictMode(),
565 callFrame->codeBlock()->putByIdContext());
566 JSValue baseValue = stackFrame.args[0].jsValue();
567 ASSERT(baseValue.isObject());
568 asObject(baseValue)->putDirect(callFrame->vm(), ident, stackFrame.args[2].jsValue(), slot);
570 CHECK_FOR_EXCEPTION_AT_END();
573 DEFINE_STUB_FUNCTION(JSObject*, op_put_by_id_transition_realloc)
575 STUB_INIT_STACK_FRAME(stackFrame);
577 JSValue baseValue = stackFrame.args[0].jsValue();
578 int32_t oldSize = stackFrame.args[3].int32();
579 Structure* newStructure = stackFrame.args[4].structure();
580 int32_t newSize = newStructure->outOfLineCapacity();
582 ASSERT(oldSize >= 0);
583 ASSERT(newSize > oldSize);
585 ASSERT(baseValue.isObject());
586 JSObject* base = asObject(baseValue);
587 VM& vm = *stackFrame.vm;
588 Butterfly* butterfly = base->growOutOfLineStorage(vm, oldSize, newSize);
589 base->setStructureAndButterfly(vm, newStructure, butterfly);
594 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id)
596 STUB_INIT_STACK_FRAME(stackFrame);
597 CallFrame* callFrame = stackFrame.callFrame;
598 Identifier& ident = stackFrame.args[1].identifier();
600 CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
601 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
602 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
604 JSValue baseValue = stackFrame.args[0].jsValue();
605 PropertySlot slot(baseValue);
606 JSValue result = baseValue.get(callFrame, ident, slot);
608 if (accessType != static_cast<AccessType>(stubInfo->accessType))
609 return JSValue::encode(result);
611 if (!stubInfo->seenOnce())
614 tryCacheGetByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, baseValue, ident, slot, stubInfo);
616 CHECK_FOR_EXCEPTION_AT_END();
617 return JSValue::encode(result);
620 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_self_fail)
622 STUB_INIT_STACK_FRAME(stackFrame);
624 CallFrame* callFrame = stackFrame.callFrame;
625 Identifier& ident = stackFrame.args[1].identifier();
627 CodeBlock* codeBlock = callFrame->codeBlock();
628 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
629 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
631 JSValue baseValue = stackFrame.args[0].jsValue();
632 PropertySlot slot(baseValue);
633 JSValue result = baseValue.get(callFrame, ident, slot);
635 if (accessType != static_cast<AccessType>(stubInfo->accessType))
636 return JSValue::encode(result);
638 CHECK_FOR_EXCEPTION();
640 ConcurrentJITLocker locker(codeBlock->m_lock);
642 if (baseValue.isCell()
643 && slot.isCacheable()
644 && !baseValue.asCell()->structure()->isUncacheableDictionary()
645 && slot.slotBase() == baseValue) {
647 PolymorphicAccessStructureList* polymorphicStructureList;
650 if (stubInfo->accessType == access_unset)
651 stubInfo->initGetByIdSelf(callFrame->vm(), codeBlock->ownerExecutable(), baseValue.asCell()->structure());
653 if (stubInfo->accessType == access_get_by_id_self) {
654 ASSERT(!stubInfo->stubRoutine);
655 polymorphicStructureList = new PolymorphicAccessStructureList(callFrame->vm(), codeBlock->ownerExecutable(), 0, stubInfo->u.getByIdSelf.baseObjectStructure.get(), true);
656 stubInfo->initGetByIdSelfList(polymorphicStructureList, 1);
658 polymorphicStructureList = stubInfo->u.getByIdSelfList.structureList;
659 listIndex = stubInfo->u.getByIdSelfList.listSize;
661 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
662 stubInfo->u.getByIdSelfList.listSize++;
663 JIT::compileGetByIdSelfList(callFrame->scope()->vm(), codeBlock, stubInfo, polymorphicStructureList, listIndex, baseValue.asCell()->structure(), ident, slot, slot.cachedOffset());
665 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
666 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
669 ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
670 return JSValue::encode(result);
673 static PolymorphicAccessStructureList* getPolymorphicAccessStructureListSlot(VM& vm, ScriptExecutable* owner, StructureStubInfo* stubInfo, int& listIndex)
675 PolymorphicAccessStructureList* prototypeStructureList = 0;
678 switch (stubInfo->accessType) {
679 case access_get_by_id_proto:
680 prototypeStructureList = new PolymorphicAccessStructureList(vm, owner, stubInfo->stubRoutine, stubInfo->u.getByIdProto.baseObjectStructure.get(), stubInfo->u.getByIdProto.prototypeStructure.get(), true);
681 stubInfo->stubRoutine.clear();
682 stubInfo->initGetByIdProtoList(prototypeStructureList, 2);
684 case access_get_by_id_chain:
685 prototypeStructureList = new PolymorphicAccessStructureList(vm, owner, stubInfo->stubRoutine, stubInfo->u.getByIdChain.baseObjectStructure.get(), stubInfo->u.getByIdChain.chain.get(), true);
686 stubInfo->stubRoutine.clear();
687 stubInfo->initGetByIdProtoList(prototypeStructureList, 2);
689 case access_get_by_id_proto_list:
690 prototypeStructureList = stubInfo->u.getByIdProtoList.structureList;
691 listIndex = stubInfo->u.getByIdProtoList.listSize;
692 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE)
693 stubInfo->u.getByIdProtoList.listSize++;
696 RELEASE_ASSERT_NOT_REACHED();
699 ASSERT(listIndex <= POLYMORPHIC_LIST_CACHE_SIZE);
700 return prototypeStructureList;
703 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_getter_stub)
705 STUB_INIT_STACK_FRAME(stackFrame);
706 CallFrame* callFrame = stackFrame.callFrame;
707 JSValue result = callGetter(callFrame, stackFrame.args[1].jsObject(), stackFrame.args[0].jsObject());
708 if (callFrame->hadException())
709 returnToThrowTrampoline(&callFrame->vm(), stackFrame.args[2].returnAddress(), STUB_RETURN_ADDRESS);
711 return JSValue::encode(result);
714 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_custom_stub)
716 STUB_INIT_STACK_FRAME(stackFrame);
717 CallFrame* callFrame = stackFrame.callFrame;
718 JSObject* slotBase = stackFrame.args[0].jsObject();
719 PropertySlot::GetValueFunc getter = reinterpret_cast<PropertySlot::GetValueFunc>(stackFrame.args[1].asPointer);
720 const Identifier& ident = stackFrame.args[2].identifier();
721 JSValue result = getter(callFrame, slotBase, ident);
722 if (callFrame->hadException())
723 returnToThrowTrampoline(&callFrame->vm(), stackFrame.args[3].returnAddress(), STUB_RETURN_ADDRESS);
725 return JSValue::encode(result);
728 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
730 STUB_INIT_STACK_FRAME(stackFrame);
732 CallFrame* callFrame = stackFrame.callFrame;
733 const Identifier& propertyName = stackFrame.args[1].identifier();
735 CodeBlock* codeBlock = callFrame->codeBlock();
736 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
737 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
739 JSValue baseValue = stackFrame.args[0].jsValue();
740 PropertySlot slot(baseValue);
741 JSValue result = baseValue.get(callFrame, propertyName, slot);
743 CHECK_FOR_EXCEPTION();
745 if (accessType != static_cast<AccessType>(stubInfo->accessType)
746 || !baseValue.isCell()
747 || !slot.isCacheable()
748 || baseValue.asCell()->structure()->isDictionary()
749 || baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching()) {
750 ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
751 return JSValue::encode(result);
754 ConcurrentJITLocker locker(codeBlock->m_lock);
756 Structure* structure = baseValue.asCell()->structure();
758 JSObject* slotBaseObject = asObject(slot.slotBase());
760 PropertyOffset offset = slot.cachedOffset();
762 if (slot.slotBase() == baseValue)
763 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
764 else if (slot.slotBase() == baseValue.asCell()->structure()->prototypeForLookup(callFrame)) {
765 ASSERT(!baseValue.asCell()->structure()->isDictionary());
767 if (baseValue.asCell()->structure()->typeInfo().hasImpureGetOwnPropertySlot()) {
768 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
769 return JSValue::encode(result);
772 // Since we're accessing a prototype in a loop, it's a good bet that it
773 // should not be treated as a dictionary.
774 if (slotBaseObject->structure()->isDictionary()) {
775 slotBaseObject->flattenDictionaryObject(callFrame->vm());
776 offset = slotBaseObject->structure()->get(callFrame->vm(), propertyName);
780 PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(callFrame->vm(), codeBlock->ownerExecutable(), stubInfo, listIndex);
781 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
782 JIT::compileGetByIdProtoList(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), propertyName, slot, offset);
784 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
785 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
788 size_t count = normalizePrototypeChainForChainAccess(callFrame, baseValue, slot.slotBase(), propertyName, offset);
789 if (count == InvalidPrototypeChain) {
790 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
791 return JSValue::encode(result);
794 ASSERT(!baseValue.asCell()->structure()->isDictionary());
796 PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(callFrame->vm(), codeBlock->ownerExecutable(), stubInfo, listIndex);
798 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
799 StructureChain* protoChain = structure->prototypeChain(callFrame);
800 JIT::compileGetByIdChainList(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, propertyName, slot, offset);
802 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
803 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
807 return JSValue::encode(result);
810 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list_full)
812 STUB_INIT_STACK_FRAME(stackFrame);
814 JSValue baseValue = stackFrame.args[0].jsValue();
815 PropertySlot slot(baseValue);
816 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
818 CHECK_FOR_EXCEPTION_AT_END();
819 return JSValue::encode(result);
822 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_fail)
824 STUB_INIT_STACK_FRAME(stackFrame);
826 JSValue baseValue = stackFrame.args[0].jsValue();
827 PropertySlot slot(baseValue);
828 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
830 CHECK_FOR_EXCEPTION_AT_END();
831 return JSValue::encode(result);
834 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_array_fail)
836 STUB_INIT_STACK_FRAME(stackFrame);
838 JSValue baseValue = stackFrame.args[0].jsValue();
839 PropertySlot slot(baseValue);
840 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
842 CHECK_FOR_EXCEPTION_AT_END();
843 return JSValue::encode(result);
846 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_string_fail)
848 STUB_INIT_STACK_FRAME(stackFrame);
850 JSValue baseValue = stackFrame.args[0].jsValue();
851 PropertySlot slot(baseValue);
852 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
854 CHECK_FOR_EXCEPTION_AT_END();
855 return JSValue::encode(result);
858 DEFINE_STUB_FUNCTION(EncodedJSValue, op_check_has_instance)
860 STUB_INIT_STACK_FRAME(stackFrame);
862 CallFrame* callFrame = stackFrame.callFrame;
863 JSValue value = stackFrame.args[0].jsValue();
864 JSValue baseVal = stackFrame.args[1].jsValue();
866 if (baseVal.isObject()) {
867 JSObject* baseObject = asObject(baseVal);
868 ASSERT(!baseObject->structure()->typeInfo().implementsDefaultHasInstance());
869 if (baseObject->structure()->typeInfo().implementsHasInstance()) {
870 bool result = baseObject->methodTable()->customHasInstance(baseObject, callFrame, value);
871 CHECK_FOR_EXCEPTION_AT_END();
872 return JSValue::encode(jsBoolean(result));
876 stackFrame.vm->throwException(callFrame, createInvalidParameterError(callFrame, "instanceof", baseVal));
877 VM_THROW_EXCEPTION_AT_END();
878 return JSValue::encode(JSValue());
882 DEFINE_STUB_FUNCTION(void, optimize)
884 STUB_INIT_STACK_FRAME(stackFrame);
886 // Defer GC so that it doesn't run between when we enter into this slow path and
887 // when we figure out the state of our code block. This prevents a number of
888 // awkward reentrancy scenarios, including:
890 // - The optimized version of our code block being jettisoned by GC right after
891 // we concluded that we wanted to use it.
893 // - An optimized version of our code block being installed just as we decided
894 // that it wasn't ready yet.
896 // This still leaves the following: anytime we return from cti_optimize, we may
897 // GC, and the GC may either jettison the optimized version of our code block,
898 // or it may install the optimized version of our code block even though we
899 // concluded that it wasn't ready yet.
901 // Note that jettisoning won't happen if we already initiated OSR, because in
902 // that case we would have already planted the optimized code block into the JS
904 DeferGC deferGC(stackFrame.vm->heap);
906 CallFrame* callFrame = stackFrame.callFrame;
907 CodeBlock* codeBlock = callFrame->codeBlock();
908 unsigned bytecodeIndex = stackFrame.args[0].int32();
911 // If we're attempting to OSR from a loop, assume that this should be
912 // separately optimized.
913 codeBlock->m_shouldAlwaysBeInlined = false;
916 if (Options::verboseOSR()) {
918 *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
919 ", executeCounter = ", codeBlock->jitExecuteCounter(),
920 ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
922 if (codeBlock->hasOptimizedReplacement())
923 dataLog(codeBlock->replacement()->osrExitCounter());
929 if (!codeBlock->checkIfOptimizationThresholdReached()) {
930 codeBlock->updateAllPredictions();
931 if (Options::verboseOSR())
932 dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
936 if (codeBlock->m_shouldAlwaysBeInlined) {
937 codeBlock->updateAllPredictions();
938 codeBlock->optimizeAfterWarmUp();
939 if (Options::verboseOSR())
940 dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
944 // We cannot be in the process of asynchronous compilation and also have an optimized
947 !stackFrame.vm->worklist
948 || !(stackFrame.vm->worklist->compilationState(codeBlock) != DFG::Worklist::NotKnown
949 && codeBlock->hasOptimizedReplacement()));
951 DFG::Worklist::State worklistState;
952 if (stackFrame.vm->worklist) {
953 // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
954 // (i.e. compiled) code blocks. But if it completes ours, we also need to know
955 // what the result was so that we don't plow ahead and attempt OSR or immediate
956 // reoptimization. This will have already also set the appropriate JIT execution
957 // count threshold depending on what happened, so if the compilation was anything
958 // but successful we just want to return early. See the case for worklistState ==
959 // DFG::Worklist::Compiled, below.
961 // Note that we could have alternatively just called Worklist::compilationState()
962 // here, and if it returned Compiled, we could have then called
963 // completeAndScheduleOSR() below. But that would have meant that it could take
964 // longer for code blocks to be completed: they would only complete when *their*
965 // execution count trigger fired; but that could take a while since the firing is
966 // racy. It could also mean that code blocks that never run again after being
967 // compiled would sit on the worklist until next GC. That's fine, but it's
968 // probably a waste of memory. Our goal here is to complete code blocks as soon as
969 // possible in order to minimize the chances of us executing baseline code after
970 // optimized code is already available.
973 stackFrame.vm->worklist->completeAllReadyPlansForVM(*stackFrame.vm, codeBlock);
975 worklistState = DFG::Worklist::NotKnown;
977 if (worklistState == DFG::Worklist::Compiling) {
978 // We cannot be in the process of asynchronous compilation and also have an optimized
980 RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
981 codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
985 if (worklistState == DFG::Worklist::Compiled) {
986 // If we don't have an optimized replacement but we did just get compiled, then
987 // the compilation failed or was invalidated, in which case the execution count
988 // thresholds have already been set appropriately by
989 // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
990 // nothing left to do.
991 if (!codeBlock->hasOptimizedReplacement()) {
992 codeBlock->updateAllPredictions();
993 if (Options::verboseOSR())
994 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
997 } else if (codeBlock->hasOptimizedReplacement()) {
998 if (Options::verboseOSR())
999 dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1000 // If we have an optimized replacement, then it must be the case that we entered
1001 // cti_optimize from a loop. That's because is there's an optimized replacement,
1002 // then all calls to this function will be relinked to the replacement and so
1003 // the prologue OSR will never fire.
1005 // This is an interesting threshold check. Consider that a function OSR exits
1006 // in the middle of a loop, while having a relatively low exit count. The exit
1007 // will reset the execution counter to some target threshold, meaning that this
1008 // code won't be reached until that loop heats up for >=1000 executions. But then
1009 // we do a second check here, to see if we should either reoptimize, or just
1010 // attempt OSR entry. Hence it might even be correct for
1011 // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1012 // additional checking anyway, to reduce the amount of recompilation thrashing.
1013 if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1014 if (Options::verboseOSR()) {
1016 "Triggering reoptimization of ", *codeBlock,
1017 "(", *codeBlock->replacement(), ") (in loop).\n");
1019 codeBlock->reoptimize();
1023 if (!codeBlock->shouldOptimizeNow()) {
1024 if (Options::verboseOSR()) {
1026 "Delaying optimization for ", *codeBlock,
1027 " because of insufficient profiling.\n");
1032 if (Options::verboseOSR())
1033 dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1035 JSScope* scope = callFrame->scope();
1036 CompilationResult result;
1037 JSObject* error = codeBlock->compileOptimized(callFrame, scope, result, bytecodeIndex);
1038 if (Options::verboseOSR()) {
1039 dataLog("Optimizing compilation of ", *codeBlock, " result: ", result, "\n");
1041 dataLog("WARNING: optimized compilation failed with a JS error.\n");
1044 codeBlock->setOptimizationThresholdBasedOnCompilationResult(result);
1045 if (result != CompilationSuccessful)
1049 CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1050 ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1052 if (optimizedCodeBlock->jitType() == JITCode::FTLJIT) {
1053 // FTL JIT doesn't support OSR entry yet.
1054 // https://bugs.webkit.org/show_bug.cgi?id=113625
1056 // Don't attempt OSR entry again.
1057 codeBlock->dontOptimizeAnytimeSoon();
1061 if (void* address = DFG::prepareOSREntry(callFrame, optimizedCodeBlock, bytecodeIndex)) {
1062 if (Options::verboseOSR()) {
1064 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ", address ",
1065 RawPointer((STUB_RETURN_ADDRESS).value()), " -> ", RawPointer(address), ".\n");
1068 codeBlock->optimizeSoon();
1069 STUB_SET_RETURN_ADDRESS(address);
1073 if (Options::verboseOSR()) {
1075 "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1076 " succeeded, OSR failed, after a delay of ",
1077 codeBlock->optimizationDelayCounter(), ".\n");
1080 // Count the OSR failure as a speculation failure. If this happens a lot, then
1082 optimizedCodeBlock->countOSRExit();
1084 // We are a lot more conservative about triggering reoptimization after OSR failure than
1085 // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1086 // already, then we really would like to reoptimize immediately. But this case covers
1087 // something else: there weren't many (or any) speculation failures before, but we just
1088 // failed to enter the speculative code because some variable had the wrong value or
1089 // because the OSR code decided for any spurious reason that it did not want to OSR
1090 // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1091 // reoptimization trigger.
1092 if (optimizedCodeBlock->shouldReoptimizeNow()) {
1093 if (Options::verboseOSR()) {
1095 "Triggering reoptimization of ", *codeBlock, " -> ",
1096 *codeBlock->replacement(), " (after OSR fail).\n");
1098 codeBlock->reoptimize();
1102 // OSR failed this time, but it might succeed next time! Let the code run a bit
1103 // longer and then try again.
1104 codeBlock->optimizeAfterWarmUp();
1106 #endif // ENABLE(DFG_JIT)
1108 DEFINE_STUB_FUNCTION(EncodedJSValue, op_instanceof)
1110 STUB_INIT_STACK_FRAME(stackFrame);
1112 CallFrame* callFrame = stackFrame.callFrame;
1113 JSValue value = stackFrame.args[0].jsValue();
1114 JSValue proto = stackFrame.args[1].jsValue();
1116 ASSERT(!value.isObject() || !proto.isObject());
1118 bool result = JSObject::defaultHasInstance(callFrame, value, proto);
1119 CHECK_FOR_EXCEPTION_AT_END();
1120 return JSValue::encode(jsBoolean(result));
1123 DEFINE_STUB_FUNCTION(EncodedJSValue, op_del_by_id)
1125 STUB_INIT_STACK_FRAME(stackFrame);
1127 CallFrame* callFrame = stackFrame.callFrame;
1129 JSObject* baseObj = stackFrame.args[0].jsValue().toObject(callFrame);
1131 bool couldDelete = baseObj->methodTable()->deleteProperty(baseObj, callFrame, stackFrame.args[1].identifier());
1132 JSValue result = jsBoolean(couldDelete);
1133 if (!couldDelete && callFrame->codeBlock()->isStrictMode())
1134 stackFrame.vm->throwException(stackFrame.callFrame, createTypeError(stackFrame.callFrame, "Unable to delete property."));
1136 CHECK_FOR_EXCEPTION_AT_END();
1137 return JSValue::encode(result);
1140 DEFINE_STUB_FUNCTION(JSObject*, op_new_func)
1142 STUB_INIT_STACK_FRAME(stackFrame);
1144 ASSERT(stackFrame.callFrame->codeBlock()->codeType() != FunctionCode || !stackFrame.callFrame->codeBlock()->needsFullScopeChain() || stackFrame.callFrame->uncheckedR(stackFrame.callFrame->codeBlock()->activationRegister()).jsValue());
1145 return JSFunction::create(stackFrame.callFrame, stackFrame.args[0].function(), stackFrame.callFrame->scope());
1148 inline void* jitCompileFor(CallFrame* callFrame, CodeSpecializationKind kind)
1150 // This function is called by cti_op_call_jitCompile() and
1151 // cti_op_construct_jitCompile() JIT glue trampolines to compile the
1152 // callee function that we want to call. Both cti glue trampolines are
1153 // called by JIT'ed code which has pushed a frame and initialized most of
1154 // the frame content except for the codeBlock.
1156 // Normally, the prologue of the callee is supposed to set the frame's cb
1157 // pointer to the cb of the callee. But in this case, the callee code does
1158 // not exist yet until it is compiled below. The compilation process will
1159 // allocate memory which may trigger a GC. The GC, in turn, will scan the
1160 // JSStack, and will expect the frame's cb to either be valid or 0. If
1161 // we don't initialize it, the GC will be accessing invalid memory and may
1164 // Hence, we should nullify it here before proceeding with the compilation.
1165 callFrame->setCodeBlock(0);
1167 JSFunction* function = jsCast<JSFunction*>(callFrame->callee());
1168 ASSERT(!function->isHostFunction());
1169 FunctionExecutable* executable = function->jsExecutable();
1170 JSScope* callDataScopeChain = function->scope();
1171 JSObject* error = executable->compileFor(callFrame, callDataScopeChain, kind);
1174 callFrame->vm().throwException(callFrame, error);
1178 DEFINE_STUB_FUNCTION(void*, op_call_jitCompile)
1180 STUB_INIT_STACK_FRAME(stackFrame);
1182 #if !ASSERT_DISABLED
1184 ASSERT(stackFrame.callFrame->callee()->methodTable()->getCallData(stackFrame.callFrame->callee(), callData) == CallTypeJS);
1187 CallFrame* callFrame = stackFrame.callFrame;
1188 void* result = jitCompileFor(callFrame, CodeForCall);
1190 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1195 DEFINE_STUB_FUNCTION(void*, op_construct_jitCompile)
1197 STUB_INIT_STACK_FRAME(stackFrame);
1199 #if !ASSERT_DISABLED
1200 ConstructData constructData;
1201 ASSERT(jsCast<JSFunction*>(stackFrame.callFrame->callee())->methodTable()->getConstructData(stackFrame.callFrame->callee(), constructData) == ConstructTypeJS);
1204 CallFrame* callFrame = stackFrame.callFrame;
1205 void* result = jitCompileFor(callFrame, CodeForConstruct);
1207 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1212 DEFINE_STUB_FUNCTION(int, op_call_arityCheck)
1214 STUB_INIT_STACK_FRAME(stackFrame);
1216 CallFrame* callFrame = stackFrame.callFrame;
1218 int missingArgCount = CommonSlowPaths::arityCheckFor(callFrame, stackFrame.stack, CodeForCall);
1219 if (missingArgCount < 0) {
1220 ErrorWithExecFunctor functor = ErrorWithExecFunctor(createStackOverflowError);
1221 return throwExceptionFromOpCall<int>(stackFrame, callFrame, STUB_RETURN_ADDRESS, &functor);
1223 return missingArgCount;
1226 DEFINE_STUB_FUNCTION(int, op_construct_arityCheck)
1228 STUB_INIT_STACK_FRAME(stackFrame);
1230 CallFrame* callFrame = stackFrame.callFrame;
1232 int missingArgCount = CommonSlowPaths::arityCheckFor(callFrame, stackFrame.stack, CodeForConstruct);
1233 if (missingArgCount < 0) {
1234 ErrorWithExecFunctor functor = ErrorWithExecFunctor(createStackOverflowError);
1235 return throwExceptionFromOpCall<int>(stackFrame, callFrame, STUB_RETURN_ADDRESS, &functor);
1237 return missingArgCount;
1240 inline void* lazyLinkFor(CallFrame* callFrame, CodeSpecializationKind kind)
1242 JSFunction* callee = jsCast<JSFunction*>(callFrame->callee());
1243 ExecutableBase* executable = callee->executable();
1245 MacroAssemblerCodePtr codePtr;
1246 CodeBlock* codeBlock = 0;
1247 CallLinkInfo* callLinkInfo = &callFrame->callerFrame()->codeBlock()->getCallLinkInfo(callFrame->returnPC());
1249 // This function is called by cti_vm_lazyLinkCall() and
1250 // cti_lazyLinkConstruct JIT glue trampolines to link the callee function
1251 // that we want to call. Both cti glue trampolines are called by JIT'ed
1252 // code which has pushed a frame and initialized most of the frame content
1253 // except for the codeBlock.
1255 // Normally, the prologue of the callee is supposed to set the frame's cb
1256 // field to the cb of the callee. But in this case, the callee may not
1257 // exist yet, and if not, it will be generated in the compilation below.
1258 // The compilation will allocate memory which may trigger a GC. The GC, in
1259 // turn, will scan the JSStack, and will expect the frame's cb to be valid
1260 // or 0. If we don't initialize it, the GC will be accessing invalid
1261 // memory and may crash.
1263 // Hence, we should nullify it here before proceeding with the compilation.
1264 callFrame->setCodeBlock(0);
1266 if (executable->isHostFunction())
1267 codePtr = executable->generatedJITCodeFor(kind)->addressForCall();
1269 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1270 if (JSObject* error = functionExecutable->compileFor(callFrame, callee->scope(), kind)) {
1271 callFrame->vm().throwException(callFrame, error);
1274 codeBlock = &functionExecutable->generatedBytecodeFor(kind);
1275 if (callFrame->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters())
1276 || callLinkInfo->callType == CallLinkInfo::CallVarargs)
1277 codePtr = functionExecutable->generatedJITCodeWithArityCheckFor(kind);
1279 codePtr = functionExecutable->generatedJITCodeFor(kind)->addressForCall();
1282 ConcurrentJITLocker locker(callFrame->callerFrame()->codeBlock()->m_lock);
1283 if (!callLinkInfo->seenOnce())
1284 callLinkInfo->setSeen();
1286 JIT::linkFor(callFrame->callerFrame(), callee, callFrame->callerFrame()->codeBlock(), codeBlock, codePtr, callLinkInfo, &callFrame->vm(), kind);
1288 return codePtr.executableAddress();
1291 DEFINE_STUB_FUNCTION(void*, vm_lazyLinkCall)
1293 STUB_INIT_STACK_FRAME(stackFrame);
1295 CallFrame* callFrame = stackFrame.callFrame;
1296 void* result = lazyLinkFor(callFrame, CodeForCall);
1298 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1303 DEFINE_STUB_FUNCTION(void*, vm_lazyLinkClosureCall)
1305 STUB_INIT_STACK_FRAME(stackFrame);
1307 CallFrame* callFrame = stackFrame.callFrame;
1309 CodeBlock* callerCodeBlock = callFrame->callerFrame()->codeBlock();
1310 VM* vm = callerCodeBlock->vm();
1311 CallLinkInfo* callLinkInfo = &callerCodeBlock->getCallLinkInfo(callFrame->returnPC());
1312 JSFunction* callee = jsCast<JSFunction*>(callFrame->callee());
1313 ExecutableBase* executable = callee->executable();
1314 Structure* structure = callee->structure();
1316 ASSERT(callLinkInfo->callType == CallLinkInfo::Call);
1317 ASSERT(callLinkInfo->isLinked());
1318 ASSERT(callLinkInfo->callee);
1319 ASSERT(callee != callLinkInfo->callee.get());
1321 bool shouldLink = false;
1322 CodeBlock* calleeCodeBlock = 0;
1323 MacroAssemblerCodePtr codePtr;
1325 if (executable == callLinkInfo->callee.get()->executable()
1326 && structure == callLinkInfo->callee.get()->structure()) {
1330 ASSERT(executable->hasJITCodeForCall());
1331 codePtr = executable->generatedJITCodeForCall()->addressForCall();
1332 if (!callee->executable()->isHostFunction()) {
1333 calleeCodeBlock = &jsCast<FunctionExecutable*>(executable)->generatedBytecodeForCall();
1334 if (callFrame->argumentCountIncludingThis() < static_cast<size_t>(calleeCodeBlock->numParameters())) {
1336 codePtr = executable->generatedJITCodeWithArityCheckFor(CodeForCall);
1339 } else if (callee->isHostFunction())
1340 codePtr = executable->generatedJITCodeForCall()->addressForCall();
1342 // Need to clear the code block before compilation, because compilation can GC.
1343 callFrame->setCodeBlock(0);
1345 FunctionExecutable* functionExecutable = jsCast<FunctionExecutable*>(executable);
1346 JSScope* scopeChain = callee->scope();
1347 JSObject* error = functionExecutable->compileFor(callFrame, scopeChain, CodeForCall);
1349 callFrame->vm().throwException(callFrame, error);
1353 codePtr = functionExecutable->generatedJITCodeWithArityCheckFor(CodeForCall);
1358 ConcurrentJITLocker locker(callerCodeBlock->m_lock);
1359 JIT::compileClosureCall(vm, callLinkInfo, callerCodeBlock, calleeCodeBlock, structure, executable, codePtr);
1360 callLinkInfo->hasSeenClosure = true;
1362 JIT::linkSlowCall(callerCodeBlock, callLinkInfo);
1364 return codePtr.executableAddress();
1367 DEFINE_STUB_FUNCTION(void*, vm_lazyLinkConstruct)
1369 STUB_INIT_STACK_FRAME(stackFrame);
1371 CallFrame* callFrame = stackFrame.callFrame;
1372 void* result = lazyLinkFor(callFrame, CodeForConstruct);
1374 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1379 DEFINE_STUB_FUNCTION(JSObject*, op_push_activation)
1381 STUB_INIT_STACK_FRAME(stackFrame);
1383 JSActivation* activation = JSActivation::create(stackFrame.callFrame->vm(), stackFrame.callFrame, stackFrame.callFrame->codeBlock());
1384 stackFrame.callFrame->setScope(activation);
1388 DEFINE_STUB_FUNCTION(EncodedJSValue, op_call_NotJSFunction)
1390 STUB_INIT_STACK_FRAME(stackFrame);
1392 CallFrame* callFrame = stackFrame.callFrame;
1394 JSValue callee = callFrame->calleeAsValue();
1397 CallType callType = getCallData(callee, callData);
1399 ASSERT(callType != CallTypeJS);
1400 if (callType != CallTypeHost) {
1401 ASSERT(callType == CallTypeNone);
1402 ErrorWithExecAndCalleeFunctor functor = ErrorWithExecAndCalleeFunctor(createNotAFunctionError, callee);
1403 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS, &functor);
1406 EncodedJSValue returnValue;
1408 SamplingTool::CallRecord callRecord(CTI_SAMPLER, true);
1409 returnValue = callData.native.function(callFrame);
1412 if (stackFrame.vm->exception())
1413 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1418 DEFINE_STUB_FUNCTION(EncodedJSValue, op_create_arguments)
1420 STUB_INIT_STACK_FRAME(stackFrame);
1422 Arguments* arguments = Arguments::create(*stackFrame.vm, stackFrame.callFrame);
1423 return JSValue::encode(JSValue(arguments));
1426 DEFINE_STUB_FUNCTION(void, op_tear_off_activation)
1428 STUB_INIT_STACK_FRAME(stackFrame);
1430 ASSERT(stackFrame.callFrame->codeBlock()->needsFullScopeChain());
1431 jsCast<JSActivation*>(stackFrame.args[0].jsValue())->tearOff(*stackFrame.vm);
1434 DEFINE_STUB_FUNCTION(void, op_tear_off_arguments)
1436 STUB_INIT_STACK_FRAME(stackFrame);
1438 CallFrame* callFrame = stackFrame.callFrame;
1439 ASSERT(callFrame->codeBlock()->usesArguments());
1440 Arguments* arguments = jsCast<Arguments*>(stackFrame.args[0].jsValue());
1441 if (JSValue activationValue = stackFrame.args[1].jsValue()) {
1442 arguments->didTearOffActivation(callFrame, jsCast<JSActivation*>(activationValue));
1445 arguments->tearOff(callFrame);
1448 DEFINE_STUB_FUNCTION(void, op_profile_will_call)
1450 STUB_INIT_STACK_FRAME(stackFrame);
1452 if (LegacyProfiler* profiler = stackFrame.vm->enabledProfiler())
1453 profiler->willExecute(stackFrame.callFrame, stackFrame.args[0].jsValue());
1456 DEFINE_STUB_FUNCTION(void, op_profile_did_call)
1458 STUB_INIT_STACK_FRAME(stackFrame);
1460 if (LegacyProfiler* profiler = stackFrame.vm->enabledProfiler())
1461 profiler->didExecute(stackFrame.callFrame, stackFrame.args[0].jsValue());
1464 DEFINE_STUB_FUNCTION(JSObject*, op_new_array)
1466 STUB_INIT_STACK_FRAME(stackFrame);
1468 return constructArray(stackFrame.callFrame, stackFrame.args[2].arrayAllocationProfile(), reinterpret_cast<JSValue*>(&stackFrame.callFrame->registers()[stackFrame.args[0].int32()]), stackFrame.args[1].int32());
1471 DEFINE_STUB_FUNCTION(JSObject*, op_new_array_with_size)
1473 STUB_INIT_STACK_FRAME(stackFrame);
1475 return constructArrayWithSizeQuirk(stackFrame.callFrame, stackFrame.args[1].arrayAllocationProfile(), stackFrame.callFrame->lexicalGlobalObject(), stackFrame.args[0].jsValue());
1478 DEFINE_STUB_FUNCTION(JSObject*, op_new_array_buffer)
1480 STUB_INIT_STACK_FRAME(stackFrame);
1482 return constructArray(stackFrame.callFrame, stackFrame.args[2].arrayAllocationProfile(), stackFrame.callFrame->codeBlock()->constantBuffer(stackFrame.args[0].int32()), stackFrame.args[1].int32());
1485 DEFINE_STUB_FUNCTION(EncodedJSValue, op_construct_NotJSConstruct)
1487 STUB_INIT_STACK_FRAME(stackFrame);
1489 CallFrame* callFrame = stackFrame.callFrame;
1490 JSValue callee = callFrame->calleeAsValue();
1492 ConstructData constructData;
1493 ConstructType constructType = getConstructData(callee, constructData);
1495 ASSERT(constructType != ConstructTypeJS);
1496 if (constructType != ConstructTypeHost) {
1497 ASSERT(constructType == ConstructTypeNone);
1498 ErrorWithExecAndCalleeFunctor functor = ErrorWithExecAndCalleeFunctor(createNotAConstructorError, callee);
1499 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS, &functor);
1502 EncodedJSValue returnValue;
1504 SamplingTool::CallRecord callRecord(CTI_SAMPLER, true);
1505 returnValue = constructData.native.function(callFrame);
1508 if (stackFrame.vm->exception())
1509 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1514 static JSValue getByVal(
1515 CallFrame* callFrame, JSValue baseValue, JSValue subscript, ReturnAddressPtr returnAddress)
1517 if (LIKELY(baseValue.isCell() && subscript.isString())) {
1518 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(callFrame, asString(subscript)->value(callFrame)))
1522 if (subscript.isUInt32()) {
1523 uint32_t i = subscript.asUInt32();
1524 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i)) {
1525 ctiPatchCallByReturnAddress(callFrame->codeBlock(), returnAddress, FunctionPtr(cti_op_get_by_val_string));
1526 return asString(baseValue)->getIndex(callFrame, i);
1528 return baseValue.get(callFrame, i);
1531 if (isName(subscript))
1532 return baseValue.get(callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName());
1534 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
1535 return baseValue.get(callFrame, property);
1538 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val)
1540 STUB_INIT_STACK_FRAME(stackFrame);
1542 CallFrame* callFrame = stackFrame.callFrame;
1544 JSValue baseValue = stackFrame.args[0].jsValue();
1545 JSValue subscript = stackFrame.args[1].jsValue();
1547 if (baseValue.isObject() && subscript.isInt32()) {
1548 // See if it's worth optimizing this at all.
1549 JSObject* object = asObject(baseValue);
1550 bool didOptimize = false;
1552 unsigned bytecodeOffset = callFrame->locationAsBytecodeOffset();
1553 ASSERT(bytecodeOffset);
1554 ByValInfo& byValInfo = callFrame->codeBlock()->getByValInfo(bytecodeOffset - 1);
1555 ASSERT(!byValInfo.stubRoutine);
1557 if (hasOptimizableIndexing(object->structure())) {
1558 // Attempt to optimize.
1559 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure());
1560 if (arrayMode != byValInfo.arrayMode) {
1561 JIT::compileGetByVal(&callFrame->vm(), callFrame->codeBlock(), &byValInfo, STUB_RETURN_ADDRESS, arrayMode);
1567 // If we take slow path more than 10 times without patching then make sure we
1568 // never make that mistake again. Or, if we failed to patch and we have some object
1569 // that intercepts indexed get, then don't even wait until 10 times. For cases
1570 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1571 // opportunity for us to observe that the get_by_val may be polymorphic.
1572 if (++byValInfo.slowPathCount >= 10
1573 || object->structure()->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1574 // Don't ever try to optimize.
1575 RepatchBuffer repatchBuffer(callFrame->codeBlock());
1576 repatchBuffer.relinkCallerToFunction(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_generic));
1581 JSValue result = getByVal(callFrame, baseValue, subscript, STUB_RETURN_ADDRESS);
1582 CHECK_FOR_EXCEPTION();
1583 return JSValue::encode(result);
1586 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val_generic)
1588 STUB_INIT_STACK_FRAME(stackFrame);
1590 CallFrame* callFrame = stackFrame.callFrame;
1592 JSValue baseValue = stackFrame.args[0].jsValue();
1593 JSValue subscript = stackFrame.args[1].jsValue();
1595 JSValue result = getByVal(callFrame, baseValue, subscript, STUB_RETURN_ADDRESS);
1596 CHECK_FOR_EXCEPTION();
1597 return JSValue::encode(result);
1600 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val_string)
1602 STUB_INIT_STACK_FRAME(stackFrame);
1604 CallFrame* callFrame = stackFrame.callFrame;
1606 JSValue baseValue = stackFrame.args[0].jsValue();
1607 JSValue subscript = stackFrame.args[1].jsValue();
1611 if (LIKELY(subscript.isUInt32())) {
1612 uint32_t i = subscript.asUInt32();
1613 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1614 result = asString(baseValue)->getIndex(callFrame, i);
1616 result = baseValue.get(callFrame, i);
1617 if (!isJSString(baseValue))
1618 ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val));
1620 } else if (isName(subscript))
1621 result = baseValue.get(callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName());
1623 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
1624 result = baseValue.get(callFrame, property);
1627 CHECK_FOR_EXCEPTION_AT_END();
1628 return JSValue::encode(result);
1631 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value)
1633 if (LIKELY(subscript.isUInt32())) {
1634 uint32_t i = subscript.asUInt32();
1635 if (baseValue.isObject()) {
1636 JSObject* object = asObject(baseValue);
1637 if (object->canSetIndexQuickly(i))
1638 object->setIndexQuickly(callFrame->vm(), i, value);
1640 object->methodTable()->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
1642 baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
1643 } else if (isName(subscript)) {
1644 PutPropertySlot slot(callFrame->codeBlock()->isStrictMode());
1645 baseValue.put(callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName(), value, slot);
1647 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
1648 if (!callFrame->vm().exception()) { // Don't put to an object if toString threw an exception.
1649 PutPropertySlot slot(callFrame->codeBlock()->isStrictMode());
1650 baseValue.put(callFrame, property, value, slot);
1655 DEFINE_STUB_FUNCTION(void, op_put_by_val)
1657 STUB_INIT_STACK_FRAME(stackFrame);
1659 CallFrame* callFrame = stackFrame.callFrame;
1661 JSValue baseValue = stackFrame.args[0].jsValue();
1662 JSValue subscript = stackFrame.args[1].jsValue();
1663 JSValue value = stackFrame.args[2].jsValue();
1665 if (baseValue.isObject() && subscript.isInt32()) {
1666 // See if it's worth optimizing at all.
1667 JSObject* object = asObject(baseValue);
1668 bool didOptimize = false;
1670 unsigned bytecodeOffset = callFrame->locationAsBytecodeOffset();
1671 ASSERT(bytecodeOffset);
1672 ByValInfo& byValInfo = callFrame->codeBlock()->getByValInfo(bytecodeOffset - 1);
1673 ASSERT(!byValInfo.stubRoutine);
1675 if (hasOptimizableIndexing(object->structure())) {
1676 // Attempt to optimize.
1677 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure());
1678 if (arrayMode != byValInfo.arrayMode) {
1679 JIT::compilePutByVal(&callFrame->vm(), callFrame->codeBlock(), &byValInfo, STUB_RETURN_ADDRESS, arrayMode);
1685 // If we take slow path more than 10 times without patching then make sure we
1686 // never make that mistake again. Or, if we failed to patch and we have some object
1687 // that intercepts indexed get, then don't even wait until 10 times. For cases
1688 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1689 // opportunity for us to observe that the get_by_val may be polymorphic.
1690 if (++byValInfo.slowPathCount >= 10
1691 || object->structure()->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1692 // Don't ever try to optimize.
1693 RepatchBuffer repatchBuffer(callFrame->codeBlock());
1694 repatchBuffer.relinkCallerToFunction(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_val_generic));
1699 putByVal(callFrame, baseValue, subscript, value);
1701 CHECK_FOR_EXCEPTION_AT_END();
1704 DEFINE_STUB_FUNCTION(void, op_put_by_val_generic)
1706 STUB_INIT_STACK_FRAME(stackFrame);
1708 CallFrame* callFrame = stackFrame.callFrame;
1710 JSValue baseValue = stackFrame.args[0].jsValue();
1711 JSValue subscript = stackFrame.args[1].jsValue();
1712 JSValue value = stackFrame.args[2].jsValue();
1714 putByVal(callFrame, baseValue, subscript, value);
1716 CHECK_FOR_EXCEPTION_AT_END();
1719 DEFINE_STUB_FUNCTION(void*, op_load_varargs)
1721 STUB_INIT_STACK_FRAME(stackFrame);
1723 CallFrame* callFrame = stackFrame.callFrame;
1724 JSStack* stack = stackFrame.stack;
1725 JSValue thisValue = stackFrame.args[0].jsValue();
1726 JSValue arguments = stackFrame.args[1].jsValue();
1727 int firstFreeRegister = stackFrame.args[2].int32();
1729 CallFrame* newCallFrame = loadVarargs(callFrame, stack, thisValue, arguments, firstFreeRegister);
1731 VM_THROW_EXCEPTION();
1732 return newCallFrame;
1735 DEFINE_STUB_FUNCTION(int, op_jless)
1737 STUB_INIT_STACK_FRAME(stackFrame);
1739 JSValue src1 = stackFrame.args[0].jsValue();
1740 JSValue src2 = stackFrame.args[1].jsValue();
1741 CallFrame* callFrame = stackFrame.callFrame;
1743 bool result = jsLess<true>(callFrame, src1, src2);
1744 CHECK_FOR_EXCEPTION_AT_END();
1748 DEFINE_STUB_FUNCTION(int, op_jlesseq)
1750 STUB_INIT_STACK_FRAME(stackFrame);
1752 JSValue src1 = stackFrame.args[0].jsValue();
1753 JSValue src2 = stackFrame.args[1].jsValue();
1754 CallFrame* callFrame = stackFrame.callFrame;
1756 bool result = jsLessEq<true>(callFrame, src1, src2);
1757 CHECK_FOR_EXCEPTION_AT_END();
1761 DEFINE_STUB_FUNCTION(int, op_jgreater)
1763 STUB_INIT_STACK_FRAME(stackFrame);
1765 JSValue src1 = stackFrame.args[0].jsValue();
1766 JSValue src2 = stackFrame.args[1].jsValue();
1767 CallFrame* callFrame = stackFrame.callFrame;
1769 bool result = jsLess<false>(callFrame, src2, src1);
1770 CHECK_FOR_EXCEPTION_AT_END();
1774 DEFINE_STUB_FUNCTION(int, op_jgreatereq)
1776 STUB_INIT_STACK_FRAME(stackFrame);
1778 JSValue src1 = stackFrame.args[0].jsValue();
1779 JSValue src2 = stackFrame.args[1].jsValue();
1780 CallFrame* callFrame = stackFrame.callFrame;
1782 bool result = jsLessEq<false>(callFrame, src2, src1);
1783 CHECK_FOR_EXCEPTION_AT_END();
1787 DEFINE_STUB_FUNCTION(int, op_jtrue)
1789 STUB_INIT_STACK_FRAME(stackFrame);
1791 JSValue src1 = stackFrame.args[0].jsValue();
1793 bool result = src1.toBoolean(stackFrame.callFrame);
1794 CHECK_FOR_EXCEPTION_AT_END();
1798 DEFINE_STUB_FUNCTION(int, op_eq)
1800 STUB_INIT_STACK_FRAME(stackFrame);
1802 JSValue src1 = stackFrame.args[0].jsValue();
1803 JSValue src2 = stackFrame.args[1].jsValue();
1805 #if USE(JSVALUE32_64)
1807 if (src2.isUndefined()) {
1808 return src1.isNull() ||
1809 (src1.isCell() && src1.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject()))
1810 || src1.isUndefined();
1813 if (src2.isNull()) {
1814 return src1.isUndefined() ||
1815 (src1.isCell() && src1.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject()))
1819 if (src1.isInt32()) {
1820 if (src2.isDouble())
1821 return src1.asInt32() == src2.asDouble();
1822 double d = src2.toNumber(stackFrame.callFrame);
1823 CHECK_FOR_EXCEPTION();
1824 return src1.asInt32() == d;
1827 if (src1.isDouble()) {
1829 return src1.asDouble() == src2.asInt32();
1830 double d = src2.toNumber(stackFrame.callFrame);
1831 CHECK_FOR_EXCEPTION();
1832 return src1.asDouble() == d;
1835 if (src1.isTrue()) {
1838 double d = src2.toNumber(stackFrame.callFrame);
1839 CHECK_FOR_EXCEPTION();
1843 if (src1.isFalse()) {
1846 double d = src2.toNumber(stackFrame.callFrame);
1847 CHECK_FOR_EXCEPTION();
1851 if (src1.isUndefined())
1852 return src2.isCell() && src2.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject());
1855 return src2.isCell() && src2.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject());
1857 JSCell* cell1 = src1.asCell();
1859 if (cell1->isString()) {
1861 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == src2.asInt32();
1863 if (src2.isDouble())
1864 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == src2.asDouble();
1867 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == 1.0;
1870 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == 0.0;
1872 JSCell* cell2 = src2.asCell();
1873 if (cell2->isString())
1874 return jsCast<JSString*>(cell1)->value(stackFrame.callFrame) == jsCast<JSString*>(cell2)->value(stackFrame.callFrame);
1876 src2 = asObject(cell2)->toPrimitive(stackFrame.callFrame);
1877 CHECK_FOR_EXCEPTION();
1881 if (src2.isObject())
1882 return asObject(cell1) == asObject(src2);
1883 src1 = asObject(cell1)->toPrimitive(stackFrame.callFrame);
1884 CHECK_FOR_EXCEPTION();
1887 #else // USE(JSVALUE32_64)
1888 CallFrame* callFrame = stackFrame.callFrame;
1890 bool result = JSValue::equalSlowCaseInline(callFrame, src1, src2);
1891 CHECK_FOR_EXCEPTION_AT_END();
1893 #endif // USE(JSVALUE32_64)
1896 DEFINE_STUB_FUNCTION(int, op_eq_strings)
1898 #if USE(JSVALUE32_64)
1899 STUB_INIT_STACK_FRAME(stackFrame);
1901 JSString* string1 = stackFrame.args[0].jsString();
1902 JSString* string2 = stackFrame.args[1].jsString();
1904 ASSERT(string1->isString());
1905 ASSERT(string2->isString());
1906 return string1->value(stackFrame.callFrame) == string2->value(stackFrame.callFrame);
1909 RELEASE_ASSERT_NOT_REACHED();
1914 DEFINE_STUB_FUNCTION(JSObject*, op_new_func_exp)
1916 STUB_INIT_STACK_FRAME(stackFrame);
1917 CallFrame* callFrame = stackFrame.callFrame;
1919 FunctionExecutable* function = stackFrame.args[0].function();
1920 JSFunction* func = JSFunction::create(callFrame, function, callFrame->scope());
1921 ASSERT(callFrame->codeBlock()->codeType() != FunctionCode || !callFrame->codeBlock()->needsFullScopeChain() || callFrame->uncheckedR(callFrame->codeBlock()->activationRegister()).jsValue());
1926 DEFINE_STUB_FUNCTION(JSObject*, op_new_regexp)
1928 STUB_INIT_STACK_FRAME(stackFrame);
1930 CallFrame* callFrame = stackFrame.callFrame;
1932 RegExp* regExp = stackFrame.args[0].regExp();
1933 if (!regExp->isValid()) {
1934 stackFrame.vm->throwException(callFrame, createSyntaxError(callFrame, "Invalid flags supplied to RegExp constructor."));
1935 VM_THROW_EXCEPTION();
1938 return RegExpObject::create(*stackFrame.vm, stackFrame.callFrame->lexicalGlobalObject(), stackFrame.callFrame->lexicalGlobalObject()->regExpStructure(), regExp);
1941 DEFINE_STUB_FUNCTION(EncodedJSValue, op_call_eval)
1943 STUB_INIT_STACK_FRAME(stackFrame);
1945 CallFrame* callFrame = stackFrame.callFrame;
1946 CallFrame* callerFrame = callFrame->callerFrame();
1947 ASSERT(callFrame->callerFrame()->codeBlock()->codeType() != FunctionCode
1948 || !callFrame->callerFrame()->codeBlock()->needsFullScopeChain()
1949 || callFrame->callerFrame()->uncheckedR(callFrame->callerFrame()->codeBlock()->activationRegister()).jsValue());
1951 callFrame->setScope(callerFrame->scope());
1952 callFrame->setReturnPC(static_cast<Instruction*>((STUB_RETURN_ADDRESS).value()));
1953 callFrame->setCodeBlock(0);
1955 if (!isHostFunction(callFrame->calleeAsValue(), globalFuncEval))
1956 return JSValue::encode(JSValue());
1958 JSValue result = eval(callFrame);
1959 if (stackFrame.vm->exception())
1960 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
1962 return JSValue::encode(result);
1965 DEFINE_STUB_FUNCTION(void*, op_throw)
1967 STUB_INIT_STACK_FRAME(stackFrame);
1968 stackFrame.vm->throwException(stackFrame.callFrame, stackFrame.args[0].jsValue());
1969 ExceptionHandler handler = jitThrow(stackFrame.vm, stackFrame.callFrame, stackFrame.args[0].jsValue(), STUB_RETURN_ADDRESS);
1970 STUB_SET_RETURN_ADDRESS(handler.catchRoutine);
1971 return handler.callFrame;
1974 DEFINE_STUB_FUNCTION(JSPropertyNameIterator*, op_get_pnames)
1976 STUB_INIT_STACK_FRAME(stackFrame);
1978 CallFrame* callFrame = stackFrame.callFrame;
1979 JSObject* o = stackFrame.args[0].jsObject();
1980 Structure* structure = o->structure();
1981 JSPropertyNameIterator* jsPropertyNameIterator = structure->enumerationCache();
1982 if (!jsPropertyNameIterator || jsPropertyNameIterator->cachedPrototypeChain() != structure->prototypeChain(callFrame))
1983 jsPropertyNameIterator = JSPropertyNameIterator::create(callFrame, o);
1984 return jsPropertyNameIterator;
1987 DEFINE_STUB_FUNCTION(int, has_property)
1989 STUB_INIT_STACK_FRAME(stackFrame);
1991 JSObject* base = stackFrame.args[0].jsObject();
1992 JSString* property = stackFrame.args[1].jsString();
1993 int result = base->hasProperty(stackFrame.callFrame, Identifier(stackFrame.callFrame, property->value(stackFrame.callFrame)));
1994 CHECK_FOR_EXCEPTION_AT_END();
1998 DEFINE_STUB_FUNCTION(void, op_push_with_scope)
2000 STUB_INIT_STACK_FRAME(stackFrame);
2002 JSObject* o = stackFrame.args[0].jsValue().toObject(stackFrame.callFrame);
2003 CHECK_FOR_EXCEPTION_VOID();
2004 stackFrame.callFrame->setScope(JSWithScope::create(stackFrame.callFrame, o));
2007 DEFINE_STUB_FUNCTION(void, op_pop_scope)
2009 STUB_INIT_STACK_FRAME(stackFrame);
2011 stackFrame.callFrame->setScope(stackFrame.callFrame->scope()->next());
2014 DEFINE_STUB_FUNCTION(void, op_push_name_scope)
2016 STUB_INIT_STACK_FRAME(stackFrame);
2018 JSNameScope* scope = JSNameScope::create(stackFrame.callFrame, stackFrame.args[0].identifier(), stackFrame.args[1].jsValue(), stackFrame.args[2].int32());
2020 CallFrame* callFrame = stackFrame.callFrame;
2021 callFrame->setScope(scope);
2024 DEFINE_STUB_FUNCTION(void, op_put_by_index)
2026 STUB_INIT_STACK_FRAME(stackFrame);
2028 CallFrame* callFrame = stackFrame.callFrame;
2029 unsigned property = stackFrame.args[1].int32();
2031 JSValue arrayValue = stackFrame.args[0].jsValue();
2032 ASSERT(isJSArray(arrayValue));
2033 asArray(arrayValue)->putDirectIndex(callFrame, property, stackFrame.args[2].jsValue());
2036 DEFINE_STUB_FUNCTION(void*, op_switch_imm)
2038 STUB_INIT_STACK_FRAME(stackFrame);
2040 JSValue scrutinee = stackFrame.args[0].jsValue();
2041 unsigned tableIndex = stackFrame.args[1].int32();
2042 CallFrame* callFrame = stackFrame.callFrame;
2043 CodeBlock* codeBlock = callFrame->codeBlock();
2045 if (scrutinee.isInt32())
2046 return codeBlock->switchJumpTable(tableIndex).ctiForValue(scrutinee.asInt32()).executableAddress();
2047 if (scrutinee.isDouble() && scrutinee.asDouble() == static_cast<int32_t>(scrutinee.asDouble()))
2048 return codeBlock->switchJumpTable(tableIndex).ctiForValue(static_cast<int32_t>(scrutinee.asDouble())).executableAddress();
2049 return codeBlock->switchJumpTable(tableIndex).ctiDefault.executableAddress();
2052 DEFINE_STUB_FUNCTION(void*, op_switch_char)
2054 STUB_INIT_STACK_FRAME(stackFrame);
2056 JSValue scrutinee = stackFrame.args[0].jsValue();
2057 unsigned tableIndex = stackFrame.args[1].int32();
2058 CallFrame* callFrame = stackFrame.callFrame;
2059 CodeBlock* codeBlock = callFrame->codeBlock();
2061 void* result = codeBlock->switchJumpTable(tableIndex).ctiDefault.executableAddress();
2063 if (scrutinee.isString()) {
2064 StringImpl* value = asString(scrutinee)->value(callFrame).impl();
2065 if (value->length() == 1)
2066 result = codeBlock->switchJumpTable(tableIndex).ctiForValue((*value)[0]).executableAddress();
2069 CHECK_FOR_EXCEPTION_AT_END();
2073 DEFINE_STUB_FUNCTION(void*, op_switch_string)
2075 STUB_INIT_STACK_FRAME(stackFrame);
2077 JSValue scrutinee = stackFrame.args[0].jsValue();
2078 unsigned tableIndex = stackFrame.args[1].int32();
2079 CallFrame* callFrame = stackFrame.callFrame;
2080 CodeBlock* codeBlock = callFrame->codeBlock();
2082 void* result = codeBlock->stringSwitchJumpTable(tableIndex).ctiDefault.executableAddress();
2084 if (scrutinee.isString()) {
2085 StringImpl* value = asString(scrutinee)->value(callFrame).impl();
2086 result = codeBlock->stringSwitchJumpTable(tableIndex).ctiForValue(value).executableAddress();
2089 CHECK_FOR_EXCEPTION_AT_END();
2093 DEFINE_STUB_FUNCTION(void, op_put_getter_setter)
2095 STUB_INIT_STACK_FRAME(stackFrame);
2097 CallFrame* callFrame = stackFrame.callFrame;
2099 ASSERT(stackFrame.args[0].jsValue().isObject());
2100 JSObject* baseObj = asObject(stackFrame.args[0].jsValue());
2102 GetterSetter* accessor = GetterSetter::create(callFrame);
2104 JSValue getter = stackFrame.args[2].jsValue();
2105 JSValue setter = stackFrame.args[3].jsValue();
2106 ASSERT(getter.isObject() || getter.isUndefined());
2107 ASSERT(setter.isObject() || setter.isUndefined());
2108 ASSERT(getter.isObject() || setter.isObject());
2110 if (!getter.isUndefined())
2111 accessor->setGetter(callFrame->vm(), asObject(getter));
2112 if (!setter.isUndefined())
2113 accessor->setSetter(callFrame->vm(), asObject(setter));
2114 baseObj->putDirectAccessor(callFrame, stackFrame.args[1].identifier(), accessor, Accessor);
2117 DEFINE_STUB_FUNCTION(void, op_throw_static_error)
2119 STUB_INIT_STACK_FRAME(stackFrame);
2121 CallFrame* callFrame = stackFrame.callFrame;
2122 String message = errorDescriptionForValue(callFrame, stackFrame.args[0].jsValue())->value(callFrame);
2123 if (stackFrame.args[1].asInt32)
2124 stackFrame.vm->throwException(callFrame, createReferenceError(callFrame, message));
2126 stackFrame.vm->throwException(callFrame, createTypeError(callFrame, message));
2127 VM_THROW_EXCEPTION_AT_END();
2130 DEFINE_STUB_FUNCTION(void, op_debug)
2132 STUB_INIT_STACK_FRAME(stackFrame);
2134 CallFrame* callFrame = stackFrame.callFrame;
2136 int debugHookID = stackFrame.args[0].int32();
2137 int firstLine = stackFrame.args[1].int32();
2138 int lastLine = stackFrame.args[2].int32();
2139 int column = stackFrame.args[3].int32();
2141 stackFrame.vm->interpreter->debug(callFrame, static_cast<DebugHookID>(debugHookID), firstLine, lastLine, column);
2144 DEFINE_STUB_FUNCTION(void*, vm_throw)
2146 STUB_INIT_STACK_FRAME(stackFrame);
2147 VM* vm = stackFrame.vm;
2148 ExceptionHandler handler = jitThrow(vm, stackFrame.callFrame, vm->exception(), vm->exceptionLocation);
2149 STUB_SET_RETURN_ADDRESS(handler.catchRoutine);
2150 return handler.callFrame;
2153 #if USE(JSVALUE32_64)
2154 EncodedExceptionHandler JIT_STUB cti_vm_handle_exception(CallFrame* callFrame)
2156 ASSERT(!callFrame->hasHostCallFrameFlag());
2158 // The entire stack has already been unwound. Nothing more to handle.
2159 return encode(uncaughtExceptionHandler());
2162 VM* vm = callFrame->codeBlock()->vm();
2163 vm->topCallFrame = callFrame;
2164 return encode(jitThrowNew(vm, callFrame, vm->exception()));
2167 ExceptionHandler JIT_STUB cti_vm_handle_exception(CallFrame* callFrame)
2169 ASSERT(!callFrame->hasHostCallFrameFlag());
2171 // The entire stack has already been unwound. Nothing more to handle.
2172 return uncaughtExceptionHandler();
2175 VM* vm = callFrame->codeBlock()->vm();
2176 vm->topCallFrame = callFrame;
2177 return jitThrowNew(vm, callFrame, vm->exception());
2181 DEFINE_STUB_FUNCTION(EncodedJSValue, to_object)
2183 STUB_INIT_STACK_FRAME(stackFrame);
2185 CallFrame* callFrame = stackFrame.callFrame;
2186 return JSValue::encode(stackFrame.args[0].jsValue().toObject(callFrame));
2189 DEFINE_STUB_FUNCTION(EncodedJSValue, op_resolve_scope)
2191 STUB_INIT_STACK_FRAME(stackFrame);
2192 ExecState* exec = stackFrame.callFrame;
2193 Instruction* pc = stackFrame.args[0].pc();
2195 const Identifier& ident = exec->codeBlock()->identifier(pc[2].u.operand);
2196 return JSValue::encode(JSScope::resolve(exec, exec->scope(), ident));
2199 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_from_scope)
2201 STUB_INIT_STACK_FRAME(stackFrame);
2202 ExecState* exec = stackFrame.callFrame;
2203 Instruction* pc = stackFrame.args[0].pc();
2205 const Identifier& ident = exec->codeBlock()->identifier(pc[3].u.operand);
2206 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
2207 ResolveModeAndType modeAndType(pc[4].u.operand);
2209 PropertySlot slot(scope);
2210 if (!scope->getPropertySlot(exec, ident, slot)) {
2211 if (modeAndType.mode() == ThrowIfNotFound) {
2212 exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
2213 VM_THROW_EXCEPTION();
2215 return JSValue::encode(jsUndefined());
2218 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
2219 if (slot.isCacheableValue() && slot.slotBase() == scope && scope->structure()->propertyAccessesAreCacheable()) {
2220 if (modeAndType.type() == GlobalProperty || modeAndType.type() == GlobalPropertyWithVarInjectionChecks) {
2221 CodeBlock* codeBlock = exec->codeBlock();
2222 ConcurrentJITLocker locker(codeBlock->m_lock);
2223 pc[5].u.structure.set(exec->vm(), codeBlock->ownerExecutable(), scope->structure());
2224 pc[6].u.operand = slot.cachedOffset();
2228 return JSValue::encode(slot.getValue(exec, ident));
2231 DEFINE_STUB_FUNCTION(void, op_put_to_scope)
2233 STUB_INIT_STACK_FRAME(stackFrame);
2234 ExecState* exec = stackFrame.callFrame;
2235 Instruction* pc = stackFrame.args[0].pc();
2237 CodeBlock* codeBlock = exec->codeBlock();
2238 const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2239 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2240 JSValue value = exec->r(pc[3].u.operand).jsValue();
2241 ResolveModeAndType modeAndType = ResolveModeAndType(pc[4].u.operand);
2243 if (modeAndType.mode() == ThrowIfNotFound && !scope->hasProperty(exec, ident)) {
2244 exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
2245 VM_THROW_EXCEPTION_AT_END();
2249 PutPropertySlot slot(codeBlock->isStrictMode());
2250 scope->methodTable()->put(scope, exec, ident, value, slot);
2252 if (exec->vm().exception()) {
2253 VM_THROW_EXCEPTION_AT_END();
2257 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
2258 if (modeAndType.type() == GlobalProperty || modeAndType.type() == GlobalPropertyWithVarInjectionChecks) {
2259 if (slot.isCacheable() && slot.base() == scope && scope->structure()->propertyAccessesAreCacheable()) {
2260 ConcurrentJITLocker locker(codeBlock->m_lock);
2261 pc[5].u.structure.set(exec->vm(), codeBlock->ownerExecutable(), scope->structure());
2262 pc[6].u.operand = slot.cachedOffset();
2269 #endif // ENABLE(JIT)