2011-01-10 Michael Saboff <msaboff@apple.com>
[WebKit-https.git] / Source / JavaScriptCore / jit / JIT.cpp
1 /*
2  * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 // This probably does not belong here; adding here for now as a quick Windows build fix.
32 #if ENABLE(ASSEMBLER) && CPU(X86) && !OS(MAC_OS_X)
33 #include "MacroAssembler.h"
34 JSC::MacroAssemblerX86Common::SSE2CheckState JSC::MacroAssemblerX86Common::s_sse2CheckState = NotCheckedSSE2;
35 #endif
36
37 #include "CodeBlock.h"
38 #include "Interpreter.h"
39 #include "JITInlineMethods.h"
40 #include "JITStubCall.h"
41 #include "JSArray.h"
42 #include "JSFunction.h"
43 #include "LinkBuffer.h"
44 #include "RepatchBuffer.h"
45 #include "ResultType.h"
46 #include "SamplingTool.h"
47
48 #ifndef NDEBUG
49 #include <stdio.h>
50 #endif
51
52 using namespace std;
53
54 namespace JSC {
55
56 void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction)
57 {
58     RepatchBuffer repatchBuffer(codeblock);
59     repatchBuffer.relinkNearCallerToTrampoline(returnAddress, newCalleeFunction);
60 }
61
62 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction)
63 {
64     RepatchBuffer repatchBuffer(codeblock);
65     repatchBuffer.relinkCallerToTrampoline(returnAddress, newCalleeFunction);
66 }
67
68 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction)
69 {
70     RepatchBuffer repatchBuffer(codeblock);
71     repatchBuffer.relinkCallerToFunction(returnAddress, newCalleeFunction);
72 }
73
74 JIT::JIT(JSGlobalData* globalData, CodeBlock* codeBlock, void* linkerOffset)
75     : m_interpreter(globalData->interpreter)
76     , m_globalData(globalData)
77     , m_codeBlock(codeBlock)
78     , m_labels(codeBlock ? codeBlock->instructions().size() : 0)
79     , m_propertyAccessCompilationInfo(codeBlock ? codeBlock->numberOfStructureStubInfos() : 0)
80     , m_callStructureStubCompilationInfo(codeBlock ? codeBlock->numberOfCallLinkInfos() : 0)
81     , m_bytecodeOffset((unsigned)-1)
82 #if USE(JSVALUE32_64)
83     , m_jumpTargetIndex(0)
84     , m_mappedBytecodeOffset((unsigned)-1)
85     , m_mappedVirtualRegisterIndex((unsigned)-1)
86     , m_mappedTag((RegisterID)-1)
87     , m_mappedPayload((RegisterID)-1)
88 #else
89     , m_lastResultBytecodeRegister(std::numeric_limits<int>::max())
90     , m_jumpTargetsPosition(0)
91 #endif
92     , m_linkerOffset(linkerOffset)
93 {
94 }
95
96 #if USE(JSVALUE32_64)
97 void JIT::emitTimeoutCheck()
98 {
99     Jump skipTimeout = branchSub32(NonZero, Imm32(1), timeoutCheckRegister);
100     JITStubCall stubCall(this, cti_timeout_check);
101     stubCall.addArgument(regT1, regT0); // save last result registers.
102     stubCall.call(timeoutCheckRegister);
103     stubCall.getArgument(0, regT1, regT0); // reload last result registers.
104     skipTimeout.link(this);
105 }
106 #else
107 void JIT::emitTimeoutCheck()
108 {
109     Jump skipTimeout = branchSub32(NonZero, Imm32(1), timeoutCheckRegister);
110     JITStubCall(this, cti_timeout_check).call(timeoutCheckRegister);
111     skipTimeout.link(this);
112
113     killLastResultRegister();
114 }
115 #endif
116
117 #define NEXT_OPCODE(name) \
118     m_bytecodeOffset += OPCODE_LENGTH(name); \
119     break;
120
121 #if USE(JSVALUE32_64)
122 #define DEFINE_BINARY_OP(name) \
123     case name: { \
124         JITStubCall stubCall(this, cti_##name); \
125         stubCall.addArgument(currentInstruction[2].u.operand); \
126         stubCall.addArgument(currentInstruction[3].u.operand); \
127         stubCall.call(currentInstruction[1].u.operand); \
128         NEXT_OPCODE(name); \
129     }
130
131 #define DEFINE_UNARY_OP(name) \
132     case name: { \
133         JITStubCall stubCall(this, cti_##name); \
134         stubCall.addArgument(currentInstruction[2].u.operand); \
135         stubCall.call(currentInstruction[1].u.operand); \
136         NEXT_OPCODE(name); \
137     }
138
139 #else // USE(JSVALUE32_64)
140
141 #define DEFINE_BINARY_OP(name) \
142     case name: { \
143         JITStubCall stubCall(this, cti_##name); \
144         stubCall.addArgument(currentInstruction[2].u.operand, regT2); \
145         stubCall.addArgument(currentInstruction[3].u.operand, regT2); \
146         stubCall.call(currentInstruction[1].u.operand); \
147         NEXT_OPCODE(name); \
148     }
149
150 #define DEFINE_UNARY_OP(name) \
151     case name: { \
152         JITStubCall stubCall(this, cti_##name); \
153         stubCall.addArgument(currentInstruction[2].u.operand, regT2); \
154         stubCall.call(currentInstruction[1].u.operand); \
155         NEXT_OPCODE(name); \
156     }
157 #endif // USE(JSVALUE32_64)
158
159 #define DEFINE_OP(name) \
160     case name: { \
161         emit_##name(currentInstruction); \
162         NEXT_OPCODE(name); \
163     }
164
165 #define DEFINE_SLOWCASE_OP(name) \
166     case name: { \
167         emitSlow_##name(currentInstruction, iter); \
168         NEXT_OPCODE(name); \
169     }
170
171 void JIT::privateCompileMainPass()
172 {
173     Instruction* instructionsBegin = m_codeBlock->instructions().begin();
174     unsigned instructionCount = m_codeBlock->instructions().size();
175
176     m_propertyAccessInstructionIndex = 0;
177     m_globalResolveInfoIndex = 0;
178     m_callLinkInfoIndex = 0;
179
180     for (m_bytecodeOffset = 0; m_bytecodeOffset < instructionCount; ) {
181         Instruction* currentInstruction = instructionsBegin + m_bytecodeOffset;
182         ASSERT_WITH_MESSAGE(m_interpreter->isOpcode(currentInstruction->u.opcode), "privateCompileMainPass gone bad @ %d", m_bytecodeOffset);
183
184 #if ENABLE(OPCODE_SAMPLING)
185         if (m_bytecodeOffset > 0) // Avoid the overhead of sampling op_enter twice.
186             sampleInstruction(currentInstruction);
187 #endif
188
189 #if USE(JSVALUE64)
190         if (m_labels[m_bytecodeOffset].isUsed())
191             killLastResultRegister();
192 #endif
193
194         m_labels[m_bytecodeOffset] = label();
195
196         switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
197         DEFINE_BINARY_OP(op_del_by_val)
198         DEFINE_BINARY_OP(op_in)
199         DEFINE_BINARY_OP(op_less)
200         DEFINE_BINARY_OP(op_lesseq)
201         DEFINE_UNARY_OP(op_is_boolean)
202         DEFINE_UNARY_OP(op_is_function)
203         DEFINE_UNARY_OP(op_is_number)
204         DEFINE_UNARY_OP(op_is_object)
205         DEFINE_UNARY_OP(op_is_string)
206         DEFINE_UNARY_OP(op_is_undefined)
207 #if USE(JSVALUE64)
208         DEFINE_UNARY_OP(op_negate)
209 #endif
210         DEFINE_UNARY_OP(op_typeof)
211
212         DEFINE_OP(op_add)
213         DEFINE_OP(op_bitand)
214         DEFINE_OP(op_bitnot)
215         DEFINE_OP(op_bitor)
216         DEFINE_OP(op_bitxor)
217         DEFINE_OP(op_call)
218         DEFINE_OP(op_call_eval)
219         DEFINE_OP(op_call_varargs)
220         DEFINE_OP(op_catch)
221         DEFINE_OP(op_construct)
222         DEFINE_OP(op_get_callee)
223         DEFINE_OP(op_create_this)
224         DEFINE_OP(op_convert_this)
225         DEFINE_OP(op_convert_this_strict)
226         DEFINE_OP(op_init_lazy_reg)
227         DEFINE_OP(op_create_arguments)
228         DEFINE_OP(op_debug)
229         DEFINE_OP(op_del_by_id)
230         DEFINE_OP(op_div)
231         DEFINE_OP(op_end)
232         DEFINE_OP(op_enter)
233         DEFINE_OP(op_create_activation)
234         DEFINE_OP(op_eq)
235         DEFINE_OP(op_eq_null)
236         DEFINE_OP(op_get_by_id)
237         DEFINE_OP(op_get_arguments_length)
238         DEFINE_OP(op_get_by_val)
239         DEFINE_OP(op_get_argument_by_val)
240         DEFINE_OP(op_get_by_pname)
241         DEFINE_OP(op_get_global_var)
242         DEFINE_OP(op_get_pnames)
243         DEFINE_OP(op_get_scoped_var)
244         DEFINE_OP(op_check_has_instance)
245         DEFINE_OP(op_instanceof)
246         DEFINE_OP(op_jeq_null)
247         DEFINE_OP(op_jfalse)
248         DEFINE_OP(op_jmp)
249         DEFINE_OP(op_jmp_scopes)
250         DEFINE_OP(op_jneq_null)
251         DEFINE_OP(op_jneq_ptr)
252         DEFINE_OP(op_jnless)
253         DEFINE_OP(op_jless)
254         DEFINE_OP(op_jlesseq)
255         DEFINE_OP(op_jnlesseq)
256         DEFINE_OP(op_jsr)
257         DEFINE_OP(op_jtrue)
258         DEFINE_OP(op_load_varargs)
259         DEFINE_OP(op_loop)
260         DEFINE_OP(op_loop_if_less)
261         DEFINE_OP(op_loop_if_lesseq)
262         DEFINE_OP(op_loop_if_true)
263         DEFINE_OP(op_loop_if_false)
264         DEFINE_OP(op_lshift)
265         DEFINE_OP(op_method_check)
266         DEFINE_OP(op_mod)
267         DEFINE_OP(op_mov)
268         DEFINE_OP(op_mul)
269 #if USE(JSVALUE32_64)
270         DEFINE_OP(op_negate)
271 #endif
272         DEFINE_OP(op_neq)
273         DEFINE_OP(op_neq_null)
274         DEFINE_OP(op_new_array)
275         DEFINE_OP(op_new_func)
276         DEFINE_OP(op_new_func_exp)
277         DEFINE_OP(op_new_object)
278         DEFINE_OP(op_new_regexp)
279         DEFINE_OP(op_next_pname)
280         DEFINE_OP(op_not)
281         DEFINE_OP(op_nstricteq)
282         DEFINE_OP(op_pop_scope)
283         DEFINE_OP(op_post_dec)
284         DEFINE_OP(op_post_inc)
285         DEFINE_OP(op_pre_dec)
286         DEFINE_OP(op_pre_inc)
287         DEFINE_OP(op_profile_did_call)
288         DEFINE_OP(op_profile_will_call)
289         DEFINE_OP(op_push_new_scope)
290         DEFINE_OP(op_push_scope)
291         DEFINE_OP(op_put_by_id)
292         DEFINE_OP(op_put_by_index)
293         DEFINE_OP(op_put_by_val)
294         DEFINE_OP(op_put_getter)
295         DEFINE_OP(op_put_global_var)
296         DEFINE_OP(op_put_scoped_var)
297         DEFINE_OP(op_put_setter)
298         DEFINE_OP(op_resolve)
299         DEFINE_OP(op_resolve_base)
300         DEFINE_OP(op_ensure_property_exists)
301         DEFINE_OP(op_resolve_global)
302         DEFINE_OP(op_resolve_global_dynamic)
303         DEFINE_OP(op_resolve_skip)
304         DEFINE_OP(op_resolve_with_base)
305         DEFINE_OP(op_ret)
306         DEFINE_OP(op_call_put_result)
307         DEFINE_OP(op_ret_object_or_this)
308         DEFINE_OP(op_rshift)
309         DEFINE_OP(op_urshift)
310         DEFINE_OP(op_sret)
311         DEFINE_OP(op_strcat)
312         DEFINE_OP(op_stricteq)
313         DEFINE_OP(op_sub)
314         DEFINE_OP(op_switch_char)
315         DEFINE_OP(op_switch_imm)
316         DEFINE_OP(op_switch_string)
317         DEFINE_OP(op_tear_off_activation)
318         DEFINE_OP(op_tear_off_arguments)
319         DEFINE_OP(op_throw)
320         DEFINE_OP(op_throw_reference_error)
321         DEFINE_OP(op_to_jsnumber)
322         DEFINE_OP(op_to_primitive)
323
324         case op_get_array_length:
325         case op_get_by_id_chain:
326         case op_get_by_id_generic:
327         case op_get_by_id_proto:
328         case op_get_by_id_proto_list:
329         case op_get_by_id_self:
330         case op_get_by_id_self_list:
331         case op_get_by_id_getter_chain:
332         case op_get_by_id_getter_proto:
333         case op_get_by_id_getter_proto_list:
334         case op_get_by_id_getter_self:
335         case op_get_by_id_getter_self_list:
336         case op_get_by_id_custom_chain:
337         case op_get_by_id_custom_proto:
338         case op_get_by_id_custom_proto_list:
339         case op_get_by_id_custom_self:
340         case op_get_by_id_custom_self_list:
341         case op_get_string_length:
342         case op_put_by_id_generic:
343         case op_put_by_id_replace:
344         case op_put_by_id_transition:
345             ASSERT_NOT_REACHED();
346         }
347     }
348
349     ASSERT(m_propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
350     ASSERT(m_callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
351
352 #ifndef NDEBUG
353     // Reset this, in order to guard its use with ASSERTs.
354     m_bytecodeOffset = (unsigned)-1;
355 #endif
356 }
357
358
359 void JIT::privateCompileLinkPass()
360 {
361     unsigned jmpTableCount = m_jmpTable.size();
362     for (unsigned i = 0; i < jmpTableCount; ++i)
363         m_jmpTable[i].from.linkTo(m_labels[m_jmpTable[i].toBytecodeOffset], this);
364     m_jmpTable.clear();
365 }
366
367 void JIT::privateCompileSlowCases()
368 {
369     Instruction* instructionsBegin = m_codeBlock->instructions().begin();
370
371     m_propertyAccessInstructionIndex = 0;
372     m_globalResolveInfoIndex = 0;
373     m_callLinkInfoIndex = 0;
374
375     for (Vector<SlowCaseEntry>::iterator iter = m_slowCases.begin(); iter != m_slowCases.end();) {
376 #if USE(JSVALUE64)
377         killLastResultRegister();
378 #endif
379
380         m_bytecodeOffset = iter->to;
381 #ifndef NDEBUG
382         unsigned firstTo = m_bytecodeOffset;
383 #endif
384         Instruction* currentInstruction = instructionsBegin + m_bytecodeOffset;
385
386         switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
387         DEFINE_SLOWCASE_OP(op_add)
388         DEFINE_SLOWCASE_OP(op_bitand)
389         DEFINE_SLOWCASE_OP(op_bitnot)
390         DEFINE_SLOWCASE_OP(op_bitor)
391         DEFINE_SLOWCASE_OP(op_bitxor)
392         DEFINE_SLOWCASE_OP(op_call)
393         DEFINE_SLOWCASE_OP(op_call_eval)
394         DEFINE_SLOWCASE_OP(op_call_varargs)
395         DEFINE_SLOWCASE_OP(op_construct)
396         DEFINE_SLOWCASE_OP(op_convert_this)
397         DEFINE_SLOWCASE_OP(op_convert_this_strict)
398         DEFINE_SLOWCASE_OP(op_div)
399         DEFINE_SLOWCASE_OP(op_eq)
400         DEFINE_SLOWCASE_OP(op_get_by_id)
401         DEFINE_SLOWCASE_OP(op_get_arguments_length)
402         DEFINE_SLOWCASE_OP(op_get_by_val)
403         DEFINE_SLOWCASE_OP(op_get_argument_by_val)
404         DEFINE_SLOWCASE_OP(op_get_by_pname)
405         DEFINE_SLOWCASE_OP(op_check_has_instance)
406         DEFINE_SLOWCASE_OP(op_instanceof)
407         DEFINE_SLOWCASE_OP(op_jfalse)
408         DEFINE_SLOWCASE_OP(op_jnless)
409         DEFINE_SLOWCASE_OP(op_jless)
410         DEFINE_SLOWCASE_OP(op_jlesseq)
411         DEFINE_SLOWCASE_OP(op_jnlesseq)
412         DEFINE_SLOWCASE_OP(op_jtrue)
413         DEFINE_SLOWCASE_OP(op_load_varargs)
414         DEFINE_SLOWCASE_OP(op_loop_if_less)
415         DEFINE_SLOWCASE_OP(op_loop_if_lesseq)
416         DEFINE_SLOWCASE_OP(op_loop_if_true)
417         DEFINE_SLOWCASE_OP(op_loop_if_false)
418         DEFINE_SLOWCASE_OP(op_lshift)
419         DEFINE_SLOWCASE_OP(op_method_check)
420         DEFINE_SLOWCASE_OP(op_mod)
421         DEFINE_SLOWCASE_OP(op_mul)
422 #if USE(JSVALUE32_64)
423         DEFINE_SLOWCASE_OP(op_negate)
424 #endif
425         DEFINE_SLOWCASE_OP(op_neq)
426         DEFINE_SLOWCASE_OP(op_not)
427         DEFINE_SLOWCASE_OP(op_nstricteq)
428         DEFINE_SLOWCASE_OP(op_post_dec)
429         DEFINE_SLOWCASE_OP(op_post_inc)
430         DEFINE_SLOWCASE_OP(op_pre_dec)
431         DEFINE_SLOWCASE_OP(op_pre_inc)
432         DEFINE_SLOWCASE_OP(op_put_by_id)
433         DEFINE_SLOWCASE_OP(op_put_by_val)
434         DEFINE_SLOWCASE_OP(op_resolve_global)
435         DEFINE_SLOWCASE_OP(op_resolve_global_dynamic)
436         DEFINE_SLOWCASE_OP(op_rshift)
437         DEFINE_SLOWCASE_OP(op_urshift)
438         DEFINE_SLOWCASE_OP(op_stricteq)
439         DEFINE_SLOWCASE_OP(op_sub)
440         DEFINE_SLOWCASE_OP(op_to_jsnumber)
441         DEFINE_SLOWCASE_OP(op_to_primitive)
442         default:
443             ASSERT_NOT_REACHED();
444         }
445
446         ASSERT_WITH_MESSAGE(iter == m_slowCases.end() || firstTo != iter->to,"Not enough jumps linked in slow case codegen.");
447         ASSERT_WITH_MESSAGE(firstTo == (iter - 1)->to, "Too many jumps linked in slow case codegen.");
448
449         emitJumpSlowToHot(jump(), 0);
450     }
451
452 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
453     ASSERT(m_propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
454 #endif
455     ASSERT(m_callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
456
457 #ifndef NDEBUG
458     // Reset this, in order to guard its use with ASSERTs.
459     m_bytecodeOffset = (unsigned)-1;
460 #endif
461 }
462
463 JITCode JIT::privateCompile(CodePtr* functionEntryArityCheck)
464 {
465     // Could use a pop_m, but would need to offset the following instruction if so.
466     preserveReturnAddressAfterCall(regT2);
467     emitPutToCallFrameHeader(regT2, RegisterFile::ReturnPC);
468
469     Label beginLabel(this);
470
471     sampleCodeBlock(m_codeBlock);
472 #if ENABLE(OPCODE_SAMPLING)
473     sampleInstruction(m_codeBlock->instructions().begin());
474 #endif
475
476     Jump registerFileCheck;
477     if (m_codeBlock->codeType() == FunctionCode) {
478         // In the case of a fast linked call, we do not set this up in the caller.
479         emitPutImmediateToCallFrameHeader(m_codeBlock, RegisterFile::CodeBlock);
480
481         addPtr(Imm32(m_codeBlock->m_numCalleeRegisters * sizeof(Register)), callFrameRegister, regT1);
482         registerFileCheck = branchPtr(Below, AbsoluteAddress(&m_globalData->interpreter->registerFile().m_end), regT1);
483     }
484
485     Label functionBody = label();
486
487     privateCompileMainPass();
488     privateCompileLinkPass();
489     privateCompileSlowCases();
490
491     Label arityCheck;
492     if (m_codeBlock->codeType() == FunctionCode) {
493         registerFileCheck.link(this);
494         m_bytecodeOffset = 0;
495         JITStubCall(this, cti_register_file_check).call();
496 #ifndef NDEBUG
497         m_bytecodeOffset = (unsigned)-1; // Reset this, in order to guard its use with ASSERTs.
498 #endif
499         jump(functionBody);
500
501         arityCheck = label();
502         preserveReturnAddressAfterCall(regT2);
503         emitPutToCallFrameHeader(regT2, RegisterFile::ReturnPC);
504         branch32(Equal, regT1, Imm32(m_codeBlock->m_numParameters)).linkTo(beginLabel, this);
505         restoreArgumentReference();
506
507         JITStubCall(this, m_codeBlock->m_isConstructor ? cti_op_construct_arityCheck : cti_op_call_arityCheck).call(callFrameRegister);
508
509         jump(beginLabel);
510     }
511
512     ASSERT(m_jmpTable.isEmpty());
513
514     LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()), m_linkerOffset);
515
516     // Translate vPC offsets into addresses in JIT generated code, for switch tables.
517     for (unsigned i = 0; i < m_switches.size(); ++i) {
518         SwitchRecord record = m_switches[i];
519         unsigned bytecodeOffset = record.bytecodeOffset;
520
521         if (record.type != SwitchRecord::String) {
522             ASSERT(record.type == SwitchRecord::Immediate || record.type == SwitchRecord::Character); 
523             ASSERT(record.jumpTable.simpleJumpTable->branchOffsets.size() == record.jumpTable.simpleJumpTable->ctiOffsets.size());
524
525             record.jumpTable.simpleJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeOffset + record.defaultOffset]);
526
527             for (unsigned j = 0; j < record.jumpTable.simpleJumpTable->branchOffsets.size(); ++j) {
528                 unsigned offset = record.jumpTable.simpleJumpTable->branchOffsets[j];
529                 record.jumpTable.simpleJumpTable->ctiOffsets[j] = offset ? patchBuffer.locationOf(m_labels[bytecodeOffset + offset]) : record.jumpTable.simpleJumpTable->ctiDefault;
530             }
531         } else {
532             ASSERT(record.type == SwitchRecord::String);
533
534             record.jumpTable.stringJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeOffset + record.defaultOffset]);
535
536             StringJumpTable::StringOffsetTable::iterator end = record.jumpTable.stringJumpTable->offsetTable.end();            
537             for (StringJumpTable::StringOffsetTable::iterator it = record.jumpTable.stringJumpTable->offsetTable.begin(); it != end; ++it) {
538                 unsigned offset = it->second.branchOffset;
539                 it->second.ctiOffset = offset ? patchBuffer.locationOf(m_labels[bytecodeOffset + offset]) : record.jumpTable.stringJumpTable->ctiDefault;
540             }
541         }
542     }
543
544     for (size_t i = 0; i < m_codeBlock->numberOfExceptionHandlers(); ++i) {
545         HandlerInfo& handler = m_codeBlock->exceptionHandler(i);
546         handler.nativeCode = patchBuffer.locationOf(m_labels[handler.target]);
547     }
548
549     for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
550         if (iter->to)
551             patchBuffer.link(iter->from, FunctionPtr(iter->to));
552     }
553
554     if (m_codeBlock->needsCallReturnIndices()) {
555         m_codeBlock->callReturnIndexVector().reserveCapacity(m_calls.size());
556         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter)
557             m_codeBlock->callReturnIndexVector().append(CallReturnOffsetToBytecodeOffset(patchBuffer.returnAddressOffset(iter->from), iter->bytecodeOffset));
558     }
559
560     // Link absolute addresses for jsr
561     for (Vector<JSRInfo>::iterator iter = m_jsrSites.begin(); iter != m_jsrSites.end(); ++iter)
562         patchBuffer.patch(iter->storeLocation, patchBuffer.locationOf(iter->target).executableAddress());
563
564 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
565     for (unsigned i = 0; i < m_codeBlock->numberOfStructureStubInfos(); ++i) {
566         StructureStubInfo& info = m_codeBlock->structureStubInfo(i);
567         info.callReturnLocation = patchBuffer.locationOf(m_propertyAccessCompilationInfo[i].callReturnLocation);
568         info.hotPathBegin = patchBuffer.locationOf(m_propertyAccessCompilationInfo[i].hotPathBegin);
569     }
570 #endif
571 #if ENABLE(JIT_OPTIMIZE_CALL)
572     for (unsigned i = 0; i < m_codeBlock->numberOfCallLinkInfos(); ++i) {
573         CallLinkInfo& info = m_codeBlock->callLinkInfo(i);
574         info.ownerCodeBlock = m_codeBlock;
575         info.callReturnLocation = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].callReturnLocation);
576         info.hotPathBegin = patchBuffer.locationOf(m_callStructureStubCompilationInfo[i].hotPathBegin);
577         info.hotPathOther = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].hotPathOther);
578     }
579 #endif
580     unsigned methodCallCount = m_methodCallCompilationInfo.size();
581     m_codeBlock->addMethodCallLinkInfos(methodCallCount);
582     for (unsigned i = 0; i < methodCallCount; ++i) {
583         MethodCallLinkInfo& info = m_codeBlock->methodCallLinkInfo(i);
584         info.structureLabel = patchBuffer.locationOf(m_methodCallCompilationInfo[i].structureToCompare);
585         info.callReturnLocation = m_codeBlock->structureStubInfo(m_methodCallCompilationInfo[i].propertyAccessIndex).callReturnLocation;
586     }
587
588     if (m_codeBlock->codeType() == FunctionCode && functionEntryArityCheck)
589         *functionEntryArityCheck = patchBuffer.locationOf(arityCheck);
590
591     return patchBuffer.finalizeCode();
592 }
593
594 #if USE(JSVALUE64)
595 void JIT::emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst)
596 {
597     loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject, d)), dst);
598     loadPtr(Address(dst, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), dst);
599     loadPtr(Address(dst, index * sizeof(Register)), dst);
600 }
601
602 void JIT::emitPutVariableObjectRegister(RegisterID src, RegisterID variableObject, int index)
603 {
604     loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject, d)), variableObject);
605     loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), variableObject);
606     storePtr(src, Address(variableObject, index * sizeof(Register)));
607 }
608 #endif
609
610 #if ENABLE(JIT_OPTIMIZE_CALL)
611 void JIT::unlinkCallOrConstruct(CallLinkInfo* callLinkInfo)
612 {
613     // When the JSFunction is deleted the pointer embedded in the instruction stream will no longer be valid
614     // (and, if a new JSFunction happened to be constructed at the same location, we could get a false positive
615     // match).  Reset the check so it no longer matches.
616     RepatchBuffer repatchBuffer(callLinkInfo->ownerCodeBlock);
617 #if USE(JSVALUE32_64)
618     repatchBuffer.repatch(callLinkInfo->hotPathBegin, 0);
619 #else
620     repatchBuffer.repatch(callLinkInfo->hotPathBegin, JSValue::encode(JSValue()));
621 #endif
622 }
623
624 void JIT::linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JIT::CodePtr code, CallLinkInfo* callLinkInfo, int callerArgCount, JSGlobalData* globalData)
625 {
626     RepatchBuffer repatchBuffer(callerCodeBlock);
627
628     // Currently we only link calls with the exact number of arguments.
629     // If this is a native call calleeCodeBlock is null so the number of parameters is unimportant
630     if (!calleeCodeBlock || (callerArgCount == calleeCodeBlock->m_numParameters)) {
631         ASSERT(!callLinkInfo->isLinked());
632     
633         if (calleeCodeBlock)
634             calleeCodeBlock->addCaller(callLinkInfo);
635     
636         repatchBuffer.repatch(callLinkInfo->hotPathBegin, callee);
637         repatchBuffer.relink(callLinkInfo->hotPathOther, code);
638     }
639
640     // patch the call so we do not continue to try to link.
641     repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->jitStubs->ctiVirtualCall());
642 }
643
644 void JIT::linkConstruct(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JIT::CodePtr code, CallLinkInfo* callLinkInfo, int callerArgCount, JSGlobalData* globalData)
645 {
646     RepatchBuffer repatchBuffer(callerCodeBlock);
647
648     // Currently we only link calls with the exact number of arguments.
649     // If this is a native call calleeCodeBlock is null so the number of parameters is unimportant
650     if (!calleeCodeBlock || (callerArgCount == calleeCodeBlock->m_numParameters)) {
651         ASSERT(!callLinkInfo->isLinked());
652     
653         if (calleeCodeBlock)
654             calleeCodeBlock->addCaller(callLinkInfo);
655     
656         repatchBuffer.repatch(callLinkInfo->hotPathBegin, callee);
657         repatchBuffer.relink(callLinkInfo->hotPathOther, code);
658     }
659
660     // patch the call so we do not continue to try to link.
661     repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->jitStubs->ctiVirtualConstruct());
662 }
663 #endif // ENABLE(JIT_OPTIMIZE_CALL)
664
665 } // namespace JSC
666
667 #endif // ENABLE(JIT)