1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
24 * Andreas Gal <gal@mozilla.com>
25 * Mike Shaver <shaver@mozilla.org>
26 * David Anderson <danderson@mozilla.com>
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
40 * ***** END LICENSE BLOCK ***** */
43 #include "jsbit.h" // low-level (NSPR-based) headers next
45 #include <math.h> // standard headers next
47 #if defined(_MSC_VER) || defined(__MINGW32__)
50 #define alloca _alloca
58 #include "nanojit/nanojit.h"
59 #include "jsapi.h" // higher-level library and API headers
63 #include "jscompartment.h"
76 #include "jsstaticcheck.h"
80 #include "jstypedarray.h"
82 #include "jsatominlines.h"
83 #include "jscntxtinlines.h"
84 #include "jsfuninlines.h"
85 #include "jsinterpinlines.h"
86 #include "jspropertycacheinlines.h"
87 #include "jsobjinlines.h"
88 #include "jsscopeinlines.h"
89 #include "jsscriptinlines.h"
90 #include "jscntxtinlines.h"
91 #include "jsopcodeinlines.h"
94 #include "methodjit/MethodJIT.h"
97 #include "jsautooplen.h" // generated headers last
98 #include "imacros.c.out"
100 #if defined(NANOJIT_ARM) && defined(__GNUC__) && defined(AVMPLUS_LINUX)
103 #include <sys/types.h>
104 #include <sys/stat.h>
105 #include <sys/mman.h>
114 getExitName(ExitType type)
116 static const char* exitNames[] =
118 #define MAKE_EXIT_STRING(x) #x,
119 JS_TM_EXITCODES(MAKE_EXIT_STRING)
120 #undef MAKE_EXIT_STRING
124 JS_ASSERT(type < TOTAL_EXIT_TYPES);
126 return exitNames[type];
133 using namespace js::gc;
134 using namespace js::tjit;
137 * This macro is just like JS_NOT_REACHED but it exists in non-debug builds
138 * too. Its presence indicates shortcomings in jstracer's handling of some
140 * - OOM failures in constructors, which lack a return value to pass back a
141 * failure code (though it can and should be done indirectly).
142 * - OOM failures in the "infallible" allocators used for Nanojit.
144 * FIXME: bug 624590 is open to fix these problems.
146 #define OUT_OF_MEMORY_ABORT(msg) JS_Assert(msg, __FILE__, __LINE__);
148 /* Implement embedder-specific nanojit members. */
151 * Nanojit requires infallible allocations most of the time. We satisfy this
152 * by reserving some space in each allocator which is used as a fallback if
153 * js_calloc() fails. Ideallly this reserve space should be big enough to
154 * allow for all infallible requests made to the allocator until the next OOM
155 * check occurs, but it turns out that's impossible to guarantee (though it
156 * should be unlikely). So we abort if the reserve runs out; this is better
157 * than allowing memory errors to occur.
159 * The space calculations are as follows... between OOM checks, each
160 * VMAllocator can do (ie. has been seen to do) the following maximum
161 * allocations on 64-bits:
163 * - dataAlloc: 31 minimum-sized chunks (MIN_CHUNK_SZB) in assm->compile()
164 * (though arbitrarily more could occur due to LabelStateMap additions done
165 * when handling labels): 62,248 bytes. This one is the most likely to
168 * - traceAlloc: 1 minimum-sized chunk: 2,008 bytes.
170 * - tempAlloc: 1 LIR code chunk (CHUNK_SZB) and 5 minimum-sized chunks for
171 * sundry small allocations: 18,048 bytes.
173 * The reserve sizes are chosen by exceeding this by a reasonable amount.
174 * Reserves for 32-bits are slightly more than half, because most of the
175 * allocated space is used to hold pointers.
177 * FIXME: Bug 624590 is open to get rid of all this.
179 static const size_t DataReserveSize = 12500 * sizeof(uintptr_t);
180 static const size_t TraceReserveSize = 5000 * sizeof(uintptr_t);
181 static const size_t TempReserveSize = 1000 * sizeof(uintptr_t);
184 nanojit::Allocator::allocChunk(size_t nbytes, bool fallible)
186 VMAllocator *vma = (VMAllocator*)this;
188 * Nb: it's conceivable that request 1 might fail (in which case
189 * mOutOfMemory will be set) and then request 2 succeeds. The subsequent
190 * OOM check will still fail, which is what we want, and the success of
191 * request 2 makes it less likely that the reserve space will overflow.
193 void *p = js_calloc(nbytes);
195 vma->mSize += nbytes;
197 vma->mOutOfMemory = true;
199 p = (void *)vma->mReserveCurr;
200 vma->mReserveCurr += nbytes;
201 if (vma->mReserveCurr > vma->mReserveLimit)
202 OUT_OF_MEMORY_ABORT("nanojit::Allocator::allocChunk: out of memory");
203 memset(p, 0, nbytes);
204 vma->mSize += nbytes;
211 nanojit::Allocator::freeChunk(void *p) {
212 VMAllocator *vma = (VMAllocator*)this;
213 if (p < vma->mReserve || uintptr_t(p) >= vma->mReserveLimit)
218 nanojit::Allocator::postReset() {
219 VMAllocator *vma = (VMAllocator*)this;
220 vma->mOutOfMemory = false;
222 vma->mReserveCurr = uintptr_t(vma->mReserve);
226 StackFilter::getTop(LIns* guard)
228 VMSideExit* e = (VMSideExit*)guard->record()->exit;
232 #if defined NJ_VERBOSE
234 formatGuardExit(InsBuf *buf, LIns *ins)
236 VMSideExit *x = (VMSideExit *)ins->record()->exit;
238 if (LogController.lcbits & LC_FragProfile)
239 VMPI_snprintf(b1.buf, b1.len, " (GuardID=%03d)", ins->record()->profGuardID);
242 VMPI_snprintf(buf->buf, buf->len,
243 " -> exit=%p pc=%p imacpc=%p sp%+ld rp%+ld %s%s",
249 getExitName(x->exitType),
254 LInsPrinter::formatGuard(InsBuf *buf, LIns *ins)
258 formatGuardExit(&b3, ins);
259 VMPI_snprintf(buf->buf, buf->len,
262 lirNames[ins->opcode()],
263 ins->oprnd1() ? formatRef(&b2, ins->oprnd1()) : "",
268 LInsPrinter::formatGuardXov(InsBuf *buf, LIns *ins)
272 formatGuardExit(&b4, ins);
273 VMPI_snprintf(buf->buf, buf->len,
276 lirNames[ins->opcode()],
277 formatRef(&b2, ins->oprnd1()),
278 formatRef(&b3, ins->oprnd2()),
283 nanojit::LInsPrinter::accNames[] = {
284 "state", // (1 << 0) == ACCSET_STATE
285 "sp", // (1 << 1) == ACCSET_STACK
286 "rp", // (1 << 2) == ACCSET_RSTACK
287 "cx", // (1 << 3) == ACCSET_CX
288 "tm", // (1 << 4) == ACCSET_TM
289 "eos", // (1 << 5) == ACCSET_EOS
290 "alloc", // (1 << 6) == ACCSET_ALLOC
291 "regs", // (1 << 7) == ACCSET_FRAMEREGS
292 "sf", // (1 << 8) == ACCSET_STACKFRAME
293 "rt", // (1 << 9) == ACCSET_RUNTIME
295 "objclasp", // (1 << 10) == ACCSET_OBJ_CLASP
296 "objflags", // (1 << 11) == ACCSET_OBJ_FLAGS
297 "objshape", // (1 << 12) == ACCSET_OBJ_SHAPE
298 "objproto", // (1 << 13) == ACCSET_OBJ_PROTO
299 "objparent", // (1 << 14) == ACCSET_OBJ_PARENT
300 "objprivate", // (1 << 15) == ACCSET_OBJ_PRIVATE
301 "objcapacity", // (1 << 16) == ACCSET_OBJ_CAPACITY
302 "objslots", // (1 << 17) == ACCSET_OBJ_SLOTS
304 "slots", // (1 << 18) == ACCSET_SLOTS
305 "tarray", // (1 << 19) == ACCSET_TARRAY
306 "tdata", // (1 << 20) == ACCSET_TARRAY_DATA
307 "iter", // (1 << 21) == ACCSET_ITER
308 "iterprops", // (1 << 22) == ACCSET_ITER_PROPS
309 "str", // (1 << 23) == ACCSET_STRING
310 "strmchars", // (1 << 24) == ACCSET_STRING_MCHARS
311 "typemap", // (1 << 25) == ACCSET_TYPEMAP
312 "fcslots", // (1 << 26) == ACCSET_FCSLOTS
313 "argsdata", // (1 << 27) == ACCSET_ARGS_DATA
315 "?!" // this entry should never be used, have it just in case
318 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(nanojit::LInsPrinter::accNames) == TM_NUM_USED_ACCS + 1);
321 } /* namespace nanojit */
323 JS_DEFINE_CALLINFO_2(extern, STRING, js_IntToString, CONTEXT, INT32, 1, nanojit::ACCSET_NONE)
327 using namespace nanojit;
329 #if JS_HAS_XML_SUPPORT
330 #define RETURN_VALUE_IF_XML(val, ret) \
332 if (!val.isPrimitive() && val.toObject().isXML()) \
333 RETURN_VALUE("xml detected", ret); \
336 #define RETURN_IF_XML(val, ret) ((void) 0)
339 #define RETURN_IF_XML_A(val) RETURN_VALUE_IF_XML(val, ARECORD_STOP)
340 #define RETURN_IF_XML(val) RETURN_VALUE_IF_XML(val, RECORD_STOP)
342 JS_STATIC_ASSERT(sizeof(JSValueType) == 1);
343 JS_STATIC_ASSERT(offsetof(TraceNativeStorage, stack_global_buf) % 16 == 0);
345 /* Map to translate a type tag into a printable representation. */
348 TypeToChar(JSValueType type)
351 case JSVAL_TYPE_DOUBLE: return 'D';
352 case JSVAL_TYPE_INT32: return 'I';
353 case JSVAL_TYPE_STRING: return 'S';
354 case JSVAL_TYPE_OBJECT: return '!';
355 case JSVAL_TYPE_BOOLEAN: return 'B';
356 case JSVAL_TYPE_NULL: return 'N';
357 case JSVAL_TYPE_UNDEFINED: return 'U';
358 case JSVAL_TYPE_MAGIC: return 'M';
359 case JSVAL_TYPE_FUNOBJ: return 'F';
360 case JSVAL_TYPE_NONFUNOBJ: return 'O';
361 case JSVAL_TYPE_BOXED: return '#';
362 case JSVAL_TYPE_STRORNULL: return 's';
363 case JSVAL_TYPE_OBJORNULL: return 'o';
369 ValueToTypeChar(const Value &v)
371 if (v.isInt32()) return 'I';
372 if (v.isDouble()) return 'D';
373 if (v.isString()) return 'S';
374 if (v.isObject()) return v.toObject().isFunction() ? 'F' : 'O';
375 if (v.isBoolean()) return 'B';
376 if (v.isNull()) return 'N';
377 if (v.isUndefined()) return 'U';
378 if (v.isMagic()) return 'M';
384 /* Blacklist parameters. */
387 * Number of iterations of a loop where we start tracing. That is, we don't
388 * start tracing until the beginning of the HOTLOOP-th iteration.
392 /* Attempt recording this many times before blacklisting permanently. */
393 #define BL_ATTEMPTS 2
395 /* Skip this many hits before attempting recording again, after an aborted attempt. */
396 #define BL_BACKOFF 32
399 * If, after running a trace CHECK_LOOP_ITERS times, it hasn't done MIN_LOOP_ITERS
400 * iterations, we blacklist it.
402 #define MIN_LOOP_ITERS 200
403 #define LOOP_CHECK_ITERS 10
406 #define LOOP_COUNT_MAX 100000000
408 #define LOOP_COUNT_MAX MIN_LOOP_ITERS
411 /* Number of times we wait to exit on a side exit before we try to extend the tree. */
414 /* Number of times we try to extend the tree along a side exit. */
417 /* Maximum number of peer trees allowed. */
420 /* Max call depths for inlining. */
421 #define MAX_CALLDEPTH 10
423 /* Max number of slots in a table-switch. */
424 #define MAX_TABLE_SWITCH 256
426 /* Max memory needed to rebuild the interpreter stack when falling off trace. */
427 #define MAX_INTERP_STACK_BYTES \
428 (MAX_NATIVE_STACK_SLOTS * sizeof(Value) + \
429 MAX_CALL_STACK_ENTRIES * sizeof(JSInlineFrame) + \
430 sizeof(JSInlineFrame)) /* possibly slow native frame at top of stack */
432 /* Max number of branches per tree. */
433 #define MAX_BRANCHES 32
435 #define CHECK_STATUS(expr) \
437 RecordingStatus _status = (expr); \
438 if (_status != RECORD_CONTINUE) \
442 #define CHECK_STATUS_A(expr) \
444 AbortableRecordingStatus _status = InjectStatus((expr)); \
445 if (_status != ARECORD_CONTINUE) \
450 #define RETURN_VALUE(msg, value) \
452 debug_only_printf(LC_TMAbort, "trace stopped: %d: %s\n", __LINE__, (msg)); \
456 #define RETURN_VALUE(msg, value) return (value)
459 #define RETURN_STOP(msg) RETURN_VALUE(msg, RECORD_STOP)
460 #define RETURN_STOP_A(msg) RETURN_VALUE(msg, ARECORD_STOP)
461 #define RETURN_ERROR(msg) RETURN_VALUE(msg, RECORD_ERROR)
462 #define RETURN_ERROR_A(msg) RETURN_VALUE(msg, ARECORD_ERROR)
466 #define JITSTAT(x) uint64 x;
467 #include "jitstats.tbl"
469 } jitstats = { 0LL, };
471 JS_STATIC_ASSERT(sizeof(jitstats) % sizeof(uint64) == 0);
474 #define JITSTAT(x) STAT ## x ## ID,
475 #include "jitstats.tbl"
481 jitstats_getOnTrace(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
483 *vp = BOOLEAN_TO_JSVAL(JS_ON_TRACE(cx));
487 static JSPropertySpec jitstats_props[] = {
488 #define JITSTAT(x) { #x, STAT ## x ## ID, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT },
489 #include "jitstats.tbl"
491 { "onTrace", 0, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT, jitstats_getOnTrace, NULL },
496 jitstats_getProperty(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
500 if (JSID_IS_STRING(id)) {
501 JSAtom* str = JSID_TO_ATOM(id);
502 if (StringEqualsAscii(str, "HOTLOOP")) {
503 *vp = INT_TO_JSVAL(HOTLOOP);
508 if (StringEqualsAscii(str, "profiler")) {
509 *vp = BOOLEAN_TO_JSVAL(cx->profilingEnabled);
516 index = JSID_TO_INT(id);
520 #define JITSTAT(x) case STAT ## x ## ID: result = jitstats.x; break;
521 #include "jitstats.tbl"
528 if (result < JSVAL_INT_MAX) {
529 *vp = INT_TO_JSVAL(jsint(result));
533 JS_snprintf(retstr, sizeof retstr, "%llu", result);
534 *vp = STRING_TO_JSVAL(JS_NewStringCopyZ(cx, retstr));
538 JSClass jitstats_class = {
541 JS_PropertyStub, JS_PropertyStub,
542 jitstats_getProperty, JS_StrictPropertyStub,
543 JS_EnumerateStub, JS_ResolveStub,
544 JS_ConvertStub, NULL,
545 JSCLASS_NO_OPTIONAL_MEMBERS
549 InitJITStatsClass(JSContext *cx, JSObject *glob)
551 JS_InitClass(cx, glob, NULL, &jitstats_class, NULL, 0, jitstats_props, NULL, NULL, NULL);
554 #define AUDIT(x) (jitstats.x++)
556 #define AUDIT(x) ((void)0)
557 #endif /* JS_JIT_SPEW */
559 static avmplus::AvmCore s_core = avmplus::AvmCore();
560 static avmplus::AvmCore* core = &s_core;
564 DumpPeerStability(TraceMonitor* tm, const void* ip, JSObject* globalObj, uint32 globalShape, uint32 argc);
568 * We really need a better way to configure the JIT. Shaver, where is
569 * my fancy JIT object?
571 * NB: this is raced on, if jstracer.cpp should ever be running MT.
572 * I think it's harmless tho.
574 static bool did_we_check_processor_features = false;
576 /* ------ Debug logging control ------ */
579 * All the logging control stuff lives in here. It is shared between
580 * all threads, but I think that's OK.
582 LogControl LogController;
587 * NB: this is raced on too, if jstracer.cpp should ever be running MT.
590 static bool did_we_set_up_debug_logging = false;
593 InitJITLogController()
598 LogController.lcbits = 0;
600 tm = getenv("TRACEMONKEY");
604 "The environment variable $TRACEMONKEY has been replaced by $TMFLAGS.\n"
605 "Try 'TMFLAGS=help js -j' for a list of options.\n"
610 tmf = getenv("TMFLAGS");
613 /* Using strstr() is really a cheap hack as far as flag decoding goes. */
614 if (strstr(tmf, "help")) {
617 "usage: TMFLAGS=option,option,option,... where options can be:\n"
619 " help show this message\n"
620 " ------ options for jstracer & jsregexp ------\n"
621 " minimal ultra-minimalist output; try this first\n"
622 " full everything except 'treevis' and 'fragprofile'\n"
623 " tracer tracer lifetime (FIXME:better description)\n"
624 " recorder trace recording stuff (FIXME:better description)\n"
625 " abort show trace recording aborts\n"
626 " stats show trace recording stats\n"
627 " regexp show compilation & entry for regexps\n"
628 " profiler show loop profiles as they are profiled\n"
629 " treevis spew that tracevis/tree.py can parse\n"
630 " ------ options for Nanojit ------\n"
631 " fragprofile count entries and exits for each fragment\n"
632 " liveness show LIR liveness at start of reader pipeline\n"
633 " readlir show LIR as it enters the reader pipeline\n"
634 " aftersf show LIR after StackFilter\n"
635 " afterdce show LIR after dead code elimination\n"
636 " native show native code (interleaved with 'afterdce')\n"
637 " nativebytes show native code bytes in 'native' output\n"
638 " regalloc show regalloc state in 'native' output\n"
639 " activation show activation state in 'native' output\n"
648 /* flags for jstracer.cpp */
649 if (strstr(tmf, "minimal") || strstr(tmf, "full")) bits |= LC_TMMinimal;
650 if (strstr(tmf, "tracer") || strstr(tmf, "full")) bits |= LC_TMTracer;
651 if (strstr(tmf, "recorder") || strstr(tmf, "full")) bits |= LC_TMRecorder;
652 if (strstr(tmf, "abort") || strstr(tmf, "full")) bits |= LC_TMAbort;
653 if (strstr(tmf, "stats") || strstr(tmf, "full")) bits |= LC_TMStats;
654 if (strstr(tmf, "profiler") || strstr(tmf, "full")) bits |= LC_TMProfiler;
655 if (strstr(tmf, "treevis")) bits |= LC_TMTreeVis;
657 /* flags for nanojit */
658 if (strstr(tmf, "fragprofile")) bits |= LC_FragProfile;
659 if (strstr(tmf, "liveness") || strstr(tmf, "full")) bits |= LC_Liveness;
660 if (strstr(tmf, "readlir") || strstr(tmf, "full")) bits |= LC_ReadLIR;
661 if (strstr(tmf, "aftersf") || strstr(tmf, "full")) bits |= LC_AfterSF;
662 if (strstr(tmf, "afterdce") || strstr(tmf, "full")) bits |= LC_AfterDCE;
663 if (strstr(tmf, "native") || strstr(tmf, "full")) bits |= LC_Native;
664 if (strstr(tmf, "nativebytes")|| strstr(tmf, "full")) bits |= LC_Bytes;
665 if (strstr(tmf, "regalloc") || strstr(tmf, "full")) bits |= LC_RegAlloc;
666 if (strstr(tmf, "activation") || strstr(tmf, "full")) bits |= LC_Activation;
668 LogController.lcbits = bits;
674 /* ------------------ Frag-level profiling support ------------------ */
679 * All the allocations done by this profile data-collection and
680 * display machinery, are done in TraceMonitor::profAlloc. That is
681 * emptied out at the end of FinishJIT. It has a lifetime from
682 * InitJIT to FinishJIT, which exactly matches the span
683 * js_FragProfiling_init to js_FragProfiling_showResults.
687 Seq<T>* reverseInPlace(Seq<T>* seq)
692 Seq<T>* next = curr->tail;
700 // The number of top blocks to show in the profile
701 #define N_TOP_BLOCKS 50
703 // Contains profile info for a single guard
705 uint32_t guardID; // identifying number
706 uint32_t count; // count.
710 uint32_t count; // entry count for this Fragment
711 uint32_t nStaticExits; // statically: the number of exits
712 size_t nCodeBytes; // statically: the number of insn bytes in the main fragment
713 size_t nExitBytes; // statically: the number of insn bytes in the exit paths
714 Seq<GuardPI>* guards; // guards, each with its own count
715 uint32_t largestGuardID; // that exists in .guards
719 FragProfiling_FragFinalizer(Fragment* f, TraceMonitor* tm)
721 // Recover profiling data from 'f', which is logically at the end
722 // of its useful lifetime.
723 if (!(LogController.lcbits & LC_FragProfile))
727 // Valid profFragIDs start at 1
728 NanoAssert(f->profFragID >= 1);
729 // Should be called exactly once per Fragment. This will assert if
730 // you issue the same FragID to more than one Fragment.
731 NanoAssert(!tm->profTab->containsKey(f->profFragID));
733 FragPI pi = { f->profCount,
739 // Begin sanity check on the guards
740 SeqBuilder<GuardPI> guardsBuilder(*tm->profAlloc);
743 uint32_t sumOfDynExits = 0;
744 for (gr = f->guardsForFrag; gr; gr = gr->nextInFrag) {
746 // Also copy the data into our auxiliary structure.
747 // f->guardsForFrag is in reverse order, and so this
748 // copy preserves that ordering (->add adds at end).
749 // Valid profGuardIDs start at 1.
750 NanoAssert(gr->profGuardID > 0);
751 sumOfDynExits += gr->profCount;
752 GuardPI gpi = { gr->profGuardID, gr->profCount };
753 guardsBuilder.add(gpi);
754 if (gr->profGuardID > pi.largestGuardID)
755 pi.largestGuardID = gr->profGuardID;
757 pi.guards = guardsBuilder.get();
758 // And put the guard list in forwards order
759 pi.guards = reverseInPlace(pi.guards);
761 // Why is this so? Because nGs is the number of guards
762 // at the time the LIR was generated, whereas f->nStaticExits
763 // is the number of them observed by the time it makes it
764 // through to the assembler. It can be the case that LIR
765 // optimisation removes redundant guards; hence we expect
766 // nGs to always be the same or higher.
767 NanoAssert(nGs >= f->nStaticExits);
769 // Also we can assert that the sum of the exit counts
770 // can't exceed the entry count. It'd be nice to assert that
771 // they are exactly equal, but we can't because we don't know
772 // how many times we got to the end of the trace.
773 NanoAssert(f->profCount >= sumOfDynExits);
775 // End sanity check on guards
777 tm->profTab->put(f->profFragID, pi);
781 FragProfiling_showResults(TraceMonitor* tm)
783 uint32_t topFragID[N_TOP_BLOCKS];
784 FragPI topPI[N_TOP_BLOCKS];
785 uint64_t totCount = 0, cumulCount;
787 size_t totCodeB = 0, totExitB = 0;
788 PodArrayZero(topFragID);
790 FragStatsMap::Iter iter(*tm->profTab);
791 while (iter.next()) {
792 uint32_t fragID = iter.key();
793 FragPI pi = iter.value();
794 uint32_t count = pi.count;
795 totCount += (uint64_t)count;
796 /* Find the rank for this entry, in tops */
797 int r = N_TOP_BLOCKS-1;
801 if (topFragID[r] == 0) {
805 if (count > topPI[r].count) {
812 NanoAssert(r >= 0 && r <= N_TOP_BLOCKS);
813 /* This entry should be placed at topPI[r], and entries
814 at higher numbered slots moved up one. */
815 if (r < N_TOP_BLOCKS) {
816 for (int s = N_TOP_BLOCKS-1; s > r; s--) {
817 topFragID[s] = topFragID[s-1];
818 topPI[s] = topPI[s-1];
820 topFragID[r] = fragID;
825 LogController.printf(
826 "\n----------------- Per-fragment execution counts ------------------\n");
827 LogController.printf(
828 "\nTotal count = %llu\n\n", (unsigned long long int)totCount);
830 LogController.printf(
831 " Entry counts Entry counts ----- Static -----\n");
832 LogController.printf(
833 " ------Self------ ----Cumulative--- Exits Cbytes Xbytes FragID\n");
834 LogController.printf("\n");
837 totCount = 1; /* avoid division by zero */
840 for (r = 0; r < N_TOP_BLOCKS; r++) {
841 if (topFragID[r] == 0)
843 cumulCount += (uint64_t)topPI[r].count;
844 LogController.printf("%3d: %5.2f%% %9u %6.2f%% %9llu"
845 " %3d %5u %5u %06u\n",
847 (double)topPI[r].count * 100.0 / (double)totCount,
849 (double)cumulCount * 100.0 / (double)totCount,
850 (unsigned long long int)cumulCount,
851 topPI[r].nStaticExits,
852 (unsigned int)topPI[r].nCodeBytes,
853 (unsigned int)topPI[r].nExitBytes,
855 totSE += (uint32_t)topPI[r].nStaticExits;
856 totCodeB += topPI[r].nCodeBytes;
857 totExitB += topPI[r].nExitBytes;
859 LogController.printf("\nTotal displayed code bytes = %u, "
861 "Total displayed static exits = %d\n\n",
862 (unsigned int)totCodeB, (unsigned int)totExitB, totSE);
864 LogController.printf("Analysis by exit counts\n\n");
866 for (r = 0; r < N_TOP_BLOCKS; r++) {
867 if (topFragID[r] == 0)
869 LogController.printf("FragID=%06u, total count %u:\n", topFragID[r],
871 uint32_t madeItToEnd = topPI[r].count;
872 uint32_t totThisFrag = topPI[r].count;
873 if (totThisFrag == 0)
876 // visit the guards, in forward order
877 for (Seq<GuardPI>* guards = topPI[r].guards; guards; guards = guards->tail) {
878 gpi = (*guards).head;
881 madeItToEnd -= gpi.count;
882 LogController.printf(" GuardID=%03u %7u (%5.2f%%)\n",
883 gpi.guardID, gpi.count,
884 100.0 * (double)gpi.count / (double)totThisFrag);
886 LogController.printf(" Looped (%03u) %7u (%5.2f%%)\n",
887 topPI[r].largestGuardID+1,
889 100.0 * (double)madeItToEnd / (double)totThisFrag);
890 NanoAssert(madeItToEnd <= topPI[r].count); // else unsigned underflow
891 LogController.printf("\n");
899 /* ----------------------------------------------------------------- */
902 static JSBool FASTCALL
903 PrintOnTrace(char* format, uint32 argc, double *argv)
916 #define GET_ARG() JS_BEGIN_MACRO \
917 if (argi >= argc) { \
918 fprintf(out, "[too few args for format]"); \
921 u.d = argv[argi++]; \
927 for (char *p = format; *p; ++p) {
934 fprintf(out, "[trailing %%]");
941 fprintf(out, "[%u:%u 0x%x:0x%x %f]", u.i.lo, u.i.hi, u.i.lo, u.i.hi, u.d);
945 fprintf(out, "%d", u.i.lo);
949 fprintf(out, "%u", u.i.lo);
953 fprintf(out, "%x", u.i.lo);
957 fprintf(out, "%f", u.d);
966 size_t length = u.s->length();
967 // protect against massive spew if u.s is a bad pointer.
968 if (length > 1 << 16)
971 fprintf(out, "<rope>");
974 const jschar *chars = u.s->nonRopeChars();
975 for (unsigned i = 0; i < length; ++i) {
976 jschar co = chars[i];
980 fprintf(out, "\\u%02x", co);
982 fprintf(out, "\\u%04x", co);
988 fprintf(out, "%s", u.cstr);
992 Value *v = (Value *) u.i.lo;
997 fprintf(out, "[invalid %%%c]", *p);
1006 JS_DEFINE_CALLINFO_3(extern, BOOL, PrintOnTrace, CHARPTR, UINT32, DOUBLEPTR, 0, ACCSET_STORE_ANY)
1008 // This version is not intended to be called directly: usually it is easier to
1009 // use one of the other overloads.
1011 TraceRecorder::tprint(const char *format, int count, nanojit::LIns *insa[])
1013 size_t size = strlen(format) + 1;
1014 char* data = (char*) traceMonitor->traceAlloc->alloc(size);
1015 memcpy(data, format, size);
1017 double *args = (double*) traceMonitor->traceAlloc->alloc(count * sizeof(double));
1018 LIns* argsp_ins = w.nameImmpNonGC(args);
1019 for (int i = 0; i < count; ++i)
1020 w.stTprintArg(insa, argsp_ins, i);
1022 LIns* args_ins[] = { w.nameImmpNonGC(args), w.nameImmi(count), w.nameImmpNonGC(data) };
1023 LIns* call_ins = w.call(&PrintOnTrace_ci, args_ins);
1024 guard(false, w.eqi0(call_ins), MISMATCH_EXIT);
1027 // Generate a 'printf'-type call from trace for debugging.
1029 TraceRecorder::tprint(const char *format)
1031 LIns* insa[] = { NULL };
1032 tprint(format, 0, insa);
1036 TraceRecorder::tprint(const char *format, LIns *ins)
1038 LIns* insa[] = { ins };
1039 tprint(format, 1, insa);
1043 TraceRecorder::tprint(const char *format, LIns *ins1, LIns *ins2)
1045 LIns* insa[] = { ins1, ins2 };
1046 tprint(format, 2, insa);
1050 TraceRecorder::tprint(const char *format, LIns *ins1, LIns *ins2, LIns *ins3)
1052 LIns* insa[] = { ins1, ins2, ins3 };
1053 tprint(format, 3, insa);
1057 TraceRecorder::tprint(const char *format, LIns *ins1, LIns *ins2, LIns *ins3, LIns *ins4)
1059 LIns* insa[] = { ins1, ins2, ins3, ins4 };
1060 tprint(format, 4, insa);
1064 TraceRecorder::tprint(const char *format, LIns *ins1, LIns *ins2, LIns *ins3, LIns *ins4,
1067 LIns* insa[] = { ins1, ins2, ins3, ins4, ins5 };
1068 tprint(format, 5, insa);
1072 TraceRecorder::tprint(const char *format, LIns *ins1, LIns *ins2, LIns *ins3, LIns *ins4,
1073 LIns *ins5, LIns *ins6)
1075 LIns* insa[] = { ins1, ins2, ins3, ins4, ins5, ins6 };
1076 tprint(format, 6, insa);
1091 Tracker::getTrackerPageBase(const void* v) const
1093 return jsuword(v) & ~TRACKER_PAGE_MASK;
1097 Tracker::getTrackerPageOffset(const void* v) const
1099 return (jsuword(v) & TRACKER_PAGE_MASK) >> 2;
1102 struct Tracker::TrackerPage*
1103 Tracker::findTrackerPage(const void* v) const
1105 jsuword base = getTrackerPageBase(v);
1106 struct Tracker::TrackerPage* p = pagelist;
1108 if (p->base == base)
1115 struct Tracker::TrackerPage*
1116 Tracker::addTrackerPage(const void* v)
1118 jsuword base = getTrackerPageBase(v);
1119 struct TrackerPage* p = (struct TrackerPage*) js_calloc(sizeof(*p));
1130 TrackerPage* p = pagelist;
1131 pagelist = pagelist->next;
1137 Tracker::has(const void *v) const
1139 return get(v) != NULL;
1143 Tracker::get(const void* v) const
1145 struct Tracker::TrackerPage* p = findTrackerPage(v);
1148 return p->map[getTrackerPageOffset(v)];
1152 Tracker::set(const void* v, LIns* i)
1154 struct Tracker::TrackerPage* p = findTrackerPage(v);
1156 p = addTrackerPage(v);
1157 p->map[getTrackerPageOffset(v)] = i;
1161 hasInt32Repr(const Value &v)
1168 return JSDOUBLE_IS_INT32(v.toDouble(), &_);
1172 asInt32(const Value &v)
1174 JS_ASSERT(v.isNumber());
1179 JS_ASSERT(JSDOUBLE_IS_INT32(v.toDouble(), &_));
1181 return jsint(v.toDouble());
1185 * Return JSVAL_TYPE_DOUBLE for all numbers (int and double). Split
1186 * JSVAL_TYPE_OBJECT into JSVAL_TYPE_FUNOBJ and JSVAL_TYPE_NONFUNOBJ.
1187 * Otherwise, just return the value's type.
1189 static inline JSValueType
1190 getPromotedType(const Value &v)
1193 return JSVAL_TYPE_DOUBLE;
1195 return v.toObject().isFunction() ? JSVAL_TYPE_FUNOBJ : JSVAL_TYPE_NONFUNOBJ;
1196 return v.extractNonDoubleObjectTraceType();
1200 * Return JSVAL_TYPE_INT32 for all whole numbers that fit into signed 32-bit.
1201 * Split JSVAL_TYPE_OBJECT into JSVAL_TYPE_FUNOBJ and JSVAL_TYPE_NONFUNOBJ.
1202 * Otherwise, just return the value's type.
1204 static inline JSValueType
1205 getCoercedType(const Value &v)
1209 return (v.isInt32() || JSDOUBLE_IS_INT32(v.toDouble(), &_))
1211 : JSVAL_TYPE_DOUBLE;
1214 return v.toObject().isFunction() ? JSVAL_TYPE_FUNOBJ : JSVAL_TYPE_NONFUNOBJ;
1215 return v.extractNonDoubleObjectTraceType();
1218 static inline JSValueType
1219 getFrameObjPtrTraceType(void *p, JSStackFrame *fp)
1221 if (p == fp->addressOfScopeChain()) {
1222 JS_ASSERT(*(JSObject **)p != NULL);
1223 return JSVAL_TYPE_NONFUNOBJ;
1225 JS_ASSERT(p == fp->addressOfArgs());
1226 return fp->hasArgsObj() ? JSVAL_TYPE_NONFUNOBJ : JSVAL_TYPE_NULL;
1230 isFrameObjPtrTraceType(JSValueType t)
1232 return t == JSVAL_TYPE_NULL || t == JSVAL_TYPE_NONFUNOBJ;
1235 /* Constant seed and accumulate step borrowed from the DJB hash. */
1237 const uintptr_t ORACLE_MASK = ORACLE_SIZE - 1;
1238 JS_STATIC_ASSERT((ORACLE_MASK & ORACLE_SIZE) == 0);
1240 const uintptr_t FRAGMENT_TABLE_MASK = FRAGMENT_TABLE_SIZE - 1;
1241 JS_STATIC_ASSERT((FRAGMENT_TABLE_MASK & FRAGMENT_TABLE_SIZE) == 0);
1243 const uintptr_t HASH_SEED = 5381;
1246 HashAccum(uintptr_t& h, uintptr_t i, uintptr_t mask)
1248 h = ((h << 5) + h + (mask & i)) & mask;
1251 static JS_REQUIRES_STACK inline int
1252 StackSlotHash(JSContext* cx, unsigned slot, const void* pc)
1254 uintptr_t h = HASH_SEED;
1255 HashAccum(h, uintptr_t(cx->fp()->script()), ORACLE_MASK);
1256 HashAccum(h, uintptr_t(pc), ORACLE_MASK);
1257 HashAccum(h, uintptr_t(slot), ORACLE_MASK);
1261 static JS_REQUIRES_STACK inline int
1262 GlobalSlotHash(JSContext* cx, unsigned slot)
1264 uintptr_t h = HASH_SEED;
1265 JSStackFrame* fp = cx->fp();
1270 HashAccum(h, uintptr_t(fp->maybeScript()), ORACLE_MASK);
1271 HashAccum(h, uintptr_t(fp->scopeChain().getGlobal()->shape()), ORACLE_MASK);
1272 HashAccum(h, uintptr_t(slot), ORACLE_MASK);
1277 PCHash(jsbytecode* pc)
1279 return int(uintptr_t(pc) & ORACLE_MASK);
1284 /* Grow the oracle bitsets to their (fixed) size here, once. */
1285 _stackDontDemote.set(ORACLE_SIZE-1);
1286 _globalDontDemote.set(ORACLE_SIZE-1);
1290 /* Tell the oracle that a certain global variable should not be demoted. */
1291 JS_REQUIRES_STACK void
1292 Oracle::markGlobalSlotUndemotable(JSContext* cx, unsigned slot)
1294 _globalDontDemote.set(GlobalSlotHash(cx, slot));
1297 /* Consult with the oracle whether we shouldn't demote a certain global variable. */
1298 JS_REQUIRES_STACK bool
1299 Oracle::isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const
1301 return _globalDontDemote.get(GlobalSlotHash(cx, slot));
1304 /* Tell the oracle that a certain slot at a certain stack slot should not be demoted. */
1305 JS_REQUIRES_STACK void
1306 Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc)
1308 _stackDontDemote.set(StackSlotHash(cx, slot, pc));
1311 JS_REQUIRES_STACK void
1312 Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot)
1314 markStackSlotUndemotable(cx, slot, cx->regs->pc);
1317 /* Consult with the oracle whether we shouldn't demote a certain slot. */
1318 JS_REQUIRES_STACK bool
1319 Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc) const
1321 return _stackDontDemote.get(StackSlotHash(cx, slot, pc));
1324 JS_REQUIRES_STACK bool
1325 Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot) const
1327 return isStackSlotUndemotable(cx, slot, cx->regs->pc);
1330 /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
1332 Oracle::markInstructionUndemotable(jsbytecode* pc)
1334 _pcDontDemote.set(PCHash(pc));
1337 /* Consult with the oracle whether we shouldn't demote a certain bytecode location. */
1339 Oracle::isInstructionUndemotable(jsbytecode* pc) const
1341 return _pcDontDemote.get(PCHash(pc));
1344 /* Tell the oracle that the instruction at bytecode location should use a stronger (slower) test for -0. */
1346 Oracle::markInstructionSlowZeroTest(jsbytecode* pc)
1348 _pcSlowZeroTest.set(PCHash(pc));
1351 /* Consult with the oracle whether we should use a stronger (slower) test for -0. */
1353 Oracle::isInstructionSlowZeroTest(jsbytecode* pc) const
1355 return _pcSlowZeroTest.get(PCHash(pc));
1359 Oracle::clearDemotability()
1361 _stackDontDemote.reset();
1362 _globalDontDemote.reset();
1363 _pcDontDemote.reset();
1364 _pcSlowZeroTest.reset();
1367 JS_REQUIRES_STACK void
1368 TraceRecorder::markSlotUndemotable(LinkableFragment* f, unsigned slot)
1370 if (slot < f->nStackTypes) {
1371 traceMonitor->oracle->markStackSlotUndemotable(cx, slot);
1375 uint16* gslots = f->globalSlots->data();
1376 traceMonitor->oracle->markGlobalSlotUndemotable(cx, gslots[slot - f->nStackTypes]);
1379 JS_REQUIRES_STACK void
1380 TraceRecorder::markSlotUndemotable(LinkableFragment* f, unsigned slot, const void* pc)
1382 if (slot < f->nStackTypes) {
1383 traceMonitor->oracle->markStackSlotUndemotable(cx, slot, pc);
1387 uint16* gslots = f->globalSlots->data();
1388 traceMonitor->oracle->markGlobalSlotUndemotable(cx, gslots[slot - f->nStackTypes]);
1391 static JS_REQUIRES_STACK bool
1392 IsSlotUndemotable(Oracle* oracle, JSContext* cx, LinkableFragment* f, unsigned slot, const void* ip)
1394 if (slot < f->nStackTypes)
1395 return !oracle || oracle->isStackSlotUndemotable(cx, slot, ip);
1397 uint16* gslots = f->globalSlots->data();
1398 return !oracle || oracle->isGlobalSlotUndemotable(cx, gslots[slot - f->nStackTypes]);
1401 class FrameInfoCache
1405 typedef FrameInfo *Lookup;
1406 static HashNumber hash(const FrameInfo* fi) {
1407 size_t len = sizeof(FrameInfo) + fi->callerHeight * sizeof(JSValueType);
1409 const unsigned char *s = (const unsigned char*)fi;
1410 for (size_t i = 0; i < len; i++, s++)
1411 h = JS_ROTATE_LEFT32(h, 4) ^ *s;
1415 static bool match(const FrameInfo* fi1, const FrameInfo* fi2) {
1416 if (memcmp(fi1, fi2, sizeof(FrameInfo)) != 0)
1418 return memcmp(fi1->get_typemap(), fi2->get_typemap(),
1419 fi1->callerHeight * sizeof(JSValueType)) == 0;
1423 typedef HashSet<FrameInfo *, HashPolicy, SystemAllocPolicy> FrameSet;
1426 VMAllocator *allocator;
1430 FrameInfoCache(VMAllocator *allocator);
1436 FrameInfo *memoize(FrameInfo *fi) {
1437 FrameSet::AddPtr p = set.lookupForAdd(fi);
1439 FrameInfo* n = (FrameInfo*)
1440 allocator->alloc(sizeof(FrameInfo) + fi->callerHeight * sizeof(JSValueType));
1441 memcpy(n, fi, sizeof(FrameInfo) + fi->callerHeight * sizeof(JSValueType));
1450 FrameInfoCache::FrameInfoCache(VMAllocator *allocator)
1451 : allocator(allocator)
1454 OUT_OF_MEMORY_ABORT("FrameInfoCache::FrameInfoCache(): out of memory");
1457 #define PC_HASH_COUNT 1024
1460 Blacklist(jsbytecode* pc)
1463 JS_ASSERT(*pc == JSOP_TRACE || *pc == JSOP_NOTRACE);
1468 Unblacklist(JSScript *script, jsbytecode *pc)
1470 JS_ASSERT(*pc == JSOP_NOTRACE || *pc == JSOP_TRACE);
1471 if (*pc == JSOP_NOTRACE) {
1475 /* This code takes care of unblacklisting in the method JIT. */
1476 js::mjit::ResetTraceHint(script, pc, GET_UINT16(pc), false);
1482 IsBlacklisted(jsbytecode* pc)
1484 if (*pc == JSOP_NOTRACE)
1486 if (*pc == JSOP_CALL)
1487 return *(pc + JSOP_CALL_LENGTH) == JSOP_NOTRACE;
1492 Backoff(TraceMonitor *tm, jsbytecode* pc, Fragment* tree = NULL)
1494 /* N.B. This code path cannot assume the recorder is/is not alive. */
1495 RecordAttemptMap &table = *tm->recordAttempts;
1496 if (RecordAttemptMap::AddPtr p = table.lookupForAdd(pc)) {
1497 if (p->value++ > (BL_ATTEMPTS * MAXPEERS)) {
1503 table.add(p, pc, 0);
1507 tree->hits() -= BL_BACKOFF;
1510 * In case there is no entry or no table (due to OOM) or some
1511 * serious imbalance in the recording-attempt distribution on a
1512 * multitree, give each tree another chance to blacklist here as
1515 if (++tree->recordAttempts > BL_ATTEMPTS)
1521 ResetRecordingAttempts(TraceMonitor *tm, jsbytecode* pc)
1523 RecordAttemptMap &table = *tm->recordAttempts;
1524 if (RecordAttemptMap::Ptr p = table.lookup(pc))
1528 static inline size_t
1529 FragmentHash(const void *ip, JSObject* globalObj, uint32 globalShape, uint32 argc)
1531 uintptr_t h = HASH_SEED;
1532 HashAccum(h, uintptr_t(ip), FRAGMENT_TABLE_MASK);
1533 HashAccum(h, uintptr_t(globalObj), FRAGMENT_TABLE_MASK);
1534 HashAccum(h, uintptr_t(globalShape), FRAGMENT_TABLE_MASK);
1535 HashAccum(h, uintptr_t(argc), FRAGMENT_TABLE_MASK);
1540 RawLookupFirstPeer(TraceMonitor* tm, const void *ip, JSObject* globalObj,
1541 uint32 globalShape, uint32 argc,
1542 TreeFragment*& firstInBucket, TreeFragment**& prevTreeNextp)
1544 size_t h = FragmentHash(ip, globalObj, globalShape, argc);
1545 TreeFragment** ppf = &tm->vmfragments[h];
1546 firstInBucket = *ppf;
1547 for (; TreeFragment* pf = *ppf; ppf = &pf->next) {
1548 if (pf->globalObj == globalObj &&
1549 pf->globalShape == globalShape &&
1552 prevTreeNextp = ppf;
1556 prevTreeNextp = ppf;
1560 static TreeFragment*
1561 LookupLoop(TraceMonitor* tm, const void *ip, JSObject* globalObj,
1562 uint32 globalShape, uint32 argc)
1564 TreeFragment *_, **prevTreeNextp;
1565 RawLookupFirstPeer(tm, ip, globalObj, globalShape, argc, _, prevTreeNextp);
1566 return *prevTreeNextp;
1569 static TreeFragment*
1570 LookupOrAddLoop(TraceMonitor* tm, const void *ip, JSObject* globalObj,
1571 uint32 globalShape, uint32 argc)
1573 TreeFragment *firstInBucket, **prevTreeNextp;
1574 RawLookupFirstPeer(tm, ip, globalObj, globalShape, argc, firstInBucket, prevTreeNextp);
1575 if (TreeFragment *f = *prevTreeNextp)
1579 uint32_t profFragID = (LogController.lcbits & LC_FragProfile)
1580 ? (++(tm->lastFragID)) : 0;
1582 TreeFragment* f = new (*tm->dataAlloc) TreeFragment(ip, tm->dataAlloc, tm->oracle,
1583 globalObj, globalShape,
1584 argc verbose_only(, profFragID));
1585 f->root = f; /* f is the root of a new tree */
1586 *prevTreeNextp = f; /* insert f at the end of the vmfragments bucket-list */
1588 f->first = f; /* initialize peer-list at f */
1593 static TreeFragment*
1594 AddNewPeerToPeerList(TraceMonitor* tm, TreeFragment* peer)
1598 uint32_t profFragID = (LogController.lcbits & LC_FragProfile)
1599 ? (++(tm->lastFragID)) : 0;
1601 TreeFragment* f = new (*tm->dataAlloc) TreeFragment(peer->ip, tm->dataAlloc, tm->oracle,
1602 peer->globalObj, peer->globalShape,
1603 peer->argc verbose_only(, profFragID));
1604 f->root = f; /* f is the root of a new tree */
1605 f->first = peer->first; /* add f to peer list */
1606 f->peer = peer->peer;
1608 /* only the |first| Fragment of a peer list needs a valid |next| field */
1609 debug_only(f->next = (TreeFragment*)0xcdcdcdcd);
1613 JS_REQUIRES_STACK void
1614 TreeFragment::initialize(JSContext* cx, SlotList *globalSlots, bool speculate)
1616 this->dependentTrees.clear();
1617 this->linkedTrees.clear();
1618 this->globalSlots = globalSlots;
1620 /* Capture the coerced type of each active slot in the type map. */
1621 this->typeMap.captureTypes(cx, globalObj, *globalSlots, 0 /* callDepth */, speculate);
1622 this->nStackTypes = this->typeMap.length() - globalSlots->length();
1623 this->spOffsetAtEntry = cx->regs->sp - cx->fp()->base();
1626 this->treeFileName = cx->fp()->script()->filename;
1627 this->treeLineNumber = js_FramePCToLineNumber(cx, cx->fp());
1628 this->treePCOffset = FramePCOffset(cx, cx->fp());
1630 this->script = cx->fp()->script();
1631 this->gcthings.clear();
1632 this->shapes.clear();
1633 this->unstableExits = NULL;
1634 this->sideExits.clear();
1636 /* Determine the native frame layout at the entry point. */
1637 this->nativeStackBase = (nStackTypes - (cx->regs->sp - cx->fp()->base())) *
1639 this->maxNativeStackSlots = nStackTypes;
1640 this->maxCallDepth = 0;
1646 TreeFragment::removeUnstableExit(VMSideExit* exit)
1648 /* Now erase this exit from the unstable exit list. */
1649 UnstableExit** tail = &this->unstableExits;
1650 for (UnstableExit* uexit = this->unstableExits; uexit != NULL; uexit = uexit->next) {
1651 if (uexit->exit == exit) {
1652 *tail = uexit->next;
1655 tail = &uexit->next;
1657 JS_NOT_REACHED("exit not in unstable exit list");
1663 AssertTreeIsUnique(TraceMonitor* tm, TreeFragment* f)
1665 JS_ASSERT(f->root == f);
1668 * Check for duplicate entry type maps. This is always wrong and hints at
1669 * trace explosion since we are trying to stabilize something without
1670 * properly connecting peer edges.
1672 for (TreeFragment* peer = LookupLoop(tm, f->ip, f->globalObj, f->globalShape, f->argc);
1674 peer = peer->peer) {
1675 if (!peer->code() || peer == f)
1677 JS_ASSERT(!f->typeMap.matches(peer->typeMap));
1683 AttemptCompilation(TraceMonitor *tm, JSObject* globalObj,
1684 JSScript* script, jsbytecode* pc, uint32 argc)
1686 /* If we already permanently blacklisted the location, undo that. */
1687 Unblacklist(script, pc);
1688 ResetRecordingAttempts(tm, pc);
1690 /* Breathe new life into all peer fragments at the designated loop header. */
1691 TreeFragment* f = LookupLoop(tm, pc, globalObj, globalObj->shape(), argc);
1694 * If the global object's shape changed, we can't easily find the
1695 * corresponding loop header via a hash table lookup. In this
1696 * we simply bail here and hope that the fragment has another
1697 * outstanding compilation attempt. This case is extremely rare.
1701 JS_ASSERT(f->root == f);
1704 JS_ASSERT(f->root == f);
1705 --f->recordAttempts;
1706 f->hits() = HOTLOOP;
1711 static const CallInfo *
1712 fcallinfo(LIns *ins)
1714 return ins->isop(LIR_calld) ? ins->callInfo() : NULL;
1718 * Determine whether this operand is guaranteed to not overflow the specified
1719 * integer operation.
1722 ChecksRequired(LOpcode op, LIns* op1, LIns* op2,
1723 bool* needsOverflowCheck, bool* needsNegZeroCheck)
1725 Interval x = Interval::of(op1, 3);
1726 Interval y = Interval::of(op2, 3);
1731 z = Interval::add(x, y);
1732 *needsNegZeroCheck = false;
1736 z = Interval::sub(x, y);
1737 *needsNegZeroCheck = false;
1741 z = Interval::mul(x, y);
1742 // A would-be negative zero result can only occur if we have
1743 // mul(0, -n) or mul(-n, 0), where n != 0. In particular, a multiply
1744 // where one operand is a positive immediate cannot result in negative
1747 // This assumes that -0 cannot be an operand; if one had occurred we
1748 // would have already exited the trace in order to promote the
1749 // computation back to doubles.
1750 *needsNegZeroCheck = (x.canBeZero() && y.canBeNegative()) ||
1751 (y.canBeZero() && x.canBeNegative());
1756 JS_NOT_REACHED("needsOverflowCheck");
1759 *needsOverflowCheck = z.hasOverflowed;
1763 * JSStackFrame::numActualArgs is only defined for function frames. Since the
1764 * actual arguments of the entry frame are kept on trace, argc is included in
1765 * the tuple identifying a fragment so that two fragments for the same loop but
1766 * recorded with different number of actual arguments are treated as two
1767 * completely separate trees. For this particular use, we define the number of
1768 * actuals for global and eval frames to be 0.
1771 entryFrameArgc(JSContext *cx)
1773 JSStackFrame *fp = cx->fp();
1774 return fp->isGlobalFrame() || fp->isEvalFrame() ? 0 : fp->numActualArgs();
1777 template <typename Visitor>
1778 static JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
1779 VisitStackAndArgs(Visitor &visitor, JSStackFrame *fp, JSStackFrame *next, Value *stack)
1781 if (JS_LIKELY(!next->hasOverflowArgs()))
1782 return visitor.visitStackSlots(stack, next->formalArgsEnd() - stack, fp);
1785 * In the case of nactual > nformal, the formals are copied by the VM onto
1786 * the top of the stack. We only want to mark the formals once, so we
1787 * carefully mark only the canonical actual arguments (as defined by
1788 * JSStackFrame::canonicalActualArg).
1790 uintN nactual = next->numActualArgs();
1791 Value *actuals = next->actualArgs();
1792 size_t nstack = (actuals - 2 /* callee,this */) - stack;
1793 if (!visitor.visitStackSlots(stack, nstack, fp))
1795 uintN nformal = next->numFormalArgs();
1796 Value *formals = next->formalArgs();
1797 if (!visitor.visitStackSlots(formals - 2, 2 + nformal, fp))
1799 return visitor.visitStackSlots(actuals + nformal, nactual - nformal, fp);
1803 * Visit the values in the given JSStackFrame that the tracer cares about. This
1804 * visitor function is (implicitly) the primary definition of the native stack
1805 * area layout. There are a few other independent pieces of code that must be
1806 * maintained to assume the same layout. They are marked like this:
1808 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
1810 template <typename Visitor>
1811 static JS_REQUIRES_STACK bool
1812 VisitFrameSlots(Visitor &visitor, JSContext *cx, unsigned depth, JSStackFrame *fp,
1815 JS_ASSERT_IF(!next, cx->fp() == fp);
1817 if (depth > 0 && !VisitFrameSlots(visitor, cx, depth-1, fp->prev(), fp))
1821 if (fp->isGlobalFrame()) {
1822 visitor.setStackSlotKind("global");
1823 Value *base = fp->slots() + fp->globalScript()->nfixed;
1825 return VisitStackAndArgs(visitor, fp, next, base);
1826 return visitor.visitStackSlots(base, cx->regs->sp - base, fp);
1829 if (JS_UNLIKELY(fp->isEvalFrame())) {
1830 visitor.setStackSlotKind("eval");
1831 if (!visitor.visitStackSlots(&fp->calleeValue(), 2, fp))
1835 * Only the bottom function frame must visit its arguments; for all
1836 * other frames, arguments are visited by the prev-frame.
1838 visitor.setStackSlotKind("args");
1839 uintN nformal = fp->numFormalArgs();
1840 if (!visitor.visitStackSlots(fp->formalArgs() - 2, 2 + nformal, fp))
1842 if (JS_UNLIKELY(fp->hasOverflowArgs())) {
1843 if (!visitor.visitStackSlots(fp->actualArgs() + nformal,
1844 fp->numActualArgs() - nformal, fp))
1850 JS_ASSERT(fp->isFunctionFrame());
1853 * We keep two members of JSStackFrame on trace: the args obj pointer and
1854 * the scope chain pointer. The visitor must take care not to treat these
1855 * as js::Value-typed variables, since they are unboxed pointers.
1856 * Moreover, JSStackFrame compresses the args obj pointer with nactual, so
1857 * fp->addressOfArgs() is not really a JSObject**: the visitor must treat
1858 * !fp->hasArgsObj() as a null args obj pointer. Hence, visitFrameObjPtr
1859 * is only passed a void *.
1861 visitor.setStackSlotKind("arguments");
1862 if (!visitor.visitFrameObjPtr(fp->addressOfArgs(), fp))
1864 visitor.setStackSlotKind("scopeChain");
1865 if (!visitor.visitFrameObjPtr(fp->addressOfScopeChain(), fp))
1868 visitor.setStackSlotKind("slots");
1870 return VisitStackAndArgs(visitor, fp, next, fp->slots());
1871 return visitor.visitStackSlots(fp->slots(), cx->regs->sp - fp->slots(), fp);
1874 // Number of native frame slots used for 'special' values between args and vars.
1875 // Currently the two values are |arguments| (args object) and |scopeChain|.
1876 const int SPECIAL_FRAME_SLOTS = 2;
1878 template <typename Visitor>
1879 static JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
1880 VisitStackSlots(Visitor &visitor, JSContext *cx, unsigned callDepth)
1882 return VisitFrameSlots(visitor, cx, callDepth, cx->fp(), NULL);
1885 template <typename Visitor>
1886 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1887 VisitGlobalSlots(Visitor &visitor, JSContext *cx, JSObject *globalObj,
1888 unsigned ngslots, uint16 *gslots)
1890 for (unsigned n = 0; n < ngslots; ++n) {
1891 unsigned slot = gslots[n];
1892 visitor.visitGlobalSlot(&globalObj->getSlotRef(slot), n, slot);
1896 class AdjustCallerTypeVisitor;
1898 template <typename Visitor>
1899 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1900 VisitGlobalSlots(Visitor &visitor, JSContext *cx, SlotList &gslots)
1902 VisitGlobalSlots(visitor, cx, cx->fp()->scopeChain().getGlobal(),
1903 gslots.length(), gslots.data());
1907 template <typename Visitor>
1908 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1909 VisitSlots(Visitor& visitor, JSContext* cx, JSObject* globalObj,
1910 unsigned callDepth, unsigned ngslots, uint16* gslots)
1912 if (VisitStackSlots(visitor, cx, callDepth))
1913 VisitGlobalSlots(visitor, cx, globalObj, ngslots, gslots);
1916 template <typename Visitor>
1917 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1918 VisitSlots(Visitor& visitor, JSContext* cx, unsigned callDepth,
1919 unsigned ngslots, uint16* gslots)
1921 VisitSlots(visitor, cx, cx->fp()->scopeChain().getGlobal(),
1922 callDepth, ngslots, gslots);
1925 template <typename Visitor>
1926 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1927 VisitSlots(Visitor &visitor, JSContext *cx, JSObject *globalObj,
1928 unsigned callDepth, const SlotList& slots)
1930 VisitSlots(visitor, cx, globalObj, callDepth, slots.length(),
1934 template <typename Visitor>
1935 static JS_REQUIRES_STACK JS_ALWAYS_INLINE void
1936 VisitSlots(Visitor &visitor, JSContext *cx, unsigned callDepth,
1937 const SlotList& slots)
1939 VisitSlots(visitor, cx, cx->fp()->scopeChain().getGlobal(),
1940 callDepth, slots.length(), slots.data());
1944 class SlotVisitorBase {
1945 #if defined JS_JIT_SPEW
1947 char const *mStackSlotKind;
1949 SlotVisitorBase() : mStackSlotKind(NULL) {}
1950 JS_ALWAYS_INLINE const char *stackSlotKind() { return mStackSlotKind; }
1951 JS_ALWAYS_INLINE void setStackSlotKind(char const *k) {
1956 JS_ALWAYS_INLINE const char *stackSlotKind() { return NULL; }
1957 JS_ALWAYS_INLINE void setStackSlotKind(char const *k) {}
1961 struct CountSlotsVisitor : public SlotVisitorBase
1967 JS_ALWAYS_INLINE CountSlotsVisitor(const void* stop = NULL) :
1973 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
1974 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
1977 if (mStop && size_t(((const Value *)mStop) - vp) < count) {
1978 mCount += size_t(((const Value *)mStop) - vp);
1986 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
1987 visitFrameObjPtr(void* p, JSStackFrame* fp) {
1990 if (mStop && mStop == p) {
1998 JS_ALWAYS_INLINE unsigned count() {
2002 JS_ALWAYS_INLINE bool stopped() {
2007 static JS_REQUIRES_STACK JS_ALWAYS_INLINE unsigned
2008 CountStackAndArgs(JSStackFrame *next, Value *stack)
2010 if (JS_LIKELY(!next->hasOverflowArgs()))
2011 return (Value *)next - stack;
2012 size_t nvals = (next->formalArgs() - 2 /* callee, this */) - stack;
2013 JS_ASSERT(nvals == unsigned((next->actualArgs() - 2) - stack) + (2 + next->numActualArgs()));
2017 static JS_ALWAYS_INLINE uintN
2018 NumSlotsBeforeFixed(JSStackFrame *fp)
2020 uintN numArgs = fp->isEvalFrame() ? 0 : Max(fp->numActualArgs(), fp->numFormalArgs());
2021 return 2 + numArgs + SPECIAL_FRAME_SLOTS;
2025 * Calculate the total number of native frame slots we need from this frame all
2026 * the way back to the entry frame, including the current stack usage.
2028 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
2030 JS_REQUIRES_STACK unsigned
2031 NativeStackSlots(JSContext *cx, unsigned callDepth)
2033 JSStackFrame *fp = cx->fp();
2034 JSStackFrame *next = NULL;
2036 unsigned depth = callDepth;
2038 for (; depth > 0; --depth, next = fp, fp = fp->prev()) {
2039 JS_ASSERT(fp->isFunctionFrame() && !fp->isEvalFrame());
2040 slots += SPECIAL_FRAME_SLOTS;
2042 slots += CountStackAndArgs(next, fp->slots());
2044 slots += cx->regs->sp - fp->slots();
2048 if (fp->isGlobalFrame()) {
2049 start = fp->slots() + fp->globalScript()->nfixed;
2051 start = fp->slots();
2052 slots += NumSlotsBeforeFixed(fp);
2055 slots += CountStackAndArgs(next, start);
2057 slots += cx->regs->sp - start;
2060 CountSlotsVisitor visitor;
2061 VisitStackSlots(visitor, cx, callDepth);
2062 JS_ASSERT(visitor.count() == slots && !visitor.stopped());
2067 class CaptureTypesVisitor : public SlotVisitorBase
2070 JSValueType* mTypeMap;
2075 JS_ALWAYS_INLINE CaptureTypesVisitor(JSContext* cx, Oracle *oracle,
2076 JSValueType* typeMap, bool speculate)
2080 mOracle(speculate ? oracle : NULL)
2083 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
2084 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
2085 JSValueType type = getCoercedType(*vp);
2086 if (type == JSVAL_TYPE_INT32 && (!mOracle || mOracle->isGlobalSlotUndemotable(mCx, slot)))
2087 type = JSVAL_TYPE_DOUBLE;
2088 JS_ASSERT(type != JSVAL_TYPE_BOXED);
2089 debug_only_printf(LC_TMTracer,
2090 "capture type global%d: %c\n",
2091 n, TypeToChar(type));
2095 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
2096 visitStackSlots(Value *vp, int count, JSStackFrame* fp) {
2097 for (int i = 0; i < count; ++i) {
2098 JSValueType type = getCoercedType(vp[i]);
2099 if (type == JSVAL_TYPE_INT32 && (!mOracle || mOracle->isStackSlotUndemotable(mCx, length())))
2100 type = JSVAL_TYPE_DOUBLE;
2101 JS_ASSERT(type != JSVAL_TYPE_BOXED);
2102 debug_only_printf(LC_TMTracer,
2103 "capture type %s%d: %c\n",
2104 stackSlotKind(), i, TypeToChar(type));
2110 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
2111 visitFrameObjPtr(void* p, JSStackFrame* fp) {
2112 JSValueType type = getFrameObjPtrTraceType(p, fp);
2113 debug_only_printf(LC_TMTracer,
2114 "capture type %s%d: %c\n",
2115 stackSlotKind(), 0, TypeToChar(type));
2120 JS_ALWAYS_INLINE uintptr_t length() {
2121 return mPtr - mTypeMap;
2126 TypeMap::set(unsigned stackSlots, unsigned ngslots,
2127 const JSValueType* stackTypeMap, const JSValueType* globalTypeMap)
2129 setLength(ngslots + stackSlots);
2130 memcpy(data(), stackTypeMap, stackSlots * sizeof(JSValueType));
2131 memcpy(data() + stackSlots, globalTypeMap, ngslots * sizeof(JSValueType));
2135 * Capture the type map for the selected slots of the global object and currently pending
2138 JS_REQUIRES_STACK void
2139 TypeMap::captureTypes(JSContext* cx, JSObject* globalObj, SlotList& slots, unsigned callDepth,
2142 setLength(NativeStackSlots(cx, callDepth) + slots.length());
2143 CaptureTypesVisitor visitor(cx, oracle, data(), speculate);
2144 VisitSlots(visitor, cx, globalObj, callDepth, slots);
2145 JS_ASSERT(visitor.length() == length());
2148 JS_REQUIRES_STACK void
2149 TypeMap::captureMissingGlobalTypes(JSContext* cx,
2150 JSObject* globalObj, SlotList& slots, unsigned stackSlots,
2153 unsigned oldSlots = length() - stackSlots;
2154 int diff = slots.length() - oldSlots;
2155 JS_ASSERT(diff >= 0);
2156 setLength(length() + diff);
2157 CaptureTypesVisitor visitor(cx, oracle, data() + stackSlots + oldSlots, speculate);
2158 VisitGlobalSlots(visitor, cx, globalObj, diff, slots.data() + oldSlots);
2161 /* Compare this type map to another one and see whether they match. */
2163 TypeMap::matches(TypeMap& other) const
2165 if (length() != other.length())
2167 return !memcmp(data(), other.data(), length());
2171 TypeMap::fromRaw(JSValueType* other, unsigned numSlots)
2173 unsigned oldLength = length();
2174 setLength(length() + numSlots);
2175 for (unsigned i = 0; i < numSlots; i++)
2176 get(oldLength + i) = other[i];
2180 * Use the provided storage area to create a new type map that contains the
2181 * partial type map with the rest of it filled up from the complete type
2185 MergeTypeMaps(JSValueType** partial, unsigned* plength, JSValueType* complete, unsigned clength, JSValueType* mem)
2187 unsigned l = *plength;
2188 JS_ASSERT(l < clength);
2189 memcpy(mem, *partial, l * sizeof(JSValueType));
2190 memcpy(mem + l, complete + l, (clength - l) * sizeof(JSValueType));
2196 * Specializes a tree to any specifically missing globals, including any
2199 static JS_REQUIRES_STACK void
2200 SpecializeTreesToLateGlobals(JSContext* cx, TreeFragment* root, JSValueType* globalTypeMap,
2201 unsigned numGlobalSlots)
2203 for (unsigned i = root->nGlobalTypes(); i < numGlobalSlots; i++)
2204 root->typeMap.add(globalTypeMap[i]);
2206 JS_ASSERT(root->nGlobalTypes() == numGlobalSlots);
2208 for (unsigned i = 0; i < root->dependentTrees.length(); i++) {
2209 TreeFragment* tree = root->dependentTrees[i];
2210 if (tree->code() && tree->nGlobalTypes() < numGlobalSlots)
2211 SpecializeTreesToLateGlobals(cx, tree, globalTypeMap, numGlobalSlots);
2213 for (unsigned i = 0; i < root->linkedTrees.length(); i++) {
2214 TreeFragment* tree = root->linkedTrees[i];
2215 if (tree->code() && tree->nGlobalTypes() < numGlobalSlots)
2216 SpecializeTreesToLateGlobals(cx, tree, globalTypeMap, numGlobalSlots);
2220 /* Specializes a tree to any missing globals, including any dependent trees. */
2221 static JS_REQUIRES_STACK void
2222 SpecializeTreesToMissingGlobals(JSContext* cx, JSObject* globalObj, TreeFragment* root)
2224 /* If we already have a bunch of peer trees, try to be as generic as possible. */
2226 for (TreeFragment *f = root->first; f; f = f->peer, ++count);
2227 bool speculate = count < MAXPEERS-1;
2229 root->typeMap.captureMissingGlobalTypes(cx, globalObj, *root->globalSlots, root->nStackTypes,
2231 JS_ASSERT(root->globalSlots->length() == root->typeMap.length() - root->nStackTypes);
2233 SpecializeTreesToLateGlobals(cx, root, root->globalTypeMap(), root->nGlobalTypes());
2237 ResetJITImpl(JSContext* cx, TraceMonitor *tm);
2240 static JS_INLINE void
2241 ResetJIT(JSContext* cx, TraceMonitor *tm, TraceVisFlushReason r)
2243 LogTraceVisEvent(cx, S_RESET, r);
2244 ResetJITImpl(cx, tm);
2247 # define ResetJIT(cx, tm, reason) ResetJITImpl(cx, tm)
2251 FlushJITCache(JSContext *cx, TraceMonitor *tm)
2253 ResetJIT(cx, tm, FR_OOM);
2257 TrashTree(TreeFragment* f);
2260 TraceRecorder::TraceRecorder(JSContext* cx, TraceMonitor *tm,
2261 VMSideExit* anchor, VMFragment* fragment,
2262 unsigned stackSlots, unsigned ngslots, JSValueType* typeMap,
2263 VMSideExit* innermost, JSScript* outerScript, jsbytecode* outerPC,
2264 uint32 outerArgc, bool speculate)
2267 oracle(speculate ? tm->oracle : NULL),
2269 tree(fragment->root),
2270 globalObj(tree->globalObj),
2271 outerScript(outerScript),
2273 outerArgc(outerArgc),
2279 importTypeMap(&tempAlloc(), tm->oracle),
2280 lirbuf(new (tempAlloc()) LirBuffer(tempAlloc())),
2281 mark(*traceMonitor->traceAlloc),
2282 numSideExitsBefore(tree->sideExits.length()),
2284 nativeFrameTracker(),
2286 callDepth(anchor ? anchor->calldepth : 0),
2287 atoms(FrameAtomBase(cx, cx->fp())),
2288 consts(JSScript::isValidOffset(cx->fp()->script()->constOffset)
2289 ? cx->fp()->script()->consts()->vector
2291 strictModeCode_ins(NULL),
2292 cfgMerges(&tempAlloc()),
2294 whichTreesToTrash(&tempAlloc()),
2295 guardedShapeTable(cx),
2299 addPropShapeBefore(NULL),
2302 native_rval_ins(NULL),
2304 pendingSpecializedNative(NULL),
2305 pendingUnboxSlot(NULL),
2306 pendingGuardCondition(NULL),
2307 pendingGlobalSlotsToSet(cx),
2309 generatedSpecializedNative(),
2311 w(&tempAlloc(), lirbuf)
2313 JS_ASSERT(globalObj == cx->fp()->scopeChain().getGlobal());
2314 JS_ASSERT(globalObj->hasOwnShape());
2315 JS_ASSERT(cx->regs->pc == (jsbytecode*)fragment->ip);
2318 if (TRACE_PROFILER(cx))
2322 JS_ASSERT(JS_THREAD_DATA(cx)->onTraceCompartment == NULL);
2323 JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
2324 JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL);
2325 JS_THREAD_DATA(cx)->recordingCompartment = cx->compartment;
2328 lirbuf->printer = new (tempAlloc()) LInsPrinter(tempAlloc(), TM_NUM_USED_ACCS);
2332 * Reset the fragment state we care about in case we got a recycled
2333 * fragment. This includes resetting any profiling data we might have
2336 fragment->lastIns = NULL;
2337 fragment->setCode(NULL);
2338 fragment->lirbuf = lirbuf;
2339 verbose_only( fragment->profCount = 0; )
2340 verbose_only( fragment->nStaticExits = 0; )
2341 verbose_only( fragment->nCodeBytes = 0; )
2342 verbose_only( fragment->nExitBytes = 0; )
2343 verbose_only( fragment->guardNumberer = 1; )
2344 verbose_only( fragment->guardsForFrag = NULL; )
2345 verbose_only( fragment->loopLabel = NULL; )
2348 * Don't change fragment->profFragID, though. Once the identity of the
2349 * Fragment is set up (for profiling purposes), we can't change it.
2352 if (!guardedShapeTable.init())
2353 OUT_OF_MEMORY_ABORT("TraceRecorder::TraceRecorder: out of memory");
2356 debug_only_print0(LC_TMMinimal, "\n");
2357 debug_only_printf(LC_TMMinimal, "Recording starting from %s:%u@%u (FragID=%06u)\n",
2358 tree->treeFileName, tree->treeLineNumber, tree->treePCOffset,
2359 fragment->profFragID);
2361 debug_only_printf(LC_TMTracer, "globalObj=%p, shape=%d\n",
2362 (void*)this->globalObj, this->globalObj->shape());
2363 debug_only_printf(LC_TMTreeVis, "TREEVIS RECORD FRAG=%p ANCHOR=%p\n", (void*)fragment,
2367 /* This creates the LIR writer pipeline. */
2368 w.init(&LogController);
2372 for (int i = 0; i < NumSavedRegs; ++i)
2375 for (int i = 0; i < NumSavedRegs; ++i)
2376 w.name(lirbuf->savedRegs[i], regNames[REGNUM(Assembler::savedRegs[i])]);
2379 lirbuf->state = w.name(w.paramp(0, 0), "state");
2381 if (fragment == fragment->root) {
2382 w.comment("begin-loop");
2383 InitConst(loopLabel) = w.label();
2385 w.comment("begin-setup");
2387 // if profiling, drop a label, so the assembler knows to put a
2388 // frag-entry-counter increment at this point. If there's a
2389 // loopLabel, use that; else we'll have to make a dummy label
2390 // especially for this purpose.
2391 verbose_only( if (LogController.lcbits & LC_FragProfile) {
2392 LIns* entryLabel = NULL;
2393 if (fragment == fragment->root) {
2394 entryLabel = loopLabel;
2396 entryLabel = w.label();
2398 NanoAssert(entryLabel);
2399 NanoAssert(!fragment->loopLabel);
2400 fragment->loopLabel = entryLabel;
2403 lirbuf->sp = w.name(w.ldpStateField(sp), "sp");
2404 lirbuf->rp = w.name(w.ldpStateField(rp), "rp");
2405 InitConst(cx_ins) = w.name(w.ldpStateField(cx), "cx");
2406 InitConst(eos_ins) = w.name(w.ldpStateField(eos), "eos");
2407 InitConst(eor_ins) = w.name(w.ldpStateField(eor), "eor");
2409 strictModeCode_ins = w.name(w.immi(cx->fp()->script()->strictModeCode), "strict");
2411 /* If we came from exit, we might not have enough global types. */
2412 if (tree->globalSlots->length() > tree->nGlobalTypes())
2413 SpecializeTreesToMissingGlobals(cx, globalObj, tree);
2415 /* read into registers all values on the stack and all globals we know so far */
2416 import(tree, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap);
2418 if (fragment == fragment->root) {
2420 * We poll the operation callback request flag. It is updated asynchronously whenever
2421 * the callback is to be invoked. We can use w.nameImmpNonGC here as JIT-ed code is per
2422 * thread and cannot outlive the corresponding JSThreadData.
2424 w.comment("begin-interruptFlags-check");
2425 /* FIXME: See bug 621140 for moving interruptCounter to the compartment. */
2426 #ifdef JS_THREADSAFE
2427 void *interrupt = (void*) &cx->runtime->interruptCounter;
2429 void *interrupt = (void*) &JS_THREAD_DATA(cx)->interruptFlags;
2431 LIns* flagptr = w.nameImmpNonGC(interrupt);
2432 LIns* x = w.ldiVolatile(flagptr);
2433 guard(true, w.eqi0(x), TIMEOUT_EXIT);
2434 w.comment("end-interruptFlags-check");
2437 * Count the number of iterations run by a trace, so that we can blacklist if
2438 * the trace runs too few iterations to be worthwhile. Do this only if the methodjit
2439 * is on--otherwise we must try to trace as much as possible.
2442 if (cx->methodJitEnabled) {
2443 w.comment("begin-count-loop-iterations");
2444 LIns* counterPtr = w.nameImmpNonGC((void *) &traceMonitor->iterationCounter);
2445 LIns* counterValue = w.ldiVolatile(counterPtr);
2446 LIns* test = w.ltiN(counterValue, LOOP_COUNT_MAX);
2447 LIns *branch = w.jfUnoptimizable(test);
2449 * stiVolatile() uses ACCSET_STORE_ANY; If LICM is implemented
2450 * (bug 545406) this counter will need its own region.
2452 w.stiVolatile(w.addi(counterValue, w.immi(1)), counterPtr);
2454 w.comment("end-count-loop-iterations");
2460 * If we are attached to a tree call guard, make sure the guard the inner
2461 * tree exited from is what we expect it to be.
2463 if (anchor && anchor->exitType == NESTED_EXIT) {
2464 LIns* nested_ins = w.ldpStateField(outermostTreeExitGuard);
2465 guard(true, w.eqp(nested_ins, w.nameImmpNonGC(innermost)), NESTED_EXIT);
2468 w.comment("end-setup");
2471 TraceRecorder::~TraceRecorder()
2473 /* Should already have been adjusted by callers before calling delete. */
2474 JS_ASSERT(traceMonitor->recorder != this);
2476 JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
2477 JS_ASSERT(&JS_THREAD_DATA(cx)->recordingCompartment->traceMonitor == traceMonitor);
2478 JS_THREAD_DATA(cx)->recordingCompartment = NULL;
2481 TrashTree(fragment->root);
2483 for (unsigned int i = 0; i < whichTreesToTrash.length(); i++)
2484 TrashTree(whichTreesToTrash[i]);
2486 /* Purge the tempAlloc used during recording. */
2487 tempAlloc().reset();
2489 forgetGuardedShapes();
2493 TraceMonitor::outOfMemory() const
2495 return dataAlloc->outOfMemory() ||
2496 tempAlloc->outOfMemory() ||
2497 traceAlloc->outOfMemory();
2501 * This function destroys the recorder after a successful recording, possibly
2502 * starting a suspended outer recorder.
2504 AbortableRecordingStatus
2505 TraceRecorder::finishSuccessfully()
2507 JS_ASSERT(!traceMonitor->profile);
2508 JS_ASSERT(traceMonitor->recorder == this);
2509 JS_ASSERT(fragment->lastIns && fragment->code());
2511 AUDIT(traceCompleted);
2514 /* Grab local copies of members needed after destruction of |this|. */
2515 JSContext* localcx = cx;
2516 TraceMonitor* localtm = traceMonitor;
2518 localtm->recorder = NULL;
2519 /* We can't (easily) use js_delete() here because the constructor is private. */
2520 this->~TraceRecorder();
2523 /* Catch OOM that occurred during recording. */
2524 if (localtm->outOfMemory() || OverfullJITCache(localcx, localtm)) {
2525 ResetJIT(localcx, localtm, FR_OOM);
2526 return ARECORD_ABORTED;
2528 return ARECORD_COMPLETED;
2531 /* This function aborts a recorder and any pending outer recorders. */
2532 JS_REQUIRES_STACK TraceRecorder::AbortResult
2533 TraceRecorder::finishAbort(const char* reason)
2535 JS_ASSERT(!traceMonitor->profile);
2536 JS_ASSERT(traceMonitor->recorder == this);
2538 AUDIT(recorderAborted);
2540 debug_only_printf(LC_TMMinimal | LC_TMAbort,
2541 "Abort recording of tree %s:%d@%d at %s:%d@%d: %s.\n",
2543 tree->treeLineNumber,
2545 cx->fp()->script()->filename,
2546 js_FramePCToLineNumber(cx, cx->fp()),
2547 FramePCOffset(cx, cx->fp()),
2550 Backoff(traceMonitor, (jsbytecode*) fragment->root->ip, fragment->root);
2553 * If this is the primary trace and we didn't succeed compiling, trash the
2554 * tree. Otherwise, remove the VMSideExits we added while recording, which
2555 * are about to be invalid.
2557 * BIG FAT WARNING: resetting the length is only a valid strategy as long as
2558 * there may be only one recorder active for a single TreeInfo at a time.
2559 * Otherwise, we may be throwing away another recorder's valid side exits.
2561 if (fragment->root == fragment) {
2562 TrashTree(fragment->toTreeFragment());
2564 JS_ASSERT(numSideExitsBefore <= fragment->root->sideExits.length());
2565 fragment->root->sideExits.setLength(numSideExitsBefore);
2568 /* Grab local copies of members needed after destruction of |this|. */
2569 JSContext* localcx = cx;
2570 TraceMonitor* localtm = traceMonitor;
2572 localtm->recorder = NULL;
2573 /* We can't (easily) use js_delete() here because the constructor is private. */
2574 this->~TraceRecorder();
2577 /* Catch OOM that occurred during recording. */
2578 if (localtm->outOfMemory() || OverfullJITCache(localcx, localtm)) {
2579 ResetJIT(localcx, localtm, FR_OOM);
2582 return NORMAL_ABORT;
2586 TraceRecorder::w_immpObjGC(JSObject* obj)
2589 tree->gcthings.addUnique(ObjectValue(*obj));
2590 return w.immpNonGC((void*)obj);
2594 TraceRecorder::w_immpFunGC(JSFunction* fun)
2597 tree->gcthings.addUnique(ObjectValue(*fun));
2598 return w.immpNonGC((void*)fun);
2602 TraceRecorder::w_immpStrGC(JSString* str)
2605 tree->gcthings.addUnique(StringValue(str));
2606 return w.immpNonGC((void*)str);
2610 TraceRecorder::w_immpShapeGC(const Shape* shape)
2613 tree->shapes.addUnique(shape);
2614 return w.immpNonGC((void*)shape);
2618 TraceRecorder::w_immpIdGC(jsid id)
2620 if (JSID_IS_GCTHING(id))
2621 tree->gcthings.addUnique(IdToValue(id));
2622 return w.immpNonGC((void*)JSID_BITS(id));
2626 TraceRecorder::nativeGlobalSlot(const Value* p) const
2628 JS_ASSERT(isGlobal(p));
2629 return ptrdiff_t(p - globalObj->slots);
2632 /* Determine the offset in the native global frame for a jsval we track. */
2634 TraceRecorder::nativeGlobalOffset(const Value* p) const
2636 return nativeGlobalSlot(p) * sizeof(double);
2639 /* Determine whether a value is a global stack slot. */
2641 TraceRecorder::isGlobal(const Value* p) const
2643 return (size_t(p - globalObj->slots) < globalObj->numSlots());
2647 TraceRecorder::isVoidPtrGlobal(const void* p) const
2649 return isGlobal((const Value *)p);
2653 * Return the offset in the native stack for the given jsval. More formally,
2654 * |p| must be the address of a jsval that is represented in the native stack
2655 * area. The return value is the offset, from TracerState::stackBase, in bytes,
2656 * where the native representation of |*p| is stored. To get the offset
2657 * relative to TracerState::sp, subtract TreeFragment::nativeStackBase.
2659 JS_REQUIRES_STACK ptrdiff_t
2660 TraceRecorder::nativeStackOffsetImpl(const void* p) const
2662 CountSlotsVisitor visitor(p);
2663 VisitStackSlots(visitor, cx, callDepth);
2664 size_t offset = visitor.count() * sizeof(double);
2667 * If it's not in a pending frame, it must be on the stack of the current
2668 * frame above sp but below fp->slots() + script->nslots.
2670 if (!visitor.stopped()) {
2671 const Value *vp = (const Value *)p;
2672 JS_ASSERT(size_t(vp - cx->fp()->slots()) < cx->fp()->numSlots());
2673 offset += size_t(vp - cx->regs->sp) * sizeof(double);
2678 JS_REQUIRES_STACK inline ptrdiff_t
2679 TraceRecorder::nativeStackOffset(const Value* p) const
2681 return nativeStackOffsetImpl(p);
2684 JS_REQUIRES_STACK inline ptrdiff_t
2685 TraceRecorder::nativeStackSlotImpl(const void* p) const
2687 return nativeStackOffsetImpl(p) / sizeof(double);
2690 JS_REQUIRES_STACK inline ptrdiff_t
2691 TraceRecorder::nativeStackSlot(const Value* p) const
2693 return nativeStackSlotImpl(p);
2697 * Return the offset, from TracerState:sp, for the given jsval. Shorthand for:
2698 * -TreeFragment::nativeStackBase + nativeStackOffset(p).
2700 inline JS_REQUIRES_STACK ptrdiff_t
2701 TraceRecorder::nativespOffsetImpl(const void* p) const
2703 return -tree->nativeStackBase + nativeStackOffsetImpl(p);
2706 inline JS_REQUIRES_STACK ptrdiff_t
2707 TraceRecorder::nativespOffset(const Value* p) const
2709 return nativespOffsetImpl(p);
2712 /* Track the maximum number of native frame slots we need during execution. */
2714 TraceRecorder::trackNativeStackUse(unsigned slots)
2716 if (slots > tree->maxNativeStackSlots)
2717 tree->maxNativeStackSlots = slots;
2721 * Unbox a jsval into a slot. Slots are wide enough to hold double values
2722 * directly (instead of storing a pointer to them). We assert instead of
2723 * type checking. The caller must ensure the types are compatible.
2726 ValueToNative(const Value &v, JSValueType type, double* slot)
2728 JS_ASSERT(type <= JSVAL_UPPER_INCL_TYPE_OF_BOXABLE_SET);
2729 if (type > JSVAL_UPPER_INCL_TYPE_OF_NUMBER_SET)
2730 v.unboxNonDoubleTo((uint64 *)slot);
2731 else if (type == JSVAL_TYPE_INT32)
2732 *(int32_t *)slot = v.isInt32() ? v.toInt32() : (int32_t)v.toDouble();
2734 *(double *)slot = v.toNumber();
2739 case JSVAL_TYPE_NONFUNOBJ: {
2740 JS_ASSERT(!IsFunctionObject(v));
2741 debug_only_printf(LC_TMTracer,
2742 "object<%p:%s> ", (void*)*(JSObject **)slot,
2743 v.toObject().getClass()->name);
2747 case JSVAL_TYPE_INT32:
2748 JS_ASSERT(v.isInt32() || (v.isDouble() && JSDOUBLE_IS_INT32(v.toDouble(), &_)));
2749 debug_only_printf(LC_TMTracer, "int<%d> ", *(jsint *)slot);
2752 case JSVAL_TYPE_DOUBLE:
2753 JS_ASSERT(v.isNumber());
2754 debug_only_printf(LC_TMTracer, "double<%g> ", *(jsdouble *)slot);
2757 case JSVAL_TYPE_BOXED:
2758 JS_NOT_REACHED("found jsval type in an entry type map");
2761 case JSVAL_TYPE_STRING:
2762 JS_ASSERT(v.isString());
2763 debug_only_printf(LC_TMTracer, "string<%p> ", (void*)*(JSString**)slot);
2766 case JSVAL_TYPE_NULL:
2767 JS_ASSERT(v.isNull());
2768 debug_only_print0(LC_TMTracer, "null ");
2771 case JSVAL_TYPE_BOOLEAN:
2772 JS_ASSERT(v.isBoolean());
2773 debug_only_printf(LC_TMTracer, "special<%d> ", *(JSBool*)slot);
2776 case JSVAL_TYPE_UNDEFINED:
2777 JS_ASSERT(v.isUndefined());
2778 debug_only_print0(LC_TMTracer, "undefined ");
2781 case JSVAL_TYPE_MAGIC:
2782 JS_ASSERT(v.isMagic());
2783 debug_only_print0(LC_TMTracer, "hole ");
2786 case JSVAL_TYPE_FUNOBJ: {
2787 JS_ASSERT(IsFunctionObject(v));
2788 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, &v.toObject());
2789 #if defined JS_JIT_SPEW
2790 if (LogController.lcbits & LC_TMTracer) {
2793 JS_PutEscapedFlatString(funName, sizeof funName, ATOM_TO_STRING(fun->atom), 0);
2795 strcpy(funName, "unnamed");
2796 LogController.printf("function<%p:%s> ", (void*)*(JSObject **)slot, funName);
2802 JS_NOT_REACHED("unexpected type");
2809 TraceMonitor::flush()
2811 /* flush should only be called after all recorders have been aborted. */
2812 JS_ASSERT(!recorder);
2813 JS_ASSERT(!profile);
2814 AUDIT(cacheFlushed);
2816 // recover profiling data from expiring Fragments
2818 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
2819 for (TreeFragment *f = vmfragments[i]; f; f = f->next) {
2820 JS_ASSERT(f->root == f);
2821 for (TreeFragment *p = f; p; p = p->peer)
2822 FragProfiling_FragFinalizer(p, this);
2828 for (Seq<Fragment*>* f = branches; f; f = f->tail)
2829 FragProfiling_FragFinalizer(f->head, this);
2836 for (LoopProfileMap::Enum e(*loopProfiles); !e.empty(); e.popFront()) {
2837 jsbytecode *pc = e.front().key;
2838 LoopProfile *prof = e.front().value;
2839 /* This code takes care of resetting all methodjit state. */
2840 js::mjit::ResetTraceHint(prof->entryScript, pc, GET_UINT16(pc), true);
2845 frameCache->reset();
2847 traceAlloc->reset();
2851 loopProfiles->clear();
2853 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
2854 globalStates[i].globalShape = -1;
2855 globalStates[i].globalSlots = new (*dataAlloc) SlotList(dataAlloc);
2858 assembler = new (*dataAlloc) Assembler(*codeAlloc, *dataAlloc, *dataAlloc, core,
2859 &LogController, avmplus::AvmCore::config);
2860 verbose_only( branches = NULL; )
2862 PodArrayZero(vmfragments);
2863 tracedScripts.clear();
2865 needFlush = JS_FALSE;
2869 IsShapeAboutToBeFinalized(JSContext *cx, const js::Shape *shape)
2871 JSRuntime *rt = cx->runtime;
2872 if (rt->gcCurrentCompartment != NULL)
2875 return !shape->marked();
2879 HasUnreachableGCThings(JSContext *cx, TreeFragment *f)
2882 * We do not check here for dead scripts as JSScript is not a GC thing.
2883 * Instead PurgeScriptFragments is used to remove dead script fragments.
2886 if (IsAboutToBeFinalized(cx, f->globalObj))
2888 Value* vp = f->gcthings.data();
2889 for (unsigned len = f->gcthings.length(); len; --len) {
2891 JS_ASSERT(v.isMarkable());
2892 if (IsAboutToBeFinalized(cx, v.toGCThing()))
2895 const Shape** shapep = f->shapes.data();
2896 for (unsigned len = f->shapes.length(); len; --len) {
2897 const Shape* shape = *shapep++;
2898 if (IsShapeAboutToBeFinalized(cx, shape))
2905 TraceMonitor::sweep(JSContext *cx)
2907 JS_ASSERT(!ontrace());
2908 debug_only_print0(LC_TMTracer, "Purging fragments with dead things");
2910 bool shouldAbortRecording = false;
2911 TreeFragment *recorderTree = NULL;
2913 recorderTree = recorder->getTree();
2914 shouldAbortRecording = HasUnreachableGCThings(cx, recorderTree);
2917 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
2918 TreeFragment** fragp = &vmfragments[i];
2919 while (TreeFragment* frag = *fragp) {
2920 TreeFragment* peer = frag;
2922 if (HasUnreachableGCThings(cx, peer))
2927 debug_only_printf(LC_TMTracer,
2928 "TreeFragment peer %p has dead gc thing."
2929 "Disconnecting tree %p with ip %p\n",
2930 (void *) peer, (void *) frag, frag->ip);
2931 JS_ASSERT(frag->root == frag);
2932 *fragp = frag->next;
2934 verbose_only( FragProfiling_FragFinalizer(frag, this); );
2935 if (recorderTree == frag)
2936 shouldAbortRecording = true;
2941 fragp = &frag->next;
2946 if (shouldAbortRecording)
2947 recorder->finishAbort("dead GC things");
2951 TraceMonitor::mark(JSTracer *trc)
2953 TracerState* state = tracerState;
2955 if (state->nativeVp)
2956 MarkValueRange(trc, state->nativeVpLen, state->nativeVp, "nativeVp");
2957 state = state->prev;
2962 * Box a value from the native stack back into the Value format.
2965 NativeToValue(JSContext* cx, Value& v, JSValueType type, double* slot)
2967 if (type == JSVAL_TYPE_DOUBLE) {
2969 } else if (JS_LIKELY(type <= JSVAL_UPPER_INCL_TYPE_OF_BOXABLE_SET)) {
2970 v.boxNonDoubleFrom(type, (uint64 *)slot);
2971 } else if (type == JSVAL_TYPE_STRORNULL) {
2972 JSString *str = *(JSString **)slot;
2973 v = str ? StringValue(str) : NullValue();
2974 } else if (type == JSVAL_TYPE_OBJORNULL) {
2975 JSObject *obj = *(JSObject **)slot;
2976 v = obj ? ObjectValue(*obj) : NullValue();
2978 JS_ASSERT(type == JSVAL_TYPE_BOXED);
2979 JS_STATIC_ASSERT(sizeof(Value) == sizeof(double));
2985 case JSVAL_TYPE_NONFUNOBJ:
2986 JS_ASSERT(!IsFunctionObject(v));
2987 debug_only_printf(LC_TMTracer,
2989 (void*) &v.toObject(),
2990 v.toObject().getClass()->name);
2992 case JSVAL_TYPE_INT32:
2993 debug_only_printf(LC_TMTracer, "int<%d> ", v.toInt32());
2995 case JSVAL_TYPE_DOUBLE:
2996 debug_only_printf(LC_TMTracer, "double<%g> ", v.toNumber());
2998 case JSVAL_TYPE_STRING:
2999 debug_only_printf(LC_TMTracer, "string<%p> ", (void*)v.toString());
3001 case JSVAL_TYPE_NULL:
3002 JS_ASSERT(v.isNull());
3003 debug_only_print0(LC_TMTracer, "null ");
3005 case JSVAL_TYPE_BOOLEAN:
3006 debug_only_printf(LC_TMTracer, "bool<%d> ", v.toBoolean());
3008 case JSVAL_TYPE_UNDEFINED:
3009 JS_ASSERT(v.isUndefined());
3010 debug_only_print0(LC_TMTracer, "undefined ");
3012 case JSVAL_TYPE_MAGIC:
3013 debug_only_printf(LC_TMTracer, "magic<%d> ", v.whyMagic());
3015 case JSVAL_TYPE_FUNOBJ:
3016 JS_ASSERT(IsFunctionObject(v));
3017 #if defined JS_JIT_SPEW
3018 if (LogController.lcbits & LC_TMTracer) {
3019 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, &v.toObject());
3022 JS_PutEscapedFlatString(funName, sizeof funName, ATOM_TO_STRING(fun->atom), 0);
3024 strcpy(funName, "unnamed");
3025 LogController.printf("function<%p:%s> ", (void*) &v.toObject(), funName);
3029 case JSVAL_TYPE_STRORNULL:
3030 debug_only_printf(LC_TMTracer, "nullablestr<%p> ", v.isNull() ? NULL : (void *)v.toString());
3032 case JSVAL_TYPE_OBJORNULL:
3033 debug_only_printf(LC_TMTracer, "nullablestr<%p> ", v.isNull() ? NULL : (void *)&v.toObject());
3035 case JSVAL_TYPE_BOXED:
3036 debug_only_printf(LC_TMTracer, "box<%llx> ", (long long unsigned int)v.asRawBits());
3039 JS_NOT_REACHED("unexpected type");
3046 ExternNativeToValue(JSContext* cx, Value& v, JSValueType type, double* slot)
3048 return NativeToValue(cx, v, type, slot);
3051 class BuildNativeFrameVisitor : public SlotVisitorBase
3054 JSValueType *mTypeMap;
3058 BuildNativeFrameVisitor(JSContext *cx,
3059 JSValueType *typemap,
3068 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
3069 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
3070 debug_only_printf(LC_TMTracer, "global%d: ", n);
3071 ValueToNative(*vp, *mTypeMap++, &mGlobal[slot]);
3074 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3075 visitStackSlots(Value *vp, int count, JSStackFrame* fp) {
3076 for (int i = 0; i < count; ++i) {
3077 debug_only_printf(LC_TMTracer, "%s%d: ", stackSlotKind(), i);
3078 ValueToNative(*vp++, *mTypeMap++, mStack++);
3083 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3084 visitFrameObjPtr(void* p, JSStackFrame* fp) {
3085 debug_only_printf(LC_TMTracer, "%s%d: ", stackSlotKind(), 0);
3086 if (p == fp->addressOfScopeChain())
3087 *(JSObject **)mStack = &fp->scopeChain();
3089 *(JSObject **)mStack = fp->hasArgsObj() ? &fp->argsObj() : NULL;
3091 if (*mTypeMap == JSVAL_TYPE_NULL) {
3092 JS_ASSERT(*(JSObject **)mStack == NULL);
3093 debug_only_print0(LC_TMTracer, "null ");
3095 JS_ASSERT(*mTypeMap == JSVAL_TYPE_NONFUNOBJ);
3096 JS_ASSERT(!(*(JSObject **)p)->isFunction());
3097 debug_only_printf(LC_TMTracer,
3098 "object<%p:%s> ", *(void **)p,
3099 (*(JSObject **)p)->getClass()->name);
3108 static JS_REQUIRES_STACK void
3109 BuildNativeFrame(JSContext *cx, JSObject *globalObj, unsigned callDepth,
3110 unsigned ngslots, uint16 *gslots,
3111 JSValueType *typeMap, double *global, double *stack)
3113 BuildNativeFrameVisitor visitor(cx, typeMap, global, stack);
3114 VisitSlots(visitor, cx, globalObj, callDepth, ngslots, gslots);
3115 debug_only_print0(LC_TMTracer, "\n");
3118 class FlushNativeGlobalFrameVisitor : public SlotVisitorBase
3121 JSValueType *mTypeMap;
3124 FlushNativeGlobalFrameVisitor(JSContext *cx,
3125 JSValueType *typeMap,
3132 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
3133 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
3134 debug_only_printf(LC_TMTracer, "global%d=", n);
3135 JS_ASSERT(JS_THREAD_DATA(mCx)->waiveGCQuota);
3136 NativeToValue(mCx, *vp, *mTypeMap++, &mGlobal[slot]);
3140 class FlushNativeStackFrameVisitor : public SlotVisitorBase
3143 const JSValueType *mInitTypeMap;
3144 const JSValueType *mTypeMap;
3147 FlushNativeStackFrameVisitor(JSContext *cx,
3148 const JSValueType *typeMap,
3151 mInitTypeMap(typeMap),
3156 const JSValueType* getTypeMap()
3161 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3162 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
3163 JS_ASSERT(JS_THREAD_DATA(mCx)->waiveGCQuota);
3164 for (size_t i = 0; i < count; ++i) {
3165 debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), unsigned(i));
3166 NativeToValue(mCx, *vp, *mTypeMap, mStack);
3174 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3175 visitFrameObjPtr(void* p, JSStackFrame* fp) {
3176 JS_ASSERT(JS_THREAD_DATA(mCx)->waiveGCQuota);
3177 debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), 0);
3178 JSObject *frameobj = *(JSObject **)mStack;
3179 JS_ASSERT((frameobj == NULL) == (*mTypeMap == JSVAL_TYPE_NULL));
3180 if (p == fp->addressOfArgs()) {
3182 JS_ASSERT_IF(fp->hasArgsObj(), frameobj == &fp->argsObj());
3183 fp->setArgsObj(*frameobj);
3184 JS_ASSERT(frameobj->isArguments());
3185 if (frameobj->isNormalArguments())
3186 frameobj->setPrivate(fp);
3188 JS_ASSERT(!frameobj->getPrivate());
3189 debug_only_printf(LC_TMTracer,
3193 JS_ASSERT(!fp->hasArgsObj());
3194 debug_only_print0(LC_TMTracer,
3197 /* else, SynthesizeFrame has initialized fp->args.nactual */
3199 JS_ASSERT(p == fp->addressOfScopeChain());
3200 if (frameobj->isCall() &&
3201 !frameobj->getPrivate() &&
3202 fp->maybeCallee() == frameobj->getCallObjCallee())
3204 JS_ASSERT(&fp->scopeChain() == JSStackFrame::sInvalidScopeChain);
3205 frameobj->setPrivate(fp);
3206 fp->setScopeChainAndCallObj(*frameobj);
3208 fp->setScopeChainNoCallObj(*frameobj);
3210 debug_only_printf(LC_TMTracer,
3215 JSValueType type = *mTypeMap;
3216 if (type == JSVAL_TYPE_NULL) {
3217 debug_only_print0(LC_TMTracer, "null ");
3219 JS_ASSERT(type == JSVAL_TYPE_NONFUNOBJ);
3220 JS_ASSERT(!frameobj->isFunction());
3221 debug_only_printf(LC_TMTracer,
3224 frameobj->getClass()->name);
3233 /* Box the given native frame into a JS frame. This is infallible. */
3234 static JS_REQUIRES_STACK void
3235 FlushNativeGlobalFrame(JSContext *cx, JSObject *globalObj, double *global, unsigned ngslots,
3236 uint16 *gslots, JSValueType *typemap)
3238 FlushNativeGlobalFrameVisitor visitor(cx, typemap, global);
3239 VisitGlobalSlots(visitor, cx, globalObj, ngslots, gslots);
3240 debug_only_print0(LC_TMTracer, "\n");
3244 * Returns the number of values on the native stack, excluding the innermost
3245 * frame. This walks all FrameInfos on the native frame stack and sums the
3246 * slot usage of each frame.
3249 StackDepthFromCallStack(TracerState* state, uint32 callDepth)
3251 int32 nativeStackFramePos = 0;
3253 // Duplicate native stack layout computation: see VisitFrameSlots header comment.
3254 for (FrameInfo** fip = state->callstackBase; fip < state->rp + callDepth; fip++)
3255 nativeStackFramePos += (*fip)->callerHeight;
3256 return nativeStackFramePos;
3260 * Generic function to read upvars on trace from slots of active frames.
3261 * T Traits type parameter. Must provide static functions:
3262 * interp_get(fp, slot) Read the value out of an interpreter frame.
3263 * native_slot(argc, slot) Return the position of the desired value in the on-trace
3264 * stack frame (with position 0 being callee).
3266 * upvarLevel Static level of the function containing the upvar definition
3267 * slot Identifies the value to get. The meaning is defined by the traits type.
3268 * callDepth Call depth of current point relative to trace entry
3270 template<typename T>
3272 GetUpvarOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth, double* result)
3274 TracerState* state = JS_TRACE_MONITOR_ON_TRACE(cx)->tracerState;
3275 FrameInfo** fip = state->rp + callDepth;
3278 * First search the FrameInfo call stack for an entry containing our
3279 * upvar, namely one with level == upvarLevel. The first FrameInfo is a
3280 * transition from the entry frame to some callee. However, it is not
3281 * known (from looking at the FrameInfo) whether the entry frame had a
3282 * callee. Rather than special-case this or insert more logic into the
3283 * loop, instead just stop before that FrameInfo (i.e. |> base| instead of
3284 * |>= base|), and let the code after the loop handle it.
3286 int32 stackOffset = StackDepthFromCallStack(state, callDepth);
3287 while (--fip > state->callstackBase) {
3288 FrameInfo* fi = *fip;
3291 * The loop starts aligned to the top of the stack, so move down to the first meaningful
3292 * callee. Then read the callee directly from the frame.
3294 stackOffset -= fi->callerHeight;
3295 JSObject* callee = *(JSObject**)(&state->stackBase[stackOffset]);
3296 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, callee);
3297 uintN calleeLevel = fun->u.i.script->staticLevel;
3298 if (calleeLevel == upvarLevel) {
3300 * Now find the upvar's value in the native stack. stackOffset is
3301 * the offset of the start of the activation record corresponding
3302 * to *fip in the native stack.
3304 uint32 native_slot = T::native_slot(fi->callerArgc, slot);
3305 *result = state->stackBase[stackOffset + native_slot];
3306 return fi->get_typemap()[native_slot];
3310 // Next search the trace entry frame, which is not in the FrameInfo stack.
3311 if (state->outermostTree->script->staticLevel == upvarLevel) {
3312 uint32 argc = state->outermostTree->argc;
3313 uint32 native_slot = T::native_slot(argc, slot);
3314 *result = state->stackBase[native_slot];
3315 return state->callstackBase[0]->get_typemap()[native_slot];
3319 * If we did not find the upvar in the frames for the active traces,
3320 * then we simply get the value from the interpreter state.
3322 JS_ASSERT(upvarLevel < UpvarCookie::UPVAR_LEVEL_LIMIT);
3323 JSStackFrame* fp = cx->findFrameAtLevel(upvarLevel);
3324 Value v = T::interp_get(fp, slot);
3325 JSValueType type = getCoercedType(v);
3326 ValueToNative(v, type, result);
3330 // For this traits type, 'slot' is the argument index, which may be -2 for callee.
3331 struct UpvarArgTraits {
3332 static Value interp_get(JSStackFrame* fp, int32 slot) {
3333 return fp->formalArg(slot);
3336 static uint32 native_slot(uint32 argc, int32 slot) {
3337 return 2 /*callee,this*/ + slot;
3342 GetUpvarArgOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth, double* result)
3344 return GetUpvarOnTrace<UpvarArgTraits>(cx, upvarLevel, slot, callDepth, result);
3347 // For this traits type, 'slot' is an index into the local slots array.
3348 struct UpvarVarTraits {
3349 static Value interp_get(JSStackFrame* fp, int32 slot) {
3350 return fp->slots()[slot];
3353 static uint32 native_slot(uint32 argc, int32 slot) {
3354 return 4 /*callee,this,arguments,scopeChain*/ + argc + slot;
3359 GetUpvarVarOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth, double* result)
3361 return GetUpvarOnTrace<UpvarVarTraits>(cx, upvarLevel, slot, callDepth, result);
3365 * For this traits type, 'slot' is an index into the stack area (within slots,
3366 * after nfixed) of a frame with no function. (On trace, the top-level frame is
3367 * the only one that can have no function.)
3369 struct UpvarStackTraits {
3370 static Value interp_get(JSStackFrame* fp, int32 slot) {
3371 return fp->slots()[slot + fp->numFixed()];
3374 static uint32 native_slot(uint32 argc, int32 slot) {
3376 * Locals are not imported by the tracer when the frame has no
3377 * function, so we do not add fp->getFixedCount().
3379 JS_ASSERT(argc == 0);
3385 GetUpvarStackOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth,
3388 return GetUpvarOnTrace<UpvarStackTraits>(cx, upvarLevel, slot, callDepth, result);
3391 // Parameters needed to access a value from a closure on trace.
3392 struct ClosureVarInfo
3401 * Generic function to read upvars from Call objects of active heavyweight functions.
3402 * call Callee Function object in which the upvar is accessed.
3404 template<typename T>
3406 GetFromClosure(JSContext* cx, JSObject* call, const ClosureVarInfo* cv, double* result)
3408 JS_ASSERT(call->isCall());
3411 TracerState* state = JS_TRACE_MONITOR_ON_TRACE(cx)->tracerState;
3412 FrameInfo** fip = state->rp + cv->callDepth;
3413 int32 stackOffset = StackDepthFromCallStack(state, cv->callDepth);
3414 while (--fip > state->callstackBase) {
3415 FrameInfo* fi = *fip;
3418 * The loop starts aligned to the top of the stack, so move down to the first meaningful
3419 * callee. Then read the callee directly from the frame.
3421 stackOffset -= fi->callerHeight;
3422 JSObject* callee = *(JSObject**)(&state->stackBase[stackOffset]);
3423 if (callee == call) {
3424 // This is not reachable as long as the tracer guards on the identity of the callee's
3425 // parent when making a call:
3427 // - We can only reach this point if we execute JSOP_LAMBDA on trace, then call the
3428 // function created by the lambda, and then execute a JSOP_NAME on trace.
3429 // - Each time we execute JSOP_LAMBDA we get a function with a different parent.
3430 // - When we execute the call to the new function, we exit trace because the parent
3432 JS_NOT_REACHED("JSOP_NAME variable found in outer trace");
3437 // We already guarded on trace that we aren't touching an outer tree's entry frame
3438 VOUCH_DOES_NOT_REQUIRE_STACK();
3439 JSStackFrame* fp = (JSStackFrame*) call->getPrivate();
3440 JS_ASSERT(fp != cx->fp());
3444 v = T::get_slot(fp, cv->slot);
3447 * Get the value from the object. We know we have a Call object, and
3448 * that our slot index is fine, so don't monkey around with calling the
3449 * property getter (which just looks in the slot) or calling
3450 * js_GetReservedSlot. Just get the slot directly. Note the static
3451 * asserts in jsfun.cpp which make sure Call objects use slots.
3453 JS_ASSERT(cv->slot < T::slot_count(call));
3454 v = T::get_slot(call, cv->slot);
3456 JSValueType type = getCoercedType(v);
3457 ValueToNative(v, type, result);
3461 struct ArgClosureTraits
3463 // Get the right frame slots to use our slot index with.
3464 // See also UpvarArgTraits.
3465 static inline Value get_slot(JSStackFrame* fp, unsigned slot) {
3466 JS_ASSERT(slot < fp->numFormalArgs());
3467 return fp->formalArg(slot);
3470 // Get the right object slots to use our slot index with.
3471 static inline Value get_slot(JSObject* obj, unsigned slot) {
3472 return obj->getSlot(slot_offset(obj) + slot);
3475 // Get the offset of our object slots from the object's slots pointer.
3476 static inline uint32 slot_offset(JSObject* obj) {
3477 return JSObject::CALL_RESERVED_SLOTS;
3480 // Get the maximum slot index of this type that should be allowed
3481 static inline uint16 slot_count(JSObject* obj) {
3482 return obj->getCallObjCalleeFunction()->nargs;
3490 GetClosureArg(JSContext* cx, JSObject* callee, const ClosureVarInfo* cv, double* result)
3492 return GetFromClosure<ArgClosureTraits>(cx, callee, cv, result);
3495 struct VarClosureTraits
3497 // See also UpvarVarTraits.
3498 static inline Value get_slot(JSStackFrame* fp, unsigned slot) {
3499 JS_ASSERT(slot < fp->fun()->script()->bindings.countVars());
3500 return fp->slots()[slot];
3503 static inline Value get_slot(JSObject* obj, unsigned slot) {
3504 return obj->getSlot(slot_offset(obj) + slot);
3507 static inline uint32 slot_offset(JSObject* obj) {
3508 return JSObject::CALL_RESERVED_SLOTS +
3509 obj->getCallObjCalleeFunction()->nargs;
3512 static inline uint16 slot_count(JSObject* obj) {
3513 return obj->getCallObjCalleeFunction()->script()->bindings.countVars();
3521 GetClosureVar(JSContext* cx, JSObject* callee, const ClosureVarInfo* cv, double* result)
3523 return GetFromClosure<VarClosureTraits>(cx, callee, cv, result);
3527 * Box the given native stack frame into the virtual machine stack. This
3530 * @param callDepth the distance between the entry frame into our trace and
3531 * cx->fp() when we make this call. If this is not called as a
3532 * result of a nested exit, callDepth is 0.
3533 * @param mp an array of JSValueType that indicate what the types of the things
3535 * @param np pointer to the native stack. We want to copy values from here to
3536 * the JS stack as needed.
3537 * @return the number of things we popped off of np.
3539 static JS_REQUIRES_STACK int
3540 FlushNativeStackFrame(JSContext* cx, unsigned callDepth, const JSValueType* mp, double* np)
3542 /* Root all string and object references first (we don't need to call the GC for this). */
3543 FlushNativeStackFrameVisitor visitor(cx, mp, np);
3544 VisitStackSlots(visitor, cx, callDepth);
3546 debug_only_print0(LC_TMTracer, "\n");
3547 return visitor.getTypeMap() - mp;
3550 /* Emit load instructions onto the trace that read the initial stack state. */
3551 JS_REQUIRES_STACK void
3552 TraceRecorder::importImpl(Address addr, const void* p, JSValueType t,
3553 const char *prefix, uintN index, JSStackFrame *fp)
3556 if (t == JSVAL_TYPE_INT32) { /* demoted */
3557 JS_ASSERT(hasInt32Repr(*(const Value *)p));
3560 * Ok, we have a valid demotion attempt pending, so insert an integer
3561 * read and promote it to double since all arithmetic operations expect
3562 * to see doubles on entry. The first op to use this slot will emit a
3563 * d2i cast which will cancel out the i2d we insert here.
3568 JS_ASSERT_IF(t != JSVAL_TYPE_BOXED && !isFrameObjPtrTraceType(t),
3569 ((const Value *)p)->isNumber() == (t == JSVAL_TYPE_DOUBLE));
3570 if (t == JSVAL_TYPE_DOUBLE) {
3572 } else if (t == JSVAL_TYPE_BOOLEAN) {
3574 } else if (t == JSVAL_TYPE_UNDEFINED) {
3575 ins = w.immiUndefined();
3576 } else if (t == JSVAL_TYPE_MAGIC) {
3582 checkForGlobalObjectReallocation();
3583 tracker.set(p, ins);
3587 JS_ASSERT(strlen(prefix) < 11);
3589 jsuword* localNames = NULL;
3590 const char* funName = NULL;
3591 JSAutoByteString funNameBytes;
3592 if (*prefix == 'a' || *prefix == 'v') {
3593 mark = JS_ARENA_MARK(&cx->tempPool);
3594 JSFunction *fun = fp->fun();
3595 Bindings &bindings = fun->script()->bindings;
3596 if (bindings.hasLocalNames())
3597 localNames = bindings.getLocalNameArray(cx, &cx->tempPool);
3599 ? js_AtomToPrintableString(cx, fun->atom, &funNameBytes)
3602 if (!strcmp(prefix, "argv")) {
3603 if (index < fp->numFormalArgs()) {
3604 JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[index]);
3605 JSAutoByteString atomBytes;
3606 JS_snprintf(name, sizeof name, "$%s.%s", funName,
3607 js_AtomToPrintableString(cx, atom, &atomBytes));
3609 JS_snprintf(name, sizeof name, "$%s.<arg%d>", funName, index);
3611 } else if (!strcmp(prefix, "vars")) {
3612 JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[fp->numFormalArgs() + index]);
3613 JSAutoByteString atomBytes;
3614 JS_snprintf(name, sizeof name, "$%s.%s", funName,
3615 js_AtomToPrintableString(cx, atom, &atomBytes));
3617 JS_snprintf(name, sizeof name, "$%s%d", prefix, index);
3621 JS_ARENA_RELEASE(&cx->tempPool, mark);
3624 debug_only_printf(LC_TMTracer, "import vp=%p name=%s type=%c\n",
3625 p, name, TypeToChar(t));
3629 JS_REQUIRES_STACK void
3630 TraceRecorder::import(Address addr, const Value* p, JSValueType t,
3631 const char *prefix, uintN index, JSStackFrame *fp)
3633 return importImpl(addr, p, t, prefix, index, fp);
3636 class ImportBoxedStackSlotVisitor : public SlotVisitorBase
3638 TraceRecorder &mRecorder;
3640 ptrdiff_t mStackOffset;
3641 JSValueType *mTypemap;
3644 ImportBoxedStackSlotVisitor(TraceRecorder &recorder,
3646 ptrdiff_t stackOffset,
3647 JSValueType *typemap) :
3648 mRecorder(recorder),
3650 mStackOffset(stackOffset),
3654 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3655 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
3656 for (size_t i = 0; i < count; ++i) {
3657 if (*mTypemap == JSVAL_TYPE_BOXED) {
3658 mRecorder.import(StackAddress(mBase, mStackOffset), vp, JSVAL_TYPE_BOXED,
3660 LIns *vp_ins = mRecorder.unbox_value(*vp,
3661 StackAddress(mBase, mStackOffset),
3662 mRecorder.copy(mRecorder.anchor));
3663 mRecorder.set(vp, vp_ins);
3667 mStackOffset += sizeof(double);
3672 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
3673 visitFrameObjPtr(void* p, JSStackFrame *fp) {
3674 JS_ASSERT(*mTypemap != JSVAL_TYPE_BOXED);
3676 mStackOffset += sizeof(double);
3681 JS_REQUIRES_STACK void
3682 TraceRecorder::import(TreeFragment* tree, LIns* sp, unsigned stackSlots, unsigned ngslots,
3683 unsigned callDepth, JSValueType* typeMap)
3686 * If we get a partial list that doesn't have all the types (i.e. recording
3687 * from a side exit that was recorded but we added more global slots
3688 * later), merge the missing types from the entry type map. This is safe
3689 * because at the loop edge we verify that we have compatible types for all
3690 * globals (entry type and loop edge type match). While a different trace
3691 * of the tree might have had a guard with a different type map for these
3692 * slots we just filled in here (the guard we continue from didn't know
3693 * about them), since we didn't take that particular guard the only way we
3694 * could have ended up here is if that other trace had at its end a
3695 * compatible type distribution with the entry map. Since that's exactly
3696 * what we used to fill in the types our current side exit didn't provide,
3697 * this is always safe to do.
3700 JSValueType* globalTypeMap = typeMap + stackSlots;
3701 unsigned length = tree->nGlobalTypes();
3704 * This is potentially the typemap of the side exit and thus shorter than
3705 * the tree's global type map.
3707 if (ngslots < length) {
3708 MergeTypeMaps(&globalTypeMap /* out param */, &ngslots /* out param */,
3709 tree->globalTypeMap(), length,
3710 (JSValueType*)alloca(sizeof(JSValueType) * length));
3712 JS_ASSERT(ngslots == tree->nGlobalTypes());
3715 * Check whether there are any values on the stack we have to unbox and do
3716 * that first before we waste any time fetching the state from the stack.
3718 ImportBoxedStackSlotVisitor boxedStackVisitor(*this, sp, -tree->nativeStackBase, typeMap);
3719 VisitStackSlots(boxedStackVisitor, cx, callDepth);
3722 * Remember the import type map so we can lazily import later whatever
3725 importTypeMap.set(importStackSlots = stackSlots,
3726 importGlobalSlots = ngslots,
3727 typeMap, globalTypeMap);
3730 JS_REQUIRES_STACK bool
3731 TraceRecorder::isValidSlot(JSObject *obj, const Shape* shape)
3733 uint32 setflags = (js_CodeSpec[*cx->regs->pc].format & (JOF_SET | JOF_INCDEC | JOF_FOR));
3736 if (!shape->hasDefaultSetter())
3737 RETURN_VALUE("non-stub setter", false);
3738 if (!shape->writable())
3739 RETURN_VALUE("writing to a read-only property", false);
3742 /* This check applies even when setflags == 0. */
3743 if (setflags != JOF_SET && !shape->hasDefaultGetter()) {
3744 JS_ASSERT(!shape->isMethod());
3745 RETURN_VALUE("non-stub getter", false);
3748 if (!obj->containsSlot(shape->slot))
3749 RETURN_VALUE("invalid-slot obj property", false);
3754 /* Lazily import a global slot if we don't already have it in the tracker. */
3755 JS_REQUIRES_STACK void
3756 TraceRecorder::importGlobalSlot(unsigned slot)
3758 JS_ASSERT(slot == uint16(slot));
3759 JS_ASSERT(globalObj->numSlots() <= MAX_GLOBAL_SLOTS);
3761 Value* vp = &globalObj->getSlotRef(slot);
3762 JS_ASSERT(!known(vp));
3764 /* Add the slot to the list of interned global slots. */
3766 int index = tree->globalSlots->offsetOf(uint16(slot));
3768 type = getCoercedType(*vp);
3769 if (type == JSVAL_TYPE_INT32 && (!oracle || oracle->isGlobalSlotUndemotable(cx, slot)))
3770 type = JSVAL_TYPE_DOUBLE;
3771 index = (int)tree->globalSlots->length();
3772 tree->globalSlots->add(uint16(slot));
3773 tree->typeMap.add(type);
3774 SpecializeTreesToMissingGlobals(cx, globalObj, tree);
3775 JS_ASSERT(tree->nGlobalTypes() == tree->globalSlots->length());
3777 type = importTypeMap[importStackSlots + index];
3779 import(EosAddress(eos_ins, slot * sizeof(double)), vp, type, "global", index, NULL);
3782 /* Lazily import a global slot if we don't already have it in the tracker. */
3783 JS_REQUIRES_STACK bool
3784 TraceRecorder::lazilyImportGlobalSlot(unsigned slot)
3786 if (slot != uint16(slot)) /* we use a table of 16-bit ints, bail out if that's not enough */
3789 * If the global object grows too large, alloca in ExecuteTree might fail,
3790 * so abort tracing on global objects with unreasonably many slots.
3792 if (globalObj->numSlots() > MAX_GLOBAL_SLOTS)
3794 Value* vp = &globalObj->getSlotRef(slot);
3796 return true; /* we already have it */
3797 importGlobalSlot(slot);
3801 /* Write back a value onto the stack or global frames. */
3803 TraceRecorder::writeBack(LIns* ins, LIns* base, ptrdiff_t offset, bool shouldDemoteToInt32)
3806 * Sink all type casts targeting the stack into the side exit by simply storing the original
3807 * (uncasted) value. Each guard generates the side exit map based on the types of the
3808 * last stores to every stack location, so it's safe to not perform them on-trace.
3810 JS_ASSERT(base == lirbuf->sp || base == eos_ins);
3811 if (shouldDemoteToInt32 && IsPromotedInt32(ins))
3812 ins = w.demoteToInt32(ins);
3815 if (base == lirbuf->sp) {
3816 addr = StackAddress(base, offset);
3818 addr = EosAddress(base, offset);
3819 unsigned slot = unsigned(offset / sizeof(double));
3820 (void)pendingGlobalSlotsToSet.append(slot); /* OOM is safe. */
3822 return w.st(ins, addr);
3825 /* Update the tracker, then issue a write back store. */
3826 JS_REQUIRES_STACK void
3827 TraceRecorder::setImpl(void* p, LIns* i, bool shouldDemoteToInt32)
3829 JS_ASSERT(i != NULL);
3830 checkForGlobalObjectReallocation();
3834 * If we are writing to this location for the first time, calculate the
3835 * offset into the native frame manually. Otherwise just look up the last
3836 * load or store associated with the same source address (p) and use the
3839 LIns* x = nativeFrameTracker.get(p);
3841 if (isVoidPtrGlobal(p))
3842 x = writeBack(i, eos_ins, nativeGlobalOffset((Value *)p), shouldDemoteToInt32);
3844 x = writeBack(i, lirbuf->sp, nativespOffsetImpl(p), shouldDemoteToInt32);
3845 nativeFrameTracker.set(p, x);
3847 #if defined NANOJIT_64BIT
3848 JS_ASSERT( x->isop(LIR_stq) || x->isop(LIR_sti) || x->isop(LIR_std));
3850 JS_ASSERT( x->isop(LIR_sti) || x->isop(LIR_std));
3854 LIns *base = x->oprnd2();
3855 if (base->isop(LIR_addp) && base->oprnd2()->isImmP()) {
3856 disp = ptrdiff_t(base->oprnd2()->immP());
3857 base = base->oprnd1();
3862 JS_ASSERT(base == lirbuf->sp || base == eos_ins);
3863 JS_ASSERT(disp == ((base == lirbuf->sp)
3864 ? nativespOffsetImpl(p)
3865 : nativeGlobalOffset((Value *)p)));
3867 writeBack(i, base, disp, shouldDemoteToInt32);
3871 JS_REQUIRES_STACK inline void
3872 TraceRecorder::set(Value* p, LIns* i, bool shouldDemoteToInt32)
3874 return setImpl(p, i, shouldDemoteToInt32);
3877 JS_REQUIRES_STACK void
3878 TraceRecorder::setFrameObjPtr(void* p, LIns* i, bool shouldDemoteToInt32)
3880 JS_ASSERT(isValidFrameObjPtr(p));
3881 return setImpl(p, i, shouldDemoteToInt32);
3884 JS_REQUIRES_STACK LIns*
3885 TraceRecorder::attemptImport(const Value* p)
3887 if (LIns* i = getFromTracker(p))
3890 /* If the variable was not known, it could require a lazy import. */
3891 CountSlotsVisitor countVisitor(p);
3892 VisitStackSlots(countVisitor, cx, callDepth);
3894 if (countVisitor.stopped() || size_t(p - cx->fp()->slots()) < cx->fp()->numSlots())
3900 inline nanojit::LIns*
3901 TraceRecorder::getFromTrackerImpl(const void* p)
3903 checkForGlobalObjectReallocation();
3904 return tracker.get(p);
3907 inline nanojit::LIns*
3908 TraceRecorder::getFromTracker(const Value* p)
3910 return getFromTrackerImpl(p);
3913 JS_REQUIRES_STACK LIns*
3914 TraceRecorder::getImpl(const void *p)
3916 LIns* x = getFromTrackerImpl(p);
3919 if (isVoidPtrGlobal(p)) {
3920 unsigned slot = nativeGlobalSlot((const Value *)p);
3921 JS_ASSERT(tree->globalSlots->offsetOf(uint16(slot)) != -1);
3922 importGlobalSlot(slot);
3924 unsigned slot = nativeStackSlotImpl(p);
3925 JSValueType type = importTypeMap[slot];
3926 importImpl(StackAddress(lirbuf->sp, -tree->nativeStackBase + slot * sizeof(jsdouble)),
3927 p, type, "stack", slot, cx->fp());
3929 JS_ASSERT(knownImpl(p));
3930 return tracker.get(p);
3933 JS_REQUIRES_STACK LIns*
3934 TraceRecorder::get(const Value *p)
3941 TraceRecorder::isValidFrameObjPtr(void *p)
3943 JSStackFrame *fp = cx->fp();
3944 for (; fp; fp = fp->prev()) {
3945 if (fp->addressOfScopeChain() == p || fp->addressOfArgs() == p)
3952 JS_REQUIRES_STACK LIns*
3953 TraceRecorder::getFrameObjPtr(void *p)
3955 JS_ASSERT(isValidFrameObjPtr(p));
3959 JS_REQUIRES_STACK LIns*
3960 TraceRecorder::addr(Value* p)
3963 ? w.addp(eos_ins, w.nameImmw(nativeGlobalOffset(p)))
3964 : w.addp(lirbuf->sp, w.nameImmw(nativespOffset(p)));
3967 JS_REQUIRES_STACK inline bool
3968 TraceRecorder::knownImpl(const void* p)
3970 checkForGlobalObjectReallocation();
3971 return tracker.has(p);
3974 JS_REQUIRES_STACK inline bool
3975 TraceRecorder::known(const Value* vp)
3977 return knownImpl(vp);
3980 JS_REQUIRES_STACK inline bool
3981 TraceRecorder::known(JSObject** p)
3983 return knownImpl(p);
3987 * The slots of the global object are sometimes reallocated by the interpreter.
3988 * This function check for that condition and re-maps the entries of the tracker
3991 JS_REQUIRES_STACK void
3992 TraceRecorder::checkForGlobalObjectReallocationHelper()
3994 debug_only_print0(LC_TMTracer, "globalObj->slots relocated, updating tracker\n");
3995 Value* src = global_slots;
3996 Value* dst = globalObj->getSlots();
3997 jsuint length = globalObj->capacity;
3998 LIns** map = (LIns**)alloca(sizeof(LIns*) * length);
3999 for (jsuint n = 0; n < length; ++n) {
4000 map[n] = tracker.get(src);
4001 tracker.set(src++, NULL);
4003 for (jsuint n = 0; n < length; ++n)
4004 tracker.set(dst++, map[n]);
4005 global_slots = globalObj->getSlots();
4008 /* Determine whether the current branch is a loop edge (taken or not taken). */
4009 static JS_REQUIRES_STACK bool
4010 IsLoopEdge(jsbytecode* pc, jsbytecode* header)
4015 return ((pc + GET_JUMP_OFFSET(pc)) == header);
4018 return ((pc + GET_JUMPX_OFFSET(pc)) == header);
4020 JS_ASSERT((*pc == JSOP_AND) || (*pc == JSOP_ANDX) ||
4021 (*pc == JSOP_OR) || (*pc == JSOP_ORX));
4026 class AdjustCallerGlobalTypesVisitor : public SlotVisitorBase
4028 TraceRecorder &mRecorder;
4030 nanojit::LirBuffer *mLirbuf;
4031 JSValueType *mTypeMap;
4033 AdjustCallerGlobalTypesVisitor(TraceRecorder &recorder,
4034 JSValueType *typeMap) :
4035 mRecorder(recorder),
4037 mLirbuf(mRecorder.lirbuf),
4041 JSValueType* getTypeMap()
4046 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
4047 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
4048 LIns *ins = mRecorder.get(vp);
4049 bool isPromote = IsPromotedInt32(ins);
4050 if (isPromote && *mTypeMap == JSVAL_TYPE_DOUBLE) {
4051 mRecorder.w.st(mRecorder.get(vp),
4052 EosAddress(mRecorder.eos_ins, mRecorder.nativeGlobalOffset(vp)));
4054 * Aggressively undo speculation so the inner tree will compile
4057 mRecorder.traceMonitor->oracle->markGlobalSlotUndemotable(mCx, slot);
4059 JS_ASSERT(!(!isPromote && *mTypeMap == JSVAL_TYPE_INT32));
4064 class AdjustCallerStackTypesVisitor : public SlotVisitorBase
4066 TraceRecorder &mRecorder;
4068 nanojit::LirBuffer *mLirbuf;
4070 JSValueType *mTypeMap;
4072 AdjustCallerStackTypesVisitor(TraceRecorder &recorder,
4073 JSValueType *typeMap) :
4074 mRecorder(recorder),
4076 mLirbuf(mRecorder.lirbuf),
4081 JSValueType* getTypeMap()
4086 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
4087 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
4088 /* N.B. vp may actually point to a JSObject*. */
4089 for (size_t i = 0; i < count; ++i) {
4090 LIns *ins = mRecorder.get(vp);
4091 bool isPromote = IsPromotedInt32(ins);
4092 if (isPromote && *mTypeMap == JSVAL_TYPE_DOUBLE) {
4093 mRecorder.w.st(ins, StackAddress(mLirbuf->sp, mRecorder.nativespOffset(vp)));
4095 * Aggressively undo speculation so the inner tree will compile
4098 mRecorder.traceMonitor->oracle->markStackSlotUndemotable(mCx, mSlotnum);
4100 JS_ASSERT(!(!isPromote && *mTypeMap == JSVAL_TYPE_INT32));
4108 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
4109 visitFrameObjPtr(void* p, JSStackFrame* fp) {
4110 JS_ASSERT(*mTypeMap != JSVAL_TYPE_BOXED);
4118 * Promote slots if necessary to match the called tree's type map. This
4119 * function is infallible and must only be called if we are certain that it is
4120 * possible to reconcile the types for each slot in the inner and outer trees.
4122 JS_REQUIRES_STACK void
4123 TraceRecorder::adjustCallerTypes(TreeFragment* f)
4125 AdjustCallerGlobalTypesVisitor globalVisitor(*this, f->globalTypeMap());
4126 VisitGlobalSlots(globalVisitor, cx, *tree->globalSlots);
4128 AdjustCallerStackTypesVisitor stackVisitor(*this, f->stackTypeMap());
4129 VisitStackSlots(stackVisitor, cx, 0);
4131 JS_ASSERT(f == f->root);
4134 JS_REQUIRES_STACK inline JSValueType
4135 TraceRecorder::determineSlotType(Value* vp)
4137 if (vp->isNumber()) {
4138 LIns *i = getFromTracker(vp);
4141 t = IsPromotedInt32(i) ? JSVAL_TYPE_INT32 : JSVAL_TYPE_DOUBLE;
4142 } else if (isGlobal(vp)) {
4143 int offset = tree->globalSlots->offsetOf(uint16(nativeGlobalSlot(vp)));
4144 JS_ASSERT(offset != -1);
4145 t = importTypeMap[importStackSlots + offset];
4147 t = importTypeMap[nativeStackSlot(vp)];
4149 JS_ASSERT_IF(t == JSVAL_TYPE_INT32, hasInt32Repr(*vp));
4154 return vp->toObject().isFunction() ? JSVAL_TYPE_FUNOBJ : JSVAL_TYPE_NONFUNOBJ;
4155 return vp->extractNonDoubleObjectTraceType();
4158 class DetermineTypesVisitor : public SlotVisitorBase
4160 TraceRecorder &mRecorder;
4161 JSValueType *mTypeMap;
4163 DetermineTypesVisitor(TraceRecorder &recorder,
4164 JSValueType *typeMap) :
4165 mRecorder(recorder),
4169 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
4170 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
4171 *mTypeMap++ = mRecorder.determineSlotType(vp);
4174 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
4175 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
4176 for (size_t i = 0; i < count; ++i)
4177 *mTypeMap++ = mRecorder.determineSlotType(vp++);
4181 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
4182 visitFrameObjPtr(void* p, JSStackFrame* fp) {
4183 *mTypeMap++ = getFrameObjPtrTraceType(p, fp);
4187 JSValueType* getTypeMap()
4193 #if defined JS_JIT_SPEW
4194 JS_REQUIRES_STACK static void
4195 TreevisLogExit(JSContext* cx, VMSideExit* exit)
4197 debug_only_printf(LC_TMTreeVis, "TREEVIS ADDEXIT EXIT=%p TYPE=%s FRAG=%p PC=%p FILE=\"%s\""
4198 " LINE=%d OFFS=%d", (void*)exit, getExitName(exit->exitType),
4199 (void*)exit->from, (void*)cx->regs->pc, cx->fp()->script()->filename,
4200 js_FramePCToLineNumber(cx, cx->fp()), FramePCOffset(cx, cx->fp()));
4201 debug_only_print0(LC_TMTreeVis, " STACK=\"");
4202 for (unsigned i = 0; i < exit->numStackSlots; i++)
4203 debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(exit->stackTypeMap()[i]));
4204 debug_only_print0(LC_TMTreeVis, "\" GLOBALS=\"");
4205 for (unsigned i = 0; i < exit->numGlobalSlots; i++)
4206 debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(exit->globalTypeMap()[i]));
4207 debug_only_print0(LC_TMTreeVis, "\"\n");
4211 JS_REQUIRES_STACK VMSideExit*
4212 TraceRecorder::snapshot(ExitType exitType)
4214 JSStackFrame* const fp = cx->fp();
4215 JSFrameRegs* const regs = cx->regs;
4216 jsbytecode* pc = regs->pc;
4219 * Check for a return-value opcode that needs to restart at the next
4222 const JSCodeSpec& cs = js_CodeSpec[*pc];
4225 * When calling a _FAIL native, make the snapshot's pc point to the next
4226 * instruction after the CALL or APPLY. Even on failure, a _FAIL native
4227 * must not be called again from the interpreter.
4229 bool resumeAfter = (pendingSpecializedNative &&
4230 JSTN_ERRTYPE(pendingSpecializedNative) == FAIL_STATUS);
4232 JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_FUNAPPLY || *pc == JSOP_FUNCALL ||
4233 *pc == JSOP_NEW || *pc == JSOP_SETPROP || *pc == JSOP_SETNAME);
4236 MUST_FLOW_THROUGH("restore_pc");
4240 * Generate the entry map for the (possibly advanced) pc and stash it in
4243 unsigned stackSlots = NativeStackSlots(cx, callDepth);
4246 * It's sufficient to track the native stack use here since all stores
4247 * above the stack watermark defined by guards are killed.
4249 trackNativeStackUse(stackSlots + 1);
4251 /* Capture the type map into a temporary location. */
4252 unsigned ngslots = tree->globalSlots->length();
4253 unsigned typemap_size = (stackSlots + ngslots) * sizeof(JSValueType);
4255 /* Use the recorder-local temporary type map. */
4256 JSValueType* typemap = NULL;
4257 if (tempTypeMap.resize(typemap_size))
4258 typemap = tempTypeMap.begin(); /* crash if resize() fails. */
4261 * Determine the type of a store by looking at the current type of the
4262 * actual value the interpreter is using. For numbers we have to check what
4263 * kind of store we used last (integer or double) to figure out what the
4264 * side exit show reflect in its typemap.
4266 DetermineTypesVisitor detVisitor(*this, typemap);
4267 VisitSlots(detVisitor, cx, callDepth, ngslots,
4268 tree->globalSlots->data());
4269 JS_ASSERT(unsigned(detVisitor.getTypeMap() - typemap) ==
4270 ngslots + stackSlots);
4273 * If this snapshot is for a side exit that leaves a boxed Value result on
4274 * the stack, make a note of this in the typemap. Examples include the
4275 * builtinStatus guard after calling a _FAIL builtin, a JSFastNative, or
4276 * GetPropertyByName; and the type guard in unbox_value after such a call
4277 * (also at the beginning of a trace branched from such a type guard).
4279 if (pendingUnboxSlot ||
4280 (pendingSpecializedNative && (pendingSpecializedNative->flags & JSTN_UNBOX_AFTER))) {
4281 unsigned pos = stackSlots - 1;
4282 if (pendingUnboxSlot == cx->regs->sp - 2)
4283 pos = stackSlots - 2;
4284 typemap[pos] = JSVAL_TYPE_BOXED;
4285 } else if (pendingSpecializedNative &&
4286 (pendingSpecializedNative->flags & JSTN_RETURN_NULLABLE_STR)) {
4287 typemap[stackSlots - 1] = JSVAL_TYPE_STRORNULL;
4288 } else if (pendingSpecializedNative &&
4289 (pendingSpecializedNative->flags & JSTN_RETURN_NULLABLE_OBJ)) {
4290 typemap[stackSlots - 1] = JSVAL_TYPE_OBJORNULL;
4293 /* Now restore the the original pc (after which early returns are ok). */
4295 MUST_FLOW_LABEL(restore_pc);
4296 regs->pc = pc - cs.length;
4299 * If we take a snapshot on a goto, advance to the target address. This
4300 * avoids inner trees returning on a break goto, which the outer
4301 * recorder then would confuse with a break in the outer tree.
4303 if (*pc == JSOP_GOTO)
4304 pc += GET_JUMP_OFFSET(pc);
4305 else if (*pc == JSOP_GOTOX)
4306 pc += GET_JUMPX_OFFSET(pc);
4310 * Check if we already have a matching side exit; if so we can return that
4311 * side exit instead of creating a new one.
4313 VMSideExit** exits = tree->sideExits.data();
4314 unsigned nexits = tree->sideExits.length();
4315 if (exitType == LOOP_EXIT) {
4316 for (unsigned n = 0; n < nexits; ++n) {
4317 VMSideExit* e = exits[n];
4318 if (e->pc == pc && (e->imacpc == fp->maybeImacropc()) &&
4319 ngslots == e->numGlobalSlots &&
4320 !memcmp(exits[n]->fullTypeMap(), typemap, typemap_size)) {
4321 AUDIT(mergedLoopExits);
4322 #if defined JS_JIT_SPEW
4323 TreevisLogExit(cx, e);
4330 /* We couldn't find a matching side exit, so create a new one. */
4331 VMSideExit* exit = (VMSideExit*)
4332 traceAlloc().alloc(sizeof(VMSideExit) + (stackSlots + ngslots) * sizeof(JSValueType));
4334 /* Setup side exit structure. */
4335 exit->from = fragment;
4336 exit->calldepth = callDepth;
4337 exit->numGlobalSlots = ngslots;
4338 exit->numStackSlots = stackSlots;
4339 exit->numStackSlotsBelowCurrentFrame = cx->fp()->isFunctionFrame() ?
4340 nativeStackOffset(&cx->fp()->calleeValue()) / sizeof(double) :
4342 exit->exitType = exitType;
4344 exit->imacpc = fp->maybeImacropc();
4345 exit->sp_adj = (stackSlots * sizeof(double)) - tree->nativeStackBase;
4346 exit->rp_adj = exit->calldepth * sizeof(FrameInfo*);
4347 exit->lookupFlags = js_InferFlags(cx, 0);
4348 memcpy(exit->fullTypeMap(), typemap, typemap_size);
4350 #if defined JS_JIT_SPEW
4351 TreevisLogExit(cx, exit);
4356 JS_REQUIRES_STACK GuardRecord*
4357 TraceRecorder::createGuardRecord(VMSideExit* exit)
4360 // For debug builds, place the guard records in a longer lasting
4361 // pool. This is because the fragment profiler will look at them
4362 // relatively late in the day, after they would have been freed,
4363 // in some cases, had they been allocated in traceAlloc().
4364 GuardRecord* gr = new (dataAlloc()) GuardRecord();
4366 // The standard place (for production builds).
4367 GuardRecord* gr = new (traceAlloc()) GuardRecord();
4373 // gr->profCount is calloc'd to zero
4375 gr->profGuardID = fragment->guardNumberer++;
4376 gr->nextInFrag = fragment->guardsForFrag;
4377 fragment->guardsForFrag = gr;
4383 /* Test if 'ins' is in a form that can be used as a guard/branch condition. */
4387 return ins->isCmp() || ins->isImmI(0) || ins->isImmI(1);
4390 /* Ensure 'ins' is in a form suitable for a guard/branch condition. */
4392 TraceRecorder::ensureCond(LIns** ins, bool* cond)
4394 if (!isCond(*ins)) {
4396 *ins = (*ins)->isI() ? w.eqi0(*ins) : w.eqp0(*ins);
4401 * Emit a guard for condition (cond), expecting to evaluate to boolean result
4402 * (expected) and using the supplied side exit if the condition doesn't hold.
4404 * Callers shouldn't generate guards that always exit (which can occur due to
4405 * optimization of the guard condition) because it's bad for both compile-time
4406 * speed (all the code generated after the guard is dead) and run-time speed
4407 * (fragment that always exit are slow). This function has two modes for
4408 * handling an always-exit guard; which mode is used depends on the value of
4409 * abortIfAlwaysExits:
4411 * - abortIfAlwaysExits == false: This is the default mode. If the guard
4412 * will always exit, we assert (in debug builds) as a signal that we are
4413 * generating bad traces. (In optimized builds that lack assertions the
4414 * guard will be generated correctly, so the code will be slow but safe.) In
4415 * this mode, the caller is responsible for not generating an always-exit
4416 * guard. The return value will always be RECORD_CONTINUE, so the caller
4417 * need not check it.
4419 * - abortIfAlwaysExits == true: If the guard will always exit, we abort
4420 * recording and return RECORD_STOP; otherwise we generate the guard
4421 * normally and return RECORD_CONTINUE. This mode can be used when the
4422 * caller doesn't know ahead of time whether the guard will always exit. In
4423 * this mode, the caller must check the return value.
4425 JS_REQUIRES_STACK RecordingStatus
4426 TraceRecorder::guard(bool expected, LIns* cond, VMSideExit* exit,
4427 bool abortIfAlwaysExits/* = false */)
4429 if (exit->exitType == LOOP_EXIT)
4430 tree->sideExits.add(exit);
4432 JS_ASSERT(isCond(cond));
4434 if ((cond->isImmI(0) && expected) || (cond->isImmI(1) && !expected)) {
4435 if (abortIfAlwaysExits) {
4436 /* The guard always exits, the caller must check for an abort. */
4437 RETURN_STOP("Constantly false guard detected");
4440 * If you hit this assertion, first decide if you want recording to
4441 * abort in the case where the guard always exits. If not, find a way
4442 * to detect that case and avoid calling guard(). Otherwise, change
4443 * the invocation of guard() so it passes in abortIfAlwaysExits=true,
4444 * and have the caller check the return value, eg. using
4445 * CHECK_STATUS(). (In optimized builds, we'll fall through to the
4446 * insGuard() below and an always-exits guard will be inserted, which
4447 * is correct but sub-optimal.)
4449 JS_NOT_REACHED("unexpected constantly false guard detected");
4453 * Nb: if the guard is never taken, no instruction will be created and
4454 * insGuard() will return NULL. This is a good thing.
4456 GuardRecord* guardRec = createGuardRecord(exit);
4457 expected ? w.xf(cond, guardRec) : w.xt(cond, guardRec);
4458 return RECORD_CONTINUE;
4462 * Emit a guard for condition (cond), expecting to evaluate to boolean result
4463 * (expected) and generate a side exit with type exitType to jump to if the
4464 * condition does not hold.
4466 JS_REQUIRES_STACK RecordingStatus
4467 TraceRecorder::guard(bool expected, LIns* cond, ExitType exitType,
4468 bool abortIfAlwaysExits/* = false */)
4470 return guard(expected, cond, snapshot(exitType), abortIfAlwaysExits);
4474 * Emit a guard a 32-bit integer arithmetic operation op(d0, d1) and
4475 * using the supplied side exit if it overflows.
4477 JS_REQUIRES_STACK LIns*
4478 TraceRecorder::guard_xov(LOpcode op, LIns* d0, LIns* d1, VMSideExit* exit)
4480 JS_ASSERT(exit->exitType == OVERFLOW_EXIT);
4482 GuardRecord* guardRec = createGuardRecord(exit);
4485 return w.addxovi(d0, d1, guardRec);
4487 return w.subxovi(d0, d1, guardRec);
4489 return w.mulxovi(d0, d1, guardRec);
4493 JS_NOT_REACHED("unexpected opcode");
4497 JS_REQUIRES_STACK VMSideExit*
4498 TraceRecorder::copy(VMSideExit* copy)
4500 size_t typemap_size = copy->numGlobalSlots + copy->numStackSlots;
4501 VMSideExit* exit = (VMSideExit*)
4502 traceAlloc().alloc(sizeof(VMSideExit) + typemap_size * sizeof(JSValueType));
4504 /* Copy side exit structure. */
4505 memcpy(exit, copy, sizeof(VMSideExit) + typemap_size * sizeof(JSValueType));
4506 exit->guards = NULL;
4507 exit->from = fragment;
4508 exit->target = NULL;
4510 if (exit->exitType == LOOP_EXIT)
4511 tree->sideExits.add(exit);
4512 #if defined JS_JIT_SPEW
4513 TreevisLogExit(cx, exit);
4519 * Determine whether any context associated with the same thread as cx is
4520 * executing native code.
4523 ProhibitFlush(TraceMonitor *tm)
4525 return !!tm->tracerState; // don't flush if we're running a trace
4529 ResetJITImpl(JSContext* cx, TraceMonitor* tm)
4531 if (!cx->traceJitEnabled)
4533 debug_only_print0(LC_TMTracer, "Flushing cache.\n");
4535 JS_ASSERT_NOT_ON_TRACE(cx);
4536 AbortRecording(cx, "flush cache");
4542 if (ProhibitFlush(tm)) {
4543 debug_only_print0(LC_TMTracer, "Deferring JIT flush due to deep bail.\n");
4544 tm->needFlush = JS_TRUE;
4550 /* Compile the current fragment. */
4551 JS_REQUIRES_STACK AbortableRecordingStatus
4552 TraceRecorder::compile()
4555 TraceVisStateObj tvso(cx, S_COMPILE);
4558 if (traceMonitor->needFlush) {
4559 ResetJIT(cx, traceMonitor, FR_DEEP_BAIL);
4560 return ARECORD_ABORTED;
4562 if (tree->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) {
4563 debug_only_print0(LC_TMTracer, "Blacklist: excessive stack use.\n");
4564 Blacklist((jsbytecode*)tree->ip);
4565 return ARECORD_STOP;
4567 if (anchor && anchor->exitType != CASE_EXIT)
4568 ++tree->branchCount;
4570 return ARECORD_STOP;
4572 /* :TODO: windows support */
4573 #if defined DEBUG && !defined WIN32
4574 /* Associate a filename and line number with the fragment. */
4575 const char* filename = cx->fp()->script()->filename;
4576 char* label = (char*)js_malloc((filename ? strlen(filename) : 7) + 16);
4578 sprintf(label, "%s:%u", filename ? filename : "<stdin>",
4579 js_FramePCToLineNumber(cx, cx->fp()));
4580 lirbuf->printer->addrNameMap->addAddrRange(fragment, sizeof(Fragment), 0, label);
4585 Assembler *assm = traceMonitor->assembler;
4586 JS_ASSERT(!assm->error());
4587 assm->compile(fragment, tempAlloc(), /*optimize*/true verbose_only(, lirbuf->printer));
4589 if (assm->error()) {
4590 assm->setError(nanojit::None);
4591 debug_only_print0(LC_TMTracer, "Blacklisted: error during compilation\n");
4592 Blacklist((jsbytecode*)tree->ip);
4593 return ARECORD_STOP;
4597 return ARECORD_STOP;
4598 ResetRecordingAttempts(traceMonitor, (jsbytecode*)fragment->ip);
4599 ResetRecordingAttempts(traceMonitor, (jsbytecode*)tree->ip);
4602 if (anchor->exitType == CASE_EXIT)
4603 assm->patch(anchor, anchor->switchInfo);
4606 assm->patch(anchor);
4608 JS_ASSERT(fragment->code());
4609 JS_ASSERT_IF(fragment == fragment->root, fragment->root == tree);
4611 return ARECORD_CONTINUE;
4615 JoinPeers(Assembler* assm, VMSideExit* exit, TreeFragment* target)
4617 exit->target = target;
4620 debug_only_printf(LC_TMTreeVis, "TREEVIS JOIN ANCHOR=%p FRAG=%p\n", (void*)exit, (void*)target);
4622 if (exit->root() == target)
4625 target->dependentTrees.addUnique(exit->root());
4626 exit->root()->linkedTrees.addUnique(target);
4629 /* Results of trying to connect an arbitrary type A with arbitrary type B */
4630 enum TypeCheckResult
4632 TypeCheck_Okay, /* Okay: same type */
4633 TypeCheck_Promote, /* Okay: Type A needs d2i() */
4634 TypeCheck_Demote, /* Okay: Type A needs i2d() */
4635 TypeCheck_Undemote, /* Bad: Slot is undemotable */
4636 TypeCheck_Bad /* Bad: incompatible types */
4639 class SlotMap : public SlotVisitorBase
4645 : vp(NULL), isPromotedInt32(false), lastCheck(TypeCheck_Bad)
4647 SlotInfo(Value* vp, bool isPromotedInt32)
4648 : vp(vp), isPromotedInt32(isPromotedInt32), lastCheck(TypeCheck_Bad),
4649 type(getCoercedType(*vp))
4651 SlotInfo(JSValueType t)
4652 : vp(NULL), isPromotedInt32(false), lastCheck(TypeCheck_Bad), type(t)
4654 SlotInfo(Value* vp, JSValueType t)
4655 : vp(vp), isPromotedInt32(t == JSVAL_TYPE_INT32), lastCheck(TypeCheck_Bad), type(t)
4658 bool isPromotedInt32;
4659 TypeCheckResult lastCheck;
4663 SlotMap(TraceRecorder& rec)
4674 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
4675 visitGlobalSlot(Value *vp, unsigned n, unsigned slot)
4680 JS_ALWAYS_INLINE SlotMap::SlotInfo&
4681 operator [](unsigned i)
4686 JS_ALWAYS_INLINE SlotMap::SlotInfo&
4692 JS_ALWAYS_INLINE unsigned
4695 return slots.length();
4699 * Possible return states:
4701 * TypeConsensus_Okay: All types are compatible. Caller must go through slot list and handle
4703 * TypeConsensus_Bad: Types are not compatible. Individual type check results are undefined.
4704 * TypeConsensus_Undemotes: Types would be compatible if slots were marked as undemotable
4705 * before recording began. Caller can go through slot list and mark
4706 * such slots as undemotable.
4708 JS_REQUIRES_STACK TypeConsensus
4709 checkTypes(LinkableFragment* f)
4711 if (length() != f->typeMap.length())
4712 return TypeConsensus_Bad;
4714 bool has_undemotes = false;
4715 for (unsigned i = 0; i < length(); i++) {
4716 TypeCheckResult result = checkType(i, f->typeMap[i]);
4717 if (result == TypeCheck_Bad)
4718 return TypeConsensus_Bad;
4719 if (result == TypeCheck_Undemote)
4720 has_undemotes = true;
4721 slots[i].lastCheck = result;
4724 return TypeConsensus_Undemotes;
4725 return TypeConsensus_Okay;
4728 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
4731 bool isPromotedInt32 = false;
4732 if (vp->isNumber()) {
4733 if (LIns* i = mRecorder.getFromTracker(vp)) {
4734 isPromotedInt32 = IsPromotedInt32(i);
4735 } else if (mRecorder.isGlobal(vp)) {
4736 int offset = mRecorder.tree->globalSlots->offsetOf(uint16(mRecorder.nativeGlobalSlot(vp)));
4737 JS_ASSERT(offset != -1);
4738 isPromotedInt32 = mRecorder.importTypeMap[mRecorder.importStackSlots + offset] ==
4741 isPromotedInt32 = mRecorder.importTypeMap[mRecorder.nativeStackSlot(vp)] ==
4745 slots.add(SlotInfo(vp, isPromotedInt32));
4748 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
4749 addSlot(JSValueType t)
4751 slots.add(SlotInfo(NULL, t));
4754 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
4755 addSlot(Value *vp, JSValueType t)
4757 slots.add(SlotInfo(vp, t));
4760 JS_REQUIRES_STACK void
4763 for (unsigned i = 0; i < length(); i++) {
4764 if (get(i).lastCheck == TypeCheck_Undemote)
4765 mRecorder.markSlotUndemotable(mRecorder.tree, i);
4769 JS_REQUIRES_STACK virtual void
4772 for (unsigned i = 0; i < length(); i++)
4777 JS_REQUIRES_STACK virtual void
4778 adjustType(SlotInfo& info) {
4779 JS_ASSERT(info.lastCheck != TypeCheck_Undemote && info.lastCheck != TypeCheck_Bad);
4781 if (info.lastCheck == TypeCheck_Promote) {
4782 JS_ASSERT(info.type == JSVAL_TYPE_INT32 || info.type == JSVAL_TYPE_DOUBLE);
4784 * This should only happen if the slot has a trivial conversion, i.e.
4785 * IsPromotedInt32() is true. We check this.
4787 * Note that getFromTracker() will return NULL if the slot was
4788 * never used, in which case we don't do the check. We could
4789 * instead called mRecorder.get(info.vp) and always check, but
4790 * get() has side-effects, which is not good in an assertion.
4791 * Not checking unused slots isn't so bad.
4793 LIns* ins = mRecorder.getFromTrackerImpl(info.vp);
4794 JS_ASSERT_IF(ins, IsPromotedInt32(ins));
4797 if (info.lastCheck == TypeCheck_Demote) {
4798 JS_ASSERT(info.type == JSVAL_TYPE_INT32 || info.type == JSVAL_TYPE_DOUBLE);
4799 JS_ASSERT(mRecorder.getImpl(info.vp)->isD());
4801 /* Never demote this final i2d. */
4802 mRecorder.setImpl(info.vp, mRecorder.getImpl(info.vp), false);
4808 checkType(unsigned i, JSValueType t)
4810 debug_only_printf(LC_TMTracer,
4811 "checkType slot %d: interp=%c typemap=%c isNum=%d isPromotedInt32=%d\n",
4813 TypeToChar(slots[i].type),
4815 slots[i].type == JSVAL_TYPE_INT32 || slots[i].type == JSVAL_TYPE_DOUBLE,
4816 slots[i].isPromotedInt32);
4818 case JSVAL_TYPE_INT32:
4819 if (slots[i].type != JSVAL_TYPE_INT32 && slots[i].type != JSVAL_TYPE_DOUBLE)
4820 return TypeCheck_Bad; /* Not a number? Type mismatch. */
4821 /* This is always a type mismatch, we can't close a double to an int. */
4822 if (!slots[i].isPromotedInt32)
4823 return TypeCheck_Undemote;
4824 /* Looks good, slot is an int32, the last instruction should be promotable. */
4825 JS_ASSERT_IF(slots[i].vp,
4826 hasInt32Repr(*(const Value *)slots[i].vp) && slots[i].isPromotedInt32);
4827 return slots[i].vp ? TypeCheck_Promote : TypeCheck_Okay;
4828 case JSVAL_TYPE_DOUBLE:
4829 if (slots[i].type != JSVAL_TYPE_INT32 && slots[i].type != JSVAL_TYPE_DOUBLE)
4830 return TypeCheck_Bad; /* Not a number? Type mismatch. */
4831 if (slots[i].isPromotedInt32)
4832 return slots[i].vp ? TypeCheck_Demote : TypeCheck_Bad;
4833 return TypeCheck_Okay;
4835 return slots[i].type == t ? TypeCheck_Okay : TypeCheck_Bad;
4837 JS_NOT_REACHED("shouldn't fall through type check switch");
4840 TraceRecorder& mRecorder;
4842 Queue<SlotInfo> slots;
4845 class DefaultSlotMap : public SlotMap
4848 DefaultSlotMap(TraceRecorder& tr) : SlotMap(tr)
4852 virtual ~DefaultSlotMap()
4856 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
4857 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp)
4859 for (size_t i = 0; i < count; i++)
4864 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
4865 visitFrameObjPtr(void* p, JSStackFrame* fp)
4867 addSlot(getFrameObjPtrTraceType(p, fp));
4872 JS_REQUIRES_STACK TypeConsensus
4873 TraceRecorder::selfTypeStability(SlotMap& slotMap)
4875 debug_only_printf(LC_TMTracer, "Checking type stability against self=%p\n", (void*)fragment);
4876 TypeConsensus consensus = slotMap.checkTypes(tree);
4878 /* Best case: loop jumps back to its own header */
4879 if (consensus == TypeConsensus_Okay)
4880 return TypeConsensus_Okay;
4883 * If the only thing keeping this loop from being stable is undemotions, then mark relevant
4884 * slots as undemotable.
4886 if (consensus == TypeConsensus_Undemotes)
4887 slotMap.markUndemotes();
4892 JS_REQUIRES_STACK TypeConsensus
4893 TraceRecorder::peerTypeStability(SlotMap& slotMap, const void* ip, TreeFragment** pPeer)
4895 JS_ASSERT(tree->first == LookupLoop(traceMonitor, ip, tree->globalObj, tree->globalShape, tree->argc));
4897 /* See if there are any peers that would make this stable */
4898 bool onlyUndemotes = false;
4899 for (TreeFragment *peer = tree->first; peer != NULL; peer = peer->peer) {
4900 if (!peer->code() || peer == fragment)
4902 debug_only_printf(LC_TMTracer, "Checking type stability against peer=%p\n", (void*)peer);
4903 TypeConsensus consensus = slotMap.checkTypes(peer);
4904 if (consensus == TypeConsensus_Okay) {
4907 * Return this even though there will be linkage; the trace itself is not stable.
4908 * Caller should inspect ppeer to check for a compatible peer.
4910 return TypeConsensus_Okay;
4912 if (consensus == TypeConsensus_Undemotes)
4913 onlyUndemotes = true;
4916 return onlyUndemotes ? TypeConsensus_Undemotes : TypeConsensus_Bad;
4920 * Complete and compile a trace and link it to the existing tree if
4921 * appropriate. Returns ARECORD_ABORTED or ARECORD_STOP, depending on whether
4922 * the recorder was deleted. Outparam is always set.
4924 JS_REQUIRES_STACK AbortableRecordingStatus
4925 TraceRecorder::closeLoop()
4927 VMSideExit *exit = snapshot(UNSTABLE_LOOP_EXIT);
4929 DefaultSlotMap slotMap(*this);
4930 VisitSlots(slotMap, cx, 0, *tree->globalSlots);
4933 * We should have arrived back at the loop header, and hence we don't want
4934 * to be in an imacro here and the opcode should be either JSOP_TRACE or, in
4935 * case this loop was blacklisted in the meantime, JSOP_NOTRACE.
4937 JS_ASSERT(*cx->regs->pc == JSOP_TRACE || *cx->regs->pc == JSOP_NOTRACE);
4938 JS_ASSERT(!cx->fp()->hasImacropc());
4940 if (callDepth != 0) {
4941 debug_only_print0(LC_TMTracer,
4942 "Blacklisted: stack depth mismatch, possible recursion.\n");
4943 Blacklist((jsbytecode*)tree->ip);
4945 return ARECORD_STOP;
4948 JS_ASSERT(exit->numStackSlots == tree->nStackTypes);
4949 JS_ASSERT(fragment->root == tree);
4950 JS_ASSERT(!trashSelf);
4952 TreeFragment* peer = NULL;
4954 TypeConsensus consensus = selfTypeStability(slotMap);
4955 if (consensus != TypeConsensus_Okay) {
4956 TypeConsensus peerConsensus = peerTypeStability(slotMap, tree->ip, &peer);
4957 /* If there was a semblance of a stable peer (even if not linkable), keep the result. */
4958 if (peerConsensus != TypeConsensus_Bad)
4959 consensus = peerConsensus;
4963 if (consensus != TypeConsensus_Okay || peer)
4964 AUDIT(unstableLoopVariable);
4968 * This exit is indeed linkable to something now. Process any promote or
4969 * demotes that are pending in the slot map.
4971 if (consensus == TypeConsensus_Okay)
4972 slotMap.adjustTypes();
4974 if (consensus != TypeConsensus_Okay || peer) {
4975 fragment->lastIns = w.x(createGuardRecord(exit));
4977 /* If there is a peer, there must have been an "Okay" consensus. */
4978 JS_ASSERT_IF(peer, consensus == TypeConsensus_Okay);
4980 /* Compile as a type-unstable loop, and hope for a connection later. */
4983 * If such a fragment does not exist, let's compile the loop ahead
4984 * of time anyway. Later, if the loop becomes type stable, we will
4985 * connect these two fragments together.
4987 debug_only_print0(LC_TMTracer,
4988 "Trace has unstable loop variable with no stable peer, "
4989 "compiling anyway.\n");
4990 UnstableExit* uexit = new (traceAlloc()) UnstableExit;
4991 uexit->fragment = fragment;
4993 uexit->next = tree->unstableExits;
4994 tree->unstableExits = uexit;
4996 JS_ASSERT(peer->code());
4997 exit->target = peer;
4998 debug_only_printf(LC_TMTracer,
4999 "Joining type-unstable trace to target fragment %p.\n",
5001 peer->dependentTrees.addUnique(tree);
5002 tree->linkedTrees.addUnique(peer);
5005 exit->exitType = LOOP_EXIT;
5006 debug_only_printf(LC_TMTreeVis, "TREEVIS CHANGEEXIT EXIT=%p TYPE=%s\n", (void*)exit,
5007 getExitName(LOOP_EXIT));
5009 JS_ASSERT((fragment == fragment->root) == !!loopLabel);
5012 w.comment("end-loop");
5013 w.livep(lirbuf->state);
5016 exit->target = tree;
5018 * This guard is dead code. However, it must be present because it
5019 * can keep alive values on the stack. Without it, StackFilter can
5020 * remove some stack stores that it shouldn't. See bug 582766 comment
5023 fragment->lastIns = w.x(createGuardRecord(exit));
5026 CHECK_STATUS_A(compile());
5028 debug_only_printf(LC_TMTreeVis, "TREEVIS CLOSELOOP EXIT=%p PEER=%p\n", (void*)exit, (void*)peer);
5030 JS_ASSERT(LookupLoop(traceMonitor, tree->ip, tree->globalObj, tree->globalShape, tree->argc) ==
5032 JS_ASSERT(tree->first);
5035 joinEdgesToEntry(peer);
5037 debug_only_stmt(DumpPeerStability(traceMonitor, peer->ip, peer->globalObj,
5038 peer->globalShape, peer->argc);)
5040 debug_only_print0(LC_TMTracer,
5041 "updating specializations on dependent and linked trees\n");
5043 SpecializeTreesToMissingGlobals(cx, globalObj, tree);
5046 * If this is a newly formed tree, and the outer tree has not been compiled yet, we
5047 * should try to compile the outer tree again.
5050 AttemptCompilation(traceMonitor, globalObj, outerScript, outerPC, outerArgc);
5052 debug_only_printf(LC_TMMinimal,
5053 "Recording completed at %s:%u@%u via closeLoop (FragID=%06u)\n",
5054 cx->fp()->script()->filename,
5055 js_FramePCToLineNumber(cx, cx->fp()),
5056 FramePCOffset(cx, cx->fp()),
5057 fragment->profFragID);
5058 debug_only_print0(LC_TMMinimal, "\n");
5061 return finishSuccessfully();
5065 FullMapFromExit(TypeMap& typeMap, VMSideExit* exit)
5067 typeMap.setLength(0);
5068 typeMap.fromRaw(exit->stackTypeMap(), exit->numStackSlots);
5069 typeMap.fromRaw(exit->globalTypeMap(), exit->numGlobalSlots);
5070 /* Include globals that were later specialized at the root of the tree. */
5071 if (exit->numGlobalSlots < exit->root()->nGlobalTypes()) {
5072 typeMap.fromRaw(exit->root()->globalTypeMap() + exit->numGlobalSlots,
5073 exit->root()->nGlobalTypes() - exit->numGlobalSlots);
5077 static JS_REQUIRES_STACK TypeConsensus
5078 TypeMapLinkability(JSContext* cx, TraceMonitor *tm, const TypeMap& typeMap, TreeFragment* peer)
5080 const TypeMap& peerMap = peer->typeMap;
5081 unsigned minSlots = JS_MIN(typeMap.length(), peerMap.length());
5082 TypeConsensus consensus = TypeConsensus_Okay;
5083 for (unsigned i = 0; i < minSlots; i++) {
5084 if (typeMap[i] == peerMap[i])
5086 if (typeMap[i] == JSVAL_TYPE_INT32 && peerMap[i] == JSVAL_TYPE_DOUBLE &&
5087 IsSlotUndemotable(tm->oracle, cx, peer, i, peer->ip)) {
5088 consensus = TypeConsensus_Undemotes;
5090 return TypeConsensus_Bad;
5096 JS_REQUIRES_STACK unsigned
5097 TraceRecorder::findUndemotesInTypemaps(const TypeMap& typeMap, LinkableFragment* f,
5098 Queue<unsigned>& undemotes)
5100 undemotes.setLength(0);
5101 unsigned minSlots = JS_MIN(typeMap.length(), f->typeMap.length());
5102 for (unsigned i = 0; i < minSlots; i++) {
5103 if (typeMap[i] == JSVAL_TYPE_INT32 && f->typeMap[i] == JSVAL_TYPE_DOUBLE) {
5105 } else if (typeMap[i] != f->typeMap[i]) {
5109 for (unsigned i = 0; i < undemotes.length(); i++)
5110 markSlotUndemotable(f, undemotes[i]);
5111 return undemotes.length();
5114 JS_REQUIRES_STACK void
5115 TraceRecorder::joinEdgesToEntry(TreeFragment* peer_root)
5117 if (fragment->root != fragment)
5120 TypeMap typeMap(NULL, traceMonitor->oracle);
5121 Queue<unsigned> undemotes(NULL);
5123 for (TreeFragment* peer = peer_root; peer; peer = peer->peer) {
5126 UnstableExit* uexit = peer->unstableExits;
5127 while (uexit != NULL) {
5128 /* Build the full typemap for this unstable exit */
5129 FullMapFromExit(typeMap, uexit->exit);
5130 /* Check its compatibility against this tree */
5131 TypeConsensus consensus = TypeMapLinkability(cx, traceMonitor, typeMap, tree);
5132 JS_ASSERT_IF(consensus == TypeConsensus_Okay, peer != fragment);
5133 if (consensus == TypeConsensus_Okay) {
5134 debug_only_printf(LC_TMTracer,
5135 "Joining type-stable trace to target exit %p->%p.\n",
5136 (void*)uexit->fragment, (void*)uexit->exit);
5139 * See bug 531513. Before linking these trees, make sure the
5140 * peer's dependency graph is up to date.
5142 TreeFragment* from = uexit->exit->root();
5143 if (from->nGlobalTypes() < tree->nGlobalTypes()) {
5144 SpecializeTreesToLateGlobals(cx, from, tree->globalTypeMap(),
5145 tree->nGlobalTypes());
5148 /* It's okay! Link together and remove the unstable exit. */
5149 JS_ASSERT(tree == fragment);
5150 JoinPeers(traceMonitor->assembler, uexit->exit, tree);
5151 uexit = peer->removeUnstableExit(uexit->exit);
5153 /* Check for int32->double slots that suggest trashing. */
5154 if (findUndemotesInTypemaps(typeMap, tree, undemotes)) {
5155 JS_ASSERT(peer == uexit->fragment->root);
5156 if (fragment == peer)
5159 whichTreesToTrash.addUnique(uexit->fragment->root);
5162 uexit = uexit->next;
5168 JS_REQUIRES_STACK AbortableRecordingStatus
5169 TraceRecorder::endLoop()
5171 return endLoop(snapshot(LOOP_EXIT));
5174 /* Emit an always-exit guard and compile the tree (used for break statements. */
5175 JS_REQUIRES_STACK AbortableRecordingStatus
5176 TraceRecorder::endLoop(VMSideExit* exit)
5178 JS_ASSERT(fragment->root == tree);
5180 if (callDepth != 0) {
5181 debug_only_print0(LC_TMTracer, "Blacklisted: stack depth mismatch, possible recursion.\n");
5182 Blacklist((jsbytecode*)tree->ip);
5184 return ARECORD_STOP;
5187 fragment->lastIns = w.x(createGuardRecord(exit));
5189 CHECK_STATUS_A(compile());
5191 debug_only_printf(LC_TMTreeVis, "TREEVIS ENDLOOP EXIT=%p\n", (void*)exit);
5193 JS_ASSERT(LookupLoop(traceMonitor, tree->ip, tree->globalObj, tree->globalShape, tree->argc) ==
5196 joinEdgesToEntry(tree->first);
5198 debug_only_stmt(DumpPeerStability(traceMonitor, tree->ip, tree->globalObj,
5199 tree->globalShape, tree->argc);)
5202 * Note: this must always be done, in case we added new globals on trace
5203 * and haven't yet propagated those to linked and dependent trees.
5205 debug_only_print0(LC_TMTracer,
5206 "updating specializations on dependent and linked trees\n");
5208 SpecializeTreesToMissingGlobals(cx, globalObj, fragment->root);
5211 * If this is a newly formed tree, and the outer tree has not been compiled
5212 * yet, we should try to compile the outer tree again.
5215 AttemptCompilation(traceMonitor, globalObj, outerScript, outerPC, outerArgc);
5217 debug_only_printf(LC_TMMinimal,
5218 "Recording completed at %s:%u@%u via endLoop (FragID=%06u)\n",
5219 cx->fp()->script()->filename,
5220 js_FramePCToLineNumber(cx, cx->fp()),
5221 FramePCOffset(cx, cx->fp()),
5222 fragment->profFragID);
5223 debug_only_print0(LC_TMTracer, "\n");
5226 return finishSuccessfully();
5229 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
5230 JS_REQUIRES_STACK void
5231 TraceRecorder::prepareTreeCall(TreeFragment* inner)
5233 VMSideExit* exit = snapshot(OOM_EXIT);
5236 * The inner tree expects to be called from the current frame. If the outer
5237 * tree (this trace) is currently inside a function inlining code
5238 * (calldepth > 0), we have to advance the native stack pointer such that
5239 * we match what the inner trace expects to see. We move it back when we
5240 * come out of the inner tree call.
5242 if (callDepth > 0) {
5244 * Calculate the amount we have to lift the native stack pointer by to
5245 * compensate for any outer frames that the inner tree doesn't expect
5246 * but the outer tree has.
5248 ptrdiff_t sp_adj = nativeStackOffset(&cx->fp()->calleeValue());
5250 /* Calculate the amount we have to lift the call stack by. */
5251 ptrdiff_t rp_adj = callDepth * sizeof(FrameInfo*);
5254 * Guard that we have enough stack space for the tree we are trying to
5255 * call on top of the new value for sp.
5257 debug_only_printf(LC_TMTracer,
5258 "sp_adj=%lld outer=%lld inner=%lld\n",
5259 (long long int)sp_adj,
5260 (long long int)tree->nativeStackBase,
5261 (long long int)inner->nativeStackBase);
5262 ptrdiff_t sp_offset =
5263 - tree->nativeStackBase /* rebase sp to beginning of outer tree's stack */
5264 + sp_adj /* adjust for stack in outer frame inner tree can't see */
5265 + inner->maxNativeStackSlots * sizeof(double); /* plus the inner tree's stack */
5266 LIns* sp_top = w.addp(lirbuf->sp, w.nameImmw(sp_offset));
5267 guard(true, w.ltp(sp_top, eos_ins), exit);
5269 /* Guard that we have enough call stack space. */
5270 ptrdiff_t rp_offset = rp_adj + inner->maxCallDepth * sizeof(FrameInfo*);
5271 LIns* rp_top = w.addp(lirbuf->rp, w.nameImmw(rp_offset));
5272 guard(true, w.ltp(rp_top, eor_ins), exit);
5275 - tree->nativeStackBase /* rebase sp to beginning of outer tree's stack */
5276 + sp_adj /* adjust for stack in outer frame inner tree can't see */
5277 + inner->nativeStackBase; /* plus the inner tree's stack base */
5278 /* We have enough space, so adjust sp and rp to their new level. */
5279 w.stStateField(w.addp(lirbuf->sp, w.nameImmw(sp_offset)), sp);
5280 w.stStateField(w.addp(lirbuf->rp, w.nameImmw(rp_adj)), rp);
5284 * The inner tree will probably access stack slots. So tell nanojit not to
5285 * discard or defer stack writes before emitting the call tree code.
5287 * (The ExitType of this snapshot is nugatory. The exit can't be taken.)
5289 w.xbarrier(createGuardRecord(exit));
5292 class ClearSlotsVisitor : public SlotVisitorBase
5296 ClearSlotsVisitor(Tracker &tracker)
5300 JS_ALWAYS_INLINE bool
5301 visitStackSlots(Value *vp, size_t count, JSStackFrame *) {
5302 for (Value *vpend = vp + count; vp != vpend; ++vp)
5303 tracker.set(vp, NULL);
5307 JS_ALWAYS_INLINE bool
5308 visitFrameObjPtr(void *p, JSStackFrame *) {
5309 tracker.set(p, NULL);
5315 BuildGlobalTypeMapFromInnerTree(Queue<JSValueType>& typeMap, VMSideExit* inner)
5318 unsigned initialSlots = typeMap.length();
5320 /* First, use the innermost exit's global typemap. */
5321 typeMap.add(inner->globalTypeMap(), inner->numGlobalSlots);
5323 /* Add missing global types from the innermost exit's tree. */
5324 TreeFragment* innerFrag = inner->root();
5325 unsigned slots = inner->numGlobalSlots;
5326 if (slots < innerFrag->nGlobalTypes()) {
5327 typeMap.add(innerFrag->globalTypeMap() + slots, innerFrag->nGlobalTypes() - slots);
5328 slots = innerFrag->nGlobalTypes();
5330 JS_ASSERT(typeMap.length() - initialSlots == slots);
5334 /* Record a call to an inner tree. */
5335 JS_REQUIRES_STACK void
5336 TraceRecorder::emitTreeCall(TreeFragment* inner, VMSideExit* exit)
5338 /* Invoke the inner tree. */
5339 LIns* args[] = { lirbuf->state }; /* reverse order */
5340 /* Construct a call info structure for the target tree. */
5341 CallInfo* ci = new (traceAlloc()) CallInfo();
5342 ci->_address = uintptr_t(inner->code());
5343 JS_ASSERT(ci->_address);
5344 ci->_typesig = CallInfo::typeSig1(ARGTYPE_P, ARGTYPE_P);
5346 ci->_storeAccSet = ACCSET_STORE_ANY;
5347 ci->_abi = ABI_FASTCALL;
5349 ci->_name = "fragment";
5351 LIns* rec = w.call(ci, args);
5352 LIns* lr = w.ldpGuardRecordExit(rec);
5353 LIns* nested = w.jtUnoptimizable(w.eqiN(w.ldiVMSideExitField(lr, exitType), NESTED_EXIT));
5356 * If the tree exits on a regular (non-nested) guard, keep updating lastTreeExitGuard
5357 * with that guard. If we mismatch on a tree call guard, this will contain the last
5358 * non-nested guard we encountered, which is the innermost loop or branch guard.
5360 w.stStateField(lr, lastTreeExitGuard);
5361 LIns* done1 = w.j(NULL);
5364 * The tree exited on a nested guard. This only occurs once a tree call guard mismatches
5365 * and we unwind the tree call stack. We store the first (innermost) tree call guard in state
5366 * and we will try to grow the outer tree the failing call was in starting at that guard.
5369 LIns* done2 = w.jfUnoptimizable(w.eqp0(w.ldpStateField(lastTreeCallGuard)));
5370 w.stStateField(lr, lastTreeCallGuard);
5371 w.stStateField(w.addp(w.ldpStateField(rp),
5372 w.i2p(w.lshiN(w.ldiVMSideExitField(lr, calldepth),
5373 sizeof(void*) == 4 ? 2 : 3))),
5375 w.label(done1, done2);
5378 * Keep updating outermostTreeExit so that TracerState always contains the most recent
5381 w.stStateField(lr, outermostTreeExitGuard);
5383 /* Read back all registers, in case the called tree changed any of them. */
5387 map = exit->globalTypeMap();
5388 for (i = 0; i < exit->numGlobalSlots; i++)
5389 JS_ASSERT(map[i] != JSVAL_TYPE_BOXED);
5390 map = exit->stackTypeMap();
5391 for (i = 0; i < exit->numStackSlots; i++)
5392 JS_ASSERT(map[i] != JSVAL_TYPE_BOXED);
5395 /* The inner tree may modify currently-tracked upvars, so flush everything. */
5396 ClearSlotsVisitor visitor(tracker);
5397 VisitStackSlots(visitor, cx, callDepth);
5398 SlotList& gslots = *tree->globalSlots;
5399 for (unsigned i = 0; i < gslots.length(); i++) {
5400 unsigned slot = gslots[i];
5401 Value* vp = &globalObj->getSlotRef(slot);
5402 tracker.set(vp, NULL);
5405 /* Set stack slots from the innermost frame. */
5406 importTypeMap.setLength(NativeStackSlots(cx, callDepth));
5407 unsigned startOfInnerFrame = importTypeMap.length() - exit->numStackSlots;
5408 for (unsigned i = 0; i < exit->numStackSlots; i++)
5409 importTypeMap[startOfInnerFrame + i] = exit->stackTypeMap()[i];
5410 importStackSlots = importTypeMap.length();
5411 JS_ASSERT(importStackSlots == NativeStackSlots(cx, callDepth));
5414 * Bug 502604 - It is illegal to extend from the outer typemap without
5415 * first extending from the inner. Make a new typemap here.
5417 BuildGlobalTypeMapFromInnerTree(importTypeMap, exit);
5419 importGlobalSlots = importTypeMap.length() - importStackSlots;
5420 JS_ASSERT(importGlobalSlots == tree->globalSlots->length());
5422 /* Restore sp and rp to their original values (we still have them in a register). */
5423 if (callDepth > 0) {
5424 w.stStateField(lirbuf->sp, sp);
5425 w.stStateField(lirbuf->rp, rp);
5429 * Guard that we come out of the inner tree along the same side exit we came out when
5430 * we called the inner tree at recording time.
5432 VMSideExit* nestedExit = snapshot(NESTED_EXIT);
5433 JS_ASSERT(exit->exitType == LOOP_EXIT);
5434 guard(true, w.eqp(lr, w.nameImmpNonGC(exit)), nestedExit);
5435 debug_only_printf(LC_TMTreeVis, "TREEVIS TREECALL INNER=%p EXIT=%p GUARD=%p\n", (void*)inner,
5436 (void*)nestedExit, (void*)exit);
5438 /* Register us as a dependent tree of the inner tree. */
5439 inner->dependentTrees.addUnique(fragment->root);
5440 tree->linkedTrees.addUnique(inner);
5443 /* Add a if/if-else control-flow merge point to the list of known merge points. */
5444 JS_REQUIRES_STACK void
5445 TraceRecorder::trackCfgMerges(jsbytecode* pc)
5447 /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */
5448 JS_ASSERT((*pc == JSOP_IFEQ) || (*pc == JSOP_IFEQX));
5449 jssrcnote* sn = js_GetSrcNote(cx->fp()->script(), pc);
5451 if (SN_TYPE(sn) == SRC_IF) {
5452 cfgMerges.add((*pc == JSOP_IFEQ)
5453 ? pc + GET_JUMP_OFFSET(pc)
5454 : pc + GET_JUMPX_OFFSET(pc));
5455 } else if (SN_TYPE(sn) == SRC_IF_ELSE)
5456 cfgMerges.add(pc + js_GetSrcNoteOffset(sn, 0));
5461 * Invert the direction of the guard if this is a loop edge that is not
5462 * taken (thin loop).
5464 JS_REQUIRES_STACK void
5465 TraceRecorder::emitIf(jsbytecode* pc, bool cond, LIns* x)
5468 JS_ASSERT(isCond(x));
5469 if (IsLoopEdge(pc, (jsbytecode*)tree->ip)) {
5470 exitType = LOOP_EXIT;
5473 * If we are about to walk out of the loop, generate code for the
5474 * inverse loop condition, pretending we recorded the case that stays
5477 if ((*pc == JSOP_IFEQ || *pc == JSOP_IFEQX) == cond) {
5478 JS_ASSERT(*pc == JSOP_IFNE || *pc == JSOP_IFNEX || *pc == JSOP_IFEQ || *pc == JSOP_IFEQX);
5479 debug_only_print0(LC_TMTracer,
5480 "Walking out of the loop, terminating it anyway.\n");
5485 * Conditional guards do not have to be emitted if the condition is
5486 * constant. We make a note whether the loop condition is true or false
5487 * here, so we later know whether to emit a loop edge or a loop end.
5490 pendingLoop = (x->immI() == int32(cond));
5494 exitType = BRANCH_EXIT;
5497 guard(cond, x, exitType);
5500 /* Emit code for a fused IFEQ/IFNE. */
5501 JS_REQUIRES_STACK void
5502 TraceRecorder::fuseIf(jsbytecode* pc, bool cond, LIns* x)
5504 if (*pc == JSOP_IFEQ || *pc == JSOP_IFNE) {
5505 emitIf(pc, cond, x);
5506 if (*pc == JSOP_IFEQ)
5511 /* Check whether we have reached the end of the trace. */
5512 JS_REQUIRES_STACK AbortableRecordingStatus
5513 TraceRecorder::checkTraceEnd(jsbytecode *pc)
5515 if (IsLoopEdge(pc, (jsbytecode*)tree->ip)) {
5517 * If we compile a loop, the trace should have a zero stack balance at
5518 * the loop edge. Currently we are parked on a comparison op or
5519 * IFNE/IFEQ, so advance pc to the loop header and adjust the stack
5520 * pointer and pretend we have reached the loop header.
5523 JS_ASSERT(!cx->fp()->hasImacropc() && (pc == cx->regs->pc || pc == cx->regs->pc + 1));
5524 JSFrameRegs orig = *cx->regs;
5526 cx->regs->pc = (jsbytecode*)tree->ip;
5527 cx->regs->sp = cx->fp()->base() + tree->spOffsetAtEntry;
5529 JSContext* localcx = cx;
5530 AbortableRecordingStatus ars = closeLoop();
5531 *localcx->regs = orig;
5537 return ARECORD_CONTINUE;
5541 * Check whether the shape of the global object has changed. The return value
5542 * indicates whether the recorder is still active. If 'false', any active
5543 * recording has been aborted and the JIT may have been reset.
5545 static JS_REQUIRES_STACK bool
5546 CheckGlobalObjectShape(JSContext* cx, TraceMonitor* tm, JSObject* globalObj,
5547 uint32 *shape = NULL, SlotList** slots = NULL)
5549 if (tm->needFlush) {
5550 ResetJIT(cx, tm, FR_DEEP_BAIL);
5554 if (globalObj->numSlots() > MAX_GLOBAL_SLOTS) {
5556 AbortRecording(cx, "too many slots in global object");
5561 * The global object must have a unique shape. That way, if an operand
5562 * isn't the global at record time, a shape guard suffices to ensure
5563 * that it isn't the global at run time.
5565 if (!globalObj->hasOwnShape()) {
5566 if (!globalObj->globalObjectOwnShapeChange(cx)) {
5567 debug_only_print0(LC_TMTracer,
5568 "Can't record: failed to give globalObj a unique shape.\n");
5573 uint32 globalShape = globalObj->shape();
5576 TreeFragment* root = tm->recorder->getFragment()->root;
5578 /* Check the global shape matches the recorder's treeinfo's shape. */
5579 if (globalObj != root->globalObj || globalShape != root->globalShape) {
5580 AUDIT(globalShapeMismatchAtEntry);
5581 debug_only_printf(LC_TMTracer,
5582 "Global object/shape mismatch (%p/%u vs. %p/%u), flushing cache.\n",
5583 (void*)globalObj, globalShape, (void*)root->globalObj,
5585 Backoff(tm, (jsbytecode*) root->ip);
5586 ResetJIT(cx, tm, FR_GLOBAL_SHAPE_MISMATCH);
5590 *shape = globalShape;
5592 *slots = root->globalSlots;
5596 /* No recorder, search for a tracked global-state (or allocate one). */
5597 for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
5598 GlobalState &state = tm->globalStates[i];
5600 if (state.globalShape == uint32(-1)) {
5601 state.globalObj = globalObj;
5602 state.globalShape = globalShape;
5603 JS_ASSERT(state.globalSlots);
5604 JS_ASSERT(state.globalSlots->length() == 0);
5607 if (state.globalObj == globalObj && state.globalShape == globalShape) {
5609 *shape = globalShape;
5611 *slots = state.globalSlots;
5616 /* No currently-tracked-global found and no room to allocate, abort. */
5617 AUDIT(globalShapeMismatchAtEntry);
5618 debug_only_printf(LC_TMTracer,
5619 "No global slotlist for global shape %u, flushing cache.\n",
5621 ResetJIT(cx, tm, FR_GLOBALS_FULL);
5626 * Return whether or not the recorder could be started. If 'false', the JIT has
5627 * been reset in response to an OOM.
5629 bool JS_REQUIRES_STACK
5630 TraceRecorder::startRecorder(JSContext* cx, TraceMonitor *tm, VMSideExit* anchor, VMFragment* f,
5631 unsigned stackSlots, unsigned ngslots,
5632 JSValueType* typeMap, VMSideExit* expectedInnerExit,
5633 JSScript* outerScript, jsbytecode* outerPC, uint32 outerArgc,
5636 JS_ASSERT(!tm->needFlush);
5637 JS_ASSERT_IF(cx->fp()->hasImacropc(), f->root != f);
5639 /* We can't (easily) use js_new() here because the constructor is private. */
5640 void *memory = js_malloc(sizeof(TraceRecorder));
5641 tm->recorder = memory
5642 ? new(memory) TraceRecorder(cx, tm, anchor, f, stackSlots, ngslots, typeMap,
5643 expectedInnerExit, outerScript, outerPC, outerArgc,
5647 if (!tm->recorder || tm->outOfMemory() || OverfullJITCache(cx, tm)) {
5648 ResetJIT(cx, tm, FR_OOM);
5656 TrashTree(TreeFragment* f)
5658 JS_ASSERT(f == f->root);
5659 debug_only_printf(LC_TMTreeVis, "TREEVIS TRASH FRAG=%p\n", (void*)f);
5663 AUDIT(treesTrashed);
5664 debug_only_print0(LC_TMTracer, "Trashing tree info.\n");
5666 TreeFragment** data = f->dependentTrees.data();
5667 unsigned length = f->dependentTrees.length();
5668 for (unsigned n = 0; n < length; ++n)
5670 data = f->linkedTrees.data();
5671 length = f->linkedTrees.length();
5672 for (unsigned n = 0; n < length; ++n)
5677 SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee)
5679 VOUCH_DOES_NOT_REQUIRE_STACK();
5681 /* Assert that we have a correct sp distance from cx->fp()->slots in fi. */
5682 JSStackFrame* const fp = cx->fp();
5683 JS_ASSERT_IF(!fi.imacpc,
5684 js_ReconstructStackDepth(cx, fp->script(), fi.pc) ==
5685 uintN(fi.spdist - fp->numFixed()));
5687 /* Use the just-flushed prev-frame to get the callee function. */
5688 JSFunction* newfun = callee->getFunctionPrivate();
5689 JSScript* newscript = newfun->script();
5691 /* Fill in the prev-frame's sp. */
5692 JSFrameRegs *regs = cx->regs;
5693 regs->sp = fp->slots() + fi.spdist;
5696 fp->setImacropc(fi.imacpc);
5698 /* Set argc/flags then mimic JSOP_CALL. */
5699 uintN argc = fi.get_argc();
5700 uint32 flags = fi.is_constructing ()
5701 ? JSFRAME_CONSTRUCTING | JSFRAME_CONSTRUCTING
5704 /* Get pointer to new/frame/slots, prepare arguments. */
5705 StackSpace &stack = cx->stack();
5706 JSStackFrame *newfp = stack.getInlineFrame(cx, regs->sp, argc, newfun,
5709 /* Initialize frame; do not need to initialize locals. */
5710 newfp->initCallFrame(cx, *callee, newfun, argc, flags);
5713 /* The stack is conservatively marked, so we can leave non-canonical args uninitialized. */
5714 if (newfp->hasOverflowArgs()) {
5715 Value *beg = newfp->actualArgs() - 2;
5716 Value *end = newfp->actualArgs() + newfp->numFormalArgs();
5717 for (Value *p = beg; p != end; ++p)
5718 p->setMagic(JS_ARG_POISON);
5721 /* These should be initialized by FlushNativeStackFrame. */
5722 newfp->thisValue().setMagic(JS_THIS_POISON);
5723 newfp->setScopeChainNoCallObj(*JSStackFrame::sInvalidScopeChain);
5726 /* Officially push the frame. */
5727 stack.pushInlineFrame(cx, newscript, newfp, cx->regs);
5729 /* Call object will be set by FlushNativeStackFrame. */
5731 /* Call the debugger hook if present. */
5732 JSInterpreterHook hook = cx->debugHooks->callHook;
5734 newfp->setHookData(hook(cx, newfp, JS_TRUE, 0,
5735 cx->debugHooks->callHookData));
5739 static JS_REQUIRES_STACK bool
5740 RecordTree(JSContext* cx, TraceMonitor* tm, TreeFragment* first,
5741 JSScript* outerScript, jsbytecode* outerPC,
5742 uint32 outerArgc, SlotList* globalSlots)
5744 /* Try to find an unused peer fragment, or allocate a new one. */
5745 JS_ASSERT(first->first == first);
5746 TreeFragment* f = NULL;
5748 for (TreeFragment* peer = first; peer; peer = peer->peer, ++count) {
5753 f = AddNewPeerToPeerList(tm, first);
5754 JS_ASSERT(f->root == f);
5756 /* Disable speculation if we are starting to accumulate a lot of trees. */
5757 bool speculate = count < MAXPEERS-1;
5759 /* save a local copy for use after JIT flush */
5760 const void* localRootIP = f->root->ip;
5762 /* Make sure the global type map didn't change on us. */
5763 if (!CheckGlobalObjectShape(cx, tm, f->globalObj)) {
5764 Backoff(tm, (jsbytecode*) localRootIP);
5768 AUDIT(recorderStarted);
5770 if (tm->outOfMemory() ||
5771 OverfullJITCache(cx, tm) ||
5772 !tm->tracedScripts.put(cx->fp()->script()))
5774 if (!OverfullJITCache(cx, tm))
5775 js_ReportOutOfMemory(cx);
5776 Backoff(tm, (jsbytecode*) f->root->ip);
5777 ResetJIT(cx, tm, FR_OOM);
5778 debug_only_print0(LC_TMTracer,
5779 "Out of memory recording new tree, flushing cache.\n");
5783 JS_ASSERT(!f->code());
5785 f->initialize(cx, globalSlots, speculate);
5788 AssertTreeIsUnique(tm, f);
5791 debug_only_printf(LC_TMTreeVis, "TREEVIS CREATETREE ROOT=%p PC=%p FILE=\"%s\" LINE=%d OFFS=%d",
5792 (void*)f, f->ip, f->treeFileName, f->treeLineNumber,
5793 FramePCOffset(cx, cx->fp()));
5794 debug_only_print0(LC_TMTreeVis, " STACK=\"");
5795 for (unsigned i = 0; i < f->nStackTypes; i++)
5796 debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(f->typeMap[i]));
5797 debug_only_print0(LC_TMTreeVis, "\" GLOBALS=\"");
5798 for (unsigned i = 0; i < f->nGlobalTypes(); i++)
5799 debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(f->typeMap[f->nStackTypes + i]));
5800 debug_only_print0(LC_TMTreeVis, "\"\n");
5803 /* Recording primary trace. */
5804 return TraceRecorder::startRecorder(cx, tm, NULL, f, f->nStackTypes,
5805 f->globalSlots->length(),
5806 f->typeMap.data(), NULL,
5807 outerScript, outerPC, outerArgc, speculate);
5810 static JS_REQUIRES_STACK TypeConsensus
5811 FindLoopEdgeTarget(JSContext* cx, TraceMonitor* tm, VMSideExit* exit, TreeFragment** peerp)
5813 TreeFragment* from = exit->root();
5815 JS_ASSERT(from->code());
5816 Oracle* oracle = tm->oracle;
5818 TypeMap typeMap(NULL, oracle);
5819 FullMapFromExit(typeMap, exit);
5820 JS_ASSERT(typeMap.length() - exit->numStackSlots == from->nGlobalTypes());
5822 /* Mark all double slots as undemotable */
5823 uint16* gslots = from->globalSlots->data();
5824 for (unsigned i = 0; i < typeMap.length(); i++) {
5825 if (typeMap[i] == JSVAL_TYPE_DOUBLE) {
5826 if (i < from->nStackTypes)
5827 oracle->markStackSlotUndemotable(cx, i, from->ip);
5828 else if (i >= exit->numStackSlots)
5829 oracle->markGlobalSlotUndemotable(cx, gslots[i - exit->numStackSlots]);
5833 JS_ASSERT(exit->exitType == UNSTABLE_LOOP_EXIT);
5835 TreeFragment* firstPeer = from->first;
5837 for (TreeFragment* peer = firstPeer; peer; peer = peer->peer) {
5840 JS_ASSERT(peer->argc == from->argc);
5841 JS_ASSERT(exit->numStackSlots == peer->nStackTypes);
5842 TypeConsensus consensus = TypeMapLinkability(cx, tm, typeMap, peer);
5843 if (consensus == TypeConsensus_Okay || consensus == TypeConsensus_Undemotes) {
5849 return TypeConsensus_Bad;
5852 static JS_REQUIRES_STACK bool
5853 AttemptToStabilizeTree(JSContext* cx, TraceMonitor* tm, JSObject* globalObj, VMSideExit* exit,
5854 JSScript* outerScript, jsbytecode* outerPC, uint32 outerArgc)
5856 if (tm->needFlush) {
5857 ResetJIT(cx, tm, FR_DEEP_BAIL);
5861 TreeFragment* from = exit->root();
5863 TreeFragment* peer = NULL;
5864 TypeConsensus consensus = FindLoopEdgeTarget(cx, tm, exit, &peer);
5865 if (consensus == TypeConsensus_Okay) {
5866 JS_ASSERT(from->globalSlots == peer->globalSlots);
5867 JS_ASSERT_IF(exit->exitType == UNSTABLE_LOOP_EXIT,
5868 from->nStackTypes == peer->nStackTypes);
5869 JS_ASSERT(exit->numStackSlots == peer->nStackTypes);
5870 /* Patch this exit to its peer */
5871 JoinPeers(tm->assembler, exit, peer);
5873 * Update peer global types. The |from| fragment should already be updated because it on
5874 * the execution path, and somehow connected to the entry trace.
5876 if (peer->nGlobalTypes() < peer->globalSlots->length())
5877 SpecializeTreesToMissingGlobals(cx, globalObj, peer);
5878 JS_ASSERT(from->nGlobalTypes() == from->globalSlots->length());
5879 /* This exit is no longer unstable, so remove it. */
5880 if (exit->exitType == UNSTABLE_LOOP_EXIT)
5881 from->removeUnstableExit(exit);
5882 debug_only_stmt(DumpPeerStability(tm, peer->ip, globalObj, from->globalShape, from->argc);)
5884 } else if (consensus == TypeConsensus_Undemotes) {
5885 /* The original tree is unconnectable, so trash it. */
5890 SlotList *globalSlots = from->globalSlots;
5892 JS_ASSERT(from == from->root);
5894 /* If this tree has been blacklisted, don't try to record a new one. */
5895 if (*(jsbytecode*)from->ip == JSOP_NOTRACE)
5898 return RecordTree(cx, tm, from->first, outerScript, outerPC, outerArgc, globalSlots);
5901 static JS_REQUIRES_STACK VMFragment*
5902 CreateBranchFragment(JSContext* cx, TraceMonitor* tm, TreeFragment* root, VMSideExit* anchor)
5905 uint32_t profFragID = (LogController.lcbits & LC_FragProfile)
5906 ? (++(tm->lastFragID)) : 0;
5909 VMFragment* f = new (*tm->dataAlloc) VMFragment(cx->regs->pc verbose_only(, profFragID));
5911 debug_only_printf(LC_TMTreeVis, "TREEVIS CREATEBRANCH ROOT=%p FRAG=%p PC=%p FILE=\"%s\""
5912 " LINE=%d ANCHOR=%p OFFS=%d\n",
5913 (void*)root, (void*)f, (void*)cx->regs->pc, cx->fp()->script()->filename,
5914 js_FramePCToLineNumber(cx, cx->fp()), (void*)anchor,
5915 FramePCOffset(cx, cx->fp()));
5916 verbose_only( tm->branches = new (*tm->dataAlloc) Seq<Fragment*>(f, tm->branches); )
5924 static JS_REQUIRES_STACK bool
5925 AttemptToExtendTree(JSContext* cx, TraceMonitor* tm, VMSideExit* anchor, VMSideExit* exitedFrom,
5926 JSScript *outerScript, jsbytecode* outerPC
5928 , TraceVisStateObj* tvso = NULL
5932 JS_ASSERT(!tm->recorder);
5934 if (tm->needFlush) {
5935 ResetJIT(cx, tm, FR_DEEP_BAIL);
5937 if (tvso) tvso->r = R_FAIL_EXTEND_FLUSH;
5942 TreeFragment* f = anchor->root();
5943 JS_ASSERT(f->code());
5946 * Don't grow trees above a certain size to avoid code explosion due to
5949 if (f->branchCount >= MAX_BRANCHES) {
5951 if (cx->methodJitEnabled && cx->profilingEnabled)
5952 Blacklist((jsbytecode *)f->ip);
5955 if (tvso) tvso->r = R_FAIL_EXTEND_MAX_BRANCHES;
5960 VMFragment* c = (VMFragment*)anchor->target;
5962 c = CreateBranchFragment(cx, tm, f, anchor);
5965 * If we are recycling a fragment, it might have a different ip so reset it
5966 * here. This can happen when attaching a branch to a NESTED_EXIT, which
5967 * might extend along separate paths (i.e. after the loop edge, and after a
5968 * return statement).
5970 c->ip = cx->regs->pc;
5971 JS_ASSERT(c->root == f);
5974 debug_only_printf(LC_TMTracer,
5975 "trying to attach another branch to the tree (hits = %d)\n", c->hits());
5977 int32_t& hits = c->hits();
5978 int32_t maxHits = HOTEXIT + MAXEXIT;
5979 if (anchor->exitType == CASE_EXIT)
5980 maxHits *= anchor->switchInfo->count;
5981 if (outerPC || (hits++ >= HOTEXIT && hits <= maxHits)) {
5982 /* start tracing secondary trace from this point */
5983 unsigned stackSlots;
5985 JSValueType* typeMap;
5986 TypeMap fullMap(NULL, tm->oracle);
5989 * If we are coming straight from a simple side exit, just use that
5990 * exit's type map as starting point.
5992 ngslots = anchor->numGlobalSlots;
5993 stackSlots = anchor->numStackSlots;
5994 typeMap = anchor->fullTypeMap();
5997 * If we side-exited on a loop exit and continue on a nesting
5998 * guard, the nesting guard (anchor) has the type information for
5999 * everything below the current scope, and the actual guard we
6000 * exited from has the types for everything in the current scope
6001 * (and whatever it inlined). We have to merge those maps here.
6003 VMSideExit* e1 = anchor;
6004 VMSideExit* e2 = exitedFrom;
6005 fullMap.add(e1->stackTypeMap(), e1->numStackSlotsBelowCurrentFrame);
6006 fullMap.add(e2->stackTypeMap(), e2->numStackSlots);
6007 stackSlots = fullMap.length();
6008 ngslots = BuildGlobalTypeMapFromInnerTree(fullMap, e2);
6009 JS_ASSERT(ngslots >= e1->numGlobalSlots); // inner tree must have all globals
6010 JS_ASSERT(ngslots == fullMap.length() - stackSlots);
6011 typeMap = fullMap.data();
6013 JS_ASSERT(ngslots >= anchor->numGlobalSlots);
6014 bool rv = TraceRecorder::startRecorder(cx, tm, anchor, c, stackSlots, ngslots, typeMap,
6015 exitedFrom, outerScript, outerPC, f->argc,
6019 tvso->r = R_FAIL_EXTEND_START;
6024 if (tvso) tvso->r = R_FAIL_EXTEND_COLD;
6029 static JS_REQUIRES_STACK bool
6030 ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f, uintN& inlineCallCount,
6031 VMSideExit** innermostNestedGuardp, VMSideExit** lrp);
6033 static inline MonitorResult
6034 RecordingIfTrue(bool b)
6036 return b ? MONITOR_RECORDING : MONITOR_NOT_RECORDING;
6040 * A postcondition of recordLoopEdge is that if recordLoopEdge does not return
6041 * MONITOR_RECORDING, the recording has been aborted.
6043 JS_REQUIRES_STACK MonitorResult
6044 TraceRecorder::recordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount)
6046 TraceMonitor* tm = r->traceMonitor;
6048 /* Process needFlush and deep abort requests. */
6049 if (tm->needFlush) {
6050 ResetJIT(cx, tm, FR_DEEP_BAIL);
6051 return MONITOR_NOT_RECORDING;
6054 JS_ASSERT(r->fragment && !r->fragment->lastIns);
6055 TreeFragment* root = r->fragment->root;
6056 TreeFragment* first = LookupOrAddLoop(tm, cx->regs->pc, root->globalObj,
6057 root->globalShape, entryFrameArgc(cx));
6060 * Make sure the shape of the global object still matches (this might flush
6063 JSObject* globalObj = cx->fp()->scopeChain().getGlobal();
6064 uint32 globalShape = -1;
6065 SlotList* globalSlots = NULL;
6066 if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
6067 JS_ASSERT(!tm->recorder);
6068 return MONITOR_NOT_RECORDING;
6071 debug_only_printf(LC_TMTracer,
6072 "Looking for type-compatible peer (%s:%d@%d)\n",
6073 cx->fp()->script()->filename,
6074 js_FramePCToLineNumber(cx, cx->fp()),
6075 FramePCOffset(cx, cx->fp()));
6077 // Find a matching inner tree. If none can be found, compile one.
6078 TreeFragment* f = r->findNestedCompatiblePeer(first);
6079 if (!f || !f->code()) {
6080 AUDIT(noCompatInnerTrees);
6082 TreeFragment* outerFragment = root;
6083 JSScript* outerScript = outerFragment->script;
6084 jsbytecode* outerPC = (jsbytecode*) outerFragment->ip;
6085 uint32 outerArgc = outerFragment->argc;
6086 JS_ASSERT(entryFrameArgc(cx) == first->argc);
6088 if (AbortRecording(cx, "No compatible inner tree") == JIT_RESET)
6089 return MONITOR_NOT_RECORDING;
6091 return RecordingIfTrue(RecordTree(cx, tm, first,
6092 outerScript, outerPC, outerArgc, globalSlots));
6095 AbortableRecordingStatus status = r->attemptTreeCall(f, inlineCallCount);
6096 if (status == ARECORD_CONTINUE)
6097 return MONITOR_RECORDING;
6098 if (status == ARECORD_ERROR) {
6100 AbortRecording(cx, "Error returned while recording loop edge");
6101 return MONITOR_ERROR;
6103 JS_ASSERT(status == ARECORD_ABORTED && !tm->recorder);
6104 return MONITOR_NOT_RECORDING;
6107 JS_REQUIRES_STACK AbortableRecordingStatus
6108 TraceRecorder::attemptTreeCall(TreeFragment* f, uintN& inlineCallCount)
6110 adjustCallerTypes(f);
6114 uintN oldInlineCallCount = inlineCallCount;
6117 JSContext *localCx = cx;
6118 TraceMonitor *localtm = traceMonitor;
6120 // Refresh the import type map so the tracker can reimport values after the
6121 // call with their correct types. The inner tree must not change the type of
6122 // any variable in a frame above the current one (i.e., upvars).
6124 // Note that DetermineTypesVisitor may call determineSlotType, which may
6125 // read from the (current, stale) import type map, but this is safe here.
6126 // The reason is that determineSlotType will read the import type map only
6127 // if there is not a tracker instruction for that value, which means that
6128 // value has not been written yet, so that type map entry is up to date.
6129 importTypeMap.setLength(NativeStackSlots(cx, callDepth));
6130 DetermineTypesVisitor visitor(*this, importTypeMap.data());
6131 VisitStackSlots(visitor, cx, callDepth);
6133 VMSideExit* innermostNestedGuard = NULL;
6135 bool ok = ExecuteTree(cx, traceMonitor, f, inlineCallCount, &innermostNestedGuard, &lr);
6138 * If ExecuteTree reentered the interpreter, it may have killed |this|
6139 * and/or caused an error, which must be propagated.
6141 JS_ASSERT_IF(localtm->recorder, localtm->recorder == this);
6143 return ARECORD_ERROR;
6144 if (!localtm->recorder)
6145 return ARECORD_ABORTED;
6148 AbortRecording(cx, "Couldn't call inner tree");
6149 return ARECORD_ABORTED;
6152 TreeFragment* outerFragment = tree;
6153 JSScript* outerScript = outerFragment->script;
6154 jsbytecode* outerPC = (jsbytecode*) outerFragment->ip;
6155 switch (lr->exitType) {
6157 /* If the inner tree exited on an unknown loop exit, grow the tree around it. */
6158 if (innermostNestedGuard) {
6159 if (AbortRecording(cx, "Inner tree took different side exit, abort current "
6160 "recording and grow nesting tree") == JIT_RESET) {
6161 return ARECORD_ABORTED;
6163 return AttemptToExtendTree(localCx, localtm,
6164 innermostNestedGuard, lr, outerScript, outerPC)
6169 JS_ASSERT(oldInlineCallCount == inlineCallCount);
6171 /* Emit a call to the inner tree and continue recording the outer tree trace. */
6172 emitTreeCall(f, lr);
6173 return ARECORD_CONTINUE;
6175 case UNSTABLE_LOOP_EXIT:
6177 /* Abort recording so the inner loop can become type stable. */
6178 JSObject* _globalObj = globalObj;
6179 if (AbortRecording(cx, "Inner tree is trying to stabilize, "
6180 "abort outer recording") == JIT_RESET) {
6181 return ARECORD_ABORTED;
6183 return AttemptToStabilizeTree(localCx, localtm, _globalObj, lr, outerScript, outerPC,
6184 outerFragment->argc)
6191 if (lr->exitType == MUL_ZERO_EXIT)
6192 traceMonitor->oracle->markInstructionSlowZeroTest(cx->regs->pc);
6194 traceMonitor->oracle->markInstructionUndemotable(cx->regs->pc);
6198 /* Abort recording the outer tree, extend the inner tree. */
6199 if (AbortRecording(cx, "Inner tree is trying to grow, "
6200 "abort outer recording") == JIT_RESET) {
6201 return ARECORD_ABORTED;
6203 return AttemptToExtendTree(localCx, localtm, lr, NULL, outerScript, outerPC)
6208 JS_NOT_REACHED("NESTED_EXIT should be replaced by innermost side exit");
6210 debug_only_printf(LC_TMTracer, "exit_type=%s\n", getExitName(lr->exitType));
6211 AbortRecording(cx, "Inner tree not suitable for calling");
6212 return ARECORD_ABORTED;
6217 IsEntryTypeCompatible(const Value &v, JSValueType type)
6221 JS_ASSERT(type <= JSVAL_UPPER_INCL_TYPE_OF_BOXABLE_SET);
6222 JS_ASSERT(type != JSVAL_TYPE_OBJECT); /* JSVAL_TYPE_OBJECT does not belong in a type map */
6225 ok = (type == JSVAL_TYPE_INT32 || type == JSVAL_TYPE_DOUBLE);
6227 } else if (v.isDouble()) {
6229 ok = (type == JSVAL_TYPE_DOUBLE) ||
6230 (type == JSVAL_TYPE_INT32 && JSDOUBLE_IS_INT32(v.toDouble(), &_));
6232 } else if (v.isObject()) {
6233 ok = v.toObject().isFunction()
6234 ? type == JSVAL_TYPE_FUNOBJ
6235 : type == JSVAL_TYPE_NONFUNOBJ;
6238 ok = v.extractNonDoubleObjectTraceType() == type;
6241 char ttag = TypeToChar(type);
6242 char vtag = ValueToTypeChar(v);
6243 debug_only_printf(LC_TMTracer, "%c/%c ", vtag, ttag);
6245 debug_only_printf(LC_TMTracer, "%s", "(incompatible types)");
6251 IsFrameObjPtrTypeCompatible(void *p, JSStackFrame *fp, JSValueType type)
6253 debug_only_printf(LC_TMTracer, "%c/%c ", TypeToChar(type),
6254 (p == fp->addressOfScopeChain() || fp->hasArgsObj())
6255 ? TypeToChar(JSVAL_TYPE_NONFUNOBJ)
6256 : TypeToChar(JSVAL_TYPE_NULL));
6257 if (p == fp->addressOfScopeChain())
6258 return type == JSVAL_TYPE_NONFUNOBJ;
6259 JS_ASSERT(p == fp->addressOfArgs());
6260 JS_ASSERT(type == JSVAL_TYPE_NONFUNOBJ || type == JSVAL_TYPE_NULL);
6261 return fp->hasArgsObj() == (type == JSVAL_TYPE_NONFUNOBJ);
6264 class TypeCompatibilityVisitor : public SlotVisitorBase
6266 TraceRecorder &mRecorder;
6269 JSValueType *mTypeMap;
6270 unsigned mStackSlotNum;
6273 TypeCompatibilityVisitor (TraceRecorder &recorder,
6274 JSValueType *typeMap) :
6275 mRecorder(recorder),
6277 mOracle(recorder.traceMonitor->oracle),
6283 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
6284 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
6285 debug_only_printf(LC_TMTracer, "global%d=", n);
6286 if (!IsEntryTypeCompatible(*vp, *mTypeMap)) {
6288 } else if (!IsPromotedInt32(mRecorder.get(vp)) && *mTypeMap == JSVAL_TYPE_INT32) {
6289 mOracle->markGlobalSlotUndemotable(mCx, slot);
6291 } else if (vp->isInt32() && *mTypeMap == JSVAL_TYPE_DOUBLE) {
6292 mOracle->markGlobalSlotUndemotable(mCx, slot);
6298 * For the below two methods, one may be inclined to 'return false' early
6299 * when mOk is set to 'false'. Don't do that. It is very important to run
6300 * through the whole list to let all mis-matching slots get marked
6301 * undemotable in the oracle.
6304 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
6305 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
6306 for (size_t i = 0; i < count; ++i) {
6307 debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), unsigned(i));
6308 if (!IsEntryTypeCompatible(*vp, *mTypeMap)) {
6310 } else if (!IsPromotedInt32(mRecorder.get(vp)) && *mTypeMap == JSVAL_TYPE_INT32) {
6311 mOracle->markStackSlotUndemotable(mCx, mStackSlotNum);
6313 } else if (vp->isInt32() && *mTypeMap == JSVAL_TYPE_DOUBLE) {
6314 mOracle->markStackSlotUndemotable(mCx, mStackSlotNum);
6323 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
6324 visitFrameObjPtr(void* p, JSStackFrame* fp) {
6325 debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), 0);
6326 if (!IsFrameObjPtrTypeCompatible(p, fp, *mTypeMap))
6338 JS_REQUIRES_STACK TreeFragment*
6339 TraceRecorder::findNestedCompatiblePeer(TreeFragment* f)
6341 unsigned int ngslots = tree->globalSlots->length();
6343 for (; f != NULL; f = f->peer) {
6347 debug_only_printf(LC_TMTracer, "checking nested types %p: ", (void*)f);
6349 if (ngslots > f->nGlobalTypes())
6350 SpecializeTreesToMissingGlobals(cx, globalObj, f);
6353 * Determine whether the typemap of the inner tree matches the outer
6354 * tree's current state. If the inner tree expects an integer, but the
6355 * outer tree doesn't guarantee an integer for that slot, we mark the
6356 * slot undemotable and mismatch here. This will force a new tree to be
6357 * compiled that accepts a double for the slot. If the inner tree
6358 * expects a double, but the outer tree has an integer, we can proceed,
6359 * but we mark the location undemotable.
6361 TypeCompatibilityVisitor visitor(*this, f->typeMap.data());
6362 VisitSlots(visitor, cx, 0, *tree->globalSlots);
6364 debug_only_printf(LC_TMTracer, " %s\n", visitor.isOk() ? "match" : "");
6372 class CheckEntryTypeVisitor : public SlotVisitorBase
6375 JSValueType *mTypeMap;
6377 CheckEntryTypeVisitor(JSValueType *typeMap) :
6382 JS_ALWAYS_INLINE void checkSlot(const Value &v, char const *name, int i) {
6383 debug_only_printf(LC_TMTracer, "%s%d=", name, i);
6384 JS_ASSERT(*(uint8_t*)mTypeMap != 0xCD);
6385 mOk = IsEntryTypeCompatible(v, *mTypeMap++);
6388 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
6389 visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
6391 checkSlot(*vp, "global", n);
6394 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
6395 visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
6396 for (size_t i = 0; i < count; ++i) {
6399 checkSlot(*vp++, stackSlotKind(), i);
6404 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
6405 visitFrameObjPtr(void* p, JSStackFrame *fp) {
6406 debug_only_printf(LC_TMTracer, "%s%d=", stackSlotKind(), 0);
6407 JS_ASSERT(*(uint8_t*)mTypeMap != 0xCD);
6408 return mOk = IsFrameObjPtrTypeCompatible(p, fp, *mTypeMap++);
6417 * Check if types are usable for trace execution.
6419 * @param cx Context.
6420 * @param f Tree of peer we're testing.
6421 * @return True if compatible (with or without demotions), false otherwise.
6423 static JS_REQUIRES_STACK bool
6424 CheckEntryTypes(JSContext* cx, JSObject* globalObj, TreeFragment* f)
6426 unsigned int ngslots = f->globalSlots->length();
6428 JS_ASSERT(f->nStackTypes == NativeStackSlots(cx, 0));
6430 if (ngslots > f->nGlobalTypes())
6431 SpecializeTreesToMissingGlobals(cx, globalObj, f);
6433 JS_ASSERT(f->typeMap.length() == NativeStackSlots(cx, 0) + ngslots);
6434 JS_ASSERT(f->typeMap.length() == f->nStackTypes + ngslots);
6435 JS_ASSERT(f->nGlobalTypes() == ngslots);
6437 CheckEntryTypeVisitor visitor(f->typeMap.data());
6438 VisitSlots(visitor, cx, 0, *f->globalSlots);
6440 debug_only_print0(LC_TMTracer, "\n");
6441 return visitor.isOk();
6445 * Find an acceptable entry tree given a PC.
6447 * @param cx Context.
6448 * @param globalObj Global object.
6449 * @param f First peer fragment.
6450 * @param nodemote If true, will try to find a peer that does not require demotion.
6451 * @out count Number of fragments consulted.
6453 static JS_REQUIRES_STACK TreeFragment*
6454 FindVMCompatiblePeer(JSContext* cx, JSObject* globalObj, TreeFragment* f, uintN& count)
6457 for (; f != NULL; f = f->peer) {
6460 debug_only_printf(LC_TMTracer,
6461 "checking vm types %p (ip: %p): ", (void*)f, f->ip);
6462 if (CheckEntryTypes(cx, globalObj, f))
6470 * For the native stacks and global frame, reuse the storage in |tm->storage|.
6471 * This reuse depends on the invariant that only one trace uses |tm->storage|
6472 * at a time. This is subtly correct in case of deep bail; see the comment
6473 * about "clobbering deep bails" in DeepBail.
6476 TracerState::TracerState(JSContext* cx, TraceMonitor* tm, TreeFragment* f,
6477 uintN& inlineCallCount, VMSideExit** innermostNestedGuardp)
6480 stackBase(tm->storage->stack()),
6481 sp(stackBase + f->nativeStackBase / sizeof(double)),
6482 eos(tm->storage->global()),
6483 callstackBase(tm->storage->callstack()),
6486 eor(callstackBase + JS_MIN(MAX_CALL_STACK_ENTRIES,
6487 JS_MAX_INLINE_CALL_COUNT - inlineCallCount)),
6488 lastTreeExitGuard(NULL),
6489 lastTreeCallGuard(NULL),
6490 rpAtLastTreeCall(NULL),
6492 inlineCallCountp(&inlineCallCount),
6493 innermostNestedGuardp(innermostNestedGuardp),
6494 #ifdef EXECUTE_TREE_TIMER
6500 JS_ASSERT(!tm->tracecx);
6502 prev = tm->tracerState;
6503 tm->tracerState = this;
6506 if (TRACE_PROFILER(cx))
6510 JS_ASSERT(JS_THREAD_DATA(cx)->onTraceCompartment == NULL);
6511 JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL ||
6512 JS_THREAD_DATA(cx)->recordingCompartment == cx->compartment);
6513 JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
6514 JS_THREAD_DATA(cx)->onTraceCompartment = cx->compartment;
6516 JS_ASSERT(eos == stackBase + MAX_NATIVE_STACK_SLOTS);
6517 JS_ASSERT(sp < eos);
6520 * inlineCallCount has already been incremented, if being invoked from
6521 * EnterFrame. It is okay to have a 0-frame restriction since the JIT
6522 * might not need any frames.
6524 JS_ASSERT(inlineCallCount <= JS_MAX_INLINE_CALL_COUNT);
6528 * Cannot 0xCD-fill global frame since it may overwrite a bailed outer
6529 * ExecuteTree's 0xdeadbeefdeadbeef marker.
6531 memset(tm->storage->stack(), 0xCD, MAX_NATIVE_STACK_SLOTS * sizeof(double));
6532 memset(tm->storage->callstack(), 0xCD, MAX_CALL_STACK_ENTRIES * sizeof(FrameInfo*));
6537 TracerState::~TracerState()
6539 JS_ASSERT(!nativeVp);
6541 if (traceMonitor->tracecx) {
6542 /* If we didn't already deep-bail... */
6543 JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL ||
6544 JS_THREAD_DATA(cx)->recordingCompartment == cx->compartment);
6545 JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
6546 JS_ASSERT(JS_THREAD_DATA(cx)->onTraceCompartment == cx->compartment);
6547 JS_THREAD_DATA(cx)->onTraceCompartment = NULL;
6550 traceMonitor->tracerState = prev;
6551 traceMonitor->tracecx = NULL;
6554 /* Call |f|, return the exit taken. */
6555 static JS_ALWAYS_INLINE VMSideExit*
6556 ExecuteTrace(JSContext* cx, TraceMonitor* tm, Fragment* f, TracerState& state)
6558 JS_ASSERT(!tm->bailExit);
6560 JS_ASSERT(!TRACE_PROFILER(cx));
6562 union { NIns *code; GuardRecord* (FASTCALL *func)(TracerState*); } u;
6565 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
6566 SIMULATE_FASTCALL(rec, state, NULL, u.func);
6568 rec = u.func(&state);
6570 JS_ASSERT(!tm->bailExit);
6571 return (VMSideExit*)rec->exit;
6574 /* Check whether our assumptions about the incoming scope-chain are upheld. */
6575 static JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
6576 ScopeChainCheck(JSContext* cx, TreeFragment* f)
6578 JS_ASSERT(f->globalObj == cx->fp()->scopeChain().getGlobal());
6581 * The JIT records and expects to execute with two scope-chain
6582 * assumptions baked-in:
6584 * 1. That the bottom of the scope chain is global, in the sense of
6585 * JSCLASS_IS_GLOBAL.
6587 * 2. That the scope chain between fp and the global is free of
6588 * "unusual" native objects such as HTML forms or other funny
6591 * #2 is checked here while following the scope-chain links, via
6592 * js_IsCacheableNonGlobalScope, which consults a whitelist of known
6593 * class types; once a global is found, it's checked for #1. Failing
6594 * either check causes an early return from execution.
6596 JSObject* child = &cx->fp()->scopeChain();
6597 while (JSObject* parent = child->getParent()) {
6598 if (!IsCacheableNonGlobalScope(child)) {
6599 debug_only_print0(LC_TMTracer,"Blacklist: non-cacheable object on scope chain.\n");
6600 Blacklist((jsbytecode*) f->root->ip);
6605 JS_ASSERT(child == f->globalObj);
6607 if (!f->globalObj->isGlobal()) {
6608 debug_only_print0(LC_TMTracer, "Blacklist: non-global at root of scope chain.\n");
6609 Blacklist((jsbytecode*) f->root->ip);
6616 enum LEAVE_TREE_STATUS {
6621 static LEAVE_TREE_STATUS
6622 LeaveTree(TraceMonitor *tm, TracerState&, VMSideExit *lr);
6624 /* Return false if the interpreter should goto error. */
6625 static JS_REQUIRES_STACK bool
6626 ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f, uintN& inlineCallCount,
6627 VMSideExit** innermostNestedGuardp, VMSideExit **lrp)
6630 TraceVisStateObj tvso(cx, S_EXECUTE);
6632 JS_ASSERT(f->root == f && f->code());
6634 if (!ScopeChainCheck(cx, f) || !cx->stack().ensureEnoughSpaceToEnterTrace() ||
6635 inlineCallCount + f->maxCallDepth > JS_MAX_INLINE_CALL_COUNT) {
6640 /* Make sure the global object is sane. */
6641 JS_ASSERT(f->globalObj->numSlots() <= MAX_GLOBAL_SLOTS);
6642 JS_ASSERT(f->nGlobalTypes() == f->globalSlots->length());
6643 JS_ASSERT_IF(f->globalSlots->length() != 0,
6644 f->globalObj->shape() == f->globalShape);
6646 /* Initialize trace state. */
6647 TracerState state(cx, tm, f, inlineCallCount, innermostNestedGuardp);
6648 double* stack = tm->storage->stack();
6649 double* global = tm->storage->global();
6650 JSObject* globalObj = f->globalObj;
6651 unsigned ngslots = f->globalSlots->length();
6652 uint16* gslots = f->globalSlots->data();
6654 BuildNativeFrame(cx, globalObj, 0 /* callDepth */, ngslots, gslots,
6655 f->typeMap.data(), global, stack);
6657 AUDIT(traceTriggered);
6658 debug_only_printf(LC_TMTracer, "entering trace at %s:%u@%u, execs: %u code: %p\n",
6659 cx->fp()->script()->filename,
6660 js_FramePCToLineNumber(cx, cx->fp()),
6661 FramePCOffset(cx, cx->fp()),
6665 debug_only_stmt(uint32 globalSlots = globalObj->numSlots();)
6666 debug_only_stmt(*(uint64*)&tm->storage->global()[globalSlots] = 0xdeadbeefdeadbeefLL;)
6668 /* Execute trace. */
6669 tm->iterationCounter = 0;
6670 debug_only(int64 t0 = PRMJ_Now();)
6672 VMSideExit* lr = (TraceVisStateObj(cx, S_NATIVE), ExecuteTrace(cx, tm, f, state));
6674 VMSideExit* lr = ExecuteTrace(cx, tm, f, state);
6676 debug_only(int64 t1 = PRMJ_Now();)
6678 JS_ASSERT_IF(lr->exitType == LOOP_EXIT, !lr->calldepth);
6680 /* Restore interpreter state. */
6682 LEAVE_TREE_STATUS lts =
6684 LeaveTree(tm, state, lr);
6686 JS_ASSERT_IF(lts == NO_DEEP_BAIL,
6687 *(uint64*)&tm->storage->global()[globalSlots] == 0xdeadbeefdeadbeefLL);
6690 *lrp = state.innermost;
6691 bool ok = !(state.builtinStatus & BUILTIN_ERROR);
6692 JS_ASSERT_IF(cx->isExceptionPending(), !ok);
6694 size_t iters = tm->iterationCounter;
6700 JSStackFrame *fp = cx->fp();
6701 const char *prefix = "";
6702 if (iters == LOOP_COUNT_MAX)
6704 debug_only_printf(LC_TMMinimal, " [%.3f ms] Tree at line %u executed for %s%u iterations;"
6705 " executed %u times; leave for %s at %s:%u (%s)\n",
6706 double(t1-t0) / PRMJ_USEC_PER_MSEC,
6707 f->treeLineNumber, prefix, (uintN)iters, f->execs,
6708 getExitName(lr->exitType),
6709 fp->script()->filename,
6710 js_FramePCToLineNumber(cx, fp),
6711 js_CodeName[fp->hasImacropc() ? *fp->imacropc() : *cx->regs->pc]);
6715 if (cx->methodJitEnabled) {
6716 if (lr->exitType == LOOP_EXIT && f->iters < MIN_LOOP_ITERS
6717 && f->execs >= LOOP_CHECK_ITERS)
6719 debug_only_printf(LC_TMMinimal, " Blacklisting at line %u (executed only %d iters)\n",
6720 f->treeLineNumber, f->iters);
6721 Blacklist((jsbytecode *)f->ip);
6731 Guardian(bool *flagp) {
6732 this->flagp = flagp;
6743 static JS_FORCES_STACK LEAVE_TREE_STATUS
6744 LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr)
6746 VOUCH_DOES_NOT_REQUIRE_STACK();
6748 JSContext* cx = state.cx;
6750 /* Temporary waive the soft GC quota to make sure LeaveTree() doesn't fail. */
6751 Guardian waiver(&JS_THREAD_DATA(cx)->waiveGCQuota);
6753 FrameInfo** callstack = state.callstackBase;
6754 double* stack = state.stackBase;
6757 * Except if we find that this is a nested bailout, the guard the call
6758 * returned is the one we have to use to adjust pc and sp.
6760 VMSideExit* innermost = lr;
6763 * While executing a tree we do not update state.sp and state.rp even if
6764 * they grow. Instead, guards tell us by how much sp and rp should be
6765 * incremented in case of a side exit. When calling a nested tree, however,
6766 * we actively adjust sp and rp. If we have such frames from outer trees on
6767 * the stack, then rp will have been adjusted. Before we can process the
6768 * stack of the frames of the tree we directly exited from, we have to
6769 * first work our way through the outer frames and generate interpreter
6770 * frames for them. Once the call stack (rp) is empty, we can process the
6771 * final frames (which again are not directly visible and only the guard we
6772 * exited on will tells us about).
6774 FrameInfo** rp = (FrameInfo**)state.rp;
6775 if (lr->exitType == NESTED_EXIT) {
6776 VMSideExit* nested = state.lastTreeCallGuard;
6779 * If lastTreeCallGuard is not set in state, we only have a single
6780 * level of nesting in this exit, so lr itself is the innermost and
6781 * outermost nested guard, and hence we set nested to lr. The
6782 * calldepth of the innermost guard is not added to state.rp, so we
6783 * do it here manually. For a nesting depth greater than 1 the
6784 * call tree code already added the innermost guard's calldepth
6785 * to state.rpAtLastTreeCall.
6788 rp += lr->calldepth;
6791 * During unwinding state.rp gets overwritten at every step and we
6792 * restore it here to its state at the innermost nested guard. The
6793 * builtin already added the calldepth of that innermost guard to
6796 rp = (FrameInfo**)state.rpAtLastTreeCall;
6798 innermost = state.lastTreeExitGuard;
6799 if (state.innermostNestedGuardp)
6800 *state.innermostNestedGuardp = nested;
6802 JS_ASSERT(nested->exitType == NESTED_EXIT);
6803 JS_ASSERT(state.lastTreeExitGuard);
6804 JS_ASSERT(state.lastTreeExitGuard->exitType != NESTED_EXIT);
6807 int32_t bs = state.builtinStatus;
6808 bool bailed = innermost->exitType == STATUS_EXIT && (bs & BUILTIN_BAILED);
6813 * A _FAIL native already called LeaveTree once. At that time we
6814 * reconstructed the interpreter stack, in pre-call state, with pc
6815 * pointing to the op that triggered the call. Then we continued in
6818 if (!(bs & BUILTIN_ERROR)) {
6820 * The builtin or native deep-bailed but finished successfully
6821 * (no exception or error).
6823 * After it returned, the JIT code stored the results of the
6824 * builtin or native at the top of the native stack and then
6825 * immediately flunked the guard on state->builtinStatus.
6827 * Now LeaveTree has been called again from the tail of
6828 * ExecuteTree. We are about to return to the interpreter. Adjust
6829 * the top stack frame to resume on the next op.
6831 JSFrameRegs* regs = cx->regs;
6832 JSOp op = (JSOp) *regs->pc;
6835 * JSOP_SETELEM can be coalesced with a JSOP_POP in the interpeter.
6836 * Since this doesn't re-enter the recorder, the post-state snapshot
6837 * is invalid. Fix it up here.
6839 if (op == JSOP_SETELEM && JSOp(regs->pc[JSOP_SETELEM_LENGTH]) == JSOP_POP) {
6840 regs->sp -= js_CodeSpec[JSOP_SETELEM].nuses;
6841 regs->sp += js_CodeSpec[JSOP_SETELEM].ndefs;
6842 regs->pc += JSOP_SETELEM_LENGTH;
6846 const JSCodeSpec& cs = js_CodeSpec[op];
6847 regs->sp -= (cs.format & JOF_INVOKE) ? GET_ARGC(regs->pc) + 2 : cs.nuses;
6848 regs->sp += cs.ndefs;
6849 regs->pc += cs.length;
6850 JS_ASSERT_IF(!cx->fp()->hasImacropc(),
6851 cx->fp()->slots() + cx->fp()->numFixed() +
6852 js_ReconstructStackDepth(cx, cx->fp()->script(), regs->pc) ==
6856 * If there's a tree call around the point that we deep exited at,
6857 * then state.sp and state.rp were restored to their original
6858 * values before the tree call and sp might be less than deepBailSp,
6859 * which we sampled when we were told to deep bail.
6861 JS_ASSERT(state.deepBailSp >= state.stackBase && state.sp <= state.deepBailSp);
6864 * As explained above, the JIT code stored a result value or values
6865 * on the native stack. Transfer them to the interpreter stack now.
6866 * (Some opcodes, like JSOP_CALLELEM, produce two values, hence the
6869 JSValueType* typeMap = innermost->stackTypeMap();
6870 for (int i = 1; i <= cs.ndefs; i++) {
6873 typeMap[innermost->numStackSlots - i],
6874 (jsdouble *) state.deepBailSp
6875 + innermost->sp_adj / sizeof(jsdouble) - i);
6881 while (callstack < rp) {
6882 FrameInfo* fi = *callstack;
6883 /* Peek at the callee native slot in the not-yet-synthesized prev frame. */
6884 JSObject* callee = *(JSObject**)&stack[fi->callerHeight];
6887 * Flush the slots for cx->fp() (which will become cx->fp()->prev after
6888 * SynthesizeFrame). Since a frame's arguments (including callee
6889 * and thisv) are part of the frame, we only want to flush up to the
6890 * next frame's arguments, so set cx->regs->sp to to not include said
6891 * arguments. The upcoming call to SynthesizeFrame will reset regs->sp
6892 * to its correct value.
6894 cx->regs->sp = cx->fp()->slots() + (fi->spdist - (2 + fi->get_argc()));
6895 int slots = FlushNativeStackFrame(cx, 0 /* callDepth */, fi->get_typemap(), stack);
6897 /* Finish initializing cx->fp() and push a new cx->fp(). */
6898 SynthesizeFrame(cx, *fi, callee);
6900 JSStackFrame* fp = cx->fp();
6901 debug_only_printf(LC_TMTracer,
6902 "synthesized deep frame for %s:%u@%u, slots=%d, fi=%p\n",
6903 fp->script()->filename,
6904 js_FramePCToLineNumber(cx, fp),
6905 FramePCOffset(cx, fp),
6910 * Keep track of the additional frames we put on the interpreter stack
6911 * and the native stack slots we consumed.
6913 ++*state.inlineCallCountp;
6919 * We already synthesized the frames around the innermost guard. Here we
6920 * just deal with additional frames inside the tree we are bailing out
6923 JS_ASSERT(rp == callstack);
6924 unsigned calldepth = innermost->calldepth;
6925 unsigned calleeOffset = 0;
6926 for (unsigned n = 0; n < calldepth; ++n) {
6927 /* Peek at the callee native slot in the not-yet-synthesized prev frame. */
6928 calleeOffset += callstack[n]->callerHeight;
6929 JSObject* callee = *(JSObject**)&stack[calleeOffset];
6931 /* Reconstruct the frame. */
6932 SynthesizeFrame(cx, *callstack[n], callee);
6933 ++*state.inlineCallCountp;
6935 JSStackFrame* fp = cx->fp();
6936 debug_only_printf(LC_TMTracer,
6937 "synthesized shallow frame for %s:%u@%u\n",
6938 fp->script()->filename, js_FramePCToLineNumber(cx, fp),
6939 FramePCOffset(cx, fp));
6944 * Adjust sp and pc relative to the tree we exited from (not the tree we
6945 * entered into). These are our final values for sp and pc since
6946 * SynthesizeFrame has already taken care of all frames in between.
6948 JSStackFrame* const fp = cx->fp();
6951 * If we are not exiting from an inlined frame, the state->sp is spbase.
6952 * Otherwise spbase is whatever slots frames around us consume.
6954 cx->regs->pc = innermost->pc;
6955 if (innermost->imacpc)
6956 fp->setImacropc(innermost->imacpc);
6958 fp->clearImacropc();
6961 * Set cx->regs->regs for the top frame. Since the top frame does not have a
6962 * FrameInfo (a FrameInfo is only pushed for calls), we basically need to
6963 * compute the offset from fp->slots() to the top of the stack based on the
6964 * number of native slots allocated for this function.
6966 * Duplicate native stack layout computation: see VisitFrameSlots header comment.
6968 uintN slotOffset = innermost->numStackSlots - innermost->numStackSlotsBelowCurrentFrame;
6969 if (fp->isGlobalFrame()) {
6970 /* Global nfixed slots are not kept on the native stack, so add them back. */
6971 slotOffset += fp->globalScript()->nfixed;
6973 /* A frame's native slots includes args and frame ptrs, so strip them off. */
6974 slotOffset -= NumSlotsBeforeFixed(fp);
6976 cx->regs->sp = fp->slots() + slotOffset;
6978 /* Assert that we computed sp correctly. */
6979 JS_ASSERT_IF(!fp->hasImacropc(),
6980 fp->slots() + fp->numFixed() +
6981 js_ReconstructStackDepth(cx, fp->script(), cx->regs->pc) == cx->regs->sp);
6983 #ifdef EXECUTE_TREE_TIMER
6984 uint64 cycles = rdtsc() - state.startTime;
6985 #elif defined(JS_JIT_SPEW)
6988 debug_only_printf(LC_TMTracer,
6989 "leaving trace at %s:%u@%u, op=%s, lr=%p, exitType=%s, sp=%lld, "
6990 "calldepth=%d, cycles=%llu\n",
6991 fp->script()->filename,
6992 js_FramePCToLineNumber(cx, fp),
6993 FramePCOffset(cx, fp),
6994 js_CodeName[fp->hasImacropc() ? *fp->imacropc() : *cx->regs->pc],
6996 getExitName(lr->exitType),
6997 (long long int)(cx->regs->sp - fp->base()),
6999 (unsigned long long int)cycles);
7004 FlushNativeStackFrame(cx, innermost->calldepth, innermost->stackTypeMap(), stack);
7005 JS_ASSERT(unsigned(slots) == innermost->numStackSlots);
7008 * If this trace is part of a tree, later branches might have added
7009 * additional globals for which we don't have any type information
7010 * available in the side exit. We merge in this information from the entry
7011 * type-map. See also the comment in the constructor of TraceRecorder
7012 * regarding why this is always safe to do.
7014 TreeFragment* outermostTree = state.outermostTree;
7015 uint16* gslots = outermostTree->globalSlots->data();
7016 unsigned ngslots = outermostTree->globalSlots->length();
7017 JS_ASSERT(ngslots == outermostTree->nGlobalTypes());
7018 JSValueType* globalTypeMap;
7020 /* Are there enough globals? */
7021 TypeMap& typeMap = *tm->cachedTempTypeMap;
7023 if (innermost->numGlobalSlots == ngslots) {
7024 /* Yes. This is the ideal fast path. */
7025 globalTypeMap = innermost->globalTypeMap();
7028 * No. Merge the typemap of the innermost entry and exit together. This
7029 * should always work because it is invalid for nested trees or linked
7030 * trees to have incompatible types. Thus, whenever a new global type
7031 * is lazily added into a tree, all dependent and linked trees are
7032 * immediately specialized (see bug 476653).
7034 JS_ASSERT(innermost->root()->nGlobalTypes() == ngslots);
7035 JS_ASSERT(innermost->root()->nGlobalTypes() > innermost->numGlobalSlots);
7036 typeMap.ensure(ngslots);
7038 unsigned check_ngslots =
7040 BuildGlobalTypeMapFromInnerTree(typeMap, innermost);
7041 JS_ASSERT(check_ngslots == ngslots);
7042 globalTypeMap = typeMap.data();
7045 /* Write back interned globals. */
7046 JS_ASSERT(state.eos == state.stackBase + MAX_NATIVE_STACK_SLOTS);
7047 JSObject* globalObj = outermostTree->globalObj;
7048 FlushNativeGlobalFrame(cx, globalObj, state.eos, ngslots, gslots, globalTypeMap);
7051 if (innermost->exitType != TIMEOUT_EXIT)
7052 AUDIT(sideExitIntoInterpreter);
7054 AUDIT(timeoutIntoInterpreter);
7057 state.innermost = innermost;
7058 return NO_DEEP_BAIL;
7062 GetLoopBottom(JSContext *cx, jsbytecode *pc)
7064 JS_ASSERT(*pc == JSOP_TRACE || *pc == JSOP_NOTRACE);
7065 JSScript *script = cx->fp()->script();
7066 jssrcnote *sn = js_GetSrcNote(script, pc);
7069 return pc + js_GetSrcNoteOffset(sn, 0);
7072 JS_ALWAYS_INLINE void
7073 TraceRecorder::assertInsideLoop()
7076 /* Asserts at callDepth == 0 will catch problems at the call op. */
7080 jsbytecode *pc = cx->regs->fp->hasImacropc() ? cx->regs->fp->imacropc() : cx->regs->pc;
7081 jsbytecode *beg = (jsbytecode *)tree->ip;
7082 jsbytecode *end = GetLoopBottom(cx, beg);
7085 * In some cases (continue in a while loop), we jump to the goto
7086 * immediately preceeding a loop (the one that jumps to the loop
7089 JS_ASSERT(pc >= beg - JSOP_GOTO_LENGTH && pc <= end);
7093 JS_REQUIRES_STACK MonitorResult
7094 RecordLoopEdge(JSContext* cx, TraceMonitor* tm, uintN& inlineCallCount)
7097 TraceVisStateObj tvso(cx, S_MONITOR);
7100 /* Is the recorder currently active? */
7102 tm->recorder->assertInsideLoop();
7103 jsbytecode* pc = cx->regs->pc;
7104 if (pc == tm->recorder->tree->ip) {
7105 AbortableRecordingStatus status = tm->recorder->closeLoop();
7106 if (status != ARECORD_COMPLETED) {
7108 AbortRecording(cx, "closeLoop failed");
7109 return MONITOR_NOT_RECORDING;
7112 MonitorResult r = TraceRecorder::recordLoopEdge(cx, tm->recorder, inlineCallCount);
7113 JS_ASSERT((r == MONITOR_RECORDING) == (tm->recorder != NULL));
7114 if (r == MONITOR_RECORDING || r == MONITOR_ERROR)
7118 * recordLoopEdge will invoke an inner tree if we have a matching
7119 * one. If we arrive here, that tree didn't run to completion and
7120 * instead we mis-matched or the inner tree took a side exit other than
7121 * the loop exit. We are thus no longer guaranteed to be parked on the
7122 * same loop header RecordLoopEdge was called for. In fact, this
7123 * might not even be a loop header at all. Hence if the program counter
7124 * no longer hovers over the inner loop header, return to the
7125 * interpreter and do not attempt to trigger or record a new tree at
7128 if (pc != cx->regs->pc) {
7130 tvso.r = R_INNER_SIDE_EXIT;
7132 return MONITOR_NOT_RECORDING;
7136 JS_ASSERT(!tm->recorder);
7139 * Make sure the shape of the global object still matches (this might flush
7142 JSObject* globalObj = cx->fp()->scopeChain().getGlobal();
7143 uint32 globalShape = -1;
7144 SlotList* globalSlots = NULL;
7146 if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
7147 Backoff(tm, cx->regs->pc);
7148 return MONITOR_NOT_RECORDING;
7151 /* Do not enter the JIT code with a pending operation callback. */
7152 if (JS_THREAD_DATA(cx)->interruptFlags) {
7154 tvso.r = R_CALLBACK_PENDING;
7156 return MONITOR_NOT_RECORDING;
7159 jsbytecode* pc = cx->regs->pc;
7160 uint32 argc = entryFrameArgc(cx);
7162 TreeFragment* f = LookupOrAddLoop(tm, pc, globalObj, globalShape, argc);
7165 * If we have no code in the anchor and no peers, we definitively won't be
7166 * able to activate any trees, so start compiling.
7168 if (!f->code() && !f->peer) {
7170 if (++f->hits() < HOTLOOP) {
7172 tvso.r = f->hits() < 1 ? R_BACKED_OFF : R_COLD;
7174 return MONITOR_NOT_RECORDING;
7177 if (!ScopeChainCheck(cx, f)) {
7179 tvso.r = R_FAIL_SCOPE_CHAIN_CHECK;
7181 return MONITOR_NOT_RECORDING;
7185 * We can give RecordTree the root peer. If that peer is already taken,
7186 * it will walk the peer list and find us a free slot or allocate a new
7189 bool rv = RecordTree(cx, tm, f->first, NULL, NULL, 0, globalSlots);
7192 tvso.r = R_FAIL_RECORD_TREE;
7194 return RecordingIfTrue(rv);
7197 debug_only_printf(LC_TMTracer,
7198 "Looking for compat peer %d@%d, from %p (ip: %p)\n",
7199 js_FramePCToLineNumber(cx, cx->fp()),
7200 FramePCOffset(cx, cx->fp()), (void*)f, f->ip);
7203 TreeFragment* match = FindVMCompatiblePeer(cx, globalObj, f, count);
7205 if (count < MAXPEERS)
7209 * If we hit the max peers ceiling, don't try to lookup fragments all
7210 * the time. That's expensive. This must be a rather type-unstable loop.
7212 debug_only_print0(LC_TMTracer, "Blacklisted: too many peer trees.\n");
7213 Blacklist((jsbytecode*) f->root->ip);
7215 tvso.r = R_MAX_PEERS;
7217 return MONITOR_NOT_RECORDING;
7220 VMSideExit* lr = NULL;
7221 VMSideExit* innermostNestedGuard = NULL;
7223 if (!ExecuteTree(cx, tm, match, inlineCallCount, &innermostNestedGuard, &lr))
7224 return MONITOR_ERROR;
7228 tvso.r = R_FAIL_EXECUTE_TREE;
7230 return MONITOR_NOT_RECORDING;
7234 * If we exit on a branch, or on a tree call guard, try to grow the inner
7235 * tree (in case of a branch exit), or the tree nested around the tree we
7236 * exited from (in case of the tree call guard).
7239 switch (lr->exitType) {
7240 case UNSTABLE_LOOP_EXIT:
7241 rv = AttemptToStabilizeTree(cx, tm, globalObj, lr, NULL, NULL, 0);
7244 tvso.r = R_FAIL_STABILIZE;
7246 return RecordingIfTrue(rv);
7250 if (lr->exitType == MUL_ZERO_EXIT)
7251 tm->oracle->markInstructionSlowZeroTest(cx->regs->pc);
7253 tm->oracle->markInstructionUndemotable(cx->regs->pc);
7257 rv = AttemptToExtendTree(cx, tm, lr, NULL, NULL, NULL
7262 return RecordingIfTrue(rv);
7265 if (innermostNestedGuard) {
7266 rv = AttemptToExtendTree(cx, tm, innermostNestedGuard, lr, NULL, NULL
7271 return RecordingIfTrue(rv);
7274 tvso.r = R_NO_EXTEND_OUTER;
7276 return MONITOR_NOT_RECORDING;
7280 tvso.r = R_MISMATCH_EXIT;
7281 return MONITOR_NOT_RECORDING;
7283 tvso.r = R_OOM_EXIT;
7284 return MONITOR_NOT_RECORDING;
7286 tvso.r = R_TIMEOUT_EXIT;
7287 return MONITOR_NOT_RECORDING;
7288 case DEEP_BAIL_EXIT:
7289 tvso.r = R_DEEP_BAIL_EXIT;
7290 return MONITOR_NOT_RECORDING;
7292 tvso.r = R_STATUS_EXIT;
7293 return MONITOR_NOT_RECORDING;
7298 * No, this was an unusual exit (i.e. out of memory/GC), so just resume
7302 tvso.r = R_OTHER_EXIT;
7304 return MONITOR_NOT_RECORDING;
7308 JS_REQUIRES_STACK AbortableRecordingStatus
7309 TraceRecorder::monitorRecording(JSOp op)
7311 JS_ASSERT(!addPropShapeBefore);
7313 JS_ASSERT(traceMonitor == &cx->compartment->traceMonitor);
7315 TraceMonitor &localtm = *traceMonitor;
7316 debug_only_stmt( JSContext *localcx = cx; )
7318 JS_ASSERT(!localtm.profile);
7320 /* Process needFlush requests now. */
7321 if (localtm.needFlush) {
7322 ResetJIT(cx, &localtm, FR_DEEP_BAIL);
7323 return ARECORD_ABORTED;
7325 JS_ASSERT(!fragment->lastIns);
7328 * Clear one-shot state used to communicate between record_JSOP_CALL and post-
7329 * opcode-case-guts record hook (record_NativeCallComplete).
7331 pendingSpecializedNative = NULL;
7333 pendingGlobalSlotsToSet.clear();
7335 /* Handle one-shot request from finishGetProp or INSTANCEOF to snapshot post-op state and guard. */
7336 if (pendingGuardCondition) {
7337 LIns* cond = pendingGuardCondition;
7338 bool expected = true;
7340 /* Put 'cond' in a form suitable for a guard/branch condition if it's not already. */
7341 ensureCond(&cond, &expected);
7342 guard(expected, cond, STATUS_EXIT);
7343 pendingGuardCondition = NULL;
7346 /* Handle one-shot request to unbox the result of a property get or ObjectToIterator. */
7347 if (pendingUnboxSlot) {
7348 LIns* val_ins = get(pendingUnboxSlot);
7350 * We need to know from where to unbox the value. Since pendingUnboxSlot
7351 * is only set in finishGetProp, we can depend on LIns* tracked for
7352 * pendingUnboxSlot to have this information.
7354 LIns* unboxed_ins = unbox_value(*pendingUnboxSlot,
7355 AnyAddress(val_ins->oprnd1(), val_ins->disp()),
7356 snapshot(BRANCH_EXIT));
7357 set(pendingUnboxSlot, unboxed_ins);
7358 pendingUnboxSlot = 0;
7362 if (LogController.lcbits & LC_TMRecorder) {
7363 debug_only_print0(LC_TMRecorder, "\n");
7364 js_Disassemble1(cx, cx->fp()->script(), cx->regs->pc,
7365 cx->fp()->hasImacropc()
7366 ? 0 : cx->regs->pc - cx->fp()->script()->code,
7367 !cx->fp()->hasImacropc(), stdout);
7372 * If op is not a break or a return from a loop, continue recording and
7373 * follow the trace. We check for imacro-calling bytecodes inside each
7374 * switch case to resolve the if (JSOP_IS_IMACOP(x)) conditions at compile
7378 AbortableRecordingStatus status;
7380 bool wasInImacro = (cx->fp()->hasImacropc());
7384 AbortRecording(cx, "unsupported opcode");
7385 status = ARECORD_ERROR;
7387 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
7390 status = this->record_##op(); \
7392 # include "jsopcode.tbl"
7396 /* N.B. |this| may have been deleted. */
7398 if (!JSOP_IS_IMACOP(op)) {
7399 JS_ASSERT(status != ARECORD_IMACRO);
7400 JS_ASSERT_IF(!wasInImacro, !localcx->fp()->hasImacropc());
7403 if (localtm.recorder) {
7404 JS_ASSERT(status != ARECORD_ABORTED);
7405 JS_ASSERT(localtm.recorder == this);
7407 /* |this| recorder completed, but a new one started; keep recording. */
7408 if (status == ARECORD_COMPLETED)
7409 return ARECORD_CONTINUE;
7411 /* Handle lazy aborts; propagate the 'error' status. */
7412 if (StatusAbortsRecorderIfActive(status)) {
7413 AbortRecording(cx, js_CodeName[op]);
7414 return status == ARECORD_ERROR ? ARECORD_ERROR : ARECORD_ABORTED;
7417 if (outOfMemory() || OverfullJITCache(cx, &localtm)) {
7418 ResetJIT(cx, &localtm, FR_OOM);
7421 * If the status returned was ARECORD_IMACRO, then we just
7422 * changed cx->regs, we need to tell the interpreter to sync
7423 * its local variables.
7425 return status == ARECORD_IMACRO ? ARECORD_IMACRO_ABORTED : ARECORD_ABORTED;
7428 JS_ASSERT(status == ARECORD_COMPLETED ||
7429 status == ARECORD_ABORTED ||
7430 status == ARECORD_ERROR);
7435 JS_REQUIRES_STACK TraceRecorder::AbortResult
7436 AbortRecording(JSContext* cx, const char* reason)
7439 JS_ASSERT(TRACE_RECORDER(cx));
7440 return TRACE_RECORDER(cx)->finishAbort(reason);
7442 return TRACE_RECORDER(cx)->finishAbort("[no reason]");
7446 #if defined NANOJIT_IA32
7450 char *c = getenv("X86_FORCE_SSE2");
7452 return (!strcmp(c, "true") ||
7457 #if defined _MSC_VER
7466 #elif defined __GNUC__
7467 asm("xchg %%esi, %%ebx\n" /* we can't clobber ebx on gcc (PIC register) */
7468 "mov $0x01, %%eax\n"
7471 "xchg %%esi, %%ebx\n"
7473 : /* We have no inputs */
7474 : "%eax", "%esi", "%ecx", "%edx"
7476 #elif defined __SUNPRO_C || defined __SUNPRO_CC
7478 "mov $0x01, %%eax\n"
7482 : /* We have no inputs */
7486 return (features & (1<<26)) != 0;
7490 #if defined(NANOJIT_ARM)
7492 #if defined(_MSC_VER) && defined(WINCE)
7494 // these come in from jswince.asm
7495 extern "C" int js_arm_try_armv5_op();
7496 extern "C" int js_arm_try_armv6_op();
7497 extern "C" int js_arm_try_armv7_op();
7498 extern "C" int js_arm_try_vfp_op();
7503 unsigned int arch = 4;
7505 js_arm_try_armv5_op();
7507 js_arm_try_armv6_op();
7509 js_arm_try_armv7_op();
7511 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
7519 #ifdef WINCE_WINDOWS_MOBILE
7524 js_arm_try_vfp_op();
7526 } __except(GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION) {
7533 #define HAVE_ENABLE_DISABLE_DEBUGGER_EXCEPTIONS 1
7535 /* See "Suppressing Exception Notifications while Debugging", at
7536 * http://msdn.microsoft.com/en-us/library/ms924252.aspx
7539 disable_debugger_exceptions()
7541 // 2 == TLSSLOT_KERNEL
7542 DWORD kctrl = (DWORD) TlsGetValue(2);
7543 // 0x12 = TLSKERN_NOFAULT | TLSKERN_NOFAULTMSG
7545 TlsSetValue(2, (LPVOID) kctrl);
7549 enable_debugger_exceptions()
7551 // 2 == TLSSLOT_KERNEL
7552 DWORD kctrl = (DWORD) TlsGetValue(2);
7553 // 0x12 = TLSKERN_NOFAULT | TLSKERN_NOFAULTMSG
7555 TlsSetValue(2, (LPVOID) kctrl);
7558 #elif defined(__GNUC__) && defined(AVMPLUS_LINUX)
7560 // Assume ARMv4 by default.
7561 static unsigned int arm_arch = 4;
7562 static bool arm_has_vfp = false;
7563 static bool arm_has_neon = false;
7564 static bool arm_has_iwmmxt = false;
7565 static bool arm_tests_initialized = false;
7568 // we're actually reading /proc/cpuinfo, but oh well
7574 const char* ver_token = "CPU architecture: ";
7575 FILE* f = fopen("/proc/cpuinfo", "r");
7576 fread(buf, sizeof(char), 1024, f);
7578 pos = strstr(buf, ver_token);
7580 int ver = *(pos + strlen(ver_token)) - '0';
7583 arm_has_neon = strstr(buf, "neon") != NULL;
7584 arm_has_vfp = strstr(buf, "vfp") != NULL;
7585 arm_has_iwmmxt = strstr(buf, "iwmmxt") != NULL;
7586 arm_tests_initialized = true;
7597 fd = open("/proc/self/auxv", O_RDONLY);
7599 while (read(fd, &aux, sizeof(Elf32_auxv_t))) {
7600 if (aux.a_type == AT_HWCAP) {
7601 uint32_t hwcap = aux.a_un.a_val;
7602 if (getenv("ARM_FORCE_HWCAP"))
7603 hwcap = strtoul(getenv("ARM_FORCE_HWCAP"), NULL, 0);
7604 else if (getenv("_SBOX_DIR"))
7605 continue; // Ignore the rest, if we're running in scratchbox
7606 // hardcode these values to avoid depending on specific versions
7607 // of the hwcap header, e.g. HWCAP_NEON
7608 arm_has_vfp = (hwcap & 64) != 0;
7609 arm_has_iwmmxt = (hwcap & 512) != 0;
7610 // this flag is only present on kernel 2.6.29
7611 arm_has_neon = (hwcap & 4096) != 0;
7612 } else if (aux.a_type == AT_PLATFORM) {
7613 const char *plat = (const char*) aux.a_un.a_val;
7614 if (getenv("ARM_FORCE_PLATFORM"))
7615 plat = getenv("ARM_FORCE_PLATFORM");
7616 else if (getenv("_SBOX_DIR"))
7617 continue; // Ignore the rest, if we're running in scratchbox
7618 // The platform string has the form "v[0-9][lb]". The "l" or "b" indicate little-
7619 // or big-endian variants and the digit indicates the version of the platform.
7620 // We can only accept ARMv4 and above, but allow anything up to ARMv9 for future
7621 // processors. Architectures newer than ARMv7 are assumed to be
7622 // backwards-compatible with ARMv7.
7623 if ((plat[0] == 'v') &&
7624 (plat[1] >= '4') && (plat[1] <= '9') &&
7625 ((plat[2] == 'l') || (plat[2] == 'b')))
7627 arm_arch = plat[1] - '0';
7633 // if we don't have 2.6.29, we have to do this hack; set
7634 // the env var to trust HWCAP.
7635 if (!getenv("ARM_TRUST_HWCAP") && (arm_arch >= 7))
7636 arm_has_neon = true;
7639 arm_tests_initialized = true;
7647 if (!arm_tests_initialized)
7656 if (!arm_tests_initialized)
7663 #warning Not sure how to check for architecture variant on your platform. Assuming ARMv4.
7665 arm_check_arch() { return 4; }
7667 arm_check_vfp() { return false; }
7670 #ifndef HAVE_ENABLE_DISABLE_DEBUGGER_EXCEPTIONS
7672 enable_debugger_exceptions() { }
7674 disable_debugger_exceptions() { }
7677 #endif /* NANOJIT_ARM */
7684 SetMaxCodeCacheBytes(JSContext* cx, uint32 bytes)
7690 JS_THREAD_DATA(cx)->maxCodeCacheBytes = bytes;
7694 InitJIT(TraceMonitor *tm)
7696 #if defined JS_JIT_SPEW
7697 tm->profAlloc = NULL;
7698 /* Set up debug logging. */
7699 if (!did_we_set_up_debug_logging) {
7700 InitJITLogController();
7701 did_we_set_up_debug_logging = true;
7703 /* Set up fragprofiling, if required. */
7704 if (LogController.lcbits & LC_FragProfile) {
7705 tm->profAlloc = js_new<VMAllocator>((char*)NULL, 0); /* no reserve needed in debug builds */
7708 tm->profTab = new (*tm->profAlloc) FragStatsMap(*tm->profAlloc);
7712 PodZero(&LogController);
7715 if (!did_we_check_processor_features) {
7716 #if defined NANOJIT_IA32
7717 avmplus::AvmCore::config.i386_use_cmov =
7718 avmplus::AvmCore::config.i386_sse2 = CheckForSSE2();
7719 avmplus::AvmCore::config.i386_fixed_esp = true;
7721 #if defined NANOJIT_ARM
7723 disable_debugger_exceptions();
7725 bool arm_vfp = arm_check_vfp();
7726 unsigned int arm_arch = arm_check_arch();
7728 enable_debugger_exceptions();
7730 avmplus::AvmCore::config.arm_vfp = arm_vfp;
7731 avmplus::AvmCore::config.soft_float = !arm_vfp;
7732 avmplus::AvmCore::config.arm_arch = arm_arch;
7734 // Sanity-check the configuration detection.
7735 // * We don't understand architectures prior to ARMv4.
7736 JS_ASSERT(arm_arch >= 4);
7738 did_we_check_processor_features = true;
7741 #define CHECK_ALLOC(lhs, rhs) \
7742 do { lhs = (rhs); if (!lhs) goto error; } while (0)
7744 CHECK_ALLOC(tm->oracle, js_new<Oracle>());
7748 CHECK_ALLOC(tm->recordAttempts, js_new<RecordAttemptMap>());
7749 if (!tm->recordAttempts->init(PC_HASH_COUNT))
7752 CHECK_ALLOC(tm->loopProfiles, js_new<LoopProfileMap>());
7753 if (!tm->loopProfiles->init(PC_HASH_COUNT))
7758 char *dataReserve, *traceReserve, *tempReserve;
7759 CHECK_ALLOC(dataReserve, (char *)js_malloc(DataReserveSize));
7760 CHECK_ALLOC(traceReserve, (char *)js_malloc(TraceReserveSize));
7761 CHECK_ALLOC(tempReserve, (char *)js_malloc(TempReserveSize));
7762 CHECK_ALLOC(tm->dataAlloc, js_new<VMAllocator>(dataReserve, DataReserveSize));
7763 CHECK_ALLOC(tm->traceAlloc, js_new<VMAllocator>(traceReserve, TraceReserveSize));
7764 CHECK_ALLOC(tm->tempAlloc, js_new<VMAllocator>(tempReserve, TempReserveSize));
7765 CHECK_ALLOC(tm->codeAlloc, js_new<CodeAlloc>());
7766 CHECK_ALLOC(tm->frameCache, js_new<FrameInfoCache>(tm->dataAlloc));
7767 CHECK_ALLOC(tm->storage, js_new<TraceNativeStorage>());
7768 CHECK_ALLOC(tm->cachedTempTypeMap, js_new<TypeMap>((Allocator*)NULL, tm->oracle));
7770 verbose_only( tm->branches = NULL; )
7773 debug_only(PodZero(&jitstats));
7777 /* Architecture properties used by test cases. */
7778 jitstats.archIsIA32 = 0;
7779 jitstats.archIs64BIT = 0;
7780 jitstats.archIsARM = 0;
7781 jitstats.archIsSPARC = 0;
7782 jitstats.archIsPPC = 0;
7783 #if defined NANOJIT_IA32
7784 jitstats.archIsIA32 = 1;
7786 #if defined NANOJIT_64BIT
7787 jitstats.archIs64BIT = 1;
7789 #if defined NANOJIT_ARM
7790 jitstats.archIsARM = 1;
7792 #if defined NANOJIT_SPARC
7793 jitstats.archIsSPARC = 1;
7795 #if defined NANOJIT_PPC
7796 jitstats.archIsPPC = 1;
7798 #if defined NANOJIT_X64
7799 jitstats.archIsAMD64 = 1;
7803 if (!tm->tracedScripts.init())
7808 /* On error, don't rely on the compartment destructor being called. */
7814 * NB: FinishJIT needs to work even when InitJIT fails. Each pointer must be
7815 * checked before it's dereferenced, as it may not have been allocated.
7818 FinishJIT(TraceMonitor *tm)
7820 JS_ASSERT(!tm->recorder);
7821 JS_ASSERT(!tm->profile);
7824 if (jitstats.recorderStarted) {
7826 debug_only_print0(LC_TMStats, "recorder");
7827 #define RECORDER_JITSTAT(_ident, _name) \
7828 debug_only_printf(LC_TMStats, "%c " _name "(%llu)", sep, \
7829 (unsigned long long int)jitstats._ident); \
7831 #define JITSTAT(x) /* nothing */
7832 #include "jitstats.tbl"
7834 #undef RECORDER_JITSTAT
7835 debug_only_print0(LC_TMStats, "\n");
7838 debug_only_print0(LC_TMStats, "monitor");
7839 #define MONITOR_JITSTAT(_ident, _name) \
7840 debug_only_printf(LC_TMStats, "%c " _name "(%llu)", sep, \
7841 (unsigned long long int)jitstats._ident); \
7843 #define JITSTAT(x) /* nothing */
7844 #include "jitstats.tbl"
7846 #undef MONITOR_JITSTAT
7847 debug_only_print0(LC_TMStats, "\n");
7851 js_delete(tm->recordAttempts);
7852 js_delete(tm->loopProfiles);
7853 js_delete(tm->oracle);
7856 // Recover profiling data from expiring Fragments, and display
7858 if (LogController.lcbits & LC_FragProfile) {
7860 for (Seq<Fragment*>* f = tm->branches; f; f = f->tail)
7861 FragProfiling_FragFinalizer(f->head, tm);
7863 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
7864 for (TreeFragment *f = tm->vmfragments[i]; f; f = f->next) {
7865 JS_ASSERT(f->root == f);
7866 for (TreeFragment *p = f; p; p = p->peer)
7867 FragProfiling_FragFinalizer(p, tm);
7872 FragProfiling_showResults(tm);
7873 js_delete(tm->profAlloc);
7876 NanoAssert(!tm->profTab);
7877 NanoAssert(!tm->profAlloc);
7881 PodArrayZero(tm->vmfragments);
7883 js_delete(tm->frameCache);
7884 tm->frameCache = NULL;
7886 js_delete(tm->codeAlloc);
7887 tm->codeAlloc = NULL;
7889 js_delete(tm->dataAlloc);
7890 tm->dataAlloc = NULL;
7892 js_delete(tm->traceAlloc);
7893 tm->traceAlloc = NULL;
7895 js_delete(tm->tempAlloc);
7896 tm->tempAlloc = NULL;
7898 js_delete(tm->storage);
7901 js_delete(tm->cachedTempTypeMap);
7902 tm->cachedTempTypeMap = NULL;
7905 JS_REQUIRES_STACK void
7906 PurgeScriptFragments(TraceMonitor* tm, JSScript* script)
7908 debug_only_printf(LC_TMTracer,
7909 "Purging fragments for JSScript %p.\n", (void*)script);
7911 /* A recorder script is being evaluated and can not be destroyed or GC-ed. */
7912 JS_ASSERT_IF(tm->recorder,
7913 JS_UPTRDIFF(tm->recorder->getTree()->ip, script->code) >= script->length);
7915 for (LoopProfileMap::Enum e(*tm->loopProfiles); !e.empty(); e.popFront()) {
7916 if (JS_UPTRDIFF(e.front().key, script->code) < script->length)
7920 TracedScriptSet::Ptr found = tm->tracedScripts.lookup(script);
7923 tm->tracedScripts.remove(found);
7925 for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
7926 TreeFragment** fragp = &tm->vmfragments[i];
7927 while (TreeFragment* frag = *fragp) {
7928 if (JS_UPTRDIFF(frag->ip, script->code) < script->length) {
7929 /* This fragment is associated with the script. */
7930 debug_only_printf(LC_TMTracer,
7931 "Disconnecting TreeFragment %p "
7932 "with ip %p, in range [%p,%p).\n",
7933 (void*)frag, frag->ip, script->code,
7934 script->code + script->length);
7936 JS_ASSERT(frag->root == frag);
7937 *fragp = frag->next;
7939 verbose_only( FragProfiling_FragFinalizer(frag, tm); )
7941 } while ((frag = frag->peer) != NULL);
7944 fragp = &frag->next;
7948 RecordAttemptMap &table = *tm->recordAttempts;
7949 for (RecordAttemptMap::Enum e(table); !e.empty(); e.popFront()) {
7950 if (JS_UPTRDIFF(e.front().key, script->code) < script->length)
7956 OverfullJITCache(JSContext *cx, TraceMonitor* tm)
7959 * You might imagine the outOfMemory flag on the allocator is sufficient
7960 * to model the notion of "running out of memory", but there are actually
7961 * two separate issues involved:
7963 * 1. The process truly running out of memory: malloc() or mmap()
7966 * 2. The limit we put on the "intended size" of the tracemonkey code
7967 * cache, in pages, has been exceeded.
7969 * Condition 1 doesn't happen very often, but we're obliged to try to
7970 * safely shut down and signal the rest of spidermonkey when it
7971 * does. Condition 2 happens quite regularly.
7973 * Presently, the code in this file doesn't check the outOfMemory condition
7974 * often enough, and frequently misuses the unchecked results of
7975 * lirbuffer insertions on the assumption that it will notice the
7976 * outOfMemory flag "soon enough" when it returns to the monitorRecording
7977 * function. This turns out to be a false assumption if we use outOfMemory
7978 * to signal condition 2: we regularly provoke "passing our intended
7979 * size" and regularly fail to notice it in time to prevent writing
7980 * over the end of an artificially self-limited LIR buffer.
7982 * To mitigate, though not completely solve, this problem, we're
7983 * modeling the two forms of memory exhaustion *separately* for the
7984 * time being: condition 1 is handled by the outOfMemory flag inside
7985 * nanojit, and condition 2 is being handled independently *here*. So
7986 * we construct our allocators to use all available memory they like,
7987 * and only report outOfMemory to us when there is literally no OS memory
7988 * left. Merely purging our cache when we hit our highwater mark is
7989 * handled by the (few) callers of this function.
7992 jsuint maxsz = JS_THREAD_DATA(cx)->maxCodeCacheBytes;
7993 return (tm->codeAlloc->size() + tm->dataAlloc->size() + tm->traceAlloc->size() > maxsz);
7996 JS_FORCES_STACK JS_FRIEND_API(void)
7997 DeepBail(JSContext *cx)
7999 JS_ASSERT(JS_ON_TRACE(cx));
8002 * Exactly one context on the current thread is on trace. Find out which
8003 * one. (Most callers cannot guarantee that it's cx.)
8005 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
8007 JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
8008 JS_THREAD_DATA(cx)->onTraceCompartment = NULL;
8010 /* It's a bug if a non-FAIL_STATUS builtin gets here. */
8011 JS_ASSERT(tm->bailExit);
8014 debug_only_print0(LC_TMTracer, "Deep bail.\n");
8015 LeaveTree(tm, *tm->tracerState, tm->bailExit);
8016 tm->bailExit = NULL;
8018 TracerState* state = tm->tracerState;
8019 state->builtinStatus |= BUILTIN_BAILED;
8022 * Between now and the LeaveTree in ExecuteTree, |tm->storage| may be
8023 * reused if another trace executes before the currently executing native
8024 * returns. If this happens, at least some of the native stack will be
8025 * clobbered, potentially all of it. This is called a clobbering deep bail.
8027 * The nested trace will complete before we return to the deep-bailed one,
8028 * hence the invariant is maintained that only one trace uses |tm->storage|
8031 * When we return to the deep-bailed trace, it will very soon reach a
8032 * STATUS_EXIT guard and bail out. Most of the native stack will just be
8033 * thrown away. However, LeaveTree will copy a few slots from the top of
8034 * the native stack to the interpreter stack--only those slots written by
8035 * the current bytecode instruction. To make sure LeaveTree has correct
8036 * data to copy from the native stack to the operand stack, we have this
8037 * rule: every caller of enterDeepBailCall must ensure that between the
8038 * deep bail call and the STATUS_EXIT guard, all those slots are written.
8040 * The rule is a bit subtle. For example, JSOP_MOREITER uses a slot which
8041 * it never writes to; in order to satisfy the above rule,
8042 * record_JSOP_MOREITER emits code to write the value back to the slot
8045 state->deepBailSp = state->sp;
8048 JS_REQUIRES_STACK Value&
8049 TraceRecorder::argval(unsigned n) const
8051 JS_ASSERT(n < cx->fp()->numFormalArgs());
8052 return cx->fp()->formalArg(n);
8055 JS_REQUIRES_STACK Value&
8056 TraceRecorder::varval(unsigned n) const
8058 JS_ASSERT(n < cx->fp()->numSlots());
8059 return cx->fp()->slots()[n];
8062 JS_REQUIRES_STACK Value&
8063 TraceRecorder::stackval(int n) const
8065 return cx->regs->sp[n];
8068 JS_REQUIRES_STACK void
8069 TraceRecorder::updateAtoms()
8071 JSScript *script = cx->fp()->script();
8072 atoms = FrameAtomBase(cx, cx->fp());
8073 consts = (cx->fp()->hasImacropc() || !JSScript::isValidOffset(script->constOffset))
8075 : script->consts()->vector;
8076 strictModeCode_ins = w.name(w.immi(script->strictModeCode), "strict");
8079 JS_REQUIRES_STACK void
8080 TraceRecorder::updateAtoms(JSScript *script)
8082 atoms = script->atomMap.vector;
8083 consts = JSScript::isValidOffset(script->constOffset) ? script->consts()->vector : 0;
8084 strictModeCode_ins = w.name(w.immi(script->strictModeCode), "strict");
8088 * Generate LIR to compute the scope chain.
8090 JS_REQUIRES_STACK LIns*
8091 TraceRecorder::scopeChain()
8093 return cx->fp()->isFunctionFrame()
8094 ? getFrameObjPtr(cx->fp()->addressOfScopeChain())
8095 : entryScopeChain();
8099 * Generate LIR to compute the scope chain on entry to the trace. This is
8100 * generally useful only for getting to the global object, because only
8101 * the global object is guaranteed to be present.
8103 JS_REQUIRES_STACK LIns*
8104 TraceRecorder::entryScopeChain() const
8106 return w.ldpStackFrameScopeChain(entryFrameIns());
8110 * Generate LIR to compute the stack frame on entry to the trace.
8112 JS_REQUIRES_STACK LIns*
8113 TraceRecorder::entryFrameIns() const
8115 return w.ldpFrameFp(w.ldpContextField(regs));
8119 * Return the frame of a call object if that frame is part of the current
8120 * trace. |depthp| is an optional outparam: if it is non-null, it will be
8121 * filled in with the depth of the call object's frame relevant to cx->fp().
8123 JS_REQUIRES_STACK JSStackFrame*
8124 TraceRecorder::frameIfInRange(JSObject* obj, unsigned* depthp) const
8126 JSStackFrame* ofp = (JSStackFrame*) obj->getPrivate();
8127 JSStackFrame* fp = cx->fp();
8128 for (unsigned depth = 0; depth <= callDepth; ++depth) {
8134 if (!(fp = fp->prev()))
8140 JS_DEFINE_CALLINFO_4(extern, UINT32, GetClosureVar, CONTEXT, OBJECT, CVIPTR, DOUBLEPTR,
8141 0, ACCSET_STORE_ANY)
8142 JS_DEFINE_CALLINFO_4(extern, UINT32, GetClosureArg, CONTEXT, OBJECT, CVIPTR, DOUBLEPTR,
8143 0, ACCSET_STORE_ANY)
8146 * Search the scope chain for a property lookup operation at the current PC and
8147 * generate LIR to access the given property. Return RECORD_CONTINUE on success,
8148 * otherwise abort and return RECORD_STOP. There are 3 outparams:
8150 * vp the address of the current property value
8151 * ins LIR instruction representing the property value on trace
8152 * NameResult describes how to look up name; see comment for NameResult in jstracer.h
8154 JS_REQUIRES_STACK AbortableRecordingStatus
8155 TraceRecorder::scopeChainProp(JSObject* chainHead, Value*& vp, LIns*& ins, NameResult& nr,
8156 JSObject** scopeObjp)
8158 JS_ASSERT(chainHead == &cx->fp()->scopeChain());
8159 JS_ASSERT(chainHead != globalObj);
8161 TraceMonitor &localtm = *traceMonitor;
8163 JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)];
8166 JSObject *obj = chainHead;
8167 if (!js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &obj2, &prop))
8168 RETURN_ERROR_A("error in js_FindProperty");
8170 /* js_FindProperty can reenter the interpreter and kill |this|. */
8171 if (!localtm.recorder)
8172 return ARECORD_ABORTED;
8175 RETURN_STOP_A("failed to find name in non-global scope chain");
8180 if (obj == globalObj) {
8181 // Even if the property is on the global object, we must guard against
8182 // the creation of properties that shadow the property in the middle
8183 // of the scope chain.
8185 if (cx->fp()->isFunctionFrame()) {
8186 // Skip any Call object when inside a function. Any reference to a
8187 // Call name the compiler resolves statically and we do not need
8188 // to match shapes of the Call objects.
8189 chainHead = cx->fp()->callee().getParent();
8190 head_ins = w.ldpObjParent(get(&cx->fp()->calleeValue()));
8192 head_ins = scopeChain();
8195 CHECK_STATUS_A(traverseScopeChain(chainHead, head_ins, obj, obj_ins));
8198 RETURN_STOP_A("prototype property");
8200 Shape* shape = (Shape*) prop;
8201 if (!isValidSlot(obj, shape))
8202 return ARECORD_STOP;
8203 if (!lazilyImportGlobalSlot(shape->slot))
8204 RETURN_STOP_A("lazy import of global slot failed");
8205 vp = &obj->getSlotRef(shape->slot);
8208 return ARECORD_CONTINUE;
8211 if (obj == obj2 && obj->isCall()) {
8212 AbortableRecordingStatus status =
8213 InjectStatus(callProp(obj, prop, ATOM_TO_JSID(atom), vp, ins, nr));
8217 RETURN_STOP_A("fp->scopeChain is not global or active call object");
8221 * Generate LIR to access a property of a Call object.
8223 JS_REQUIRES_STACK RecordingStatus
8224 TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, Value*& vp,
8225 LIns*& ins, NameResult& nr)
8227 Shape *shape = (Shape*) prop;
8229 JSOp op = JSOp(*cx->regs->pc);
8230 uint32 setflags = (js_CodeSpec[op].format & (JOF_SET | JOF_INCDEC | JOF_FOR));
8231 if (setflags && !shape->writable())
8232 RETURN_STOP("writing to a read-only property");
8234 uintN slot = uint16(shape->shortid);
8237 JSStackFrame* cfp = (JSStackFrame*) obj->getPrivate();
8239 if (shape->getterOp() == GetCallArg) {
8240 JS_ASSERT(slot < cfp->numFormalArgs());
8241 vp = &cfp->formalArg(slot);
8243 } else if (shape->getterOp() == GetCallVar ||
8244 shape->getterOp() == GetCallVarChecked) {
8245 JS_ASSERT(slot < cfp->numSlots());
8246 vp = &cfp->slots()[slot];
8249 RETURN_STOP("dynamic property of Call object");
8252 // Now assert that our use of shape->shortid was in fact kosher.
8253 JS_ASSERT(shape->hasShortID());
8255 if (frameIfInRange(obj)) {
8256 // At this point we are guaranteed to be looking at an active call oject
8257 // whose properties are stored in the corresponding JSStackFrame.
8260 return RECORD_CONTINUE;
8263 // Call objects do not yet have shape->isMethod() properties, but they
8264 // should. See bug 514046, for which this code is future-proof. Remove
8265 // this comment when that bug is fixed (so, FIXME: 514046).
8269 js_GetPropertyHelper(cx, obj, shape->id,
8270 (op == JSOP_CALLNAME)
8271 ? JSGET_NO_METHOD_BARRIER
8272 : JSGET_METHOD_BARRIER,
8278 JSObject* parent = cx->fp()->callee().getParent();
8279 LIns* parent_ins = w.ldpObjParent(get(&cx->fp()->calleeValue()));
8280 CHECK_STATUS(traverseScopeChain(parent, parent_ins, obj, obj_ins));
8283 // Because the parent guard in guardCallee ensures this Call object
8284 // will be the same object now and on trace, and because once a Call
8285 // object loses its frame it never regains one, on trace we will also
8286 // have a null private in the Call object. So all we need to do is
8287 // write the value to the Call object's slot.
8288 if (shape->getterOp() == GetCallArg) {
8289 JS_ASSERT(slot < ArgClosureTraits::slot_count(obj));
8290 slot += ArgClosureTraits::slot_offset(obj);
8291 } else if (shape->getterOp() == GetCallVar ||
8292 shape->getterOp() == GetCallVarChecked) {
8293 JS_ASSERT(slot < VarClosureTraits::slot_count(obj));
8294 slot += VarClosureTraits::slot_offset(obj);
8296 RETURN_STOP("dynamic property of Call object");
8299 // Now assert that our use of shape->shortid was in fact kosher.
8300 JS_ASSERT(shape->hasShortID());
8302 ins = unbox_slot(obj, obj_ins, slot, snapshot(BRANCH_EXIT));
8304 ClosureVarInfo* cv = new (traceAlloc()) ClosureVarInfo();
8307 cv->callDepth = callDepth;
8310 // Even though the frame is out of range, later we might be called as an
8311 // inner trace such that the target variable is defined in the outer trace
8312 // entry frame. For simplicity, we just fall off trace.
8314 w.eqp(entryFrameIns(), w.ldpObjPrivate(obj_ins)),
8317 LIns* outp = w.allocp(sizeof(double));
8320 w.nameImmpNonGC(cv),
8325 if (shape->getterOp() == GetCallArg) {
8326 ci = &GetClosureArg_ci;
8327 } else if (shape->getterOp() == GetCallVar ||
8328 shape->getterOp() == GetCallVarChecked) {
8329 ci = &GetClosureVar_ci;
8331 RETURN_STOP("dynamic property of Call object");
8334 // Now assert that our use of shape->shortid was in fact kosher.
8335 JS_ASSERT(shape->hasShortID());
8337 LIns* call_ins = w.call(ci, args);
8339 JSValueType type = getCoercedType(nr.v);
8341 w.name(w.eqi(call_ins, w.immi(type)), "guard(type-stable name access)"),
8343 ins = stackLoad(AllocSlotsAddress(outp), type);
8347 nr.obj_ins = obj_ins;
8349 return RECORD_CONTINUE;
8352 JS_REQUIRES_STACK LIns*
8353 TraceRecorder::arg(unsigned n)
8355 return get(&argval(n));
8358 JS_REQUIRES_STACK void
8359 TraceRecorder::arg(unsigned n, LIns* i)
8364 JS_REQUIRES_STACK LIns*
8365 TraceRecorder::var(unsigned n)
8367 return get(&varval(n));
8370 JS_REQUIRES_STACK void
8371 TraceRecorder::var(unsigned n, LIns* i)
8376 JS_REQUIRES_STACK LIns*
8377 TraceRecorder::stack(int n)
8379 return get(&stackval(n));
8382 JS_REQUIRES_STACK void
8383 TraceRecorder::stack(int n, LIns* i)
8385 set(&stackval(n), i);
8388 /* Leave trace iff one operand is negative and the other is non-negative. */
8389 JS_REQUIRES_STACK void
8390 TraceRecorder::guardNonNeg(LIns* d0, LIns* d1, VMSideExit* exit)
8393 JS_ASSERT(d0->immI() >= 0);
8395 guard(false, w.ltiN(d0, 0), exit);
8398 JS_ASSERT(d1->immI() >= 0);
8400 guard(false, w.ltiN(d1, 0), exit);
8403 JS_REQUIRES_STACK LIns*
8404 TraceRecorder::alu(LOpcode v, jsdouble v0, jsdouble v1, LIns* s0, LIns* s1)
8407 * To even consider this operation for demotion, both operands have to be
8408 * integers and the oracle must not give us a negative hint for the
8411 if (!oracle || oracle->isInstructionUndemotable(cx->regs->pc) ||
8412 !IsPromotedInt32(s0) || !IsPromotedInt32(s1)) {
8414 if (v == LIR_modd) {
8415 LIns* args[] = { s1, s0 };
8416 return w.call(&js_dmod_ci, args);
8418 LIns* result = w.ins2(v, s0, s1);
8419 JS_ASSERT_IF(s0->isImmD() && s1->isImmD(), result->isImmD());
8433 if (r == 0.0 && (v0 < 0.0 || v1 < 0.0))
8436 #if defined NANOJIT_IA32 || defined NANOJIT_X64
8443 if (v0 < 0 || v1 == 0 || (s1->isImmD() && v1 < 0))
8445 r = js_dmod(v0, v1);
8453 * The result must be an integer at record time, otherwise there is no
8454 * point in trying to demote it.
8456 if (jsint(r) != r || JSDOUBLE_IS_NEGZERO(r))
8459 LIns* d0 = w.demoteToInt32(s0);
8460 LIns* d1 = w.demoteToInt32(s1);
8463 * Speculatively emit an integer operation, betting that at runtime we
8464 * will get integer results again.
8466 VMSideExit* exit = NULL;
8469 #if defined NANOJIT_IA32 || defined NANOJIT_X64
8471 if (d0->isImmI() && d1->isImmI())
8472 return w.i2d(w.immi(jsint(r)));
8474 exit = snapshot(OVERFLOW_EXIT);
8477 * If the divisor is greater than zero its always safe to execute
8478 * the division. If not, we have to make sure we are not running
8479 * into -2147483648 / -1, because it can raise an overflow exception.
8481 if (!d1->isImmI()) {
8482 if (MaybeBranch mbr = w.jt(w.gtiN(d1, 0))) {
8483 guard(false, w.eqi0(d1), exit);
8484 guard(true, w.eqi0(w.andi(w.eqiN(d0, 0x80000000),
8485 w.eqiN(d1, -1))), exit);
8489 if (d1->immI() == -1)
8490 guard(false, w.eqiN(d0, 0x80000000), exit);
8493 result = w.divi(d0, d1);
8495 /* As long as the modulus is zero, the result is an integer. */
8496 guard(true, w.eqi0(w.modi(result)), exit);
8498 /* Don't lose a -0. */
8499 guard(false, w.eqi0(result), exit);
8503 if (d0->isImmI() && d1->isImmI())
8504 return w.i2d(w.immi(jsint(r)));
8506 exit = snapshot(OVERFLOW_EXIT);
8508 /* Make sure we don't trigger division by zero at runtime. */
8510 guard(false, w.eqi0(d1), exit);
8512 result = w.modi(w.divi(d0, d1));
8514 /* If the result is not 0, it is always within the integer domain. */
8515 if (MaybeBranch mbr = w.jf(w.eqi0(result))) {
8517 * If the result is zero, we must exit if the lhs is negative since
8518 * the result is -0 in this case, which is not in the integer domain.
8520 guard(false, w.ltiN(d0, 0), exit);
8528 v = arithOpcodeD2I(v);
8529 JS_ASSERT(v == LIR_addi || v == LIR_muli || v == LIR_subi);
8532 * If the operands guarantee that the result will be an integer (e.g.
8533 * z = x * y with 0 <= (x|y) <= 0xffff guarantees z <= fffe0001), we
8534 * don't have to guard against an overflow. Otherwise we emit a guard
8535 * that will inform the oracle and cause a non-demoted trace to be
8536 * attached that uses floating-point math for this operation.
8538 bool needsOverflowCheck = true, needsNegZeroCheck = true;
8539 ChecksRequired(v, d0, d1, &needsOverflowCheck, &needsNegZeroCheck);
8540 if (needsOverflowCheck) {
8541 exit = snapshot(OVERFLOW_EXIT);
8542 result = guard_xov(v, d0, d1, exit);
8544 result = w.ins2(v, d0, d1);
8546 if (needsNegZeroCheck) {
8547 JS_ASSERT(v == LIR_muli);
8549 * Make sure we don't lose a -0. We exit if the result is zero and if
8550 * either operand is negative. We start out using a weaker guard, checking
8551 * if either argument is negative. If this ever fails, we recompile with
8552 * a stronger, but slower, guard.
8554 if (v0 < 0.0 || v1 < 0.0
8555 || !oracle || oracle->isInstructionSlowZeroTest(cx->regs->pc))
8558 exit = snapshot(OVERFLOW_EXIT);
8561 w.eqi0(w.andi(w.eqi0(result),
8562 w.ori(w.ltiN(d0, 0),
8566 guardNonNeg(d0, d1, snapshot(MUL_ZERO_EXIT));
8571 JS_ASSERT_IF(d0->isImmI() && d1->isImmI(), result->isImmI(jsint(r)));
8572 return w.i2d(result);
8576 TraceRecorder::d2i(LIns* d, bool resultCanBeImpreciseIfFractional)
8579 return w.immi(js_DoubleToECMAInt32(d->immD()));
8580 if (d->isop(LIR_i2d) || d->isop(LIR_ui2d)) {
8581 // The d2i(i2d(i)) case is obviously a no-op. (Unlike i2d(d2i(d))!)
8582 // The d2i(ui2d(ui)) case is less obvious, but it is also a no-op.
8583 // For example, 4294967295U has the bit pattern 0xffffffff, and
8584 // d2i(ui2d(4294967295U)) is -1, which also has the bit pattern
8585 // 0xffffffff. Another way to think about it: d2i(ui2d(ui)) is
8586 // equivalent to ui2i(ui); ui2i doesn't exist, but it would be a
8588 // (Note that the above reasoning depends on the fact that d2i()
8589 // always succeeds, ie. it never aborts).
8592 if (d->isop(LIR_addd) || d->isop(LIR_subd)) {
8593 // If 'i32ad' and 'i32bd' are integral doubles that fit in int32s, and
8594 // 'i32ai' and 'i32bi' are int32s with the equivalent values, then
8597 // d2i(addd(i32ad, i32bd)) == addi(i32ai, i32bi)
8599 // If the RHS doesn't overflow, this is obvious. If it does overflow,
8600 // the result will truncate. And the LHS will truncate in exactly the
8601 // same way. So they're always equal.
8602 LIns* lhs = d->oprnd1();
8603 LIns* rhs = d->oprnd2();
8604 if (IsPromotedInt32(lhs) && IsPromotedInt32(rhs))
8605 return w.ins2(arithOpcodeD2I(d->opcode()), w.demoteToInt32(lhs), w.demoteToInt32(rhs));
8608 const CallInfo* ci = d->callInfo();
8609 if (ci == &js_UnboxDouble_ci) {
8610 #if JS_BITS_PER_WORD == 32
8611 LIns *tag_ins = d->callArgN(0);
8612 LIns *payload_ins = d->callArgN(1);
8613 LIns* args[] = { payload_ins, tag_ins };
8614 return w.call(&js_UnboxInt32_ci, args);
8616 LIns* val_ins = d->callArgN(0);
8617 LIns* args[] = { val_ins };
8618 return w.call(&js_UnboxInt32_ci, args);
8621 if (ci == &js_StringToNumber_ci) {
8622 LIns* ok_ins = w.allocp(sizeof(JSBool));
8623 LIns* args[] = { ok_ins, d->callArgN(1), d->callArgN(0) };
8624 LIns* ret_ins = w.call(&js_StringToInt32_ci, args);
8626 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
8631 return resultCanBeImpreciseIfFractional
8633 : w.call(&js_DoubleToInt32_ci, &d);
8637 TraceRecorder::d2u(LIns* d)
8640 return w.immi(js_DoubleToECMAUint32(d->immD()));
8641 if (d->isop(LIR_i2d) || d->isop(LIR_ui2d))
8643 return w.call(&js_DoubleToUint32_ci, &d);
8646 JS_REQUIRES_STACK RecordingStatus
8647 TraceRecorder::makeNumberInt32(LIns* d, LIns** out)
8649 JS_ASSERT(d->isD());
8650 if (IsPromotedInt32(d)) {
8651 *out = w.demoteToInt32(d);
8652 return RECORD_CONTINUE;
8655 // This means "convert double to int if it's integral, otherwise
8656 // exit". We first convert the double to an int, then convert it back
8657 // and exit if the two doubles don't match. If 'f' is a non-integral
8658 // immediate we'll end up aborting.
8659 *out = d2i(d, /* resultCanBeImpreciseIfFractional = */true);
8660 return guard(true, w.eqd(d, w.i2d(*out)), MISMATCH_EXIT, /* abortIfAlwaysExits = */true);
8663 JS_REQUIRES_STACK RecordingStatus
8664 TraceRecorder::makeNumberUint32(LIns* d, LIns** out)
8666 JS_ASSERT(d->isD());
8667 if (IsPromotedUint32(d)) {
8668 *out = w.demoteToUint32(d);
8669 return RECORD_CONTINUE;
8672 // This means "convert double to uint if it's integral, otherwise
8673 // exit". We first convert the double to an unsigned int, then
8674 // convert it back and exit if the two doubles don't match. If
8675 // 'f' is a non-integral immediate we'll end up aborting.
8677 return guard(true, w.eqd(d, w.ui2d(*out)), MISMATCH_EXIT, /* abortIfAlwaysExits = */true);
8680 JS_REQUIRES_STACK LIns*
8681 TraceRecorder::stringify(const Value& v)
8683 LIns* v_ins = get(&v);
8687 LIns* args[] = { v_ins, cx_ins };
8690 ci = &js_NumberToString_ci;
8691 } else if (v.isUndefined()) {
8692 return w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_VOID]);
8693 } else if (v.isBoolean()) {
8694 ci = &js_BooleanIntToString_ci;
8697 * Callers must deal with non-primitive (non-null object) values by
8698 * calling an imacro. We don't try to guess about which imacro, with
8699 * what valueOf hint, here.
8701 JS_ASSERT(v.isNull());
8702 return w.immpAtomGC(cx->runtime->atomState.nullAtom);
8705 v_ins = w.call(ci, args);
8706 guard(false, w.eqp0(v_ins), OOM_EXIT);
8710 JS_REQUIRES_STACK bool
8711 TraceRecorder::canCallImacro() const
8713 /* We cannot nest imacros. */
8714 return !cx->fp()->hasImacropc();
8717 JS_REQUIRES_STACK RecordingStatus
8718 TraceRecorder::callImacro(jsbytecode* imacro)
8720 return canCallImacro() ? callImacroInfallibly(imacro) : RECORD_STOP;
8723 JS_REQUIRES_STACK RecordingStatus
8724 TraceRecorder::callImacroInfallibly(jsbytecode* imacro)
8726 JSStackFrame* fp = cx->fp();
8727 JS_ASSERT(!fp->hasImacropc());
8728 JSFrameRegs* regs = cx->regs;
8729 fp->setImacropc(regs->pc);
8732 return RECORD_IMACRO;
8735 JS_REQUIRES_STACK AbortableRecordingStatus
8736 TraceRecorder::ifop()
8738 Value& v = stackval(-1);
8739 LIns* v_ins = get(&v);
8743 if (v.isNull() || v.isUndefined()) {
8746 } else if (!v.isPrimitive()) {
8749 } else if (v.isBoolean()) {
8750 /* Test for boolean is true, negate later if we are testing for false. */
8752 x = w.eqiN(v_ins, 1);
8753 } else if (v.isNumber()) {
8754 jsdouble d = v.toNumber();
8755 cond = !JSDOUBLE_IS_NaN(d) && d;
8756 x = w.eqi0(w.eqi0(w.andi(w.eqd(v_ins, v_ins), w.eqi0(w.eqd0(v_ins)))));
8757 } else if (v.isString()) {
8758 cond = v.toString()->length() != 0;
8759 x = w.eqi0(w.eqp0(w.getStringLength(v_ins)));
8761 JS_NOT_REACHED("ifop");
8762 return ARECORD_STOP;
8765 jsbytecode* pc = cx->regs->pc;
8766 emitIf(pc, cond, x);
8767 return checkTraceEnd(pc);
8772 * Record LIR for a tableswitch or tableswitchx op. We record LIR only the
8773 * "first" time we hit the op. Later, when we start traces after exiting that
8774 * trace, we just patch.
8776 JS_REQUIRES_STACK AbortableRecordingStatus
8777 TraceRecorder::tableswitch()
8779 Value& v = stackval(-1);
8781 /* No need to guard if the condition can't match any of the cases. */
8783 return ARECORD_CONTINUE;
8785 /* No need to guard if the condition is constant. */
8786 LIns* v_ins = d2i(get(&v));
8787 if (v_ins->isImmI())
8788 return ARECORD_CONTINUE;
8790 jsbytecode* pc = cx->regs->pc;
8791 /* Starting a new trace after exiting a trace via switch. */
8793 (anchor->exitType == CASE_EXIT || anchor->exitType == DEFAULT_EXIT) &&
8794 fragment->ip == pc) {
8795 return ARECORD_CONTINUE;
8800 if (*pc == JSOP_TABLESWITCH) {
8801 pc += JUMP_OFFSET_LEN;
8802 low = GET_JUMP_OFFSET(pc);
8803 pc += JUMP_OFFSET_LEN;
8804 high = GET_JUMP_OFFSET(pc);
8806 pc += JUMPX_OFFSET_LEN;
8807 low = GET_JUMP_OFFSET(pc);
8808 pc += JUMP_OFFSET_LEN;
8809 high = GET_JUMP_OFFSET(pc);
8813 * If there are no cases, this is a no-op. The default case immediately
8814 * follows in the bytecode and is always taken, so we need no special
8815 * action to handle it.
8817 int count = high + 1 - low;
8818 JS_ASSERT(count >= 0);
8820 return ARECORD_CONTINUE;
8822 /* Cap maximum table-switch size for modesty. */
8823 if (count > MAX_TABLE_SWITCH)
8824 return InjectStatus(switchop());
8826 /* Generate switch LIR. */
8827 SwitchInfo* si = new (traceAlloc()) SwitchInfo();
8830 si->index = (uint32) -1;
8831 LIns* diff = w.subi(v_ins, w.immi(low));
8832 LIns* cmp = w.ltui(diff, w.immi(si->count));
8833 guard(true, cmp, DEFAULT_EXIT);
8834 // We use AnyAddress; it's imprecise but this case is rare and not worth its
8835 // own access region.
8836 w.st(diff, AnyAddress(w.immpNonGC(&si->index)));
8837 VMSideExit* exit = snapshot(CASE_EXIT);
8838 exit->switchInfo = si;
8839 LIns* guardIns = w.xtbl(diff, createGuardRecord(exit));
8840 fragment->lastIns = guardIns;
8841 CHECK_STATUS_A(compile());
8842 return finishSuccessfully();
8846 JS_REQUIRES_STACK RecordingStatus
8847 TraceRecorder::switchop()
8849 Value& v = stackval(-1);
8850 LIns* v_ins = get(&v);
8852 /* No need to guard if the condition is constant. */
8853 if (v_ins->isImmAny())
8854 return RECORD_CONTINUE;
8856 jsdouble d = v.toNumber();
8858 w.name(w.eqd(v_ins, w.immd(d)), "guard(switch on numeric)"),
8860 /* abortIfAlwaysExits = */true);
8861 } else if (v.isString()) {
8862 LIns* args[] = { w.immpStrGC(v.toString()), v_ins, cx_ins };
8863 LIns* equal_rval = w.call(&js_EqualStringsOnTrace_ci, args);
8865 w.name(w.eqiN(equal_rval, JS_NEITHER), "guard(oom)"),
8868 w.name(w.eqi0(equal_rval), "guard(switch on string)"),
8870 } else if (v.isBoolean()) {
8872 w.name(w.eqi(v_ins, w.immi(v.isTrue())), "guard(switch on boolean)"),
8874 } else if (v.isUndefined()) {
8875 // This is a unit type, so no guard is needed.
8877 RETURN_STOP("switch on object or null");
8879 return RECORD_CONTINUE;
8882 JS_REQUIRES_STACK RecordingStatus
8883 TraceRecorder::inc(Value& v, jsint incr, bool pre)
8885 LIns* v_ins = get(&v);
8887 CHECK_STATUS(inc(v, v_ins, dummy, incr, pre));
8889 return RECORD_CONTINUE;
8893 * On exit, v_ins is the incremented unboxed value, and the appropriate value
8894 * (pre- or post-increment as described by pre) is stacked. v_out is set to
8895 * the value corresponding to v_ins.
8897 JS_REQUIRES_STACK RecordingStatus
8898 TraceRecorder::inc(const Value &v, LIns*& v_ins, Value &v_out, jsint incr, bool pre)
8901 CHECK_STATUS(incHelper(v, v_ins, v_out, v_after, incr));
8903 const JSCodeSpec& cs = js_CodeSpec[*cx->regs->pc];
8904 JS_ASSERT(cs.ndefs == 1);
8905 stack(-cs.nuses, pre ? v_after : v_ins);
8907 return RECORD_CONTINUE;
8911 * Do an increment operation without storing anything to the stack.
8913 * v_after is an out param whose value corresponds to the instruction the
8914 * v_ins_after out param gets set to.
8916 JS_REQUIRES_STACK RecordingStatus
8917 TraceRecorder::incHelper(const Value &v, LIns*& v_ins, Value &v_after,
8918 LIns*& v_ins_after, jsint incr)
8920 // FIXME: Bug 606071 on making this work for objects.
8921 if (!v.isPrimitive())
8922 RETURN_STOP("can inc primitives only");
8924 // We need to modify |v_ins| the same way relational() modifies
8926 if (v.isUndefined()) {
8927 v_ins_after = w.immd(js_NaN);
8928 v_after.setDouble(js_NaN);
8929 v_ins = w.immd(js_NaN);
8930 } else if (v.isNull()) {
8931 v_ins_after = w.immd(incr);
8932 v_after.setDouble(incr);
8933 v_ins = w.immd(0.0);
8935 if (v.isBoolean()) {
8936 v_ins = w.i2d(v_ins);
8937 } else if (v.isString()) {
8938 LIns* ok_ins = w.allocp(sizeof(JSBool));
8939 LIns* args[] = { ok_ins, v_ins, cx_ins };
8940 v_ins = w.call(&js_StringToNumber_ci, args);
8942 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
8945 JS_ASSERT(v.isNumber());
8949 AutoValueRooter tvr(cx);
8951 ValueToNumber(cx, tvr.value(), &num);
8952 v_ins_after = alu(LIR_addd, num, incr, v_ins, w.immd(incr));
8953 v_after.setDouble(num + incr);
8956 return RECORD_CONTINUE;
8959 JS_REQUIRES_STACK AbortableRecordingStatus
8960 TraceRecorder::incProp(jsint incr, bool pre)
8962 Value& l = stackval(-1);
8963 if (l.isPrimitive())
8964 RETURN_STOP_A("incProp on primitive");
8966 JSObject* obj = &l.toObject();
8967 LIns* obj_ins = get(&l);
8971 CHECK_STATUS_A(prop(obj, obj_ins, &slot, &v_ins, NULL));
8973 if (slot == SHAPE_INVALID_SLOT)
8974 RETURN_STOP_A("incProp on invalid slot");
8976 Value& v = obj->getSlotRef(slot);
8978 CHECK_STATUS_A(inc(v, v_ins, v_after, incr, pre));
8980 LIns* slots_ins = NULL;
8981 stobj_set_slot(obj, obj_ins, slot, slots_ins, v_after, v_ins);
8982 return ARECORD_CONTINUE;
8985 JS_REQUIRES_STACK RecordingStatus
8986 TraceRecorder::incElem(jsint incr, bool pre)
8988 Value& r = stackval(-1);
8989 Value& l = stackval(-2);
8994 if (!l.isPrimitive() && l.toObject().isDenseArray() && r.isInt32()) {
8995 guardDenseArray(get(&l), MISMATCH_EXIT);
8996 CHECK_STATUS(denseArrayElement(l, r, vp, v_ins, addr_ins, snapshot(BRANCH_EXIT)));
8997 if (!addr_ins) // if we read a hole, abort
9000 CHECK_STATUS(inc(*vp, v_ins, v_after, incr, pre));
9001 box_value_into(v_after, v_ins, DSlotsAddress(addr_ins));
9002 return RECORD_CONTINUE;
9005 return callImacro((incr == 1)
9006 ? pre ? incelem_imacros.incelem : incelem_imacros.eleminc
9007 : pre ? decelem_imacros.decelem : decelem_imacros.elemdec);
9011 EvalCmp(LOpcode op, double l, double r)
9031 JS_NOT_REACHED("unexpected comparison op");
9038 EvalCmp(JSContext *cx, LOpcode op, JSString* l, JSString* r, JSBool *ret)
9041 return EqualStrings(cx, l, r, ret);
9043 if (!CompareStrings(cx, l, r, &cmp))
9045 *ret = EvalCmp(op, cmp, 0);
9049 JS_REQUIRES_STACK RecordingStatus
9050 TraceRecorder::strictEquality(bool equal, bool cmpCase)
9052 Value& r = stackval(-1);
9053 Value& l = stackval(-2);
9054 LIns* l_ins = get(&l);
9055 LIns* r_ins = get(&r);
9059 JSValueType ltag = getPromotedType(l);
9060 if (ltag != getPromotedType(r)) {
9063 } else if (ltag == JSVAL_TYPE_STRING) {
9064 LIns* args[] = { r_ins, l_ins, cx_ins };
9065 LIns* equal_ins = w.call(&js_EqualStringsOnTrace_ci, args);
9067 w.name(w.eqiN(equal_ins, JS_NEITHER), "guard(oom)"),
9069 x = w.eqiN(equal_ins, equal);
9070 if (!EqualStrings(cx, l.toString(), r.toString(), &cond))
9071 RETURN_ERROR("oom");
9073 if (ltag == JSVAL_TYPE_DOUBLE)
9074 x = w.eqd(l_ins, r_ins);
9075 else if (ltag == JSVAL_TYPE_NULL || ltag == JSVAL_TYPE_NONFUNOBJ || ltag == JSVAL_TYPE_FUNOBJ)
9076 x = w.eqp(l_ins, r_ins);
9078 x = w.eqi(l_ins, r_ins);
9081 cond = (ltag == JSVAL_TYPE_DOUBLE)
9082 ? l.toNumber() == r.toNumber()
9085 cond = (!!cond == equal);
9088 /* Only guard if the same path may not always be taken. */
9090 guard(cond, x, BRANCH_EXIT);
9091 return RECORD_CONTINUE;
9095 return RECORD_CONTINUE;
9098 JS_REQUIRES_STACK AbortableRecordingStatus
9099 TraceRecorder::equality(bool negate, bool tryBranchAfterCond)
9101 Value& rval = stackval(-1);
9102 Value& lval = stackval(-2);
9103 LIns* l_ins = get(&lval);
9104 LIns* r_ins = get(&rval);
9106 return equalityHelper(lval, rval, l_ins, r_ins, negate, tryBranchAfterCond, lval);
9109 JS_REQUIRES_STACK AbortableRecordingStatus
9110 TraceRecorder::equalityHelper(Value& l, Value& r, LIns* l_ins, LIns* r_ins,
9111 bool negate, bool tryBranchAfterCond,
9114 LOpcode op = LIR_eqi;
9116 LIns* args[] = { NULL, NULL, NULL };
9119 * The if chain below closely mirrors that found in 11.9.3, in general
9120 * deviating from that ordering of ifs only to account for SpiderMonkey's
9121 * conflation of booleans and undefined and for the possibility of
9122 * confusing objects and null. Note carefully the spec-mandated recursion
9123 * in the final else clause, which terminates because Number == T recurs
9124 * only if T is Object, but that must recur again to convert Object to
9125 * primitive, and ToPrimitive throws if the object cannot be converted to
9126 * a primitive value (which would terminate recursion).
9129 if (getPromotedType(l) == getPromotedType(r)) {
9130 if (l.isUndefined() || l.isNull()) {
9134 } else if (l.isObject()) {
9135 if (l.toObject().getClass()->ext.equality)
9136 RETURN_STOP_A("Can't trace extended class equality operator");
9137 LIns* flags_ins = w.ldiObjFlags(l_ins);
9138 LIns* flag_ins = w.andi(flags_ins, w.nameImmui(JSObject::HAS_EQUALITY));
9139 guard(true, w.eqi0(flag_ins), BRANCH_EXIT);
9143 } else if (l.isBoolean()) {
9144 JS_ASSERT(r.isBoolean());
9146 } else if (l.isString()) {
9147 JSString *l_str = l.toString();
9148 JSString *r_str = r.toString();
9149 if (!l_str->isRope() && !r_str->isRope() && l_str->length() == 1 && r_str->length() == 1) {
9150 VMSideExit *exit = snapshot(BRANCH_EXIT);
9151 LIns *c = w.immw(1);
9152 guard(true, w.eqp(w.getStringLength(l_ins), c), exit);
9153 guard(true, w.eqp(w.getStringLength(r_ins), c), exit);
9154 l_ins = w.getStringChar(l_ins, w.immpNonGC(0));
9155 r_ins = w.getStringChar(r_ins, w.immpNonGC(0));
9157 args[0] = r_ins, args[1] = l_ins, args[2] = cx_ins;
9158 LIns *equal_ins = w.call(&js_EqualStringsOnTrace_ci, args);
9160 w.name(w.eqiN(equal_ins, JS_NEITHER), "guard(oom)"),
9165 if (!EqualStrings(cx, l.toString(), r.toString(), &cond))
9166 RETURN_ERROR_A("oom");
9168 JS_ASSERT(l.isNumber() && r.isNumber());
9169 cond = (l.toNumber() == r.toNumber());
9172 } else if (l.isNull() && r.isUndefined()) {
9173 l_ins = w.immiUndefined();
9175 } else if (l.isUndefined() && r.isNull()) {
9176 r_ins = w.immiUndefined();
9178 } else if (l.isNumber() && r.isString()) {
9179 LIns* ok_ins = w.allocp(sizeof(JSBool));
9180 args[0] = ok_ins, args[1] = r_ins, args[2] = cx_ins;
9181 r_ins = w.call(&js_StringToNumber_ci, args);
9183 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
9186 double d = js_StringToNumber(cx, r.toString(), &ok);
9188 RETURN_ERROR_A("oom");
9189 cond = (l.toNumber() == d);
9191 } else if (l.isString() && r.isNumber()) {
9192 LIns* ok_ins = w.allocp(sizeof(JSBool));
9193 args[0] = ok_ins, args[1] = l_ins, args[2] = cx_ins;
9194 l_ins = w.call(&js_StringToNumber_ci, args);
9196 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
9199 double d = js_StringToNumber(cx, l.toString(), &ok);
9201 RETURN_ERROR_A("oom");
9202 cond = (d == r.toNumber());
9205 // Below we may assign to l or r, which modifies the interpreter state.
9206 // This is fine as long as we also update the tracker.
9207 if (l.isBoolean()) {
9208 l_ins = w.i2d(l_ins);
9210 l.setInt32(l.isTrue());
9211 return equalityHelper(l, r, l_ins, r_ins, negate,
9212 tryBranchAfterCond, rval);
9214 if (r.isBoolean()) {
9215 r_ins = w.i2d(r_ins);
9217 r.setInt32(r.isTrue());
9218 return equalityHelper(l, r, l_ins, r_ins, negate,
9219 tryBranchAfterCond, rval);
9221 if ((l.isString() || l.isNumber()) && !r.isPrimitive()) {
9222 CHECK_STATUS_A(guardNativeConversion(r));
9223 return InjectStatus(callImacro(equality_imacros.any_obj));
9225 if (!l.isPrimitive() && (r.isString() || r.isNumber())) {
9226 CHECK_STATUS_A(guardNativeConversion(l));
9227 return InjectStatus(callImacro(equality_imacros.obj_any));
9235 /* If the operands aren't numbers, compare them as integers. */
9236 LIns* x = w.ins2(op, l_ins, r_ins);
9242 jsbytecode* pc = cx->regs->pc;
9245 * Don't guard if the same path is always taken. If it isn't, we have to
9246 * fuse comparisons and the following branch, because the interpreter does
9249 if (tryBranchAfterCond)
9250 fuseIf(pc + 1, cond, x);
9253 * There is no need to write out the result of this comparison if the trace
9254 * ends on this operation.
9256 if (pc[1] == JSOP_IFNE || pc[1] == JSOP_IFEQ)
9257 CHECK_STATUS_A(checkTraceEnd(pc + 1));
9260 * We update the stack after the guard. This is safe since the guard bails
9261 * out at the comparison and the interpreter will therefore re-execute the
9262 * comparison. This way the value of the condition doesn't have to be
9263 * calculated and saved on the stack in most cases.
9267 return ARECORD_CONTINUE;
9270 JS_REQUIRES_STACK AbortableRecordingStatus
9271 TraceRecorder::relational(LOpcode op, bool tryBranchAfterCond)
9273 Value& r = stackval(-1);
9274 Value& l = stackval(-2);
9277 LIns* l_ins = get(&l);
9278 LIns* r_ins = get(&r);
9280 jsdouble lnum, rnum;
9283 * 11.8.5 if either argument is an object with a function-valued valueOf
9284 * property; if both arguments are objects with non-function-valued valueOf
9285 * properties, abort.
9287 if (!l.isPrimitive()) {
9288 CHECK_STATUS_A(guardNativeConversion(l));
9289 if (!r.isPrimitive()) {
9290 CHECK_STATUS_A(guardNativeConversion(r));
9291 return InjectStatus(callImacro(binary_imacros.obj_obj));
9293 return InjectStatus(callImacro(binary_imacros.obj_any));
9295 if (!r.isPrimitive()) {
9296 CHECK_STATUS_A(guardNativeConversion(r));
9297 return InjectStatus(callImacro(binary_imacros.any_obj));
9300 /* 11.8.5 steps 3, 16-21. */
9301 if (l.isString() && r.isString()) {
9302 LIns* args[] = { r_ins, l_ins, cx_ins };
9303 LIns* result_ins = w.call(&js_CompareStringsOnTrace_ci, args);
9305 w.name(w.eqiN(result_ins, INT32_MIN), "guard(oom)"),
9309 if (!EvalCmp(cx, op, l.toString(), r.toString(), &cond))
9310 RETURN_ERROR_A("oom");
9314 /* 11.8.5 steps 4-5. */
9315 if (!l.isNumber()) {
9316 if (l.isBoolean()) {
9317 l_ins = w.i2d(l_ins);
9318 } else if (l.isUndefined()) {
9319 l_ins = w.immd(js_NaN);
9320 } else if (l.isString()) {
9321 LIns* ok_ins = w.allocp(sizeof(JSBool));
9322 LIns* args[] = { ok_ins, l_ins, cx_ins };
9323 l_ins = w.call(&js_StringToNumber_ci, args);
9325 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
9327 } else if (l.isNull()) {
9328 l_ins = w.immd(0.0);
9330 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
9331 "have been handled at start of method");
9332 RETURN_STOP_A("safety belt");
9335 if (!r.isNumber()) {
9336 if (r.isBoolean()) {
9337 r_ins = w.i2d(r_ins);
9338 } else if (r.isUndefined()) {
9339 r_ins = w.immd(js_NaN);
9340 } else if (r.isString()) {
9341 LIns* ok_ins = w.allocp(sizeof(JSBool));
9342 LIns* args[] = { ok_ins, r_ins, cx_ins };
9343 r_ins = w.call(&js_StringToNumber_ci, args);
9345 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
9347 } else if (r.isNull()) {
9348 r_ins = w.immd(0.0);
9350 JS_NOT_REACHED("JSVAL_IS_NUMBER if int/double, objects should "
9351 "have been handled at start of method");
9352 RETURN_STOP_A("safety belt");
9356 AutoValueRooter tvr(cx);
9358 ValueToNumber(cx, tvr.value(), &lnum);
9360 ValueToNumber(cx, tvr.value(), &rnum);
9362 cond = EvalCmp(op, lnum, rnum);
9365 /* 11.8.5 steps 6-15. */
9368 * If the result is not a number or it's not a quad, we must use an integer
9372 JS_ASSERT(isCmpDOpcode(op));
9373 op = cmpOpcodeD2I(op);
9375 x = w.ins2(op, l_ins, r_ins);
9377 jsbytecode* pc = cx->regs->pc;
9380 * Don't guard if the same path is always taken. If it isn't, we have to
9381 * fuse comparisons and the following branch, because the interpreter does
9384 if (tryBranchAfterCond)
9385 fuseIf(pc + 1, cond, x);
9388 * There is no need to write out the result of this comparison if the trace
9389 * ends on this operation.
9391 if (pc[1] == JSOP_IFNE || pc[1] == JSOP_IFEQ)
9392 CHECK_STATUS_A(checkTraceEnd(pc + 1));
9395 * We update the stack after the guard. This is safe since the guard bails
9396 * out at the comparison and the interpreter will therefore re-execute the
9397 * comparison. This way the value of the condition doesn't have to be
9398 * calculated and saved on the stack in most cases.
9402 return ARECORD_CONTINUE;
9405 JS_REQUIRES_STACK RecordingStatus
9406 TraceRecorder::unary(LOpcode op)
9408 Value& v = stackval(-1);
9409 bool intop = retTypes[op] == LTy_I;
9418 return RECORD_CONTINUE;
9423 JS_REQUIRES_STACK RecordingStatus
9424 TraceRecorder::binary(LOpcode op)
9426 Value& r = stackval(-1);
9427 Value& l = stackval(-2);
9429 if (!l.isPrimitive()) {
9430 CHECK_STATUS(guardNativeConversion(l));
9431 if (!r.isPrimitive()) {
9432 CHECK_STATUS(guardNativeConversion(r));
9433 return callImacro(binary_imacros.obj_obj);
9435 return callImacro(binary_imacros.obj_any);
9437 if (!r.isPrimitive()) {
9438 CHECK_STATUS(guardNativeConversion(r));
9439 return callImacro(binary_imacros.any_obj);
9442 bool intop = retTypes[op] == LTy_I;
9446 bool leftIsNumber = l.isNumber();
9447 jsdouble lnum = leftIsNumber ? l.toNumber() : 0;
9449 bool rightIsNumber = r.isNumber();
9450 jsdouble rnum = rightIsNumber ? r.toNumber() : 0;
9453 NanoAssert(op != LIR_addd); // LIR_addd/IS_STRING case handled by record_JSOP_ADD()
9454 LIns* ok_ins = w.allocp(sizeof(JSBool));
9455 LIns* args[] = { ok_ins, a, cx_ins };
9456 a = w.call(&js_StringToNumber_ci, args);
9458 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
9461 lnum = js_StringToNumber(cx, l.toString(), &ok);
9463 RETURN_ERROR("oom");
9464 leftIsNumber = true;
9467 NanoAssert(op != LIR_addd); // LIR_addd/IS_STRING case handled by record_JSOP_ADD()
9468 LIns* ok_ins = w.allocp(sizeof(JSBool));
9469 LIns* args[] = { ok_ins, b, cx_ins };
9470 b = w.call(&js_StringToNumber_ci, args);
9472 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
9475 rnum = js_StringToNumber(cx, r.toString(), &ok);
9477 RETURN_ERROR("oom");
9478 rightIsNumber = true;
9480 if (l.isBoolean()) {
9482 lnum = l.toBoolean();
9483 leftIsNumber = true;
9484 } else if (l.isUndefined()) {
9487 leftIsNumber = true;
9489 if (r.isBoolean()) {
9491 rnum = r.toBoolean();
9492 rightIsNumber = true;
9493 } else if (r.isUndefined()) {
9496 rightIsNumber = true;
9498 if (leftIsNumber && rightIsNumber) {
9500 a = (op == LIR_rshui) ? d2u(a) : d2i(a);
9503 a = alu(op, lnum, rnum, a, b);
9505 a = (op == LIR_rshui) ? w.ui2d(a) : w.i2d(a);
9507 return RECORD_CONTINUE;
9512 #if defined DEBUG_notme && defined XP_UNIX
9515 static FILE* shapefp = NULL;
9518 DumpShape(JSObject* obj, const char* prefix)
9521 shapefp = fopen("/tmp/shapes.dump", "w");
9526 fprintf(shapefp, "\n%s: shape %u flags %x\n", prefix, obj->shape(), obj->flags);
9527 for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) {
9528 const Shape &shape = r.front();
9530 if (JSID_IS_ATOM(shape.id)) {
9532 JS_PutString(JSID_TO_STRING(shape.id), shapefp);
9534 JS_ASSERT(!JSID_IS_OBJECT(shape.id));
9535 fprintf(shapefp, " %d", JSID_TO_INT(shape.id));
9537 fprintf(shapefp, " %u %p %p %x %x %d\n",
9538 shape.slot, shape.getter, shape.setter, shape.attrs, shape.flags, shape.shortid);
9544 TraceRecorder::dumpGuardedShapes(const char* prefix)
9546 for (GuardedShapeTable::Range r = guardedShapeTable.all(); !r.empty(); r.popFront())
9547 DumpShape(r.front().value, prefix);
9549 #endif /* DEBUG_notme && XP_UNIX */
9551 JS_REQUIRES_STACK RecordingStatus
9552 TraceRecorder::guardShape(LIns* obj_ins, JSObject* obj, uint32 shape, const char* guardName,
9555 // Test (with add if missing) for a remembered guard for (obj_ins, obj).
9556 GuardedShapeTable::AddPtr p = guardedShapeTable.lookupForAdd(obj_ins);
9558 JS_ASSERT(p->value == obj);
9559 return RECORD_CONTINUE;
9561 if (!guardedShapeTable.add(p, obj_ins, obj))
9562 return RECORD_ERROR;
9564 if (obj == globalObj) {
9565 // In this case checking object identity is equivalent and faster.
9567 w.name(w.eqp(obj_ins, w.immpObjGC(globalObj)), "guard_global"),
9569 return RECORD_CONTINUE;
9572 #if defined DEBUG_notme && defined XP_UNIX
9573 DumpShape(obj, "guard");
9574 fprintf(shapefp, "for obj_ins %p\n", obj_ins);
9577 // Finally, emit the shape guard.
9578 guard(true, w.name(w.eqiN(w.ldiObjShape(obj_ins), shape), guardName), exit);
9579 return RECORD_CONTINUE;
9583 TraceRecorder::forgetGuardedShapesForObject(JSObject* obj)
9585 for (GuardedShapeTable::Enum e(guardedShapeTable); !e.empty(); e.popFront()) {
9586 if (e.front().value == obj) {
9587 #if defined DEBUG_notme && defined XP_UNIX
9588 DumpShape(entry->obj, "forget");
9596 TraceRecorder::forgetGuardedShapes()
9598 #if defined DEBUG_notme && defined XP_UNIX
9599 dumpGuardedShapes("forget-all");
9601 guardedShapeTable.clear();
9604 JS_REQUIRES_STACK AbortableRecordingStatus
9605 TraceRecorder::test_property_cache(JSObject* obj, LIns* obj_ins, JSObject*& obj2, PCVal& pcval)
9607 jsbytecode* pc = cx->regs->pc;
9608 JS_ASSERT(*pc != JSOP_INITPROP && *pc != JSOP_INITMETHOD &&
9609 *pc != JSOP_SETNAME && *pc != JSOP_SETPROP && *pc != JSOP_SETMETHOD);
9611 // Mimic the interpreter's special case for dense arrays by skipping up one
9612 // hop along the proto chain when accessing a named (not indexed) property,
9613 // typically to find Array.prototype methods.
9614 JSObject* aobj = obj;
9615 if (obj->isDenseArray()) {
9616 guardDenseArray(obj_ins, BRANCH_EXIT);
9617 aobj = obj->getProto();
9618 obj_ins = w.ldpObjProto(obj_ins);
9621 if (!aobj->isNative())
9622 RETURN_STOP_A("non-native object");
9625 PropertyCacheEntry* entry;
9626 JS_PROPERTY_CACHE(cx).test(cx, pc, aobj, obj2, entry, atom);
9628 // Miss: pre-fill the cache for the interpreter, as well as for our needs.
9629 // FIXME: bug 458271.
9630 jsid id = ATOM_TO_JSID(atom);
9632 // The lookup below may change object shapes.
9633 forgetGuardedShapes();
9636 if (JOF_OPMODE(*pc) == JOF_NAME) {
9637 JS_ASSERT(aobj == obj);
9639 TraceMonitor &localtm = *traceMonitor;
9640 entry = js_FindPropertyHelper(cx, id, true, &obj, &obj2, &prop);
9642 RETURN_ERROR_A("error in js_FindPropertyHelper");
9644 /* js_FindPropertyHelper can reenter the interpreter and kill |this|. */
9645 if (!localtm.recorder)
9646 return ARECORD_ABORTED;
9648 if (entry == JS_NO_PROP_CACHE_FILL)
9649 RETURN_STOP_A("cannot cache name");
9651 TraceMonitor &localtm = *traceMonitor;
9652 int protoIndex = js_LookupPropertyWithFlags(cx, aobj, id,
9657 RETURN_ERROR_A("error in js_LookupPropertyWithFlags");
9659 /* js_LookupPropertyWithFlags can reenter the interpreter and kill |this|. */
9660 if (!localtm.recorder)
9661 return ARECORD_ABORTED;
9664 if (!obj2->isNative())
9665 RETURN_STOP_A("property found on non-native object");
9666 entry = JS_PROPERTY_CACHE(cx).fill(cx, aobj, 0, protoIndex, obj2,
9669 if (entry == JS_NO_PROP_CACHE_FILL)
9676 // Propagate obj from js_FindPropertyHelper to record_JSOP_BINDNAME
9677 // via our obj2 out-parameter. If we are recording JSOP_SETNAME and
9678 // the global it's assigning does not yet exist, create it.
9681 // Use a null pcval to return "no such property" to our caller.
9683 return ARECORD_CONTINUE;
9687 RETURN_STOP_A("failed to fill property cache");
9690 #ifdef JS_THREADSAFE
9691 // There's a potential race in any JS_THREADSAFE embedding that's nuts
9692 // enough to share mutable objects on the scope or proto chain, but we
9693 // don't care about such insane embeddings. Anyway, the (scope, proto)
9694 // entry->vcap coordinates must reach obj2 from aobj at this point.
9695 JS_ASSERT(cx->thread->data.requestDepth);
9698 return InjectStatus(guardPropertyCacheHit(obj_ins, aobj, obj2, entry, pcval));
9701 JS_REQUIRES_STACK RecordingStatus
9702 TraceRecorder::guardPropertyCacheHit(LIns* obj_ins,
9705 PropertyCacheEntry* entry,
9708 VMSideExit* exit = snapshot(BRANCH_EXIT);
9710 uint32 vshape = entry->vshape();
9712 // Special case for the global object, which may be aliased to get a property value.
9713 // To catch cross-global property accesses we must check against globalObj identity.
9714 // But a JOF_NAME mode opcode needs no guard, as we ensure the global object's shape
9715 // never changes, and name ops can't reach across a global object ('with' aborts).
9716 if (aobj == globalObj) {
9717 if (entry->adding())
9718 RETURN_STOP("adding a property to the global object");
9720 JSOp op = js_GetOpcode(cx, cx->fp()->script(), cx->regs->pc);
9721 if (JOF_OPMODE(op) != JOF_NAME) {
9723 w.name(w.eqp(obj_ins, w.immpObjGC(globalObj)), "guard_global"),
9727 CHECK_STATUS(guardShape(obj_ins, aobj, entry->kshape, "guard_kshape", exit));
9730 if (entry->adding()) {
9732 w.ldiRuntimeProtoHazardShape(w.ldpConstContextField(runtime));
9735 w.name(w.eqiN(vshape_ins, vshape), "guard_protoHazardShape"),
9739 // For any hit that goes up the scope and/or proto chains, we will need to
9740 // guard on the shape of the object containing the property.
9741 if (entry->vcapTag() >= 1) {
9742 JS_ASSERT(obj2->shape() == vshape);
9743 if (obj2 == globalObj)
9744 RETURN_STOP("hitting the global object via a prototype chain");
9747 if (entry->vcapTag() == 1) {
9748 // Duplicate the special case in PropertyCache::test.
9749 obj2_ins = w.ldpObjProto(obj_ins);
9750 guard(false, w.eqp0(obj2_ins), exit);
9752 obj2_ins = w.immpObjGC(obj2);
9754 CHECK_STATUS(guardShape(obj2_ins, obj2, vshape, "guard_vshape", exit));
9757 pcval = entry->vword;
9758 return RECORD_CONTINUE;
9762 TraceRecorder::stobj_set_fslot(LIns *obj_ins, unsigned slot, const Value &v, LIns* v_ins)
9764 box_value_into(v, v_ins, FSlotsAddress(obj_ins, slot));
9768 TraceRecorder::stobj_set_dslot(LIns *obj_ins, unsigned slot, LIns*& slots_ins,
9769 const Value &v, LIns* v_ins)
9772 slots_ins = w.ldpObjSlots(obj_ins);
9773 box_value_into(v, v_ins, DSlotsAddress(slots_ins, slot));
9777 TraceRecorder::stobj_set_slot(JSObject *obj, LIns* obj_ins, unsigned slot, LIns*& slots_ins,
9778 const Value &v, LIns* v_ins)
9781 * A shape guard must have already been generated for obj, which will
9782 * ensure that future objects have the same number of fixed slots.
9784 if (!obj->hasSlotsArray()) {
9785 JS_ASSERT(slot < obj->numSlots());
9786 stobj_set_fslot(obj_ins, slot, v, v_ins);
9788 stobj_set_dslot(obj_ins, slot, slots_ins, v, v_ins);
9793 TraceRecorder::unbox_slot(JSObject *obj, LIns *obj_ins, uint32 slot, VMSideExit *exit)
9795 /* Same guarantee about fixed slots as stobj_set_slot. */
9796 Address addr = (!obj->hasSlotsArray())
9797 ? (Address)FSlotsAddress(obj_ins, slot)
9798 : (Address)DSlotsAddress(w.ldpObjSlots(obj_ins), slot);
9800 return unbox_value(obj->getSlot(slot), addr, exit);
9803 #if JS_BITS_PER_WORD == 32
9806 TraceRecorder::box_undefined_into(Address addr)
9808 w.stiValueTag(w.nameImmui(JSVAL_TAG_UNDEFINED), addr);
9809 w.stiValuePayload(w.immi(0), addr);
9813 TraceRecorder::box_null_into(Address addr)
9815 w.stiValueTag(w.nameImmui(JSVAL_TAG_NULL), addr);
9816 w.stiValuePayload(w.immi(0), addr);
9820 TraceRecorder::unbox_number_as_double(Address addr, LIns *tag_ins, VMSideExit *exit)
9822 guard(true, w.leui(tag_ins, w.nameImmui(JSVAL_UPPER_INCL_TAG_OF_NUMBER_SET)), exit);
9823 LIns *val_ins = w.ldiValuePayload(addr);
9824 LIns* args[] = { val_ins, tag_ins };
9825 return w.call(&js_UnboxDouble_ci, args);
9829 TraceRecorder::unbox_non_double_object(Address addr, LIns* tag_ins,
9830 JSValueType type, VMSideExit* exit)
9833 if (type == JSVAL_TYPE_UNDEFINED) {
9834 val_ins = w.immiUndefined();
9835 } else if (type == JSVAL_TYPE_NULL) {
9836 val_ins = w.immpNull();
9838 JS_ASSERT(type == JSVAL_TYPE_INT32 || type == JSVAL_TYPE_OBJECT ||
9839 type == JSVAL_TYPE_STRING || type == JSVAL_TYPE_BOOLEAN ||
9840 type == JSVAL_TYPE_MAGIC);
9841 val_ins = w.ldiValuePayload(addr);
9844 guard(true, w.eqi(tag_ins, w.nameImmui(JSVAL_TYPE_TO_TAG(type))), exit);
9849 TraceRecorder::unbox_object(Address addr, LIns* tag_ins, JSValueType type, VMSideExit* exit)
9851 JS_ASSERT(type == JSVAL_TYPE_FUNOBJ || type == JSVAL_TYPE_NONFUNOBJ);
9852 guard(true, w.name(w.eqi(tag_ins, w.nameImmui(JSVAL_TAG_OBJECT)), "isObj"), exit);
9853 LIns *payload_ins = w.ldiValuePayload(addr);
9854 if (type == JSVAL_TYPE_FUNOBJ)
9855 guardClass(payload_ins, &js_FunctionClass, exit, LOAD_NORMAL);
9857 guardNotClass(payload_ins, &js_FunctionClass, exit, LOAD_NORMAL);
9862 TraceRecorder::unbox_value(const Value &v, Address addr, VMSideExit *exit, bool force_double)
9864 LIns *tag_ins = w.ldiValueTag(addr);
9866 if (v.isNumber() && force_double)
9867 return unbox_number_as_double(addr, tag_ins, exit);
9870 guard(true, w.name(w.eqi(tag_ins, w.nameImmui(JSVAL_TAG_INT32)), "isInt"), exit);
9871 return w.i2d(w.ldiValuePayload(addr));
9875 guard(true, w.name(w.ltui(tag_ins, w.nameImmui(JSVAL_TAG_CLEAR)), "isDouble"), exit);
9880 JSValueType type = v.toObject().isFunction() ? JSVAL_TYPE_FUNOBJ : JSVAL_TYPE_NONFUNOBJ;
9881 return unbox_object(addr, tag_ins, type, exit);
9884 JSValueType type = v.extractNonDoubleObjectTraceType();
9885 return unbox_non_double_object(addr, tag_ins, type, exit);
9889 TraceRecorder::unbox_any_object(Address addr, LIns **obj_ins, LIns **is_obj_ins)
9891 LIns *tag_ins = w.ldiValueTag(addr);
9892 *is_obj_ins = w.eqi(tag_ins, w.nameImmui(JSVAL_TAG_OBJECT));
9893 *obj_ins = w.ldiValuePayload(addr);
9897 TraceRecorder::is_boxed_true(Address addr)
9899 LIns *tag_ins = w.ldiValueTag(addr);
9900 LIns *bool_ins = w.eqi(tag_ins, w.nameImmui(JSVAL_TAG_BOOLEAN));
9901 LIns *payload_ins = w.ldiValuePayload(addr);
9902 return w.gtiN(w.andi(bool_ins, payload_ins), 0);
9906 TraceRecorder::is_boxed_magic(Address addr, JSWhyMagic why)
9908 LIns *tag_ins = w.ldiValueTag(addr);
9909 return w.eqi(tag_ins, w.nameImmui(JSVAL_TAG_MAGIC));
9913 TraceRecorder::box_value_into(const Value &v, LIns *v_ins, Address addr)
9916 JS_ASSERT(v_ins->isD());
9917 if (fcallinfo(v_ins) == &js_UnboxDouble_ci) {
9918 w.stiValueTag(v_ins->callArgN(0), addr);
9919 w.stiValuePayload(v_ins->callArgN(1), addr);
9920 } else if (IsPromotedInt32(v_ins)) {
9921 LIns *int_ins = w.demoteToInt32(v_ins);
9922 w.stiValueTag(w.nameImmui(JSVAL_TAG_INT32), addr);
9923 w.stiValuePayload(int_ins, addr);
9930 if (v.isUndefined()) {
9931 box_undefined_into(addr);
9932 } else if (v.isNull()) {
9933 box_null_into(addr);
9935 JSValueTag tag = v.isObject() ? JSVAL_TAG_OBJECT : v.extractNonDoubleObjectTraceTag();
9936 w.stiValueTag(w.nameImmui(tag), addr);
9937 w.stiValuePayload(v_ins, addr);
9942 TraceRecorder::box_value_for_native_call(const Value &v, LIns *v_ins)
9944 return box_value_into_alloc(v, v_ins);
9947 #elif JS_BITS_PER_WORD == 64
9950 TraceRecorder::box_undefined_into(Address addr)
9952 w.stq(w.nameImmq(JSVAL_BITS(JSVAL_VOID)), addr);
9956 TraceRecorder::non_double_object_value_has_type(LIns *v_ins, JSValueType type)
9958 return w.eqi(w.q2i(w.rshuqN(v_ins, JSVAL_TAG_SHIFT)),
9959 w.nameImmui(JSVAL_TYPE_TO_TAG(type)));
9963 TraceRecorder::unpack_ptr(LIns *v_ins)
9965 return w.andq(v_ins, w.nameImmq(JSVAL_PAYLOAD_MASK));
9969 TraceRecorder::unbox_number_as_double(LIns *v_ins, VMSideExit *exit)
9972 w.ltuq(v_ins, w.nameImmq(JSVAL_UPPER_EXCL_SHIFTED_TAG_OF_NUMBER_SET)),
9974 LIns* args[] = { v_ins };
9975 return w.call(&js_UnboxDouble_ci, args);
9978 inline nanojit::LIns*
9979 TraceRecorder::unbox_non_double_object(LIns* v_ins, JSValueType type, VMSideExit* exit)
9981 JS_ASSERT(type <= JSVAL_UPPER_INCL_TYPE_OF_VALUE_SET);
9983 if (type == JSVAL_TYPE_UNDEFINED) {
9984 unboxed_ins = w.immiUndefined();
9985 } else if (type == JSVAL_TYPE_NULL) {
9986 unboxed_ins = w.immpNull();
9987 } else if (type >= JSVAL_LOWER_INCL_TYPE_OF_PTR_PAYLOAD_SET) {
9988 unboxed_ins = unpack_ptr(v_ins);
9990 JS_ASSERT(type == JSVAL_TYPE_INT32 || type == JSVAL_TYPE_BOOLEAN || type == JSVAL_TYPE_MAGIC);
9991 unboxed_ins = w.q2i(v_ins);
9994 guard(true, non_double_object_value_has_type(v_ins, type), exit);
9999 TraceRecorder::unbox_object(LIns* v_ins, JSValueType type, VMSideExit* exit)
10001 JS_STATIC_ASSERT(JSVAL_TYPE_OBJECT == JSVAL_UPPER_INCL_TYPE_OF_VALUE_SET);
10002 JS_ASSERT(type == JSVAL_TYPE_FUNOBJ || type == JSVAL_TYPE_NONFUNOBJ);
10004 w.geuq(v_ins, w.nameImmq(JSVAL_SHIFTED_TAG_OBJECT)),
10006 v_ins = unpack_ptr(v_ins);
10007 if (type == JSVAL_TYPE_FUNOBJ)
10008 guardClass(v_ins, &js_FunctionClass, exit, LOAD_NORMAL);
10010 guardNotClass(v_ins, &js_FunctionClass, exit, LOAD_NORMAL);
10015 TraceRecorder::unbox_value(const Value &v, Address addr, VMSideExit *exit, bool force_double)
10017 LIns *v_ins = w.ldq(addr);
10019 if (v.isNumber() && force_double)
10020 return unbox_number_as_double(v_ins, exit);
10023 guard(true, non_double_object_value_has_type(v_ins, JSVAL_TYPE_INT32), exit);
10024 return w.i2d(w.q2i(v_ins));
10027 if (v.isDouble()) {
10028 guard(true, w.leuq(v_ins, w.nameImmq(JSVAL_SHIFTED_TAG_MAX_DOUBLE)), exit);
10029 return w.qasd(v_ins);
10032 if (v.isObject()) {
10033 JSValueType type = v.toObject().isFunction() ? JSVAL_TYPE_FUNOBJ : JSVAL_TYPE_NONFUNOBJ;
10034 return unbox_object(v_ins, type, exit);
10037 JSValueType type = v.extractNonDoubleObjectTraceType();
10038 return unbox_non_double_object(v_ins, type, exit);
10042 TraceRecorder::unbox_any_object(Address addr, LIns **obj_ins, LIns **is_obj_ins)
10044 JS_STATIC_ASSERT(JSVAL_TYPE_OBJECT == JSVAL_UPPER_INCL_TYPE_OF_VALUE_SET);
10045 LIns *v_ins = w.ldq(addr);
10046 *is_obj_ins = w.geuq(v_ins, w.nameImmq(JSVAL_TYPE_OBJECT));
10047 *obj_ins = unpack_ptr(v_ins);
10051 TraceRecorder::is_boxed_true(Address addr)
10053 LIns *v_ins = w.ldq(addr);
10054 return w.eqq(v_ins, w.immq(JSVAL_BITS(JSVAL_TRUE)));
10058 TraceRecorder::is_boxed_magic(Address addr, JSWhyMagic why)
10060 LIns *v_ins = w.ldq(addr);
10061 return w.eqq(v_ins, w.nameImmq(BUILD_JSVAL(JSVAL_TAG_MAGIC, why)));
10065 TraceRecorder::box_value_for_native_call(const Value &v, LIns *v_ins)
10067 if (v.isNumber()) {
10068 JS_ASSERT(v_ins->isD());
10069 if (fcallinfo(v_ins) == &js_UnboxDouble_ci)
10070 return v_ins->callArgN(0);
10071 if (IsPromotedInt32(v_ins)) {
10072 return w.orq(w.ui2uq(w.demoteToInt32(v_ins)),
10073 w.nameImmq(JSVAL_SHIFTED_TAG_INT32));
10075 return w.dasq(v_ins);
10079 return w.nameImmq(JSVAL_BITS(JSVAL_NULL));
10080 if (v.isUndefined())
10081 return w.nameImmq(JSVAL_BITS(JSVAL_VOID));
10083 JSValueTag tag = v.isObject() ? JSVAL_TAG_OBJECT : v.extractNonDoubleObjectTraceTag();
10084 uint64 shiftedTag = ((uint64)tag) << JSVAL_TAG_SHIFT;
10085 LIns *shiftedTag_ins = w.nameImmq(shiftedTag);
10087 if (v.hasPtrPayload())
10088 return w.orq(v_ins, shiftedTag_ins);
10089 return w.orq(w.ui2uq(v_ins), shiftedTag_ins);
10093 TraceRecorder::box_value_into(const Value &v, LIns *v_ins, Address addr)
10095 LIns *boxed_ins = box_value_for_native_call(v, v_ins);
10096 w.st(boxed_ins, addr);
10099 #endif /* JS_BITS_PER_WORD */
10102 TraceRecorder::box_value_into_alloc(const Value &v, LIns *v_ins)
10104 LIns *alloc_ins = w.allocp(sizeof(Value));
10105 box_value_into(v, v_ins, AllocSlotsAddress(alloc_ins));
10110 TraceRecorder::is_string_id(LIns *id_ins)
10112 return w.eqp0(w.andp(id_ins, w.nameImmw(JSID_TYPE_MASK)));
10116 TraceRecorder::unbox_string_id(LIns *id_ins)
10118 JS_STATIC_ASSERT(JSID_TYPE_STRING == 0);
10123 TraceRecorder::unbox_int_id(LIns *id_ins)
10125 return w.rshiN(w.p2i(id_ins), 1);
10128 JS_REQUIRES_STACK RecordingStatus
10129 TraceRecorder::getThis(LIns*& this_ins)
10131 JSStackFrame *fp = cx->fp();
10133 if (fp->isGlobalFrame()) {
10134 // Top-level code. It is an invariant of the interpreter that fp->thisv
10135 // is non-null. Furthermore, we would not be recording if globalObj
10136 // were not at the end of the scope chain, so `this` can only be one
10137 // object, which we can burn into the trace.
10138 JS_ASSERT(!fp->thisValue().isPrimitive());
10141 JSObject *obj = globalObj->thisObject(cx);
10143 RETURN_ERROR("thisObject hook failed");
10144 JS_ASSERT(&fp->thisValue().toObject() == obj);
10147 this_ins = w.immpObjGC(&fp->thisValue().toObject());
10148 return RECORD_CONTINUE;
10151 JS_ASSERT(fp->callee().getGlobal() == globalObj);
10152 Value& thisv = fp->thisValue();
10154 if (thisv.isObject() || fp->fun()->inStrictMode()) {
10156 * fp->thisValue() has already been computed. Since the
10157 * type-specialization of traces distinguishes between computed and
10158 * uncomputed |this|, the same will be true at run time (or we
10159 * won't get this far).
10161 this_ins = get(&fp->thisValue());
10162 return RECORD_CONTINUE;
10165 /* Don't bother tracing calls on wrapped primitive |this| values. */
10166 if (!thisv.isNullOrUndefined())
10167 RETURN_STOP("wrapping primitive |this|");
10170 * Compute 'this' now. The result is globalObj->thisObject(), which is
10171 * trace-constant. getThisObject writes back to fp->thisValue(), so do
10172 * the same on trace.
10174 if (!fp->computeThis(cx))
10175 RETURN_ERROR("computeThis failed");
10177 /* thisv is a reference, so it'll see the newly computed |this|. */
10178 this_ins = w.immpObjGC(globalObj);
10179 set(&thisv, this_ins);
10180 return RECORD_CONTINUE;
10183 JS_REQUIRES_STACK void
10184 TraceRecorder::guardClassHelper(bool cond, LIns* obj_ins, Class* clasp, VMSideExit* exit,
10187 LIns* class_ins = w.ldpObjClasp(obj_ins, loadQual);
10191 JS_snprintf(namebuf, sizeof namebuf, "%s_clasp", clasp->name);
10192 LIns* clasp_ins = w.name(w.immpNonGC(clasp), namebuf);
10193 JS_snprintf(namebuf, sizeof namebuf, "guard(class is %s)", clasp->name);
10194 LIns* cmp_ins = w.name(w.eqp(class_ins, clasp_ins), namebuf);
10196 LIns* clasp_ins = w.immpNonGC(clasp);
10197 LIns* cmp_ins = w.eqp(class_ins, clasp_ins);
10199 guard(cond, cmp_ins, exit);
10202 JS_REQUIRES_STACK void
10203 TraceRecorder::guardClass(LIns* obj_ins, Class* clasp, VMSideExit* exit, LoadQual loadQual)
10205 guardClassHelper(true, obj_ins, clasp, exit, loadQual);
10208 JS_REQUIRES_STACK void
10209 TraceRecorder::guardNotClass(LIns* obj_ins, Class* clasp, VMSideExit* exit, LoadQual loadQual)
10211 guardClassHelper(false, obj_ins, clasp, exit, loadQual);
10214 JS_REQUIRES_STACK void
10215 TraceRecorder::guardDenseArray(LIns* obj_ins, ExitType exitType)
10217 guardClass(obj_ins, &js_ArrayClass, snapshot(exitType), LOAD_NORMAL);
10220 JS_REQUIRES_STACK void
10221 TraceRecorder::guardDenseArray(LIns* obj_ins, VMSideExit* exit)
10223 guardClass(obj_ins, &js_ArrayClass, exit, LOAD_NORMAL);
10226 JS_REQUIRES_STACK bool
10227 TraceRecorder::guardHasPrototype(JSObject* obj, LIns* obj_ins,
10228 JSObject** pobj, LIns** pobj_ins,
10231 *pobj = obj->getProto();
10232 *pobj_ins = w.ldpObjProto(obj_ins);
10234 bool cond = *pobj == NULL;
10235 guard(cond, w.name(w.eqp0(*pobj_ins), "guard(proto-not-null)"), exit);
10239 JS_REQUIRES_STACK RecordingStatus
10240 TraceRecorder::guardPrototypeHasNoIndexedProperties(JSObject* obj, LIns* obj_ins, VMSideExit *exit)
10243 * Guard that no object along the prototype chain has any indexed
10244 * properties which might become visible through holes in the array.
10246 if (js_PrototypeHasIndexedProperties(cx, obj))
10247 return RECORD_STOP;
10249 JS_ASSERT(obj->isDenseArray());
10252 * Changing __proto__ on a dense array makes it slow, so we can just bake in
10253 * the current prototype as the first prototype to test. This avoids an
10254 * extra load when running the trace.
10256 obj = obj->getProto();
10259 obj_ins = w.immpObjGC(obj);
10262 * Changing __proto__ on a native object changes its shape, and adding
10263 * indexed properties changes shapes too. And non-native objects never pass
10264 * shape guards. So it's enough to just guard on shapes up the proto chain;
10265 * any change to the proto chain length will make us fail a guard before we
10266 * run off the end of the proto chain.
10269 CHECK_STATUS(guardShape(obj_ins, obj, obj->shape(), "guard(shape)", exit));
10270 obj = obj->getProto();
10271 obj_ins = w.ldpObjProto(obj_ins);
10274 return RECORD_CONTINUE;
10278 * Guard that the object stored in v has the ECMA standard [[DefaultValue]]
10279 * method. Several imacros require this.
10281 JS_REQUIRES_STACK RecordingStatus
10282 TraceRecorder::guardNativeConversion(Value& v)
10284 JSObject* obj = &v.toObject();
10285 LIns* obj_ins = get(&v);
10287 ConvertOp convert = obj->getClass()->convert;
10288 if (convert != Valueify(JS_ConvertStub) && convert != js_TryValueOf)
10289 RETURN_STOP("operand has convert hook");
10291 VMSideExit* exit = snapshot(BRANCH_EXIT);
10292 if (obj->isNative()) {
10293 // The common case. Guard on shape rather than class because it'll
10294 // often be free: we're about to do a shape guard anyway to get the
10295 // .valueOf property of this object, and this guard will be cached.
10296 CHECK_STATUS(guardShape(obj_ins, obj, obj->shape(),
10297 "guardNativeConversion", exit));
10299 // We could specialize to guard on just JSClass.convert, but a mere
10300 // class guard is simpler and slightly faster.
10301 guardClass(obj_ins, obj->getClass(), snapshot(MISMATCH_EXIT), LOAD_NORMAL);
10303 return RECORD_CONTINUE;
10306 JS_REQUIRES_STACK void
10307 TraceRecorder::clearReturningFrameFromNativeTracker()
10310 * Clear all tracker entries associated with the frame for the same reason
10311 * described in record_EnterFrame. Reuse the generic visitor to avoid
10312 * duplicating logic. The generic visitor stops at 'sp', whereas we need to
10313 * clear up to script->nslots, so finish the job manually.
10315 ClearSlotsVisitor visitor(nativeFrameTracker);
10316 VisitStackSlots(visitor, cx, 0);
10317 Value *vp = cx->regs->sp;
10318 Value *vpend = cx->fp()->slots() + cx->fp()->script()->nslots;
10319 for (; vp < vpend; ++vp)
10320 nativeFrameTracker.set(vp, NULL);
10326 BoxArg(TraceRecorder *tr, Address addr)
10327 : tr(tr), addr(addr) {}
10330 void operator()(uintN argi, Value *src) {
10331 tr->box_value_into(*src, tr->get(src), OffsetAddress(addr, argi * sizeof(Value)));
10336 * If we have created an |arguments| object for the frame, we must copy the
10337 * argument values into the object as properties in case it is used after
10338 * this frame returns.
10340 JS_REQUIRES_STACK AbortableRecordingStatus
10341 TraceRecorder::putActivationObjects()
10343 JSStackFrame *const fp = cx->fp();
10344 bool have_args = fp->hasArgsObj() && !fp->argsObj().isStrictArguments();
10345 bool have_call = fp->isFunctionFrame() && fp->fun()->isHeavyweight();
10347 if (!have_args && !have_call)
10348 return ARECORD_CONTINUE;
10350 if (have_args && !fp->script()->usesArguments) {
10352 * have_args is true, so |arguments| has been accessed, but
10353 * usesArguments is false, so there's no statically visible access.
10354 * It must have been a dodgy access like |f["arguments"]|; just
10355 * abort. (In the case where the record-time property name is not
10356 * "arguments" but a later run-time property name is, we wouldn't have
10357 * emitted the call to js_PutArgumentsOnTrace(), and js_GetArgsValue()
10358 * will deep bail asking for the top JSStackFrame.)
10360 RETURN_STOP_A("dodgy arguments access");
10363 uintN nformal = fp->numFormalArgs();
10364 uintN nactual = fp->numActualArgs();
10365 uintN nargs = have_args && have_call ? Max(nformal, nactual)
10366 : have_args ? nactual : nformal;
10370 args_ins = w.allocp(nargs * sizeof(Value));
10371 /* Don't copy all the actuals if we are only boxing for the callobj. */
10372 Address addr = AllocSlotsAddress(args_ins);
10373 if (nargs == nactual)
10374 fp->forEachCanonicalActualArg(BoxArg(this, addr));
10376 fp->forEachFormalArg(BoxArg(this, addr));
10378 args_ins = w.immpNonGC(0);
10382 LIns* argsobj_ins = getFrameObjPtr(fp->addressOfArgs());
10383 LIns* args[] = { args_ins, argsobj_ins, cx_ins };
10384 w.call(&js_PutArgumentsOnTrace_ci, args);
10388 int nslots = fp->fun()->script()->bindings.countVars();
10391 slots_ins = w.allocp(sizeof(Value) * nslots);
10392 for (int i = 0; i < nslots; ++i) {
10393 box_value_into(fp->slots()[i], get(&fp->slots()[i]),
10394 AllocSlotsAddress(slots_ins, i));
10397 slots_ins = w.immpNonGC(0);
10400 LIns* scopeChain_ins = getFrameObjPtr(fp->addressOfScopeChain());
10401 LIns* args[] = { slots_ins, w.nameImmi(nslots), args_ins,
10402 w.nameImmi(fp->numFormalArgs()), scopeChain_ins, cx_ins };
10403 w.call(&js_PutCallObjectOnTrace_ci, args);
10406 return ARECORD_CONTINUE;
10409 JS_REQUIRES_STACK AbortableRecordingStatus
10410 TraceRecorder::record_EnterFrame()
10412 JSStackFrame* const fp = cx->fp();
10414 if (++callDepth >= MAX_CALLDEPTH)
10415 RETURN_STOP_A("exceeded maximum call depth");
10417 debug_only_stmt(JSAutoByteString funBytes);
10418 debug_only_printf(LC_TMTracer, "EnterFrame %s, callDepth=%d\n",
10419 cx->fp()->fun()->atom ?
10420 js_AtomToPrintableString(cx, cx->fp()->fun()->atom, &funBytes) :
10424 if (LogController.lcbits & LC_TMRecorder) {
10425 js_Disassemble(cx, cx->fp()->script(), JS_TRUE, stdout);
10426 debug_only_print0(LC_TMTracer, "----\n");
10429 LIns* void_ins = w.immiUndefined();
10431 // Before we enter this frame, we need to clear out any dangling insns left
10432 // in the tracer. While we also clear when returning from a function, it is
10433 // possible to have the following sequence of stack usage:
10435 // [fp1]***************** push
10437 // [fp1]*****[fp2] call
10438 // [fp1]*****[fp2]*** push
10440 // Duplicate native stack layout computation: see VisitFrameSlots header comment.
10442 // args: carefully copy stack layout
10443 uintN nactual = fp->numActualArgs();
10444 uintN nformal = fp->numFormalArgs();
10445 if (nactual < nformal) {
10446 // Fill in missing with void.
10447 JS_ASSERT(fp->actualArgs() == fp->formalArgs());
10448 Value *beg = fp->formalArgs() + nactual;
10449 Value *end = fp->formalArgsEnd();
10450 for (Value *vp = beg; vp != end; ++vp) {
10451 nativeFrameTracker.set(vp, NULL);
10454 } else if (nactual > nformal) {
10455 // Although the VM clones the formal args to the top of the stack, due
10456 // to the fact that we only track the canonical arguments (in argument
10457 // order), the native stack offset of the arguments doesn't change. The
10458 // only thing that changes is which js::Value* in the tracker maps to
10459 // that slot. Thus, all we need to do here is fixup the trackers, not
10460 // emit any actual copying on trace.
10461 JS_ASSERT(fp->actualArgs() != fp->formalArgs());
10462 JS_ASSERT(fp->hasOverflowArgs());
10463 Value *srcbeg = fp->actualArgs() - 2;
10464 Value *srcend = fp->actualArgs() + nformal;
10465 Value *dstbeg = fp->formalArgs() - 2;
10466 for (Value *src = srcbeg, *dst = dstbeg; src != srcend; ++src, ++dst) {
10467 nativeFrameTracker.set(dst, NULL);
10468 tracker.set(dst, tracker.get(src));
10469 nativeFrameTracker.set(src, NULL);
10470 tracker.set(src, NULL);
10474 // argsObj: clear and set to null
10475 nativeFrameTracker.set(fp->addressOfArgs(), NULL);
10476 setFrameObjPtr(fp->addressOfArgs(), w.immpNull());
10478 // scopeChain: clear, initialize before snapshot, set below
10479 nativeFrameTracker.set(fp->addressOfScopeChain(), NULL);
10480 setFrameObjPtr(fp->addressOfScopeChain(), w.immpNull());
10482 // nfixed: clear and set to undefined
10483 Value *vp = fp->slots();
10484 Value *vpstop = vp + fp->numFixed();
10485 for (; vp < vpstop; ++vp) {
10486 nativeFrameTracker.set(vp, NULL);
10490 // nfixed to nslots: clear
10492 vpstop = fp->slots() + fp->numSlots();
10493 for (; vp < vpstop; ++vp)
10494 nativeFrameTracker.set(vp, NULL);
10496 LIns* callee_ins = get(&cx->fp()->calleeValue());
10497 LIns* scopeChain_ins = w.ldpObjParent(callee_ins);
10499 // set scopeChain for real
10500 if (cx->fp()->fun()->isHeavyweight()) {
10501 if (js_IsNamedLambda(cx->fp()->fun()))
10502 RETURN_STOP_A("can't call named lambda heavyweight on trace");
10504 LIns* fun_ins = w.nameImmpNonGC(cx->fp()->fun());
10506 LIns* args[] = { scopeChain_ins, callee_ins, fun_ins, cx_ins };
10507 LIns* call_ins = w.call(&js_CreateCallObjectOnTrace_ci, args);
10508 guard(false, w.eqp0(call_ins), OOM_EXIT);
10510 setFrameObjPtr(fp->addressOfScopeChain(), call_ins);
10512 setFrameObjPtr(fp->addressOfScopeChain(), scopeChain_ins);
10515 /* Try inlining one level in case this recursion doesn't go too deep. */
10516 if (fp->script() == fp->prev()->script() &&
10517 fp->prev()->prev() && fp->prev()->prev()->script() == fp->script()) {
10518 RETURN_STOP_A("recursion started inlining");
10521 return ARECORD_CONTINUE;
10524 JS_REQUIRES_STACK AbortableRecordingStatus
10525 TraceRecorder::record_LeaveFrame()
10527 debug_only_stmt(JSStackFrame *fp = cx->fp();)
10529 JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, fp->script(),
10530 cx->regs->pc)].length == JSOP_CALL_LENGTH);
10532 if (callDepth-- <= 0)
10533 RETURN_STOP_A("returned out of a loop we started tracing");
10535 // LeaveFrame gets called after the interpreter popped the frame and
10536 // stored rval, so cx->fp() not cx->fp()->prev, and -1 not 0.
10538 set(&stackval(-1), rval_ins);
10539 return ARECORD_CONTINUE;
10542 JS_REQUIRES_STACK AbortableRecordingStatus
10543 TraceRecorder::record_JSOP_PUSH()
10545 stack(0, w.immiUndefined());
10546 return ARECORD_CONTINUE;
10549 JS_REQUIRES_STACK AbortableRecordingStatus
10550 TraceRecorder::record_JSOP_POPV()
10552 Value& rval = stackval(-1);
10554 // Store it in cx->fp()->rval. NB: Tricky dependencies. cx->fp() is the right
10555 // frame because POPV appears only in global and eval code and we don't
10556 // trace JSOP_EVAL or leaving the frame where tracing started.
10557 LIns *fp_ins = entryFrameIns();
10558 box_value_into(rval, get(&rval), StackFrameAddress(fp_ins,
10559 JSStackFrame::offsetOfReturnValue()));
10560 return ARECORD_CONTINUE;
10563 JS_REQUIRES_STACK AbortableRecordingStatus
10564 TraceRecorder::record_JSOP_ENTERWITH()
10566 return ARECORD_STOP;
10569 JS_REQUIRES_STACK AbortableRecordingStatus
10570 TraceRecorder::record_JSOP_LEAVEWITH()
10572 return ARECORD_STOP;
10575 static JSBool JS_FASTCALL
10576 functionProbe(JSContext *cx, JSFunction *fun, int enter)
10578 #ifdef MOZ_TRACE_JSCALLS
10579 JSScript *script = fun ? FUN_SCRIPT(fun) : NULL;
10581 Probes::enterJSFun(cx, fun, script, enter);
10583 Probes::exitJSFun(cx, fun, script, enter);
10588 JS_DEFINE_CALLINFO_3(static, BOOL, functionProbe, CONTEXT, FUNCTION, INT32, 0, ACCSET_ALL)
10590 JS_REQUIRES_STACK AbortableRecordingStatus
10591 TraceRecorder::record_JSOP_RETURN()
10593 /* A return from callDepth 0 terminates the current loop, except for recursion. */
10594 if (callDepth == 0) {
10595 AUDIT(returnLoopExits);
10599 CHECK_STATUS_A(putActivationObjects());
10601 if (Probes::callTrackingActive(cx)) {
10602 LIns* args[] = { w.immi(0), w.nameImmpNonGC(cx->fp()->fun()), cx_ins };
10603 LIns* call_ins = w.call(&functionProbe_ci, args);
10604 guard(false, w.eqi0(call_ins), MISMATCH_EXIT);
10607 /* If we inlined this function call, make the return value available to the caller code. */
10608 Value& rval = stackval(-1);
10609 JSStackFrame *fp = cx->fp();
10610 if (fp->isConstructing() && rval.isPrimitive()) {
10611 rval_ins = get(&fp->thisValue());
10613 rval_ins = get(&rval);
10615 debug_only_stmt(JSAutoByteString funBytes);
10616 debug_only_printf(LC_TMTracer,
10617 "returning from %s\n",
10619 js_AtomToPrintableString(cx, fp->fun()->atom, &funBytes) :
10621 clearReturningFrameFromNativeTracker();
10623 return ARECORD_CONTINUE;
10626 JS_REQUIRES_STACK AbortableRecordingStatus
10627 TraceRecorder::record_JSOP_GOTO()
10630 * If we hit a break or a continue to an outer loop, end the loop and
10631 * generate an always-taken loop exit guard. For other downward gotos
10632 * (like if/else) continue recording.
10634 jssrcnote* sn = js_GetSrcNote(cx->fp()->script(), cx->regs->pc);
10637 if (SN_TYPE(sn) == SRC_BREAK) {
10638 AUDIT(breakLoopExits);
10643 * Tracing labeled break isn't impossible, but does require potentially
10644 * fixing up the block chain. See bug 616119.
10646 if (SN_TYPE(sn) == SRC_BREAK2LABEL || SN_TYPE(sn) == SRC_CONT2LABEL)
10647 RETURN_STOP_A("labeled break");
10649 return ARECORD_CONTINUE;
10652 JS_REQUIRES_STACK AbortableRecordingStatus
10653 TraceRecorder::record_JSOP_IFEQ()
10655 trackCfgMerges(cx->regs->pc);
10659 JS_REQUIRES_STACK AbortableRecordingStatus
10660 TraceRecorder::record_JSOP_IFNE()
10666 TraceRecorder::newArguments(LIns* callee_ins)
10668 LIns* global_ins = w.immpObjGC(globalObj);
10669 LIns* argc_ins = w.nameImmi(cx->fp()->numActualArgs());
10671 LIns* args[] = { callee_ins, argc_ins, global_ins, cx_ins };
10672 LIns* argsobj_ins = w.call(&js_NewArgumentsOnTrace_ci, args);
10673 guard(false, w.eqp0(argsobj_ins), OOM_EXIT);
10675 return argsobj_ins;
10678 JS_REQUIRES_STACK AbortableRecordingStatus
10679 TraceRecorder::record_JSOP_ARGUMENTS()
10681 JSStackFrame* const fp = cx->fp();
10683 /* In an eval, 'arguments' will be a BINDNAME, which we don't trace. */
10684 JS_ASSERT(!fp->isEvalFrame());
10686 if (fp->hasOverriddenArgs())
10687 RETURN_STOP_A("Can't trace |arguments| if |arguments| is assigned to");
10688 if (fp->fun()->inStrictMode())
10689 RETURN_STOP_A("Can't trace strict-mode arguments");
10691 LIns* a_ins = getFrameObjPtr(fp->addressOfArgs());
10693 LIns* callee_ins = get(&fp->calleeValue());
10694 if (a_ins->isImmP()) {
10695 // |arguments| is set to 0 by EnterFrame on this trace, so call to create it.
10696 args_ins = newArguments(callee_ins);
10698 // Generate LIR to create arguments only if it has not already been created.
10700 LIns* mem_ins = w.allocp(sizeof(JSObject *));
10702 LIns* isZero_ins = w.eqp0(a_ins);
10703 if (isZero_ins->isImmI(0)) {
10704 w.stAlloc(a_ins, mem_ins);
10705 } else if (isZero_ins->isImmI(1)) {
10706 LIns* call_ins = newArguments(callee_ins);
10707 w.stAlloc(call_ins, mem_ins);
10709 LIns* br1 = w.jtUnoptimizable(isZero_ins);
10710 w.stAlloc(a_ins, mem_ins);
10711 LIns* br2 = w.j(NULL);
10714 LIns* call_ins = newArguments(callee_ins);
10715 w.stAlloc(call_ins, mem_ins);
10718 args_ins = w.ldpAlloc(mem_ins);
10721 stack(0, args_ins);
10722 setFrameObjPtr(fp->addressOfArgs(), args_ins);
10723 return ARECORD_CONTINUE;
10726 JS_REQUIRES_STACK AbortableRecordingStatus
10727 TraceRecorder::record_JSOP_DUP()
10729 stack(0, get(&stackval(-1)));
10730 return ARECORD_CONTINUE;
10733 JS_REQUIRES_STACK AbortableRecordingStatus
10734 TraceRecorder::record_JSOP_DUP2()
10736 stack(0, get(&stackval(-2)));
10737 stack(1, get(&stackval(-1)));
10738 return ARECORD_CONTINUE;
10741 JS_REQUIRES_STACK AbortableRecordingStatus
10742 TraceRecorder::record_JSOP_SWAP()
10744 Value& l = stackval(-2);
10745 Value& r = stackval(-1);
10746 LIns* l_ins = get(&l);
10747 LIns* r_ins = get(&r);
10750 return ARECORD_CONTINUE;
10753 JS_REQUIRES_STACK AbortableRecordingStatus
10754 TraceRecorder::record_JSOP_PICK()
10756 Value* sp = cx->regs->sp;
10757 jsint n = cx->regs->pc[1];
10758 JS_ASSERT(sp - (n+1) >= cx->fp()->base());
10759 LIns* top = get(sp - (n+1));
10760 for (jsint i = 0; i < n; ++i)
10761 set(sp - (n+1) + i, get(sp - n + i));
10763 return ARECORD_CONTINUE;
10766 JS_REQUIRES_STACK AbortableRecordingStatus
10767 TraceRecorder::record_JSOP_SETCONST()
10769 return ARECORD_STOP;
10772 JS_REQUIRES_STACK AbortableRecordingStatus
10773 TraceRecorder::record_JSOP_BITOR()
10775 return InjectStatus(binary(LIR_ori));
10778 JS_REQUIRES_STACK AbortableRecordingStatus
10779 TraceRecorder::record_JSOP_BITXOR()
10781 return InjectStatus(binary(LIR_xori));
10784 JS_REQUIRES_STACK AbortableRecordingStatus
10785 TraceRecorder::record_JSOP_BITAND()
10787 return InjectStatus(binary(LIR_andi));
10790 JS_REQUIRES_STACK AbortableRecordingStatus
10791 TraceRecorder::record_JSOP_EQ()
10793 return equality(false, true);
10796 JS_REQUIRES_STACK AbortableRecordingStatus
10797 TraceRecorder::record_JSOP_NE()
10799 return equality(true, true);
10802 JS_REQUIRES_STACK AbortableRecordingStatus
10803 TraceRecorder::record_JSOP_LT()
10805 return relational(LIR_ltd, true);
10808 JS_REQUIRES_STACK AbortableRecordingStatus
10809 TraceRecorder::record_JSOP_LE()
10811 return relational(LIR_led, true);
10814 JS_REQUIRES_STACK AbortableRecordingStatus
10815 TraceRecorder::record_JSOP_GT()
10817 return relational(LIR_gtd, true);
10820 JS_REQUIRES_STACK AbortableRecordingStatus
10821 TraceRecorder::record_JSOP_GE()
10823 return relational(LIR_ged, true);
10826 JS_REQUIRES_STACK AbortableRecordingStatus
10827 TraceRecorder::record_JSOP_LSH()
10829 return InjectStatus(binary(LIR_lshi));
10832 JS_REQUIRES_STACK AbortableRecordingStatus
10833 TraceRecorder::record_JSOP_RSH()
10835 return InjectStatus(binary(LIR_rshi));
10838 JS_REQUIRES_STACK AbortableRecordingStatus
10839 TraceRecorder::record_JSOP_URSH()
10841 return InjectStatus(binary(LIR_rshui));
10844 JS_REQUIRES_STACK AbortableRecordingStatus
10845 TraceRecorder::record_JSOP_ADD()
10847 Value& r = stackval(-1);
10848 Value& l = stackval(-2);
10850 if (!l.isPrimitive()) {
10851 CHECK_STATUS_A(guardNativeConversion(l));
10852 if (!r.isPrimitive()) {
10853 CHECK_STATUS_A(guardNativeConversion(r));
10854 return InjectStatus(callImacro(add_imacros.obj_obj));
10856 return InjectStatus(callImacro(add_imacros.obj_any));
10858 if (!r.isPrimitive()) {
10859 CHECK_STATUS_A(guardNativeConversion(r));
10860 return InjectStatus(callImacro(add_imacros.any_obj));
10863 if (l.isString() || r.isString()) {
10864 LIns* args[] = { stringify(r), stringify(l), cx_ins };
10865 LIns* concat = w.call(&js_ConcatStrings_ci, args);
10866 guard(false, w.eqp0(concat), OOM_EXIT);
10868 return ARECORD_CONTINUE;
10871 return InjectStatus(binary(LIR_addd));
10874 JS_REQUIRES_STACK AbortableRecordingStatus
10875 TraceRecorder::record_JSOP_SUB()
10877 return InjectStatus(binary(LIR_subd));
10880 JS_REQUIRES_STACK AbortableRecordingStatus
10881 TraceRecorder::record_JSOP_MUL()
10883 return InjectStatus(binary(LIR_muld));
10886 JS_REQUIRES_STACK AbortableRecordingStatus
10887 TraceRecorder::record_JSOP_DIV()
10889 return InjectStatus(binary(LIR_divd));
10892 JS_REQUIRES_STACK AbortableRecordingStatus
10893 TraceRecorder::record_JSOP_MOD()
10895 return InjectStatus(binary(LIR_modd));
10898 JS_REQUIRES_STACK AbortableRecordingStatus
10899 TraceRecorder::record_JSOP_NOT()
10901 Value& v = stackval(-1);
10902 if (v.isBoolean() || v.isUndefined()) {
10903 set(&v, w.eqi0(w.eqiN(get(&v), 1)));
10904 return ARECORD_CONTINUE;
10906 if (v.isNumber()) {
10907 LIns* v_ins = get(&v);
10908 set(&v, w.ori(w.eqd0(v_ins), w.eqi0(w.eqd(v_ins, v_ins))));
10909 return ARECORD_CONTINUE;
10911 if (v.isObjectOrNull()) {
10912 set(&v, w.eqp0(get(&v)));
10913 return ARECORD_CONTINUE;
10915 JS_ASSERT(v.isString());
10916 set(&v, w.eqp0(w.getStringLength(get(&v))));
10917 return ARECORD_CONTINUE;
10920 JS_REQUIRES_STACK AbortableRecordingStatus
10921 TraceRecorder::record_JSOP_BITNOT()
10923 return InjectStatus(unary(LIR_noti));
10926 JS_REQUIRES_STACK AbortableRecordingStatus
10927 TraceRecorder::record_JSOP_NEG()
10929 Value& v = stackval(-1);
10931 if (!v.isPrimitive()) {
10932 CHECK_STATUS_A(guardNativeConversion(v));
10933 return InjectStatus(callImacro(unary_imacros.sign));
10936 if (v.isNumber()) {
10940 * If we're a promoted integer, we have to watch out for 0s since -0 is
10941 * a double. Only follow this path if we're not an integer that's 0 and
10942 * we're not a double that's zero.
10945 !oracle->isInstructionUndemotable(cx->regs->pc) &&
10946 IsPromotedInt32(a) &&
10947 (!v.isInt32() || v.toInt32() != 0) &&
10948 (!v.isDouble() || v.toDouble() != 0) &&
10949 -v.toNumber() == (int)-v.toNumber())
10951 VMSideExit* exit = snapshot(OVERFLOW_EXIT);
10952 a = guard_xov(LIR_subi, w.immi(0), w.demoteToInt32(a), exit);
10953 if (!a->isImmI() && a->isop(LIR_subxovi)) {
10954 guard(false, w.eqiN(a, 0), exit); // make sure we don't lose a -0
10962 return ARECORD_CONTINUE;
10966 set(&v, w.immd(-0.0));
10967 return ARECORD_CONTINUE;
10970 if (v.isUndefined()) {
10971 set(&v, w.immd(js_NaN));
10972 return ARECORD_CONTINUE;
10975 if (v.isString()) {
10976 LIns* ok_ins = w.allocp(sizeof(JSBool));
10977 LIns* args[] = { ok_ins, get(&v), cx_ins };
10978 LIns* num_ins = w.call(&js_StringToNumber_ci, args);
10980 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
10982 set(&v, w.negd(num_ins));
10983 return ARECORD_CONTINUE;
10986 JS_ASSERT(v.isBoolean());
10987 set(&v, w.negd(w.i2d(get(&v))));
10988 return ARECORD_CONTINUE;
10991 JS_REQUIRES_STACK AbortableRecordingStatus
10992 TraceRecorder::record_JSOP_POS()
10994 Value& v = stackval(-1);
10996 if (!v.isPrimitive()) {
10997 CHECK_STATUS_A(guardNativeConversion(v));
10998 return InjectStatus(callImacro(unary_imacros.sign));
11002 return ARECORD_CONTINUE;
11005 set(&v, w.immd(0));
11006 return ARECORD_CONTINUE;
11008 if (v.isUndefined()) {
11009 set(&v, w.immd(js_NaN));
11010 return ARECORD_CONTINUE;
11013 if (v.isString()) {
11014 LIns* ok_ins = w.allocp(sizeof(JSBool));
11015 LIns* args[] = { ok_ins, get(&v), cx_ins };
11016 LIns* num_ins = w.call(&js_StringToNumber_ci, args);
11018 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
11021 return ARECORD_CONTINUE;
11024 JS_ASSERT(v.isBoolean());
11025 set(&v, w.i2d(get(&v)));
11026 return ARECORD_CONTINUE;
11029 JS_REQUIRES_STACK AbortableRecordingStatus
11030 TraceRecorder::record_JSOP_PRIMTOP()
11032 // Either this opcode does nothing or we couldn't have traced here, because
11033 // we'd have thrown an exception -- so do nothing if we actually hit this.
11034 return ARECORD_CONTINUE;
11037 JS_REQUIRES_STACK AbortableRecordingStatus
11038 TraceRecorder::record_JSOP_OBJTOP()
11040 Value& v = stackval(-1);
11041 RETURN_IF_XML_A(v);
11042 return ARECORD_CONTINUE;
11046 TraceRecorder::getClassPrototype(JSObject* ctor, LIns*& proto_ins)
11048 // ctor must be a function created via js_InitClass.
11050 Class *clasp = FUN_CLASP(GET_FUNCTION_PRIVATE(cx, ctor));
11053 TraceMonitor &localtm = *traceMonitor;
11057 if (!ctor->getProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom), &pval))
11058 RETURN_ERROR("error getting prototype from constructor");
11060 // ctor.prototype is a permanent data property, so this lookup cannot have
11062 JS_ASSERT(localtm.recorder);
11067 ok = JS_GetPropertyAttributes(cx, ctor, js_class_prototype_str, &attrs, &found);
11070 JS_ASSERT((~attrs & (JSPROP_READONLY | JSPROP_PERMANENT)) == 0);
11073 // Since ctor was built by js_InitClass, we can assert (rather than check)
11074 // that pval is usable.
11075 JS_ASSERT(!pval.isPrimitive());
11076 JSObject *proto = &pval.toObject();
11077 JS_ASSERT_IF(clasp != &js_ArrayClass, proto->emptyShapes[0]->getClass() == clasp);
11079 proto_ins = w.immpObjGC(proto);
11080 return RECORD_CONTINUE;
11084 TraceRecorder::getClassPrototype(JSProtoKey key, LIns*& proto_ins)
11087 TraceMonitor &localtm = *traceMonitor;
11091 if (!js_GetClassPrototype(cx, globalObj, key, &proto))
11092 RETURN_ERROR("error in js_GetClassPrototype");
11094 // This should not have reentered.
11095 JS_ASSERT(localtm.recorder);
11098 /* Double-check that a native proto has a matching emptyShape. */
11099 if (key != JSProto_Array) {
11100 JS_ASSERT(proto->isNative());
11101 JS_ASSERT(proto->emptyShapes);
11102 EmptyShape *empty = proto->emptyShapes[0];
11104 JS_ASSERT(JSCLASS_CACHED_PROTO_KEY(empty->getClass()) == key);
11108 proto_ins = w.immpObjGC(proto);
11109 return RECORD_CONTINUE;
11112 #define IGNORE_NATIVE_CALL_COMPLETE_CALLBACK ((JSSpecializedNative*)1)
11115 TraceRecorder::newString(JSObject* ctor, uint32 argc, Value* argv, Value* rval)
11117 JS_ASSERT(argc == 1);
11119 if (!argv[0].isPrimitive()) {
11120 CHECK_STATUS(guardNativeConversion(argv[0]));
11121 return callImacro(new_imacros.String);
11125 CHECK_STATUS(getClassPrototype(ctor, proto_ins));
11127 LIns* args[] = { stringify(argv[0]), proto_ins, cx_ins };
11128 LIns* obj_ins = w.call(&js_String_tn_ci, args);
11129 guard(false, w.eqp0(obj_ins), OOM_EXIT);
11131 set(rval, obj_ins);
11132 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11133 return RECORD_CONTINUE;
11137 TraceRecorder::newArray(JSObject* ctor, uint32 argc, Value* argv, Value* rval)
11140 CHECK_STATUS(getClassPrototype(ctor, proto_ins));
11144 LIns *args[] = { proto_ins, cx_ins };
11145 arr_ins = w.call(&js::NewDenseEmptyArray_ci, args);
11146 guard(false, w.eqp0(arr_ins), OOM_EXIT);
11148 } else if (argc == 1 && argv[0].isNumber()) {
11149 /* Abort on RangeError if the double doesn't fit in a uint. */
11151 CHECK_STATUS(makeNumberUint32(get(argv), &len_ins));
11152 LIns *args[] = { proto_ins, len_ins, cx_ins };
11153 arr_ins = w.call(&js::NewDenseUnallocatedArray_ci, args);
11154 guard(false, w.eqp0(arr_ins), OOM_EXIT);
11157 LIns *args[] = { proto_ins, w.nameImmi(argc), cx_ins };
11158 arr_ins = w.call(&js::NewDenseAllocatedArray_ci, args);
11159 guard(false, w.eqp0(arr_ins), OOM_EXIT);
11161 // arr->slots[i] = box_jsval(vp[i]); for i in 0..argc
11162 LIns *slots_ins = NULL;
11163 for (uint32 i = 0; i < argc && !outOfMemory(); i++) {
11164 stobj_set_dslot(arr_ins, i, slots_ins, argv[i], get(&argv[i]));
11168 set(rval, arr_ins);
11169 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11170 return RECORD_CONTINUE;
11173 JS_REQUIRES_STACK void
11174 TraceRecorder::propagateFailureToBuiltinStatus(LIns* ok_ins, LIns*& status_ins)
11177 * Check the boolean return value (ok_ins) of a native JSNative,
11178 * JSFastNative, or JSPropertyOp hook for failure. On failure, set the
11179 * BUILTIN_ERROR bit of cx->builtinStatus.
11181 * If the return value (ok_ins) is true, status' == status. Otherwise
11182 * status' = status | BUILTIN_ERROR. We calculate (rval&1)^1, which is 1
11183 * if rval is JS_FALSE (error), and then shift that by 1, which is the log2
11184 * of BUILTIN_ERROR.
11186 JS_STATIC_ASSERT(((JS_TRUE & 1) ^ 1) << 1 == 0);
11187 JS_STATIC_ASSERT(((JS_FALSE & 1) ^ 1) << 1 == BUILTIN_ERROR);
11188 status_ins = w.ori(status_ins, w.lshiN(w.xoriN(w.andiN(ok_ins, 1), 1), 1));
11189 w.stStateField(status_ins, builtinStatus);
11192 JS_REQUIRES_STACK void
11193 TraceRecorder::emitNativePropertyOp(const Shape* shape, LIns* obj_ins,
11194 bool setflag, LIns* addr_boxed_val_ins)
11196 JS_ASSERT(addr_boxed_val_ins->isop(LIR_allocp));
11197 JS_ASSERT(setflag ? !shape->hasSetterValue() : !shape->hasGetterValue());
11198 JS_ASSERT(setflag ? !shape->hasDefaultSetter() : !shape->hasDefaultGetterOrIsMethod());
11200 enterDeepBailCall();
11202 w.stStateField(addr_boxed_val_ins, nativeVp);
11203 w.stStateField(w.immi(1), nativeVpLen);
11205 CallInfo* ci = new (traceAlloc()) CallInfo();
11206 /* Setters and getters have their initial arguments in common. */
11207 LIns* possibleArgs[] = { NULL, NULL, w.immpIdGC(SHAPE_USERID(shape)), obj_ins, cx_ins };
11210 ci->_address = uintptr_t(shape->setterOp());
11211 ci->_typesig = CallInfo::typeSig5(ARGTYPE_I, ARGTYPE_P, ARGTYPE_P, ARGTYPE_P, ARGTYPE_B,
11213 possibleArgs[0] = addr_boxed_val_ins;
11214 possibleArgs[1] = strictModeCode_ins;
11215 args = possibleArgs;
11217 ci->_address = uintptr_t(shape->getterOp());
11218 ci->_typesig = CallInfo::typeSig4(ARGTYPE_I, ARGTYPE_P, ARGTYPE_P, ARGTYPE_P, ARGTYPE_P);
11219 possibleArgs[1] = addr_boxed_val_ins;
11220 args = possibleArgs + 1;
11223 ci->_storeAccSet = ACCSET_STORE_ANY;
11224 ci->_abi = ABI_CDECL;
11226 ci->_name = "JSPropertyOp";
11228 LIns* ok_ins = w.call(ci, args);
11230 // Cleanup. Immediately clear nativeVp before we might deep bail.
11231 w.stStateField(w.immpNull(), nativeVp);
11232 leaveDeepBailCall();
11234 // Guard that the call succeeded and builtinStatus is still 0.
11235 // If the native op succeeds but we deep-bail here, the result value is
11236 // lost! Therefore this can only be used for setters of shared properties.
11237 // In that case we ignore the result value anyway.
11238 LIns* status_ins = w.ldiStateField(builtinStatus);
11239 propagateFailureToBuiltinStatus(ok_ins, status_ins);
11240 guard(true, w.eqi0(status_ins), STATUS_EXIT);
11243 JS_REQUIRES_STACK RecordingStatus
11244 TraceRecorder::emitNativeCall(JSSpecializedNative* sn, uintN argc, LIns* args[], bool rooted)
11246 if (JSTN_ERRTYPE(sn) == FAIL_STATUS) {
11247 // This needs to capture the pre-call state of the stack. So do not set
11248 // pendingSpecializedNative before taking this snapshot.
11249 JS_ASSERT(!pendingSpecializedNative);
11251 // Take snapshot for DeepBail and store it in tm->bailExit.
11252 enterDeepBailCall();
11255 LIns* res_ins = w.call(sn->builtin, args);
11257 // Immediately unroot the vp as soon we return since we might deep bail next.
11259 w.stStateField(w.immpNull(), nativeVp);
11261 rval_ins = res_ins;
11262 switch (JSTN_ERRTYPE(sn)) {
11264 guard(false, w.eqp0(res_ins), OOM_EXIT);
11267 res_ins = w.i2d(res_ins);
11268 guard(false, w.ltdN(res_ins, 0), OOM_EXIT);
11271 guard(false, w.eqiN(res_ins, JS_NEITHER), OOM_EXIT);
11276 set(&stackval(0 - (2 + argc)), res_ins);
11279 * The return value will be processed by NativeCallComplete since
11280 * we have to know the actual return value type for calls that return
11283 pendingSpecializedNative = sn;
11285 return RECORD_CONTINUE;
11289 * Check whether we have a specialized implementation for this native
11292 JS_REQUIRES_STACK RecordingStatus
11293 TraceRecorder::callSpecializedNative(JSNativeTraceInfo *trcinfo, uintN argc,
11296 JSStackFrame* const fp = cx->fp();
11297 jsbytecode *pc = cx->regs->pc;
11299 Value& fval = stackval(0 - (2 + argc));
11300 Value& tval = stackval(0 - (1 + argc));
11302 LIns* this_ins = get(&tval);
11304 LIns* args[nanojit::MAXARGS];
11305 JSSpecializedNative *sn = trcinfo->specializations;
11308 if (((sn->flags & JSTN_CONSTRUCTOR) != 0) != constructing)
11311 uintN knownargc = strlen(sn->argtypes);
11312 if (argc != knownargc)
11315 intN prefixc = strlen(sn->prefix);
11316 JS_ASSERT(prefixc <= 3);
11317 LIns** argp = &args[argc + prefixc - 1];
11321 memset(args, 0xCD, sizeof(args));
11325 for (i = prefixc; i--; ) {
11326 argtype = sn->prefix[i];
11327 if (argtype == 'C') {
11329 } else if (argtype == 'T') { /* this, as an object */
11330 if (tval.isPrimitive())
11331 goto next_specialization;
11333 } else if (argtype == 'S') { /* this, as a string */
11334 if (!tval.isString())
11335 goto next_specialization;
11337 } else if (argtype == 'f') {
11338 *argp = w.immpObjGC(&fval.toObject());
11339 } else if (argtype == 'p') {
11340 CHECK_STATUS(getClassPrototype(&fval.toObject(), *argp));
11341 } else if (argtype == 'R') {
11342 *argp = w.nameImmpNonGC(cx->runtime);
11343 } else if (argtype == 'P') {
11344 // FIXME: Set pc to imacpc when recording JSOP_CALL inside the
11345 // JSOP_GETELEM imacro (bug 476559).
11346 if ((*pc == JSOP_CALL) &&
11347 fp->hasImacropc() && *fp->imacropc() == JSOP_GETELEM)
11348 *argp = w.nameImmpNonGC(fp->imacropc());
11350 *argp = w.nameImmpNonGC(pc);
11351 } else if (argtype == 'D') { /* this, as a number */
11352 if (!tval.isNumber())
11353 goto next_specialization;
11355 } else if (argtype == 'M') {
11356 MathCache *mathCache = GetMathCache(cx);
11358 return RECORD_ERROR;
11359 *argp = w.nameImmpNonGC(mathCache);
11361 JS_NOT_REACHED("unknown prefix arg type");
11366 for (i = knownargc; i--; ) {
11367 Value& arg = stackval(0 - (i + 1));
11370 argtype = sn->argtypes[i];
11371 if (argtype == 'd' || argtype == 'i') {
11372 if (!arg.isNumber())
11373 goto next_specialization;
11374 if (argtype == 'i')
11375 *argp = d2i(*argp);
11376 } else if (argtype == 'o') {
11377 if (arg.isPrimitive())
11378 goto next_specialization;
11379 } else if (argtype == 's') {
11380 if (!arg.isString())
11381 goto next_specialization;
11382 } else if (argtype == 'r') {
11383 if (!VALUE_IS_REGEXP(cx, arg))
11384 goto next_specialization;
11385 } else if (argtype == 'f') {
11386 if (!IsFunctionObject(arg))
11387 goto next_specialization;
11388 } else if (argtype == 'v') {
11389 *argp = box_value_for_native_call(arg, *argp);
11391 goto next_specialization;
11396 JS_ASSERT(args[0] != (LIns *)0xcdcdcdcd);
11398 return emitNativeCall(sn, argc, args, false);
11400 next_specialization:;
11401 } while ((sn++)->flags & JSTN_MORE);
11403 return RECORD_STOP;
11406 static JSBool FASTCALL
11407 ceilReturningInt(jsdouble x, int32 *out)
11409 jsdouble r = js_math_ceil_impl(x);
11410 return JSDOUBLE_IS_INT32(r, out);
11413 static JSBool FASTCALL
11414 floorReturningInt(jsdouble x, int32 *out)
11416 jsdouble r = js_math_floor_impl(x);
11417 return JSDOUBLE_IS_INT32(r, out);
11420 static JSBool FASTCALL
11421 roundReturningInt(jsdouble x, int32 *out)
11423 jsdouble r = js_math_round_impl(x);
11424 return JSDOUBLE_IS_INT32(r, out);
11428 * These functions store into their second argument, so they need to
11429 * be annotated accordingly. To be future-proof, we use ACCSET_STORE_ANY
11430 * so that new callers don't have to remember to update the annotation.
11432 JS_DEFINE_CALLINFO_2(static, BOOL, ceilReturningInt, DOUBLE, INT32PTR, 0, ACCSET_STORE_ANY)
11433 JS_DEFINE_CALLINFO_2(static, BOOL, floorReturningInt, DOUBLE, INT32PTR, 0, ACCSET_STORE_ANY)
11434 JS_DEFINE_CALLINFO_2(static, BOOL, roundReturningInt, DOUBLE, INT32PTR, 0, ACCSET_STORE_ANY)
11436 JS_REQUIRES_STACK RecordingStatus
11437 TraceRecorder::callFloatReturningInt(uintN argc, const nanojit::CallInfo *ci)
11439 Value& arg = stackval(-1);
11440 LIns* resptr_ins = w.allocp(sizeof(int32));
11441 LIns* args[] = { resptr_ins, get(&arg) };
11442 LIns* fits_ins = w.call(ci, args);
11444 guard(false, w.eqi0(fits_ins), OVERFLOW_EXIT);
11446 LIns* res_ins = w.ldiAlloc(resptr_ins);
11448 set(&stackval(0 - (2 + argc)), w.i2d(res_ins));
11450 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11452 return RECORD_CONTINUE;
11455 JS_REQUIRES_STACK RecordingStatus
11456 TraceRecorder::callNative(uintN argc, JSOp mode)
11460 JS_ASSERT(mode == JSOP_CALL || mode == JSOP_NEW || mode == JSOP_FUNAPPLY ||
11461 mode == JSOP_FUNCALL);
11463 Value* vp = &stackval(0 - (2 + argc));
11464 JSObject* funobj = &vp[0].toObject();
11465 JSFunction* fun = funobj->getFunctionPrivate();
11466 JS_ASSERT(fun->isNative());
11467 Native native = fun->u.n.native;
11471 if (vp[2].isNumber() && mode == JSOP_CALL) {
11472 if (native == js_math_ceil || native == js_math_floor || native == js_math_round) {
11473 LIns* a = get(&vp[2]);
11475 if (IsPromotedInt32OrUint32(a)) {
11477 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11478 return RECORD_CONTINUE;
11480 if (native == js_math_floor) {
11481 if (floorReturningInt(vp[2].toNumber(), &result))
11482 return callFloatReturningInt(argc, &floorReturningInt_ci);
11483 } else if (native == js_math_ceil) {
11484 if (ceilReturningInt(vp[2].toNumber(), &result))
11485 return callFloatReturningInt(argc, &ceilReturningInt_ci);
11486 } else if (native == js_math_round) {
11487 if (roundReturningInt(vp[2].toNumber(), &result))
11488 return callFloatReturningInt(argc, &roundReturningInt_ci);
11490 } else if (native == js_math_abs) {
11491 LIns* a = get(&vp[2]);
11492 if (IsPromotedInt32(a) && vp[2].toNumber() != INT_MIN) {
11493 a = w.demoteToInt32(a);
11494 /* abs(INT_MIN) can't be done using integers; exit if we see it. */
11495 LIns* intMin_ins = w.name(w.immi(0x80000000), "INT_MIN");
11496 LIns* isIntMin_ins = w.name(w.eqi(a, intMin_ins), "isIntMin");
11497 guard(false, isIntMin_ins, MISMATCH_EXIT);
11498 LIns* neg_ins = w.negi(a);
11499 LIns* isNeg_ins = w.name(w.ltiN(a, 0), "isNeg");
11500 LIns* abs_ins = w.name(w.cmovi(isNeg_ins, neg_ins, a), "abs");
11501 set(&vp[0], w.i2d(abs_ins));
11502 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11503 return RECORD_CONTINUE;
11506 if (vp[1].isString()) {
11507 JSString *str = vp[1].toString();
11508 if (native == js_str_charAt) {
11509 jsdouble i = vp[2].toNumber();
11510 if (JSDOUBLE_IS_NaN(i))
11512 if (i < 0 || i >= str->length())
11513 RETURN_STOP("charAt out of bounds");
11514 LIns* str_ins = get(&vp[1]);
11515 LIns* idx_ins = get(&vp[2]);
11517 CHECK_STATUS(getCharAt(str, str_ins, idx_ins, mode, &char_ins));
11518 set(&vp[0], char_ins);
11519 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11520 return RECORD_CONTINUE;
11521 } else if (native == js_str_charCodeAt) {
11522 jsdouble i = vp[2].toNumber();
11523 if (JSDOUBLE_IS_NaN(i))
11525 if (i < 0 || i >= str->length())
11526 RETURN_STOP("charCodeAt out of bounds");
11527 LIns* str_ins = get(&vp[1]);
11528 LIns* idx_ins = get(&vp[2]);
11529 LIns* charCode_ins;
11530 CHECK_STATUS(getCharCodeAt(str, str_ins, idx_ins, &charCode_ins));
11531 set(&vp[0], charCode_ins);
11532 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11533 return RECORD_CONTINUE;
11536 } else if (vp[2].isString() && mode == JSOP_CALL) {
11537 if (native == js_regexp_exec) {
11538 jsbytecode *pc = cx->regs->pc;
11540 * If we see any of these sequences, the result is unused:
11542 * - call / trace / pop
11544 * If we see any of these sequences, the result is only tested for nullness:
11546 * - call / trace / ifeq
11547 * - call / not / ifeq
11548 * - call / trace / not / ifeq
11550 * In either case, we replace the call to RegExp.exec() on the
11551 * stack with a call to RegExp.test() because "r.exec(s) !=
11552 * null" is equivalent to "r.test(s)". This avoids building
11553 * the result array, which can be expensive. This requires
11554 * that RegExp.prototype.test() hasn't been changed; we check this.
11556 if (pc[0] == JSOP_CALL) {
11557 if ((pc[JSOP_CALL_LENGTH] == JSOP_POP) ||
11558 (pc[JSOP_CALL_LENGTH] == JSOP_TRACE &&
11559 pc[JSOP_CALL_LENGTH + JSOP_TRACE_LENGTH] == JSOP_POP) ||
11560 (pc[JSOP_CALL_LENGTH] == JSOP_IFEQ) ||
11561 (pc[JSOP_CALL_LENGTH] == JSOP_TRACE &&
11562 pc[JSOP_CALL_LENGTH + JSOP_TRACE_LENGTH] == JSOP_IFEQ) ||
11563 (pc[JSOP_CALL_LENGTH] == JSOP_NOT &&
11564 pc[JSOP_CALL_LENGTH + JSOP_NOT_LENGTH] == JSOP_IFEQ) ||
11565 (pc[JSOP_CALL_LENGTH] == JSOP_TRACE &&
11566 pc[JSOP_CALL_LENGTH + JSOP_TRACE_LENGTH] == JSOP_NOT &&
11567 pc[JSOP_CALL_LENGTH + JSOP_TRACE_LENGTH + JSOP_NOT_LENGTH] == JSOP_IFEQ))
11570 jsid id = ATOM_TO_JSID(cx->runtime->atomState.testAtom);
11571 /* Get RegExp.prototype.test() and check it hasn't been changed. */
11572 if (js_GetClassPrototype(cx, NULL, JSProto_RegExp, &proto)) {
11573 if (JSObject *tmp = HasNativeMethod(proto, id, js_regexp_test)) {
11574 vp[0] = ObjectValue(*tmp);
11576 fun = tmp->getFunctionPrivate();
11577 native = js_regexp_test;
11587 if (vp[2].isNumber() && vp[3].isNumber() && mode == JSOP_CALL &&
11588 (native == js_math_min || native == js_math_max)) {
11589 LIns* a = get(&vp[2]);
11590 LIns* b = get(&vp[3]);
11591 if (IsPromotedInt32(a) && IsPromotedInt32(b)) {
11592 a = w.demoteToInt32(a);
11593 b = w.demoteToInt32(b);
11594 LIns* cmp = (native == js_math_min) ? w.lti(a, b) : w.gti(a, b);
11595 set(&vp[0], w.i2d(w.cmovi(cmp, a, b)));
11596 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11597 return RECORD_CONTINUE;
11599 if (IsPromotedUint32(a) && IsPromotedUint32(b)) {
11600 a = w.demoteToUint32(a);
11601 b = w.demoteToUint32(b);
11602 LIns* cmp = (native == js_math_min) ? w.ltui(a, b) : w.gtui(a, b);
11603 set(&vp[0], w.ui2d(w.cmovi(cmp, a, b)));
11604 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11605 return RECORD_CONTINUE;
11611 if (fun->flags & JSFUN_TRCINFO) {
11612 JSNativeTraceInfo *trcinfo = FUN_TRCINFO(fun);
11613 JS_ASSERT(trcinfo && fun->u.n.native == trcinfo->native);
11615 /* Try to call a type specialized version of the native. */
11616 if (trcinfo->specializations) {
11617 RecordingStatus status = callSpecializedNative(trcinfo, argc, mode == JSOP_NEW);
11618 if (status != RECORD_STOP)
11623 if (native == js_fun_apply || native == js_fun_call)
11624 RETURN_STOP("trying to call native apply or call");
11626 // Allocate the vp vector and emit code to root it.
11627 uintN vplen = 2 + argc;
11628 LIns* invokevp_ins = w.allocp(vplen * sizeof(Value));
11630 // vp[0] is the callee.
11631 box_value_into(vp[0], w.immpObjGC(funobj), AllocSlotsAddress(invokevp_ins));
11633 // Calculate |this|.
11635 if (mode == JSOP_NEW) {
11636 Class* clasp = fun->u.n.clasp;
11637 JS_ASSERT(clasp != &js_SlowArrayClass);
11639 clasp = &js_ObjectClass;
11640 JS_ASSERT(((jsuword) clasp & 3) == 0);
11642 // Abort on |new Function|. (FIXME: This restriction might not
11643 // unnecessary now that the constructor creates the new function object
11645 if (clasp == &js_FunctionClass)
11646 RETURN_STOP("new Function");
11648 if (!clasp->isNative())
11649 RETURN_STOP("new with non-native ops");
11651 // Don't trace |new Math.sin(0)|.
11652 if (!fun->isConstructor())
11653 RETURN_STOP("new with non-constructor native function");
11655 vp[1].setMagicWithObjectOrNullPayload(NULL);
11656 newobj_ins = w.immpMagicNull();
11658 /* Treat this as a regular call, the constructor will behave correctly. */
11660 this_ins = newobj_ins;
11662 this_ins = get(&vp[1]);
11664 set(&vp[1], this_ins);
11665 box_value_into(vp[1], this_ins, AllocSlotsAddress(invokevp_ins, 1));
11668 for (uintN n = 2; n < 2 + argc; n++) {
11669 box_value_into(vp[n], get(&vp[n]), AllocSlotsAddress(invokevp_ins, n));
11670 // For a very long argument list we might run out of LIR space, so
11671 // check inside the loop.
11673 RETURN_STOP("out of memory in argument list");
11676 // Populate extra slots, including the return value slot for a slow native.
11677 if (2 + argc < vplen) {
11678 for (uintN n = 2 + argc; n < vplen; n++) {
11679 box_undefined_into(AllocSlotsAddress(invokevp_ins, n));
11681 RETURN_STOP("out of memory in extra slots");
11685 // Set up arguments for the JSNative or JSFastNative.
11686 if (mode == JSOP_NEW)
11687 RETURN_STOP("untraceable fast native constructor");
11688 native_rval_ins = invokevp_ins;
11689 args[0] = invokevp_ins;
11690 args[1] = w.immi(argc);
11692 uint32 typesig = CallInfo::typeSig3(ARGTYPE_I, ARGTYPE_P, ARGTYPE_I, ARGTYPE_P);
11694 // Generate CallInfo and a JSSpecializedNative structure on the fly.
11695 // Do not use JSTN_UNBOX_AFTER for mode JSOP_NEW because
11696 // record_NativeCallComplete unboxes the result specially.
11698 CallInfo* ci = new (traceAlloc()) CallInfo();
11699 ci->_address = uintptr_t(fun->u.n.native);
11701 ci->_storeAccSet = ACCSET_STORE_ANY;
11702 ci->_abi = ABI_CDECL;
11703 ci->_typesig = typesig;
11705 ci->_name = js_anonymous_str;
11707 JSAutoByteString bytes(cx, ATOM_TO_STRING(fun->atom));
11709 size_t n = strlen(bytes.ptr()) + 1;
11710 char *buffer = new (traceAlloc()) char[n];
11711 memcpy(buffer, bytes.ptr(), n);
11712 ci->_name = buffer;
11717 // Generate a JSSpecializedNative structure on the fly.
11718 generatedSpecializedNative.builtin = ci;
11719 generatedSpecializedNative.flags = FAIL_STATUS | ((mode == JSOP_NEW)
11721 : JSTN_UNBOX_AFTER);
11722 generatedSpecializedNative.prefix = NULL;
11723 generatedSpecializedNative.argtypes = NULL;
11725 // We only have to ensure that the values we wrote into the stack buffer
11726 // are rooted if we actually make it to the call, so only set nativeVp and
11727 // nativeVpLen immediately before emitting the call code. This way we avoid
11728 // leaving trace with a bogus nativeVp because we fall off trace while unboxing
11729 // values into the stack buffer.
11730 w.stStateField(w.nameImmi(vplen), nativeVpLen);
11731 w.stStateField(invokevp_ins, nativeVp);
11733 // argc is the original argc here. It is used to calculate where to place
11734 // the return value.
11735 return emitNativeCall(&generatedSpecializedNative, argc, args, true);
11738 JS_REQUIRES_STACK RecordingStatus
11739 TraceRecorder::functionCall(uintN argc, JSOp mode)
11741 Value& fval = stackval(0 - (2 + argc));
11742 JS_ASSERT(&fval >= cx->fp()->base());
11744 if (!IsFunctionObject(fval))
11745 RETURN_STOP("callee is not a function");
11747 Value& tval = stackval(0 - (1 + argc));
11750 * If callee is not constant, it's a shapeless call and we have to guard
11751 * explicitly that we will get this callee again at runtime.
11753 if (!get(&fval)->isImmP())
11754 CHECK_STATUS(guardCallee(fval));
11757 * Require that the callee be a function object, to avoid guarding on its
11758 * class here. We know if the callee and this were pushed by JSOP_CALLNAME
11759 * or JSOP_CALLPROP that callee is a *particular* function, since these hit
11760 * the property cache and guard on the object (this) in which the callee
11761 * was found. So it's sufficient to test here that the particular function
11762 * is interpreted, not guard on that condition.
11764 * Bytecode sequences that push shapeless callees must guard on the callee
11765 * class being Function and the function being interpreted.
11767 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, &fval.toObject());
11769 if (Probes::callTrackingActive(cx)) {
11770 JSScript *script = FUN_SCRIPT(fun);
11771 if (!script || !script->isEmpty()) {
11772 LIns* args[] = { w.immi(1), w.nameImmpNonGC(fun), cx_ins };
11773 LIns* call_ins = w.call(&functionProbe_ci, args);
11774 guard(false, w.eqi0(call_ins), MISMATCH_EXIT);
11778 if (FUN_INTERPRETED(fun))
11779 return interpretedFunctionCall(fval, fun, argc, mode == JSOP_NEW);
11781 Native native = fun->maybeNative();
11782 Value* argv = &tval + 1;
11783 if (native == js_Array)
11784 return newArray(&fval.toObject(), argc, argv, &fval);
11785 if (native == js_String && argc == 1) {
11786 if (mode == JSOP_NEW)
11787 return newString(&fval.toObject(), 1, argv, &fval);
11788 if (!argv[0].isPrimitive()) {
11789 CHECK_STATUS(guardNativeConversion(argv[0]));
11790 return callImacro(call_imacros.String);
11792 set(&fval, stringify(argv[0]));
11793 pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
11794 return RECORD_CONTINUE;
11797 RecordingStatus rs = callNative(argc, mode);
11798 if (Probes::callTrackingActive(cx)) {
11799 LIns* args[] = { w.immi(0), w.nameImmpNonGC(fun), cx_ins };
11800 LIns* call_ins = w.call(&functionProbe_ci, args);
11801 guard(false, w.eqi0(call_ins), MISMATCH_EXIT);
11806 JS_REQUIRES_STACK AbortableRecordingStatus
11807 TraceRecorder::record_JSOP_NEW()
11809 uintN argc = GET_ARGC(cx->regs->pc);
11810 cx->assertValidStackDepth(argc + 2);
11811 return InjectStatus(functionCall(argc, JSOP_NEW));
11814 JS_REQUIRES_STACK AbortableRecordingStatus
11815 TraceRecorder::record_JSOP_DELNAME()
11817 return ARECORD_STOP;
11820 static JSBool JS_FASTCALL
11821 DeleteIntKey(JSContext* cx, JSObject* obj, int32 i, JSBool strict)
11823 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
11825 LeaveTraceIfGlobalObject(cx, obj);
11826 LeaveTraceIfArgumentsObject(cx, obj);
11827 Value v = BooleanValue(false);
11829 if (INT_FITS_IN_JSID(i)) {
11830 id = INT_TO_JSID(i);
11832 if (!js_ValueToStringId(cx, Int32Value(i), &id)) {
11833 SetBuiltinError(tm);
11838 if (!obj->deleteProperty(cx, id, &v, strict))
11839 SetBuiltinError(tm);
11840 return v.toBoolean();
11842 JS_DEFINE_CALLINFO_4(extern, BOOL_FAIL, DeleteIntKey, CONTEXT, OBJECT, INT32, BOOL,
11843 0, ACCSET_STORE_ANY)
11845 static JSBool JS_FASTCALL
11846 DeleteStrKey(JSContext* cx, JSObject* obj, JSString* str, JSBool strict)
11848 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
11850 LeaveTraceIfGlobalObject(cx, obj);
11851 LeaveTraceIfArgumentsObject(cx, obj);
11852 Value v = BooleanValue(false);
11856 * NB: JSOP_DELPROP does not need js_ValueToStringId to atomize, but (see
11857 * jsatominlines.h) that helper early-returns if the computed property name
11858 * string is already atomized, and we are *not* on a perf-critical path!
11860 if (!js_ValueToStringId(cx, StringValue(str), &id) || !obj->deleteProperty(cx, id, &v, strict))
11861 SetBuiltinError(tm);
11862 return v.toBoolean();
11864 JS_DEFINE_CALLINFO_4(extern, BOOL_FAIL, DeleteStrKey, CONTEXT, OBJECT, STRING, BOOL,
11865 0, ACCSET_STORE_ANY)
11867 JS_REQUIRES_STACK AbortableRecordingStatus
11868 TraceRecorder::record_JSOP_DELPROP()
11870 Value& lval = stackval(-1);
11871 if (lval.isPrimitive())
11872 RETURN_STOP_A("JSOP_DELPROP on primitive base expression");
11873 if (&lval.toObject() == globalObj)
11874 RETURN_STOP_A("JSOP_DELPROP on global property");
11876 JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)];
11878 enterDeepBailCall();
11879 LIns* args[] = { strictModeCode_ins, w.immpAtomGC(atom), get(&lval), cx_ins };
11880 LIns* rval_ins = w.call(&DeleteStrKey_ci, args);
11882 LIns* status_ins = w.ldiStateField(builtinStatus);
11883 pendingGuardCondition = w.eqi0(status_ins);
11884 leaveDeepBailCall();
11886 set(&lval, rval_ins);
11887 return ARECORD_CONTINUE;
11890 JS_REQUIRES_STACK AbortableRecordingStatus
11891 TraceRecorder::record_JSOP_DELELEM()
11893 Value& lval = stackval(-2);
11894 if (lval.isPrimitive())
11895 RETURN_STOP_A("JSOP_DELELEM on primitive base expression");
11896 if (&lval.toObject() == globalObj)
11897 RETURN_STOP_A("JSOP_DELELEM on global property");
11898 if (lval.toObject().isArguments())
11899 RETURN_STOP_A("JSOP_DELELEM on the |arguments| object");
11901 Value& idx = stackval(-1);
11904 enterDeepBailCall();
11905 if (hasInt32Repr(idx)) {
11907 CHECK_STATUS_A(makeNumberInt32(get(&idx), &num_ins));
11908 LIns* args[] = { strictModeCode_ins, num_ins, get(&lval), cx_ins };
11909 rval_ins = w.call(&DeleteIntKey_ci, args);
11910 } else if (idx.isString()) {
11911 LIns* args[] = { strictModeCode_ins, get(&idx), get(&lval), cx_ins };
11912 rval_ins = w.call(&DeleteStrKey_ci, args);
11914 RETURN_STOP_A("JSOP_DELELEM on non-int, non-string index");
11917 LIns* status_ins = w.ldiStateField(builtinStatus);
11918 pendingGuardCondition = w.eqi0(status_ins);
11919 leaveDeepBailCall();
11921 set(&lval, rval_ins);
11922 return ARECORD_CONTINUE;
11925 JS_REQUIRES_STACK AbortableRecordingStatus
11926 TraceRecorder::record_JSOP_TYPEOF()
11928 Value& r = stackval(-1);
11930 if (r.isString()) {
11931 type = w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_STRING]);
11932 } else if (r.isNumber()) {
11933 type = w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_NUMBER]);
11934 } else if (r.isUndefined()) {
11935 type = w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_VOID]);
11936 } else if (r.isBoolean()) {
11937 type = w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_BOOLEAN]);
11938 } else if (r.isNull()) {
11939 type = w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_OBJECT]);
11941 if (r.toObject().isFunction()) {
11942 type = w.immpAtomGC(cx->runtime->atomState.typeAtoms[JSTYPE_FUNCTION]);
11944 LIns* args[] = { get(&r), cx_ins };
11945 type = w.call(&js_TypeOfObject_ci, args);
11949 return ARECORD_CONTINUE;
11952 JS_REQUIRES_STACK AbortableRecordingStatus
11953 TraceRecorder::record_JSOP_VOID()
11955 stack(-1, w.immiUndefined());
11956 return ARECORD_CONTINUE;
11959 JS_REQUIRES_STACK AbortableRecordingStatus
11960 TraceRecorder::record_JSOP_INCNAME()
11965 JS_REQUIRES_STACK AbortableRecordingStatus
11966 TraceRecorder::record_JSOP_INCPROP()
11971 JS_REQUIRES_STACK AbortableRecordingStatus
11972 TraceRecorder::record_JSOP_INCELEM()
11974 return InjectStatus(incElem(1));
11977 JS_REQUIRES_STACK AbortableRecordingStatus
11978 TraceRecorder::record_JSOP_DECNAME()
11980 return incName(-1);
11983 JS_REQUIRES_STACK AbortableRecordingStatus
11984 TraceRecorder::record_JSOP_DECPROP()
11986 return incProp(-1);
11989 JS_REQUIRES_STACK AbortableRecordingStatus
11990 TraceRecorder::record_JSOP_DECELEM()
11992 return InjectStatus(incElem(-1));
11995 JS_REQUIRES_STACK AbortableRecordingStatus
11996 TraceRecorder::incName(jsint incr, bool pre)
12003 CHECK_STATUS_A(name(vp, v_ins, nr));
12004 Value v = nr.tracked ? *vp : nr.v;
12006 CHECK_STATUS_A(incHelper(v, v_ins, v_after, v_ins_after, incr));
12007 LIns* v_ins_result = pre ? v_ins_after : v_ins;
12009 set(vp, v_ins_after);
12010 stack(0, v_ins_result);
12011 return ARECORD_CONTINUE;
12014 if (!nr.obj->isCall())
12015 RETURN_STOP_A("incName on unsupported object class");
12017 CHECK_STATUS_A(setCallProp(nr.obj, nr.obj_ins, nr.shape, v_ins_after, v_after));
12018 stack(0, v_ins_result);
12019 return ARECORD_CONTINUE;
12022 JS_REQUIRES_STACK AbortableRecordingStatus
12023 TraceRecorder::record_JSOP_NAMEINC()
12025 return incName(1, false);
12028 JS_REQUIRES_STACK AbortableRecordingStatus
12029 TraceRecorder::record_JSOP_PROPINC()
12031 return incProp(1, false);
12034 // XXX consolidate with record_JSOP_GETELEM code...
12035 JS_REQUIRES_STACK AbortableRecordingStatus
12036 TraceRecorder::record_JSOP_ELEMINC()
12038 return InjectStatus(incElem(1, false));
12041 JS_REQUIRES_STACK AbortableRecordingStatus
12042 TraceRecorder::record_JSOP_NAMEDEC()
12044 return incName(-1, false);
12047 JS_REQUIRES_STACK AbortableRecordingStatus
12048 TraceRecorder::record_JSOP_PROPDEC()
12050 return incProp(-1, false);
12053 JS_REQUIRES_STACK AbortableRecordingStatus
12054 TraceRecorder::record_JSOP_ELEMDEC()
12056 return InjectStatus(incElem(-1, false));
12059 JS_REQUIRES_STACK AbortableRecordingStatus
12060 TraceRecorder::record_JSOP_GETPROP()
12062 return getProp(stackval(-1));
12066 * If possible, lookup obj[id] without calling any resolve hooks or touching
12067 * any non-native objects, store the results in *pobjp and *shapep (NULL if no
12068 * such property exists), and return true.
12070 * If a safe lookup is not possible, return false; *pobjp and *shapep are
12074 SafeLookup(JSContext *cx, JSObject* obj, jsid id, JSObject** pobjp, const Shape** shapep)
12077 // Avoid non-native lookupProperty hooks.
12078 if (obj->getOps()->lookupProperty)
12081 if (const Shape *shape = obj->nativeLookup(id)) {
12087 // Avoid resolve hooks.
12088 if (obj->getClass()->resolve != JS_ResolveStub)
12090 } while ((obj = obj->getProto()) != NULL);
12097 * Lookup the property for the SETPROP/SETNAME/SETMETHOD instruction at pc.
12098 * Emit guards to ensure that the result at run time is the same.
12100 JS_REQUIRES_STACK RecordingStatus
12101 TraceRecorder::lookupForSetPropertyOp(JSObject* obj, LIns* obj_ins, jsid id,
12102 bool* safep, JSObject** pobjp, const Shape** shapep)
12104 // We could consult the property cache here, but the contract for
12105 // PropertyCache::testForSet is intricate enough that it's a lot less code
12106 // to do a SafeLookup.
12107 *safep = SafeLookup(cx, obj, id, pobjp, shapep);
12109 return RECORD_CONTINUE;
12111 VMSideExit *exit = snapshot(BRANCH_EXIT);
12113 CHECK_STATUS(guardShape(obj_ins, obj, obj->shape(), "guard_kshape", exit));
12114 if (obj != *pobjp && *pobjp != globalObj) {
12115 CHECK_STATUS(guardShape(w.immpObjGC(*pobjp), *pobjp, (*pobjp)->shape(),
12116 "guard_vshape", exit));
12120 if (obj != globalObj)
12121 CHECK_STATUS(guardShape(obj_ins, obj, obj->shape(), "guard_proto_chain", exit));
12122 obj = obj->getProto();
12125 obj_ins = w.immpObjGC(obj);
12128 return RECORD_CONTINUE;
12131 static JSBool FASTCALL
12132 MethodWriteBarrier(JSContext* cx, JSObject* obj, uint32 slot, const Value* v)
12135 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
12138 bool ok = obj->methodWriteBarrier(cx, slot, *v);
12139 JS_ASSERT(WasBuiltinSuccessful(tm));
12142 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, MethodWriteBarrier, CONTEXT, OBJECT, UINT32, CVALUEPTR,
12143 0, ACCSET_STORE_ANY)
12145 /* Emit a specialized, inlined copy of js_NativeSet. */
12146 JS_REQUIRES_STACK RecordingStatus
12147 TraceRecorder::nativeSet(JSObject* obj, LIns* obj_ins, const Shape* shape,
12148 const Value &v, LIns* v_ins)
12150 uint32 slot = shape->slot;
12151 JS_ASSERT((slot != SHAPE_INVALID_SLOT) == shape->hasSlot());
12152 JS_ASSERT_IF(shape->hasSlot(), obj->nativeContains(*shape));
12155 * We do not trace assignment to properties that have both a non-default
12156 * setter and a slot, for several reasons.
12158 * First, that would require sampling rt->propertyRemovals before and after
12159 * (see js_NativeSet), and even more code to handle the case where the two
12160 * samples differ. A mere guard is not enough, because you can't just bail
12161 * off trace in the middle of a property assignment without storing the
12162 * value and making the stack right.
12164 * If obj is the global object, there are two additional problems. We would
12165 * have to emit still more code to store the result in the object (not the
12166 * native global frame) if the setter returned successfully after
12167 * deep-bailing. And we would have to cope if the run-time type of the
12168 * setter's return value differed from the record-time type of v, in which
12169 * case unboxing would fail and, having called a native setter, we could
12170 * not just retry the instruction in the interpreter.
12172 * If obj is branded, we would have a similar problem recovering from a
12173 * failed call to MethodWriteBarrier.
12175 if (!shape->hasDefaultSetter() && slot != SHAPE_INVALID_SLOT)
12176 RETURN_STOP("can't trace set of property with setter and slot");
12178 // These two cases are strict-mode errors and can't be traced.
12179 if (shape->hasGetterValue() && shape->hasDefaultSetter())
12180 RETURN_STOP("can't set a property that has only a getter");
12181 if (shape->isDataDescriptor() && !shape->writable())
12182 RETURN_STOP("can't assign to readonly property");
12184 // Call the setter, if any.
12185 if (!shape->hasDefaultSetter()) {
12186 if (shape->hasSetterValue())
12187 RETURN_STOP("can't trace JavaScript function setter yet");
12188 emitNativePropertyOp(shape, obj_ins, true, box_value_into_alloc(v, v_ins));
12191 if (slot != SHAPE_INVALID_SLOT) {
12192 if (obj->brandedOrHasMethodBarrier()) {
12193 if (obj == globalObj) {
12194 // Because the trace is type-specialized to the global object's
12195 // slots, no run-time check is needed. Avoid recording a global
12196 // shape change, though.
12197 JS_ASSERT(obj->nativeContains(*shape));
12198 if (IsFunctionObject(obj->getSlot(slot)))
12199 RETURN_STOP("can't trace set of function-valued global property");
12201 // Setting a function-valued property might need to rebrand the
12202 // object. Call the method write barrier. Note that even if the
12203 // property is not function-valued now, it might be on trace.
12204 enterDeepBailCall();
12205 LIns* args[] = {box_value_into_alloc(v, v_ins), w.immi(slot), obj_ins, cx_ins};
12206 LIns* ok_ins = w.call(&MethodWriteBarrier_ci, args);
12207 guard(false, w.eqi0(ok_ins), OOM_EXIT);
12208 leaveDeepBailCall();
12212 // Store the value.
12213 if (obj == globalObj) {
12214 if (!lazilyImportGlobalSlot(slot))
12215 RETURN_STOP("lazy import of global slot failed");
12216 set(&obj->getSlotRef(slot), v_ins);
12218 LIns* slots_ins = NULL;
12219 stobj_set_slot(obj, obj_ins, slot, slots_ins, v, v_ins);
12223 return RECORD_CONTINUE;
12226 JS_REQUIRES_STACK RecordingStatus
12227 TraceRecorder::addDataProperty(JSObject* obj)
12229 if (!obj->isExtensible())
12230 RETURN_STOP("assignment adds property to non-extensible object");
12232 // If obj is the global, the global shape is about to change. Note also
12233 // that since we do not record this case, SETNAME and SETPROP are identical
12234 // as far as the tracer is concerned. (js_CheckUndeclaredVarAssignment
12235 // distinguishes the two, in the interpreter.)
12236 if (obj == globalObj)
12237 RETURN_STOP("set new property of global object"); // global shape change
12239 // js_AddProperty does not call the addProperty hook.
12240 Class* clasp = obj->getClass();
12241 if (clasp->addProperty != Valueify(JS_PropertyStub))
12242 RETURN_STOP("set new property of object with addProperty hook");
12244 // See comment in TR::nativeSet about why we do not support setting a
12245 // property that has both a setter and a slot.
12246 if (clasp->setProperty != Valueify(JS_StrictPropertyStub))
12247 RETURN_STOP("set new property with setter and slot");
12250 addPropShapeBefore = obj->lastProperty();
12252 return RECORD_CONTINUE;
12255 JS_REQUIRES_STACK AbortableRecordingStatus
12256 TraceRecorder::record_AddProperty(JSObject *obj)
12258 Value& objv = stackval(-2);
12259 JS_ASSERT(&objv.toObject() == obj);
12260 LIns* obj_ins = get(&objv);
12261 Value& v = stackval(-1);
12262 LIns* v_ins = get(&v);
12263 const Shape* shape = obj->lastProperty();
12265 if (!shape->hasDefaultSetter()) {
12266 JS_ASSERT(IsWatchedProperty(cx, shape));
12267 RETURN_STOP_A("assignment adds property with watchpoint");
12271 JS_ASSERT(addPropShapeBefore);
12272 if (obj->inDictionaryMode())
12273 JS_ASSERT(shape->previous()->matches(addPropShapeBefore));
12275 JS_ASSERT(shape->previous() == addPropShapeBefore);
12276 JS_ASSERT(shape->isDataDescriptor());
12277 JS_ASSERT(shape->hasDefaultSetter());
12278 addPropShapeBefore = NULL;
12281 if (obj->inDictionaryMode())
12282 RETURN_STOP_A("assignment adds property to dictionary"); // FIXME: bug 625900
12284 // On trace, call js_Add{,Atom}Property to do the dirty work.
12285 LIns* args[] = { w.immpShapeGC(shape), obj_ins, cx_ins };
12286 jsbytecode op = *cx->regs->pc;
12287 bool isDefinitelyAtom = (op == JSOP_SETPROP);
12288 const CallInfo *ci = isDefinitelyAtom ? &js_AddAtomProperty_ci : &js_AddProperty_ci;
12289 LIns* ok_ins = w.call(ci, args);
12290 guard(false, w.eqi0(ok_ins), OOM_EXIT);
12292 // Box the value and store it in the new slot.
12293 CHECK_STATUS_A(InjectStatus(nativeSet(obj, obj_ins, shape, v, v_ins)));
12295 // Finish off a SET instruction by moving sp[-1] to sp[-2].
12296 if (op == JSOP_SETPROP || op == JSOP_SETNAME || op == JSOP_SETMETHOD)
12298 return ARECORD_CONTINUE;
12301 JS_REQUIRES_STACK RecordingStatus
12302 TraceRecorder::setUpwardTrackedVar(Value* stackVp, const Value &v, LIns* v_ins)
12304 JSValueType stackT = determineSlotType(stackVp);
12305 JSValueType otherT = getCoercedType(v);
12307 bool promote = true;
12309 if (stackT != otherT) {
12310 if (stackT == JSVAL_TYPE_DOUBLE && otherT == JSVAL_TYPE_INT32 && IsPromotedInt32(v_ins))
12313 RETURN_STOP("can't trace this upvar mutation");
12316 set(stackVp, v_ins, promote);
12318 return RECORD_CONTINUE;
12321 JS_REQUIRES_STACK RecordingStatus
12322 TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, const Shape *shape,
12323 LIns *v_ins, const Value &v)
12325 // Set variables in on-trace-stack call objects by updating the tracker.
12326 JSStackFrame *fp = frameIfInRange(callobj);
12328 if (shape->setterOp() == SetCallArg) {
12329 JS_ASSERT(shape->hasShortID());
12330 uintN slot = uint16(shape->shortid);
12331 Value *vp2 = &fp->formalArg(slot);
12332 CHECK_STATUS(setUpwardTrackedVar(vp2, v, v_ins));
12333 return RECORD_CONTINUE;
12335 if (shape->setterOp() == SetCallVar) {
12336 JS_ASSERT(shape->hasShortID());
12337 uintN slot = uint16(shape->shortid);
12338 Value *vp2 = &fp->slots()[slot];
12339 CHECK_STATUS(setUpwardTrackedVar(vp2, v, v_ins));
12340 return RECORD_CONTINUE;
12342 RETURN_STOP("can't trace special CallClass setter");
12345 if (!callobj->getPrivate()) {
12346 // Because the parent guard in guardCallee ensures this Call object
12347 // will be the same object now and on trace, and because once a Call
12348 // object loses its frame it never regains one, on trace we will also
12349 // have a null private in the Call object. So all we need to do is
12350 // write the value to the Call object's slot.
12351 intN slot = uint16(shape->shortid);
12352 if (shape->setterOp() == SetCallArg) {
12353 JS_ASSERT(slot < ArgClosureTraits::slot_count(callobj));
12354 slot += ArgClosureTraits::slot_offset(callobj);
12355 } else if (shape->setterOp() == SetCallVar) {
12356 JS_ASSERT(slot < VarClosureTraits::slot_count(callobj));
12357 slot += VarClosureTraits::slot_offset(callobj);
12359 RETURN_STOP("can't trace special CallClass setter");
12362 // Now assert that the shortid get we did above was ok. Have to do it
12363 // after the RETURN_STOP above, since in that case we may in fact not
12364 // have a valid shortid; but we don't use it in that case anyway.
12365 JS_ASSERT(shape->hasShortID());
12367 LIns* slots_ins = NULL;
12368 stobj_set_dslot(callobj_ins, slot, slots_ins, v, v_ins);
12369 return RECORD_CONTINUE;
12372 // This is the hard case: we have a JSStackFrame private, but it's not in
12373 // range. During trace execution we may or may not have a JSStackFrame
12374 // anymore. Call the standard builtins, which handle that situation.
12376 // Set variables in off-trace-stack call objects by calling standard builtins.
12377 const CallInfo* ci = NULL;
12378 if (shape->setterOp() == SetCallArg)
12379 ci = &js_SetCallArg_ci;
12380 else if (shape->setterOp() == SetCallVar)
12381 ci = &js_SetCallVar_ci;
12383 RETURN_STOP("can't trace special CallClass setter");
12385 // Even though the frame is out of range, later we might be called as an
12386 // inner trace such that the target variable is defined in the outer trace
12387 // entry frame. For simplicity, we just fall off trace.
12389 w.eqp(entryFrameIns(), w.ldpObjPrivate(callobj_ins)),
12393 box_value_for_native_call(v, v_ins),
12394 w.nameImmw(JSID_BITS(SHAPE_USERID(shape))),
12398 LIns* call_ins = w.call(ci, args);
12399 guard(false, w.name(w.eqi0(call_ins), "guard(set upvar)"), STATUS_EXIT);
12401 return RECORD_CONTINUE;
12405 * Emit a specialized, inlined copy of js_SetPropertyHelper for the current
12406 * instruction. On success, *deferredp is true if a call to record_AddProperty
12409 JS_REQUIRES_STACK RecordingStatus
12410 TraceRecorder::setProperty(JSObject* obj, LIns* obj_ins, const Value &v, LIns* v_ins,
12413 *deferredp = false;
12415 JSAtom *atom = atoms[GET_INDEX(cx->regs->pc)];
12416 jsid id = ATOM_TO_JSID(atom);
12418 if (obj->getOps()->setProperty)
12419 RETURN_STOP("non-native object"); // FIXME: bug 625900
12423 const Shape* shape;
12424 CHECK_STATUS(lookupForSetPropertyOp(obj, obj_ins, id, &safe, &pobj, &shape));
12426 RETURN_STOP("setprop: lookup fail"); // FIXME: bug 625900
12428 // Handle Call objects specially. The Call objects we create on trace are
12429 // not fully populated until we leave trace. Calling the setter on such an
12430 // object wouldn't work.
12432 return setCallProp(obj, obj_ins, shape, v_ins, v);
12434 // Handle setting a property that is not found on obj or anywhere on its
12435 // the prototype chain.
12438 return addDataProperty(obj);
12441 // Check whether we can assign to/over the existing property.
12442 if (shape->isAccessorDescriptor()) {
12443 if (shape->hasDefaultSetter())
12444 RETURN_STOP("setting accessor property with no setter");
12445 } else if (!shape->writable()) {
12446 RETURN_STOP("setting readonly data property");
12449 // Handle setting an existing own property.
12451 if (*cx->regs->pc == JSOP_SETMETHOD) {
12452 if (shape->isMethod() && &shape->methodObject() == &v.toObject())
12453 return RECORD_CONTINUE;
12454 RETURN_STOP("setmethod: property exists");
12456 return nativeSet(obj, obj_ins, shape, v, v_ins);
12459 // If shape is an inherited non-SHARED property, we will add a new,
12460 // shadowing data property.
12461 if (shape->hasSlot()) {
12462 // Avoid being tripped up by legacy special case for shortids, where
12463 // the new shadowing data property inherits the setter.
12464 if (shape->hasShortID() && !shape->hasDefaultSetter())
12465 RETURN_STOP("shadowing assignment with shortid");
12467 return addDataProperty(obj);
12470 // Handle setting an inherited SHARED property.
12471 // If it has the default setter, the assignment is a no-op.
12472 if (shape->hasDefaultSetter() && !shape->hasGetterValue())
12473 return RECORD_CONTINUE;
12474 return nativeSet(obj, obj_ins, shape, v, v_ins);
12477 /* Record a JSOP_SET{PROP,NAME,METHOD} instruction. */
12478 JS_REQUIRES_STACK RecordingStatus
12479 TraceRecorder::recordSetPropertyOp()
12481 Value& l = stackval(-2);
12483 RETURN_STOP("set property of primitive");
12484 JSObject* obj = &l.toObject();
12485 LIns* obj_ins = get(&l);
12487 Value& r = stackval(-1);
12488 LIns* r_ins = get(&r);
12491 CHECK_STATUS(setProperty(obj, obj_ins, r, r_ins, &deferred));
12493 // Finish off a SET instruction by moving sp[-1] to sp[-2]. But if
12494 // record_AddProperty is going be called, we're not done with sp[-2] yet,
12495 // so delay this move until the end of record_AddProperty.
12498 return RECORD_CONTINUE;
12501 JS_REQUIRES_STACK AbortableRecordingStatus
12502 TraceRecorder::record_JSOP_SETPROP()
12504 return InjectStatus(recordSetPropertyOp());
12507 JS_REQUIRES_STACK AbortableRecordingStatus
12508 TraceRecorder::record_JSOP_SETMETHOD()
12510 return InjectStatus(recordSetPropertyOp());
12513 JS_REQUIRES_STACK AbortableRecordingStatus
12514 TraceRecorder::record_JSOP_SETNAME()
12516 return InjectStatus(recordSetPropertyOp());
12519 JS_REQUIRES_STACK RecordingStatus
12520 TraceRecorder::recordInitPropertyOp(jsbytecode op)
12522 Value& l = stackval(-2);
12523 JSObject* obj = &l.toObject();
12524 LIns* obj_ins = get(&l);
12525 JS_ASSERT(obj->getClass() == &js_ObjectClass);
12527 Value& v = stackval(-1);
12528 LIns* v_ins = get(&v);
12530 JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)];
12531 jsid id = js_CheckForStringIndex(ATOM_TO_JSID(atom));
12533 // If obj already has this property (because JSOP_NEWOBJECT already set its
12534 // shape or because the id appears more than once in the initializer), just
12535 // set it. The existing property can't be an accessor property: we wouldn't
12536 // get here, as JSOP_SETTER can't be recorded.
12537 if (const Shape* shape = obj->nativeLookup(id)) {
12538 // Don't assign a bare (non-cloned) function to an ordinary or method
12539 // property. The opposite case, assigning some other value to a method,
12540 // is OK. nativeSet emits code that trips the write barrier.
12541 if (op == JSOP_INITMETHOD)
12542 RETURN_STOP("initmethod: property exists");
12543 JS_ASSERT(shape->isDataDescriptor());
12544 JS_ASSERT(shape->hasSlot());
12545 JS_ASSERT(shape->hasDefaultSetter());
12546 return nativeSet(obj, obj_ins, shape, v, v_ins);
12549 // Duplicate the interpreter's special treatment of __proto__. Unlike the
12550 // SET opcodes, JSOP_INIT{PROP,METHOD} do not write to the stack.
12551 if (atom == cx->runtime->atomState.protoAtom) {
12553 return setProperty(obj, obj_ins, v, v_ins, &deferred);
12556 // Define a new property.
12557 return addDataProperty(obj);
12560 JS_REQUIRES_STACK AbortableRecordingStatus
12561 TraceRecorder::record_JSOP_INITPROP()
12563 return InjectStatus(recordInitPropertyOp(JSOP_INITPROP));
12566 JS_REQUIRES_STACK AbortableRecordingStatus
12567 TraceRecorder::record_JSOP_INITMETHOD()
12569 return InjectStatus(recordInitPropertyOp(JSOP_INITMETHOD));
12572 JS_REQUIRES_STACK VMSideExit*
12573 TraceRecorder::enterDeepBailCall()
12575 // Take snapshot for DeepBail and store it in tm->bailExit.
12576 VMSideExit* exit = snapshot(DEEP_BAIL_EXIT);
12577 w.stTraceMonitorField(w.nameImmpNonGC(exit), bailExit);
12579 // Tell nanojit not to discard or defer stack writes before this call.
12580 w.xbarrier(createGuardRecord(exit));
12582 // Forget about guarded shapes, since deep bailers can reshape the world.
12583 forgetGuardedShapes();
12587 JS_REQUIRES_STACK void
12588 TraceRecorder::leaveDeepBailCall()
12590 // Keep tm->bailExit null when it's invalid.
12591 w.stTraceMonitorField(w.immpNull(), bailExit);
12594 JS_REQUIRES_STACK void
12595 TraceRecorder::finishGetProp(LIns* obj_ins, LIns* vp_ins, LIns* ok_ins, Value* outp)
12597 // Store the boxed result (and this-object, if JOF_CALLOP) before the
12598 // guard. The deep-bail case requires this. If the property get fails,
12599 // these slots will be ignored anyway.
12600 // N.B. monitorRecording expects get(outp)->isLoad()
12601 JS_ASSERT(vp_ins->isop(LIR_allocp));
12602 LIns* result_ins = w.lddAlloc(vp_ins);
12603 set(outp, result_ins);
12604 if (js_CodeSpec[*cx->regs->pc].format & JOF_CALLOP)
12605 set(outp + 1, obj_ins);
12607 // We need to guard on ok_ins, but this requires a snapshot of the state
12608 // after this op. monitorRecording will do it for us.
12609 pendingGuardCondition = ok_ins;
12611 // Note there is a boxed result sitting on the stack. The caller must leave
12612 // it there for the time being, since the return type is not yet
12613 // known. monitorRecording will emit the code to unbox it.
12614 pendingUnboxSlot = outp;
12618 RootedStringToId(JSContext* cx, JSString** namep, jsid* idp)
12620 JSString* name = *namep;
12621 if (name->isAtomized()) {
12622 *idp = INTERNED_STRING_TO_JSID(name);
12626 JSAtom* atom = js_AtomizeString(cx, name, 0);
12629 *namep = ATOM_TO_STRING(atom); /* write back to GC root */
12630 *idp = ATOM_TO_JSID(atom);
12634 static const size_t PIC_TABLE_ENTRY_COUNT = 32;
12636 struct PICTableEntry
12645 PICTable() : entryCount(0) {}
12647 PICTableEntry entries[PIC_TABLE_ENTRY_COUNT];
12650 bool scan(uint32 shape, jsid id, uint32 *slotOut) {
12651 for (size_t i = 0; i < entryCount; ++i) {
12652 PICTableEntry &entry = entries[i];
12653 if (entry.shape == shape && entry.id == id) {
12654 *slotOut = entry.slot;
12661 void update(uint32 shape, jsid id, uint32 slot) {
12662 if (entryCount >= PIC_TABLE_ENTRY_COUNT)
12664 PICTableEntry &newEntry = entries[entryCount++];
12665 newEntry.shape = shape;
12667 newEntry.slot = slot;
12671 static JSBool FASTCALL
12672 GetPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, Value* vp, PICTable *picTable)
12674 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
12676 LeaveTraceIfGlobalObject(cx, obj);
12679 if (!RootedStringToId(cx, namep, &id)) {
12680 SetBuiltinError(tm);
12684 /* Delegate to the op, if present. */
12685 PropertyIdOp op = obj->getOps()->getProperty;
12687 bool result = op(cx, obj, obj, id, vp);
12689 SetBuiltinError(tm);
12690 return WasBuiltinSuccessful(tm);
12693 /* Try to hit in the cache. */
12695 if (picTable->scan(obj->shape(), id, &slot)) {
12696 *vp = obj->getSlot(slot);
12697 return WasBuiltinSuccessful(tm);
12700 const Shape *shape;
12702 if (!js_GetPropertyHelperWithShape(cx, obj, obj, id, JSGET_METHOD_BARRIER, vp, &shape,
12704 SetBuiltinError(tm);
12708 /* Only update the table when the object is the holder of the property. */
12709 if (obj == holder && shape->hasSlot() && shape->hasDefaultGetter()) {
12711 * Note: we insert the non-normalized id into the table so you don't need to
12712 * normalize it before hitting in the table (faster lookup).
12714 picTable->update(obj->shape(), id, shape->slot);
12717 return WasBuiltinSuccessful(tm);
12719 JS_DEFINE_CALLINFO_5(static, BOOL_FAIL, GetPropertyByName, CONTEXT, OBJECT, STRINGPTR, VALUEPTR,
12721 0, ACCSET_STORE_ANY)
12723 // Convert the value in a slot to a string and store the resulting string back
12724 // in the slot (typically in order to root it).
12725 JS_REQUIRES_STACK RecordingStatus
12726 TraceRecorder::primitiveToStringInPlace(Value* vp)
12729 JS_ASSERT(v.isPrimitive());
12731 if (!v.isString()) {
12732 // v is not a string. Turn it into one. js_ValueToString is safe
12733 // because v is not an object.
12735 TraceMonitor *localtm = traceMonitor;
12737 JSString *str = js_ValueToString(cx, v);
12738 JS_ASSERT(localtm->recorder == this);
12740 RETURN_ERROR("failed to stringify element id");
12742 set(vp, stringify(*vp));
12744 // Write the string back to the stack to save the interpreter some work
12745 // and to ensure snapshots get the correct type for this slot.
12748 return RECORD_CONTINUE;
12751 JS_REQUIRES_STACK RecordingStatus
12752 TraceRecorder::getPropertyByName(LIns* obj_ins, Value* idvalp, Value* outp)
12754 CHECK_STATUS(primitiveToStringInPlace(idvalp));
12755 enterDeepBailCall();
12757 // Call GetPropertyByName. The vp parameter points to stack because this is
12758 // what the interpreter currently does. obj and id are rooted on the
12759 // interpreter stack, but the slot at vp is not a root.
12760 LIns* vp_ins = w.name(w.allocp(sizeof(Value)), "vp");
12761 LIns* idvalp_ins = w.name(addr(idvalp), "idvalp");
12762 PICTable *picTable = new (traceAlloc()) PICTable();
12763 LIns* pic_ins = w.nameImmpNonGC(picTable);
12764 LIns* args[] = {pic_ins, vp_ins, idvalp_ins, obj_ins, cx_ins};
12765 LIns* ok_ins = w.call(&GetPropertyByName_ci, args);
12767 // GetPropertyByName can assign to *idvalp, so the tracker has an incorrect
12768 // entry for that address. Correct it. (If the value in the address is
12769 // never used again, the usual case, Nanojit will kill this load.)
12770 // The Address could be made more precise with some effort (idvalp_ins may
12771 // be a stack location), but it's not worth it because this case is rare.
12772 tracker.set(idvalp, w.ldp(AnyAddress(idvalp_ins)));
12774 finishGetProp(obj_ins, vp_ins, ok_ins, outp);
12775 leaveDeepBailCall();
12776 return RECORD_CONTINUE;
12779 static JSBool FASTCALL
12780 GetPropertyByIndex(JSContext* cx, JSObject* obj, int32 index, Value* vp)
12782 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
12784 LeaveTraceIfGlobalObject(cx, obj);
12786 AutoIdRooter idr(cx);
12787 if (!js_Int32ToId(cx, index, idr.addr()) || !obj->getProperty(cx, idr.id(), vp)) {
12788 SetBuiltinError(tm);
12791 return WasBuiltinSuccessful(tm);
12793 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyByIndex, CONTEXT, OBJECT, INT32, VALUEPTR, 0,
12796 JS_REQUIRES_STACK RecordingStatus
12797 TraceRecorder::getPropertyByIndex(LIns* obj_ins, LIns* index_ins, Value* outp)
12799 CHECK_STATUS(makeNumberInt32(index_ins, &index_ins));
12801 // See note in getPropertyByName about vp.
12802 enterDeepBailCall();
12803 LIns* vp_ins = w.name(w.allocp(sizeof(Value)), "vp");
12804 LIns* args[] = {vp_ins, index_ins, obj_ins, cx_ins};
12805 LIns* ok_ins = w.call(&GetPropertyByIndex_ci, args);
12806 finishGetProp(obj_ins, vp_ins, ok_ins, outp);
12807 leaveDeepBailCall();
12808 return RECORD_CONTINUE;
12811 static JSBool FASTCALL
12812 GetPropertyById(JSContext* cx, JSObject* obj, jsid id, Value* vp)
12814 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
12816 LeaveTraceIfGlobalObject(cx, obj);
12817 if (!obj->getProperty(cx, id, vp)) {
12818 SetBuiltinError(tm);
12821 return WasBuiltinSuccessful(tm);
12823 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyById, CONTEXT, OBJECT, JSID, VALUEPTR,
12824 0, ACCSET_STORE_ANY)
12826 JS_REQUIRES_STACK RecordingStatus
12827 TraceRecorder::getPropertyById(LIns* obj_ins, Value* outp)
12831 jsbytecode* pc = cx->regs->pc;
12832 const JSCodeSpec& cs = js_CodeSpec[*pc];
12833 if (*pc == JSOP_LENGTH) {
12834 atom = cx->runtime->atomState.lengthAtom;
12835 } else if (JOF_TYPE(cs.format) == JOF_ATOM) {
12836 atom = atoms[GET_INDEX(pc)];
12838 JS_ASSERT(JOF_TYPE(cs.format) == JOF_SLOTATOM);
12839 atom = atoms[GET_INDEX(pc + SLOTNO_LEN)];
12842 JS_STATIC_ASSERT(sizeof(jsid) == sizeof(void *));
12843 jsid id = ATOM_TO_JSID(atom);
12845 // Call GetPropertyById. See note in getPropertyByName about vp.
12846 enterDeepBailCall();
12847 LIns* vp_ins = w.name(w.allocp(sizeof(Value)), "vp");
12848 LIns* args[] = {vp_ins, w.nameImmw(JSID_BITS(id)), obj_ins, cx_ins};
12849 LIns* ok_ins = w.call(&GetPropertyById_ci, args);
12850 finishGetProp(obj_ins, vp_ins, ok_ins, outp);
12851 leaveDeepBailCall();
12852 return RECORD_CONTINUE;
12855 /* Manually inlined, specialized copy of js_NativeGet. */
12856 static JSBool FASTCALL
12857 GetPropertyWithNativeGetter(JSContext* cx, JSObject* obj, Shape* shape, Value* vp)
12859 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
12861 LeaveTraceIfGlobalObject(cx, obj);
12865 const Shape* shape2;
12866 JS_ASSERT_IF(SafeLookup(cx, obj, shape->id, &pobj, &shape2), shape == shape2);
12869 // Shape::get contains a special case for With objects. We can elide it
12870 // here because With objects are, we claim, never on the operand stack
12871 // while recording.
12872 JS_ASSERT(obj->getClass() != &js_WithClass);
12874 vp->setUndefined();
12875 if (!shape->getterOp()(cx, obj, SHAPE_USERID(shape), vp)) {
12876 SetBuiltinError(tm);
12879 return WasBuiltinSuccessful(tm);
12881 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyWithNativeGetter,
12882 CONTEXT, OBJECT, SHAPE, VALUEPTR, 0, ACCSET_STORE_ANY)
12884 JS_REQUIRES_STACK RecordingStatus
12885 TraceRecorder::getPropertyWithNativeGetter(LIns* obj_ins, const Shape* shape, Value* outp)
12887 JS_ASSERT(!shape->hasGetterValue());
12888 JS_ASSERT(shape->slot == SHAPE_INVALID_SLOT);
12889 JS_ASSERT(!shape->hasDefaultGetterOrIsMethod());
12891 // Call GetPropertyWithNativeGetter. See note in getPropertyByName about vp.
12892 // FIXME - We should call the getter directly. Using a builtin function for
12893 // now because it buys some extra asserts. See bug 508310.
12894 enterDeepBailCall();
12895 LIns* vp_ins = w.name(w.allocp(sizeof(Value)), "vp");
12896 LIns* args[] = {vp_ins, w.nameImmpNonGC(shape), obj_ins, cx_ins};
12897 LIns* ok_ins = w.call(&GetPropertyWithNativeGetter_ci, args);
12898 finishGetProp(obj_ins, vp_ins, ok_ins, outp);
12899 leaveDeepBailCall();
12900 return RECORD_CONTINUE;
12903 JS_REQUIRES_STACK RecordingStatus
12904 TraceRecorder::getPropertyWithScriptGetter(JSObject *obj, LIns* obj_ins, const Shape* shape)
12906 if (!canCallImacro())
12907 RETURN_STOP("cannot trace script getter, already in imacro");
12909 // Rearrange the stack in preparation for the imacro, taking care to adjust
12910 // the interpreter state and the tracker in the same way. This adjustment
12911 // is noted in imacros.jsasm with .fixup tags.
12912 Value getter = shape->getterValue();
12913 Value*& sp = cx->regs->sp;
12914 switch (*cx->regs->pc) {
12918 set(&sp[-1], get(&sp[-2]));
12920 set(&sp[-2], w.immpObjGC(&getter.toObject()));
12921 return callImacroInfallibly(getprop_imacros.scriptgetter);
12923 case JSOP_CALLPROP:
12926 set(&sp[-2], w.immpObjGC(&getter.toObject()));
12928 set(&sp[-1], get(&sp[-3]));
12929 return callImacroInfallibly(callprop_imacros.scriptgetter);
12931 case JSOP_GETTHISPROP:
12932 case JSOP_GETARGPROP:
12933 case JSOP_GETLOCALPROP:
12936 set(&sp[-2], w.immpObjGC(&getter.toObject()));
12937 sp[-1] = ObjectValue(*obj);
12938 set(&sp[-1], obj_ins);
12939 return callImacroInfallibly(getthisprop_imacros.scriptgetter);
12942 RETURN_STOP("cannot trace script getter for this opcode");
12946 JS_REQUIRES_STACK RecordingStatus
12947 TraceRecorder::getCharCodeAt(JSString *str, LIns* str_ins, LIns* idx_ins, LIns** out)
12949 CHECK_STATUS(makeNumberInt32(idx_ins, &idx_ins));
12950 idx_ins = w.ui2p(idx_ins);
12951 LIns *lengthAndFlags_ins = w.ldpStringLengthAndFlags(str_ins);
12952 if (MaybeBranch mbr = w.jt(w.eqp0(w.andp(lengthAndFlags_ins, w.nameImmw(JSString::ROPE_BIT)))))
12954 LIns *args[] = { str_ins, cx_ins };
12955 LIns *ok_ins = w.call(&js_Flatten_ci, args);
12956 guard(false, w.eqi0(ok_ins), OOM_EXIT);
12961 w.ltup(idx_ins, w.rshupN(lengthAndFlags_ins, JSString::LENGTH_SHIFT)),
12962 snapshot(MISMATCH_EXIT));
12963 *out = w.i2d(w.getStringChar(str_ins, idx_ins));
12964 return RECORD_CONTINUE;
12967 JS_STATIC_ASSERT(sizeof(JSString) == 16 || sizeof(JSString) == 32);
12970 JS_REQUIRES_STACK LIns*
12971 TraceRecorder::getUnitString(LIns* str_ins, LIns* idx_ins)
12973 LIns *ch_ins = w.getStringChar(str_ins, idx_ins);
12974 guard(true, w.ltuiN(ch_ins, UNIT_STRING_LIMIT), MISMATCH_EXIT);
12975 return w.addp(w.nameImmpNonGC(JSString::unitStringTable),
12976 w.lshpN(w.ui2p(ch_ins), (sizeof(JSString) == 16) ? 4 : 5));
12979 JS_REQUIRES_STACK RecordingStatus
12980 TraceRecorder::getCharAt(JSString *str, LIns* str_ins, LIns* idx_ins, JSOp mode, LIns** out)
12982 CHECK_STATUS(makeNumberInt32(idx_ins, &idx_ins));
12983 idx_ins = w.ui2p(idx_ins);
12984 LIns *lengthAndFlags_ins = w.ldpStringLengthAndFlags(str_ins);
12985 if (MaybeBranch mbr = w.jt(w.eqp0(w.andp(lengthAndFlags_ins,
12986 w.nameImmw(JSString::ROPE_BIT)))))
12988 LIns *args[] = { str_ins, cx_ins };
12989 LIns *ok_ins = w.call(&js_Flatten_ci, args);
12990 guard(false, w.eqi0(ok_ins), OOM_EXIT);
12994 LIns* inRange = w.ltup(idx_ins, w.rshupN(lengthAndFlags_ins, JSString::LENGTH_SHIFT));
12996 if (mode == JSOP_GETELEM) {
12997 guard(true, inRange, MISMATCH_EXIT);
12999 *out = getUnitString(str_ins, idx_ins);
13001 LIns *phi_ins = w.allocp(sizeof(JSString *));
13002 w.stAlloc(w.nameImmpNonGC(cx->runtime->emptyString), phi_ins);
13004 if (MaybeBranch mbr = w.jf(inRange)) {
13005 LIns *unitstr_ins = getUnitString(str_ins, idx_ins);
13006 w.stAlloc(unitstr_ins, phi_ins);
13009 *out = w.ldpAlloc(phi_ins);
13011 return RECORD_CONTINUE;
13014 // Typed array tracing depends on EXPANDED_LOADSTORE and F2I
13015 #if NJ_EXPANDED_LOADSTORE_SUPPORTED && NJ_F2I_SUPPORTED
13016 static bool OkToTraceTypedArrays = true;
13018 static bool OkToTraceTypedArrays = false;
13021 JS_REQUIRES_STACK void
13022 TraceRecorder::guardNotHole(LIns *argsobj_ins, LIns *idx_ins)
13024 // vp = &argsobj->slots[JSSLOT_ARGS_DATA].slots[idx]
13025 LIns* argsData_ins = w.getObjPrivatizedSlot(argsobj_ins, JSObject::JSSLOT_ARGS_DATA);
13026 LIns* slotOffset_ins = w.addp(w.nameImmw(offsetof(ArgumentsData, slots)),
13027 w.ui2p(w.muliN(idx_ins, sizeof(Value))));
13028 LIns* vp_ins = w.addp(argsData_ins, slotOffset_ins);
13031 w.name(is_boxed_magic(ArgsSlotOffsetAddress(vp_ins), JS_ARGS_HOLE),
13032 "guard(not deleted arg)"),
13036 JS_REQUIRES_STACK AbortableRecordingStatus
13037 TraceRecorder::record_JSOP_GETELEM()
13039 bool call = *cx->regs->pc == JSOP_CALLELEM;
13041 Value& idx = stackval(-1);
13042 Value& lval = stackval(-2);
13044 LIns* obj_ins = get(&lval);
13045 LIns* idx_ins = get(&idx);
13047 // Special case for array-like access of strings.
13048 if (lval.isString() && hasInt32Repr(idx)) {
13050 RETURN_STOP_A("JSOP_CALLELEM on a string");
13051 int i = asInt32(idx);
13052 if (size_t(i) >= lval.toString()->length())
13053 RETURN_STOP_A("Invalid string index in JSOP_GETELEM");
13055 CHECK_STATUS_A(getCharAt(lval.toString(), obj_ins, idx_ins, JSOP_GETELEM, &char_ins));
13056 set(&lval, char_ins);
13057 return ARECORD_CONTINUE;
13060 if (lval.isPrimitive())
13061 RETURN_STOP_A("JSOP_GETLEM on a primitive");
13062 RETURN_IF_XML_A(lval);
13064 JSObject* obj = &lval.toObject();
13065 if (obj == globalObj)
13066 RETURN_STOP_A("JSOP_GETELEM on global");
13069 /* Property access using a string name or something we have to stringify. */
13070 if (!idx.isInt32()) {
13071 if (!idx.isPrimitive())
13072 RETURN_STOP_A("object used as index");
13074 return InjectStatus(getPropertyByName(obj_ins, &idx, &lval));
13077 if (obj->isArguments()) {
13078 // Don't even try to record if out of range or reading a deleted arg
13079 int32 int_idx = idx.toInt32();
13080 if (int_idx < 0 || int_idx >= (int32)obj->getArgsInitialLength())
13081 RETURN_STOP_A("cannot trace arguments with out of range index");
13082 if (obj->getArgsElement(int_idx).isMagic(JS_ARGS_HOLE))
13083 RETURN_STOP_A("reading deleted args element");
13085 // Only trace reading arguments out of active, tracked frame
13087 JSStackFrame *afp = guardArguments(obj, obj_ins, &depth);
13089 Value* vp = &afp->canonicalActualArg(int_idx);
13090 if (idx_ins->isImmD()) {
13091 JS_ASSERT(int_idx == (int32)idx_ins->immD());
13092 guardNotHole(obj_ins, w.nameImmi(int_idx));
13095 // If the index is not a constant expression, we generate LIR to load the value from
13096 // the native stack area. The guard on js_ArgumentClass above ensures the up-to-date
13097 // value has been written back to the native stack area.
13098 CHECK_STATUS_A(makeNumberInt32(idx_ins, &idx_ins));
13101 * For small nactual,
13102 * 0 <= int_idx < nactual iff unsigned(int_idx) < unsigned(nactual).
13105 w.name(w.ltui(idx_ins, w.nameImmui(afp->numActualArgs())),
13106 "guard(upvar index in range)"),
13109 guardNotHole(obj_ins, idx_ins);
13111 JSValueType type = getCoercedType(*vp);
13113 // Guard that the argument has the same type on trace as during recording.
13116 // In this case, we are in the same frame where the arguments object was created.
13117 // The entry type map is not necessarily up-to-date, so we capture a new type map
13118 // for this point in the code.
13119 unsigned stackSlots = NativeStackSlots(cx, 0 /* callDepth */);
13120 JSValueType* typemap = new (traceAlloc()) JSValueType[stackSlots];
13121 DetermineTypesVisitor detVisitor(*this, typemap);
13122 VisitStackSlots(detVisitor, cx, 0);
13123 typemap_ins = w.nameImmpNonGC(typemap + 2 /* callee, this */);
13125 // In this case, we are in a deeper frame from where the arguments object was
13126 // created. The type map at the point of the call out from the creation frame
13128 // Note: this relies on the assumption that we abort on setting an element of
13129 // an arguments object in any deeper frame.
13130 LIns* fip_ins = w.ldpRstack(lirbuf->rp, (callDepth-depth)*sizeof(FrameInfo*));
13131 typemap_ins = w.addp(fip_ins, w.nameImmw(sizeof(FrameInfo) + 2/*callee,this*/ * sizeof(JSValueType)));
13134 LIns* type_ins = w.lduc2uiConstTypeMapEntry(typemap_ins, idx_ins);
13136 w.name(w.eqi(type_ins, w.immi(type)), "guard(type-stable upvar)"),
13139 // Read the value out of the native stack area.
13140 size_t stackOffset = nativespOffset(&afp->canonicalActualArg(0));
13141 LIns* args_addr_ins = w.addp(lirbuf->sp, w.nameImmw(stackOffset));
13142 LIns* argi_addr_ins = w.addp(args_addr_ins,
13143 w.ui2p(w.muli(idx_ins, w.nameImmi(sizeof(double)))));
13145 // The Address could be more precise, but ValidateWriter
13146 // doesn't recognise the complex expression involving 'sp' as
13147 // an stack access, and it's not worth the effort to be
13148 // more precise because this case is rare.
13149 v_ins = stackLoad(AnyAddress(argi_addr_ins), type);
13154 set(&idx, obj_ins);
13155 return ARECORD_CONTINUE;
13157 RETURN_STOP_A("can't reach arguments object's frame");
13160 if (obj->isDenseArray()) {
13161 // Fast path for dense arrays accessed with a integer index.
13165 VMSideExit* branchExit = snapshot(BRANCH_EXIT);
13166 guardDenseArray(obj_ins, branchExit);
13167 CHECK_STATUS_A(denseArrayElement(lval, idx, vp, v_ins, addr_ins, branchExit));
13170 set(&idx, obj_ins);
13171 return ARECORD_CONTINUE;
13174 if (OkToTraceTypedArrays && js_IsTypedArray(obj)) {
13175 // Fast path for typed arrays accessed with a integer index.
13177 guardClass(obj_ins, obj->getClass(), snapshot(BRANCH_EXIT), LOAD_CONST);
13178 CHECK_STATUS_A(typedArrayElement(lval, idx, vp, v_ins));
13181 set(&idx, obj_ins);
13182 return ARECORD_CONTINUE;
13185 return InjectStatus(getPropertyByIndex(obj_ins, idx_ins, &lval));
13188 /* Functions used by JSOP_SETELEM */
13190 static JSBool FASTCALL
13191 SetPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, Value* vp, JSBool strict)
13193 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
13195 LeaveTraceIfGlobalObject(cx, obj);
13198 if (!RootedStringToId(cx, namep, &id) || !obj->setProperty(cx, id, vp, strict)) {
13199 SetBuiltinError(tm);
13202 return WasBuiltinSuccessful(tm);
13204 JS_DEFINE_CALLINFO_5(static, BOOL_FAIL, SetPropertyByName,
13205 CONTEXT, OBJECT, STRINGPTR, VALUEPTR, BOOL,
13206 0, ACCSET_STORE_ANY)
13208 static JSBool FASTCALL
13209 InitPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, ValueArgType arg)
13211 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
13213 LeaveTraceIfGlobalObject(cx, obj);
13216 if (!RootedStringToId(cx, namep, &id) ||
13217 !obj->defineProperty(cx, id, ValueArgToConstRef(arg), NULL, NULL, JSPROP_ENUMERATE)) {
13218 SetBuiltinError(tm);
13221 return WasBuiltinSuccessful(tm);
13223 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, InitPropertyByName, CONTEXT, OBJECT, STRINGPTR, VALUE,
13224 0, ACCSET_STORE_ANY)
13226 JS_REQUIRES_STACK RecordingStatus
13227 TraceRecorder::initOrSetPropertyByName(LIns* obj_ins, Value* idvalp, Value* rvalp, bool init)
13229 CHECK_STATUS(primitiveToStringInPlace(idvalp));
13232 LIns* v_ins = box_value_for_native_call(*rvalp, get(rvalp));
13233 enterDeepBailCall();
13234 LIns* idvalp_ins = w.name(addr(idvalp), "idvalp");
13235 LIns* args[] = {v_ins, idvalp_ins, obj_ins, cx_ins};
13236 pendingGuardCondition = w.call(&InitPropertyByName_ci, args);
13238 // See note in getPropertyByName about vp.
13239 LIns* vp_ins = box_value_into_alloc(*rvalp, get(rvalp));
13240 enterDeepBailCall();
13241 LIns* idvalp_ins = w.name(addr(idvalp), "idvalp");
13242 LIns* args[] = { strictModeCode_ins, vp_ins, idvalp_ins, obj_ins, cx_ins };
13243 pendingGuardCondition = w.call(&SetPropertyByName_ci, args);
13246 leaveDeepBailCall();
13247 return RECORD_CONTINUE;
13250 static JSBool FASTCALL
13251 SetPropertyByIndex(JSContext* cx, JSObject* obj, int32 index, Value* vp, JSBool strict)
13253 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
13255 LeaveTraceIfGlobalObject(cx, obj);
13257 AutoIdRooter idr(cx);
13258 if (!js_Int32ToId(cx, index, idr.addr()) || !obj->setProperty(cx, idr.id(), vp, strict)) {
13259 SetBuiltinError(tm);
13262 return WasBuiltinSuccessful(tm);
13264 JS_DEFINE_CALLINFO_5(static, BOOL_FAIL, SetPropertyByIndex, CONTEXT, OBJECT, INT32, VALUEPTR, BOOL,
13265 0, ACCSET_STORE_ANY)
13267 static JSBool FASTCALL
13268 InitPropertyByIndex(JSContext* cx, JSObject* obj, int32 index, ValueArgType arg)
13270 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
13272 LeaveTraceIfGlobalObject(cx, obj);
13274 AutoIdRooter idr(cx);
13275 if (!js_Int32ToId(cx, index, idr.addr()) ||
13276 !obj->defineProperty(cx, idr.id(), ValueArgToConstRef(arg), NULL, NULL, JSPROP_ENUMERATE)) {
13277 SetBuiltinError(tm);
13280 return WasBuiltinSuccessful(tm);
13282 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, InitPropertyByIndex, CONTEXT, OBJECT, INT32, VALUE,
13283 0, ACCSET_STORE_ANY)
13285 JS_REQUIRES_STACK RecordingStatus
13286 TraceRecorder::initOrSetPropertyByIndex(LIns* obj_ins, LIns* index_ins, Value* rvalp, bool init)
13288 CHECK_STATUS(makeNumberInt32(index_ins, &index_ins));
13291 LIns* rval_ins = box_value_for_native_call(*rvalp, get(rvalp));
13292 enterDeepBailCall();
13293 LIns* args[] = {rval_ins, index_ins, obj_ins, cx_ins};
13294 pendingGuardCondition = w.call(&InitPropertyByIndex_ci, args);
13296 // See note in getPropertyByName about vp.
13297 LIns* vp_ins = box_value_into_alloc(*rvalp, get(rvalp));
13298 enterDeepBailCall();
13299 LIns* args[] = {strictModeCode_ins, vp_ins, index_ins, obj_ins, cx_ins};
13300 pendingGuardCondition = w.call(&SetPropertyByIndex_ci, args);
13303 leaveDeepBailCall();
13304 return RECORD_CONTINUE;
13307 JS_REQUIRES_STACK AbortableRecordingStatus
13308 TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex)
13310 Value& v = stackval(v_spindex);
13311 Value& idx = stackval(idx_spindex);
13312 Value& lval = stackval(lval_spindex);
13314 if (lval.isPrimitive())
13315 RETURN_STOP_A("left JSOP_SETELEM operand is not an object");
13316 RETURN_IF_XML_A(lval);
13318 JSObject* obj = &lval.toObject();
13319 LIns* obj_ins = get(&lval);
13320 LIns* idx_ins = get(&idx);
13321 LIns* v_ins = get(&v);
13323 if (obj->isArguments())
13324 RETURN_STOP_A("can't trace setting elements of the |arguments| object");
13326 if (obj == globalObj)
13327 RETURN_STOP_A("can't trace setting elements on the global object");
13329 if (!idx.isInt32()) {
13330 if (!idx.isPrimitive())
13331 RETURN_STOP_A("non-primitive index");
13332 CHECK_STATUS_A(initOrSetPropertyByName(obj_ins, &idx, &v,
13333 *cx->regs->pc == JSOP_INITELEM));
13334 } else if (OkToTraceTypedArrays && js_IsTypedArray(obj)) {
13335 // Fast path: assigning to element of typed array.
13336 VMSideExit* branchExit = snapshot(BRANCH_EXIT);
13338 // Ensure array is a typed array and is the same type as what was written
13339 guardClass(obj_ins, obj->getClass(), branchExit, LOAD_CONST);
13341 js::TypedArray* tarray = js::TypedArray::fromJSObject(obj);
13343 LIns* priv_ins = w.ldpObjPrivate(obj_ins);
13345 // The index was on the stack and is therefore a LIR float; force it to
13347 CHECK_STATUS_A(makeNumberInt32(idx_ins, &idx_ins));
13349 // Ensure idx >= 0 && idx < length (by using uint32)
13350 CHECK_STATUS_A(guard(true,
13351 w.name(w.ltui(idx_ins, w.ldiConstTypedArrayLength(priv_ins)),
13353 OVERFLOW_EXIT, /* abortIfAlwaysExits = */true));
13355 // We're now ready to store
13356 LIns* data_ins = w.ldpConstTypedArrayData(priv_ins);
13357 LIns* pidx_ins = w.ui2p(idx_ins);
13358 LIns* typed_v_ins = v_ins;
13360 // If it's not a number, convert objects to NaN,
13361 // null to 0, and call StringToNumber or BooleanOrUndefinedToNumber
13363 if (!v.isNumber()) {
13365 typed_v_ins = w.immd(0);
13366 } else if (v.isUndefined()) {
13367 typed_v_ins = w.immd(js_NaN);
13368 } else if (v.isString()) {
13369 LIns* ok_ins = w.allocp(sizeof(JSBool));
13370 LIns* args[] = { ok_ins, typed_v_ins, cx_ins };
13371 typed_v_ins = w.call(&js_StringToNumber_ci, args);
13373 w.name(w.eqi0(w.ldiAlloc(ok_ins)), "guard(oom)"),
13375 } else if (v.isBoolean()) {
13376 JS_ASSERT(v.isBoolean());
13377 typed_v_ins = w.i2d(typed_v_ins);
13379 typed_v_ins = w.immd(js_NaN);
13383 switch (tarray->type) {
13384 case js::TypedArray::TYPE_INT8:
13385 case js::TypedArray::TYPE_INT16:
13386 case js::TypedArray::TYPE_INT32:
13387 typed_v_ins = d2i(typed_v_ins);
13389 case js::TypedArray::TYPE_UINT8:
13390 case js::TypedArray::TYPE_UINT16:
13391 case js::TypedArray::TYPE_UINT32:
13392 typed_v_ins = d2u(typed_v_ins);
13394 case js::TypedArray::TYPE_UINT8_CLAMPED:
13395 if (IsPromotedInt32(typed_v_ins)) {
13396 typed_v_ins = w.demoteToInt32(typed_v_ins);
13397 typed_v_ins = w.cmovi(w.ltiN(typed_v_ins, 0),
13399 w.cmovi(w.gtiN(typed_v_ins, 0xff),
13403 typed_v_ins = w.call(&js_TypedArray_uint8_clamp_double_ci, &typed_v_ins);
13406 case js::TypedArray::TYPE_FLOAT32:
13407 case js::TypedArray::TYPE_FLOAT64:
13408 // Do nothing, this is already a float
13411 JS_NOT_REACHED("Unknown typed array type in tracer");
13414 switch (tarray->type) {
13415 case js::TypedArray::TYPE_INT8:
13416 case js::TypedArray::TYPE_UINT8_CLAMPED:
13417 case js::TypedArray::TYPE_UINT8:
13418 w.sti2cTypedArrayElement(typed_v_ins, data_ins, pidx_ins);
13420 case js::TypedArray::TYPE_INT16:
13421 case js::TypedArray::TYPE_UINT16:
13422 w.sti2sTypedArrayElement(typed_v_ins, data_ins, pidx_ins);
13424 case js::TypedArray::TYPE_INT32:
13425 case js::TypedArray::TYPE_UINT32:
13426 w.stiTypedArrayElement(typed_v_ins, data_ins, pidx_ins);
13428 case js::TypedArray::TYPE_FLOAT32:
13429 w.std2fTypedArrayElement(typed_v_ins, data_ins, pidx_ins);
13431 case js::TypedArray::TYPE_FLOAT64:
13432 w.stdTypedArrayElement(typed_v_ins, data_ins, pidx_ins);
13435 JS_NOT_REACHED("Unknown typed array type in tracer");
13437 } else if (idx.toInt32() < 0 || !obj->isDenseArray()) {
13438 CHECK_STATUS_A(initOrSetPropertyByIndex(obj_ins, idx_ins, &v,
13439 *cx->regs->pc == JSOP_INITELEM));
13441 // Fast path: assigning to element of dense array.
13442 VMSideExit* branchExit = snapshot(BRANCH_EXIT);
13443 VMSideExit* mismatchExit = snapshot(MISMATCH_EXIT);
13445 // Make sure the array is actually dense.
13446 if (!obj->isDenseArray())
13447 return ARECORD_STOP;
13448 guardDenseArray(obj_ins, branchExit);
13450 // The index was on the stack and is therefore a LIR float. Force it to
13452 CHECK_STATUS_A(makeNumberInt32(idx_ins, &idx_ins));
13454 if (!js_EnsureDenseArrayCapacity(cx, obj, idx.toInt32()))
13455 RETURN_STOP_A("couldn't ensure dense array capacity for setelem");
13457 // Grow the array if the index exceeds the capacity. This happens
13458 // rarely, eg. less than 1% of the time in SunSpider.
13459 LIns* capacity_ins = w.ldiDenseArrayCapacity(obj_ins);
13461 * It's important that CSE works across this control-flow diamond
13462 * because it really helps series of interleaved GETELEM and SETELEM
13463 * operations. Likewise with the diamond below.
13465 w.pauseAddingCSEValues();
13466 if (MaybeBranch mbr = w.jt(w.name(w.ltui(idx_ins, capacity_ins), "inRange"))) {
13467 LIns* args[] = { idx_ins, obj_ins, cx_ins };
13468 LIns* res_ins = w.call(&js_EnsureDenseArrayCapacity_ci, args);
13469 guard(false, w.eqi0(res_ins), mismatchExit);
13472 w.resumeAddingCSEValues();
13474 // Get the address of the element.
13475 LIns *elemp_ins = w.name(w.getDslotAddress(obj_ins, idx_ins), "elemp");
13477 // If we are overwriting a hole:
13478 // - Guard that we don't have any indexed properties along the prototype chain.
13479 // - Check if the length has changed; if so, update it to index+1.
13480 // This happens moderately often, eg. close to 10% of the time in
13481 // SunSpider, and for some benchmarks it's close to 100%.
13482 Address dslotAddr = DSlotsAddress(elemp_ins);
13483 LIns* isHole_ins = w.name(is_boxed_magic(dslotAddr, JS_ARRAY_HOLE),
13485 w.pauseAddingCSEValues();
13486 if (MaybeBranch mbr1 = w.jf(isHole_ins)) {
13488 * It's important that this use branchExit, not mismatchExit, since
13489 * changes to shapes should just mean we compile a new branch, not
13490 * throw the whole trace away.
13492 CHECK_STATUS_A(guardPrototypeHasNoIndexedProperties(obj, obj_ins, branchExit));
13493 LIns* length_ins = w.lduiObjPrivate(obj_ins);
13494 if (MaybeBranch mbr2 = w.jt(w.ltui(idx_ins, length_ins))) {
13495 LIns* newLength_ins = w.name(w.addiN(idx_ins, 1), "newLength");
13496 w.stuiObjPrivate(obj_ins, newLength_ins);
13501 w.resumeAddingCSEValues();
13503 // Right, actually set the element.
13504 box_value_into(v, v_ins, dslotAddr);
13507 jsbytecode* pc = cx->regs->pc;
13508 if (*pc == JSOP_SETELEM && pc[JSOP_SETELEM_LENGTH] != JSOP_POP)
13511 return ARECORD_CONTINUE;
13514 JS_REQUIRES_STACK AbortableRecordingStatus
13515 TraceRecorder::record_JSOP_SETELEM()
13517 return setElem(-3, -2, -1);
13520 static JSBool FASTCALL
13521 CheckSameGlobal(JSObject *obj, JSObject *globalObj)
13523 return obj->getGlobal() == globalObj;
13525 JS_DEFINE_CALLINFO_2(static, BOOL, CheckSameGlobal, OBJECT, OBJECT, 0, ACCSET_STORE_ANY)
13527 JS_REQUIRES_STACK AbortableRecordingStatus
13528 TraceRecorder::record_JSOP_CALLNAME()
13530 JSObject* scopeObj = &cx->fp()->scopeChain();
13533 if (scopeObj != globalObj) {
13536 CHECK_STATUS_A(scopeChainProp(scopeObj, vp, funobj_ins, nr, &scopeObj));
13539 if (!vp->isObject())
13540 RETURN_STOP_A("callee is not an object");
13541 funobj = &vp->toObject();
13542 if (!funobj->isFunction())
13543 RETURN_STOP_A("callee is not a function");
13545 LIns* obj_ins = w.immpObjGC(globalObj);
13549 CHECK_STATUS_A(test_property_cache(scopeObj, obj_ins, obj2, pcval));
13551 if (pcval.isNull() || !pcval.isFunObj())
13552 RETURN_STOP_A("callee is not a function");
13554 funobj = &pcval.toFunObj();
13555 funobj_ins = w.immpObjGC(funobj);
13558 // Detect crossed globals early. The interpreter could decide to compute
13559 // a non-Undefined |this| value, and we want to make sure that we'll (1)
13560 // abort in this case, and (2) bail out early if a callee will need special
13561 // |this| computation. Note that if (scopeObj != globalObj),
13562 // scopeChainProp() guarantees that scopeObj is a cacheable scope.
13563 if (scopeObj == globalObj) {
13564 JSFunction *fun = funobj->getFunctionPrivate();
13565 if (!fun->isInterpreted() || !fun->inStrictMode()) {
13566 if (funobj->getGlobal() != globalObj)
13567 RETURN_STOP_A("callee crosses globals");
13569 // If the funobj is not constant, we need may a guard that the
13570 // callee will not cross globals. This is only the case for non-
13571 // compile-and-go trees.
13572 if (!funobj_ins->isImmP() && !tree->script->compileAndGo) {
13573 LIns* args[] = { w.immpObjGC(globalObj), funobj_ins };
13574 guard(false, w.eqi0(w.call(&CheckSameGlobal_ci, args)), MISMATCH_EXIT);
13579 stack(0, funobj_ins);
13580 stack(1, w.immiUndefined());
13581 return ARECORD_CONTINUE;
13584 JS_DEFINE_CALLINFO_5(extern, UINT32, GetUpvarArgOnTrace, CONTEXT, UINT32, INT32, UINT32,
13585 DOUBLEPTR, 0, ACCSET_STORE_ANY)
13586 JS_DEFINE_CALLINFO_5(extern, UINT32, GetUpvarVarOnTrace, CONTEXT, UINT32, INT32, UINT32,
13587 DOUBLEPTR, 0, ACCSET_STORE_ANY)
13588 JS_DEFINE_CALLINFO_5(extern, UINT32, GetUpvarStackOnTrace, CONTEXT, UINT32, INT32, UINT32,
13589 DOUBLEPTR, 0, ACCSET_STORE_ANY)
13592 * Record LIR to get the given upvar. Return the LIR instruction for the upvar
13593 * value. NULL is returned only on a can't-happen condition with an invalid
13594 * typemap. The value of the upvar is returned as v.
13596 JS_REQUIRES_STACK LIns*
13597 TraceRecorder::upvar(JSScript* script, JSUpvarArray* uva, uintN index, Value& v)
13600 * Try to find the upvar in the current trace's tracker. For &vr to be
13601 * the address of the jsval found in js::GetUpvar, we must initialize
13602 * vr directly with the result, so it is a reference to the same location.
13603 * It does not work to assign the result to v, because v is an already
13604 * existing reference that points to something else.
13606 UpvarCookie cookie = uva->vector[index];
13607 const Value& vr = GetUpvar(cx, script->staticLevel, cookie);
13610 if (LIns* ins = attemptImport(&vr))
13614 * The upvar is not in the current trace, so get the upvar value exactly as
13615 * the interpreter does and unbox.
13617 uint32 level = script->staticLevel - cookie.level();
13618 uint32 cookieSlot = cookie.slot();
13619 JSStackFrame* fp = cx->findFrameAtLevel(level);
13620 const CallInfo* ci;
13622 if (!fp->isFunctionFrame() || fp->isEvalFrame()) {
13623 ci = &GetUpvarStackOnTrace_ci;
13625 } else if (cookieSlot < fp->numFormalArgs()) {
13626 ci = &GetUpvarArgOnTrace_ci;
13628 } else if (cookieSlot == UpvarCookie::CALLEE_SLOT) {
13629 ci = &GetUpvarArgOnTrace_ci;
13632 ci = &GetUpvarVarOnTrace_ci;
13633 slot = cookieSlot - fp->numFormalArgs();
13636 LIns* outp = w.allocp(sizeof(double));
13639 w.nameImmi(callDepth),
13644 LIns* call_ins = w.call(ci, args);
13645 JSValueType type = getCoercedType(v);
13647 w.name(w.eqi(call_ins, w.immi(type)), "guard(type-stable upvar)"),
13649 return stackLoad(AllocSlotsAddress(outp), type);
13653 * Generate LIR to load a value from the native stack. This method ensures that
13654 * the correct LIR load operator is used.
13657 TraceRecorder::stackLoad(Address addr, uint8 type)
13660 case JSVAL_TYPE_DOUBLE:
13661 return w.ldd(addr);
13662 case JSVAL_TYPE_NONFUNOBJ:
13663 case JSVAL_TYPE_STRING:
13664 case JSVAL_TYPE_FUNOBJ:
13665 case JSVAL_TYPE_NULL:
13666 return w.ldp(addr);
13667 case JSVAL_TYPE_INT32:
13668 return w.i2d(w.ldi(addr));
13669 case JSVAL_TYPE_BOOLEAN:
13670 case JSVAL_TYPE_UNDEFINED:
13671 case JSVAL_TYPE_MAGIC:
13672 return w.ldi(addr);
13673 case JSVAL_TYPE_BOXED:
13675 JS_NOT_REACHED("found jsval type in an upvar type map entry");
13680 JS_REQUIRES_STACK AbortableRecordingStatus
13681 TraceRecorder::record_JSOP_GETFCSLOT()
13683 JSObject& callee = cx->fp()->callee();
13684 LIns* callee_ins = get(&cx->fp()->calleeValue());
13686 LIns* upvars_ins = w.getObjPrivatizedSlot(callee_ins, JSObject::JSSLOT_FLAT_CLOSURE_UPVARS);
13688 unsigned index = GET_UINT16(cx->regs->pc);
13689 LIns *v_ins = unbox_value(callee.getFlatClosureUpvar(index),
13690 FCSlotsAddress(upvars_ins, index),
13691 snapshot(BRANCH_EXIT));
13693 return ARECORD_CONTINUE;
13696 JS_REQUIRES_STACK AbortableRecordingStatus
13697 TraceRecorder::record_JSOP_CALLFCSLOT()
13699 CHECK_STATUS_A(record_JSOP_GETFCSLOT());
13700 stack(1, w.immiUndefined());
13701 return ARECORD_CONTINUE;
13704 JS_REQUIRES_STACK RecordingStatus
13705 TraceRecorder::guardCallee(Value& callee)
13707 JSObject& callee_obj = callee.toObject();
13708 JS_ASSERT(callee_obj.isFunction());
13709 JSFunction* callee_fun = (JSFunction*) callee_obj.getPrivate();
13712 * First, guard on the callee's function (JSFunction*) identity. This is
13713 * necessary since tracing always inlines function calls. But note that
13714 * TR::functionCall avoids calling TR::guardCallee for constant methods
13715 * (those hit in the property cache from JSOP_CALLPROP).
13717 VMSideExit* branchExit = snapshot(BRANCH_EXIT);
13718 LIns* callee_ins = get(&callee);
13719 tree->gcthings.addUnique(callee);
13722 w.eqp(w.ldpObjPrivate(callee_ins), w.nameImmpNonGC(callee_fun)),
13726 * Second, consider guarding on the parent scope of the callee.
13728 * As long as we guard on parent scope, we are guaranteed when recording
13729 * variable accesses for a Call object having no private data that we can
13730 * emit code that avoids checking for an active JSStackFrame for the Call
13731 * object (which would hold fresh variable values -- the Call object's
13732 * slots would be stale until the stack frame is popped). This is because
13733 * Call objects can't pick up a new stack frame in their private slot once
13734 * they have none. TR::callProp and TR::setCallProp depend on this fact and
13735 * document where; if this guard is removed make sure to fix those methods.
13736 * Search for the "parent guard" comments in them.
13738 * In general, a loop in an escaping function scoped by Call objects could
13739 * be traced before the function has returned, and the trace then triggered
13740 * after, or vice versa. The function must escape, i.e., be a "funarg", or
13741 * else there's no need to guard callee parent at all. So once we know (by
13742 * static analysis) that a function may escape, we cannot avoid guarding on
13743 * either the private data of the Call object or the Call object itself, if
13744 * we wish to optimize for the particular deactivated stack frame (null
13745 * private data) case as noted above.
13747 if (callee_fun->isInterpreted() &&
13748 (!FUN_NULL_CLOSURE(callee_fun) || callee_fun->script()->bindings.hasUpvars())) {
13749 JSObject* parent = callee_obj.getParent();
13751 if (parent != globalObj) {
13752 if (!parent->isCall())
13753 RETURN_STOP("closure scoped by neither the global object nor a Call object");
13756 w.eqp(w.ldpObjParent(callee_ins), w.immpObjGC(parent)),
13760 return RECORD_CONTINUE;
13764 * Prepare the given |arguments| object to be accessed on trace. If the return
13765 * value is non-NULL, then the given |arguments| object refers to a frame on
13766 * the current trace and is guaranteed to refer to the same frame on trace for
13767 * all later executions.
13769 JS_REQUIRES_STACK JSStackFrame *
13770 TraceRecorder::guardArguments(JSObject *obj, LIns* obj_ins, unsigned *depthp)
13772 JS_ASSERT(obj->isArguments());
13774 JSStackFrame *afp = frameIfInRange(obj, depthp);
13778 VMSideExit *exit = snapshot(MISMATCH_EXIT);
13779 guardClass(obj_ins, obj->getClass(), exit, LOAD_CONST);
13781 LIns* args_ins = getFrameObjPtr(afp->addressOfArgs());
13782 LIns* cmp = w.eqp(args_ins, obj_ins);
13783 guard(true, cmp, exit);
13787 JS_REQUIRES_STACK RecordingStatus
13788 TraceRecorder::createThis(JSObject& ctor, LIns* ctor_ins, LIns** thisobj_insp)
13790 JS_ASSERT(ctor.getFunctionPrivate()->isInterpreted());
13791 if (ctor.getFunctionPrivate()->isFunctionPrototype())
13792 RETURN_STOP("new Function.prototype");
13793 if (ctor.isBoundFunction())
13794 RETURN_STOP("new applied to bound function");
13796 // Given the above conditions, ctor.prototype is a non-configurable data
13797 // property with a slot.
13798 const Shape *shape = LookupInterpretedFunctionPrototype(cx, &ctor);
13800 RETURN_ERROR("new f: error resolving f.prototype");
13802 // At run time ctor might be a different instance of the same function. Its
13803 // .prototype property might not be resolved yet. Guard on the function
13804 // object's shape to make sure .prototype is there.
13806 // However, if ctor_ins is constant, which is usual, we don't need to
13807 // guard: .prototype is non-configurable, and an object's non-configurable
13808 // data properties always stay in the same slot for the life of the object.
13809 if (!ctor_ins->isImmP())
13810 guardShape(ctor_ins, &ctor, ctor.shape(), "ctor_shape", snapshot(MISMATCH_EXIT));
13812 // Pass the slot of ctor.prototype to js_CreateThisFromTrace. We can only
13813 // bake the slot into the trace, not the value, since .prototype is
13815 uintN protoSlot = shape->slot;
13816 LIns* args[] = { w.nameImmw(protoSlot), ctor_ins, cx_ins };
13817 *thisobj_insp = w.call(&js_CreateThisFromTrace_ci, args);
13818 guard(false, w.eqp0(*thisobj_insp), OOM_EXIT);
13819 return RECORD_CONTINUE;
13822 JS_REQUIRES_STACK RecordingStatus
13823 TraceRecorder::interpretedFunctionCall(Value& fval, JSFunction* fun, uintN argc, bool constructing)
13826 * The function's identity (JSFunction and therefore JSScript) is guarded,
13827 * so we can optimize away the function call if the corresponding script is
13828 * empty. No need to worry about crossing globals or relocating argv, even,
13831 if (fun->script()->isEmpty()) {
13834 CHECK_STATUS(createThis(fval.toObject(), get(&fval), &rval_ins));
13836 rval_ins = w.immiUndefined();
13837 stack(-2 - argc, rval_ins);
13838 return RECORD_CONTINUE;
13841 if (fval.toObject().getGlobal() != globalObj)
13842 RETURN_STOP("JSOP_CALL or JSOP_NEW crosses global scopes");
13844 JSStackFrame* const fp = cx->fp();
13846 if (constructing) {
13848 CHECK_STATUS(createThis(fval.toObject(), get(&fval), &thisobj_ins));
13849 stack(-int(argc) - 1, thisobj_ins);
13852 // Generate a type map for the outgoing frame and stash it in the LIR
13853 unsigned stackSlots = NativeStackSlots(cx, 0 /* callDepth */);
13854 FrameInfo* fi = (FrameInfo*)
13855 tempAlloc().alloc(sizeof(FrameInfo) + stackSlots * sizeof(JSValueType));
13856 JSValueType* typemap = (JSValueType*)(fi + 1);
13858 DetermineTypesVisitor detVisitor(*this, typemap);
13859 VisitStackSlots(detVisitor, cx, 0);
13861 JS_ASSERT(argc < FrameInfo::CONSTRUCTING_FLAG);
13863 tree->gcthings.addUnique(fval);
13864 fi->pc = cx->regs->pc;
13865 fi->imacpc = fp->maybeImacropc();
13866 fi->spdist = cx->regs->sp - fp->slots();
13867 fi->set_argc(uint16(argc), constructing);
13868 fi->callerHeight = stackSlots - (2 + argc);
13869 fi->callerArgc = fp->isGlobalFrame() || fp->isEvalFrame() ? 0 : fp->numActualArgs();
13871 if (callDepth >= tree->maxCallDepth)
13872 tree->maxCallDepth = callDepth + 1;
13874 fi = traceMonitor->frameCache->memoize(fi);
13876 RETURN_STOP("out of memory");
13877 w.stRstack(w.nameImmpNonGC(fi), lirbuf->rp, callDepth * sizeof(FrameInfo*));
13879 #if defined JS_JIT_SPEW
13880 debug_only_printf(LC_TMTracer, "iFC frameinfo=%p, stack=%d, map=", (void*)fi,
13882 for (unsigned i = 0; i < fi->callerHeight; i++)
13883 debug_only_printf(LC_TMTracer, "%c", TypeToChar(fi->get_typemap()[i]));
13884 debug_only_print0(LC_TMTracer, "\n");
13887 updateAtoms(fun->u.i.script);
13888 return RECORD_CONTINUE;
13892 * We implement JSOP_FUNAPPLY/JSOP_FUNCALL using imacros
13895 GetCallMode(JSStackFrame *fp)
13897 if (fp->hasImacropc()) {
13898 JSOp op = (JSOp) *fp->imacropc();
13899 if (op == JSOP_FUNAPPLY || op == JSOP_FUNCALL)
13905 JS_REQUIRES_STACK AbortableRecordingStatus
13906 TraceRecorder::record_JSOP_CALL()
13908 uintN argc = GET_ARGC(cx->regs->pc);
13909 cx->assertValidStackDepth(argc + 2);
13910 return InjectStatus(functionCall(argc, GetCallMode(cx->fp())));
13913 static jsbytecode* funapply_imacro_table[] = {
13914 funapply_imacros.apply0,
13915 funapply_imacros.apply1,
13916 funapply_imacros.apply2,
13917 funapply_imacros.apply3,
13918 funapply_imacros.apply4,
13919 funapply_imacros.apply5,
13920 funapply_imacros.apply6,
13921 funapply_imacros.apply7,
13922 funapply_imacros.apply8
13925 static jsbytecode* funcall_imacro_table[] = {
13926 funcall_imacros.call0,
13927 funcall_imacros.call1,
13928 funcall_imacros.call2,
13929 funcall_imacros.call3,
13930 funcall_imacros.call4,
13931 funcall_imacros.call5,
13932 funcall_imacros.call6,
13933 funcall_imacros.call7,
13934 funcall_imacros.call8
13937 JS_REQUIRES_STACK AbortableRecordingStatus
13938 TraceRecorder::record_JSOP_FUNCALL()
13940 return record_JSOP_FUNAPPLY();
13943 JS_REQUIRES_STACK AbortableRecordingStatus
13944 TraceRecorder::record_JSOP_FUNAPPLY()
13946 jsbytecode *pc = cx->regs->pc;
13947 uintN argc = GET_ARGC(pc);
13948 cx->assertValidStackDepth(argc + 2);
13950 Value* vp = cx->regs->sp - (argc + 2);
13952 JSObject* aobj = NULL;
13953 LIns* aobj_ins = NULL;
13955 JS_ASSERT(!cx->fp()->hasImacropc());
13957 if (!IsFunctionObject(vp[0]))
13958 return record_JSOP_CALL();
13959 RETURN_IF_XML_A(vp[0]);
13961 JSObject* obj = &vp[0].toObject();
13962 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
13963 if (FUN_INTERPRETED(fun))
13964 return record_JSOP_CALL();
13966 bool apply = fun->u.n.native == js_fun_apply;
13967 if (!apply && fun->u.n.native != js_fun_call)
13968 return record_JSOP_CALL();
13971 * We don't trace apply and call with a primitive 'this', which is the
13972 * first positional parameter, unless 'this' is null. That's ok.
13974 if (argc > 0 && !vp[2].isObjectOrNull())
13975 return record_JSOP_CALL();
13978 * Guard on the identity of this, which is the function we are applying.
13980 if (!IsFunctionObject(vp[1]))
13981 RETURN_STOP_A("callee is not a function");
13982 CHECK_STATUS_A(guardCallee(vp[1]));
13984 if (apply && argc >= 2) {
13986 RETURN_STOP_A("apply with excess arguments");
13987 if (vp[3].isPrimitive())
13988 RETURN_STOP_A("arguments parameter of apply is primitive");
13989 aobj = &vp[3].toObject();
13990 aobj_ins = get(&vp[3]);
13993 * We trace dense arrays and arguments objects. The code we generate
13994 * for apply uses imacros to handle a specific number of arguments.
13996 if (aobj->isDenseArray()) {
13997 guardDenseArray(aobj_ins, MISMATCH_EXIT);
13998 length = aobj->getArrayLength();
14000 w.eqiN(w.lduiObjPrivate(aobj_ins), length),
14002 } else if (aobj->isArguments()) {
14004 JSStackFrame *afp = guardArguments(aobj, aobj_ins, &depth);
14006 RETURN_STOP_A("can't reach arguments object's frame");
14007 if (aobj->isArgsLengthOverridden())
14008 RETURN_STOP_A("can't trace arguments with overridden length");
14009 guardArgsLengthNotAssigned(aobj_ins);
14010 length = afp->numActualArgs();
14012 RETURN_STOP_A("arguments parameter of apply is not a dense array or argments object");
14015 if (length >= JS_ARRAY_LENGTH(funapply_imacro_table))
14016 RETURN_STOP_A("too many arguments to apply");
14018 return InjectStatus(callImacro(funapply_imacro_table[length]));
14021 if (argc >= JS_ARRAY_LENGTH(funcall_imacro_table))
14022 RETURN_STOP_A("too many arguments to call");
14024 return InjectStatus(callImacro(funcall_imacro_table[argc]));
14027 JS_REQUIRES_STACK AbortableRecordingStatus
14028 TraceRecorder::record_NativeCallComplete()
14030 if (pendingSpecializedNative == IGNORE_NATIVE_CALL_COMPLETE_CALLBACK)
14031 return ARECORD_CONTINUE;
14034 JS_ASSERT(pendingSpecializedNative);
14035 jsbytecode* pc = cx->regs->pc;
14036 JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_FUNCALL || *pc == JSOP_FUNAPPLY ||
14037 *pc == JSOP_NEW || *pc == JSOP_SETPROP);
14040 Value& v = stackval(-1);
14041 LIns* v_ins = get(&v);
14044 * At this point the generated code has already called the native function
14045 * and we can no longer fail back to the original pc location (JSOP_CALL)
14046 * because that would cause the interpreter to re-execute the native
14047 * function, which might have side effects.
14049 * Instead, the snapshot() call below sees that we are currently parked on
14050 * a traceable native's JSOP_CALL instruction, and it will advance the pc
14051 * to restore by the length of the current opcode. If the native's return
14052 * type is jsval, snapshot() will also indicate in the type map that the
14053 * element on top of the stack is a boxed value which doesn't need to be
14054 * boxed if the type guard generated by unbox_value() fails.
14057 if (JSTN_ERRTYPE(pendingSpecializedNative) == FAIL_STATUS) {
14058 leaveDeepBailCall();
14060 LIns* status = w.ldiStateField(builtinStatus);
14061 if (pendingSpecializedNative == &generatedSpecializedNative) {
14062 LIns* ok_ins = v_ins;
14065 * If we run a generic traceable native, the return value is in the argument
14066 * vector for native function calls. The actual return value of the native is a JSBool
14067 * indicating the error status.
14070 Address nativeRvalAddr = AllocSlotsAddress(native_rval_ins);
14071 if (pendingSpecializedNative->flags & JSTN_CONSTRUCTOR) {
14075 // v_ins := the object payload from native_rval_ins
14076 // cond_ins := true if native_rval_ins contains a JSObject*
14077 unbox_any_object(nativeRvalAddr, &v_ins, &cond_ins);
14078 // x := v_ins if native_rval_ins contains a JSObject*, NULL otherwise
14079 x = w.cmovp(cond_ins, v_ins, w.immw(0));
14080 // v_ins := newobj_ins if native_rval_ins doesn't contain a JSObject*,
14081 // the object payload from native_rval_ins otherwise
14082 v_ins = w.cmovp(w.eqp0(x), newobj_ins, x);
14084 v_ins = w.ldd(nativeRvalAddr);
14088 propagateFailureToBuiltinStatus(ok_ins, status);
14090 guard(true, w.eqi0(status), STATUS_EXIT);
14093 if (pendingSpecializedNative->flags & JSTN_UNBOX_AFTER) {
14095 * If we side exit on the unboxing code due to a type change, make sure that the boxed
14096 * value is actually currently associated with that location, and that we are talking
14097 * about the top of the stack here, which is where we expected boxed values.
14099 JS_ASSERT(&v == &cx->regs->sp[-1] && get(&v) == v_ins);
14100 set(&v, unbox_value(v, AllocSlotsAddress(native_rval_ins), snapshot(BRANCH_EXIT)));
14101 } else if (pendingSpecializedNative->flags &
14102 (JSTN_RETURN_NULLABLE_STR | JSTN_RETURN_NULLABLE_OBJ)) {
14104 w.name(w.eqp0(v_ins), "guard(nullness)"),
14106 } else if (JSTN_ERRTYPE(pendingSpecializedNative) == FAIL_NEG) {
14107 /* Already added i2d in functionCall. */
14108 JS_ASSERT(v.isNumber());
14110 /* Convert the result to double if the builtin returns int32. */
14111 if (v.isNumber() &&
14112 pendingSpecializedNative->builtin->returnType() == ARGTYPE_I) {
14113 set(&v, w.i2d(v_ins));
14117 // We'll null pendingSpecializedNative in monitorRecording, on the next op
14118 // cycle. There must be a next op since the stack is non-empty.
14119 return ARECORD_CONTINUE;
14122 JS_REQUIRES_STACK AbortableRecordingStatus
14123 TraceRecorder::name(Value*& vp, LIns*& ins, NameResult& nr)
14125 JSObject* obj = &cx->fp()->scopeChain();
14126 JSOp op = JSOp(*cx->regs->pc);
14127 if (js_CodeSpec[op].format & JOF_GNAME)
14128 obj = obj->getGlobal();
14129 if (obj != globalObj)
14130 return scopeChainProp(obj, vp, ins, nr);
14132 /* Can't use prop here, because we don't want unboxing from global slots. */
14133 LIns* obj_ins = w.immpObjGC(globalObj);
14140 * Property cache ensures that we are dealing with an existing property,
14141 * and guards the shape for us.
14143 CHECK_STATUS_A(test_property_cache(obj, obj_ins, obj2, pcval));
14145 /* Abort if property doesn't exist (interpreter will report an error.) */
14146 if (pcval.isNull())
14147 RETURN_STOP_A("named property not found");
14149 /* Insist on obj being the directly addressed object. */
14151 RETURN_STOP_A("name() hit prototype chain");
14153 /* Don't trace getter or setter calls, our caller wants a direct slot. */
14154 if (pcval.isShape()) {
14155 const Shape* shape = pcval.toShape();
14156 if (!isValidSlot(obj, shape))
14157 RETURN_STOP_A("name() not accessing a valid slot");
14158 slot = shape->slot;
14160 if (!pcval.isSlot())
14161 RETURN_STOP_A("PCE is not a slot");
14162 slot = pcval.toSlot();
14165 if (!lazilyImportGlobalSlot(slot))
14166 RETURN_STOP_A("lazy import of global slot failed");
14168 vp = &obj->getSlotRef(slot);
14171 return ARECORD_CONTINUE;
14174 static JSObject* FASTCALL
14175 MethodReadBarrier(JSContext* cx, JSObject* obj, Shape* shape, JSObject* funobj)
14177 Value v = ObjectValue(*funobj);
14178 AutoValueRooter tvr(cx, v);
14180 if (!obj->methodReadBarrier(cx, *shape, tvr.addr()))
14182 return &tvr.value().toObject();
14184 JS_DEFINE_CALLINFO_4(static, OBJECT_FAIL, MethodReadBarrier, CONTEXT, OBJECT, SHAPE, OBJECT,
14185 0, ACCSET_STORE_ANY)
14188 * Get a property. The current opcode has JOF_ATOM.
14190 * There are two modes. The caller must pass nonnull pointers for either outp
14191 * or both slotp and v_insp. In the latter case, we require a plain old
14192 * property with a slot; if the property turns out to be anything else, abort
14193 * tracing (rather than emit a call to a native getter or GetAnyProperty).
14195 JS_REQUIRES_STACK AbortableRecordingStatus
14196 TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32 *slotp, LIns** v_insp, Value *outp)
14199 * Insist that obj have js_SetProperty as its set object-op. This suffices
14200 * to prevent a rogue obj from being used on-trace (loaded via obj_ins),
14201 * because we will guard on shape (or else global object identity) and any
14202 * object not having the same op must have a different class, and therefore
14203 * must differ in its shape (or not be the global object).
14205 if (!obj->isDenseArray() && obj->getOps()->getProperty)
14206 RETURN_STOP_A("non-dense-array, non-native js::ObjectOps::getProperty");
14208 JS_ASSERT((slotp && v_insp && !outp) || (!slotp && !v_insp && outp));
14211 * Property cache ensures that we are dealing with an existing property,
14212 * and guards the shape for us.
14216 CHECK_STATUS_A(test_property_cache(obj, obj_ins, obj2, pcval));
14218 /* Check for nonexistent property reference, which results in undefined. */
14219 if (pcval.isNull()) {
14221 RETURN_STOP_A("property not found");
14224 * We could specialize to guard on just JSClass.getProperty, but a mere
14225 * class guard is simpler and slightly faster.
14227 if (obj->getClass()->getProperty != Valueify(JS_PropertyStub)) {
14228 RETURN_STOP_A("can't trace through access to undefined property if "
14229 "JSClass.getProperty hook isn't stubbed");
14231 guardClass(obj_ins, obj->getClass(), snapshot(MISMATCH_EXIT), LOAD_NORMAL);
14234 * This trace will be valid as long as neither the object nor any object
14235 * on its prototype chain changes shape.
14237 * FIXME: This loop can become a single shape guard once bug 497789 has
14240 VMSideExit* exit = snapshot(BRANCH_EXIT);
14242 if (obj->isNative()) {
14243 CHECK_STATUS_A(guardShape(obj_ins, obj, obj->shape(), "guard(shape)", exit));
14244 } else if (obj->isDenseArray()) {
14245 guardDenseArray(obj_ins, exit);
14247 RETURN_STOP_A("non-native object involved in undefined property access");
14249 } while (guardHasPrototype(obj, obj_ins, &obj, &obj_ins, exit));
14251 set(outp, w.immiUndefined());
14252 return ARECORD_CONTINUE;
14255 return InjectStatus(propTail(obj, obj_ins, obj2, pcval, slotp, v_insp, outp));
14258 JS_REQUIRES_STACK RecordingStatus
14259 TraceRecorder::propTail(JSObject* obj, LIns* obj_ins, JSObject* obj2, PCVal pcval,
14260 uint32 *slotp, LIns** v_insp, Value *outp)
14262 const JSCodeSpec& cs = js_CodeSpec[*cx->regs->pc];
14263 uint32 setflags = (cs.format & (JOF_INCDEC | JOF_FOR));
14264 JS_ASSERT(!(cs.format & JOF_SET));
14266 const Shape* shape;
14270 if (pcval.isShape()) {
14271 shape = pcval.toShape();
14272 JS_ASSERT(obj2->nativeContains(*shape));
14274 if (setflags && !shape->hasDefaultSetter())
14275 RETURN_STOP("non-stub setter");
14276 if (setflags && !shape->writable())
14277 RETURN_STOP("writing to a readonly property");
14278 if (!shape->hasDefaultGetterOrIsMethod()) {
14280 RETURN_STOP("can't trace non-stub getter for this opcode");
14281 if (shape->hasGetterValue())
14282 return getPropertyWithScriptGetter(obj, obj_ins, shape);
14283 if (shape->slot == SHAPE_INVALID_SLOT)
14284 return getPropertyWithNativeGetter(obj_ins, shape, outp);
14285 return getPropertyById(obj_ins, outp);
14287 if (!obj2->containsSlot(shape->slot))
14288 RETURN_STOP("no valid slot");
14289 slot = shape->slot;
14290 isMethod = shape->isMethod();
14291 JS_ASSERT_IF(isMethod, obj2->hasMethodBarrier());
14293 if (!pcval.isSlot())
14294 RETURN_STOP("PCE is not a slot");
14295 slot = pcval.toSlot();
14300 /* We have a slot. Check whether it is direct or in a prototype. */
14303 RETURN_STOP("JOF_INCDEC|JOF_FOR opcode hit prototype chain");
14306 * We're getting a prototype property. Two cases:
14308 * 1. If obj2 is obj's immediate prototype we must walk up from obj,
14309 * since direct and immediate-prototype cache hits key on obj's shape,
14310 * not its identity.
14312 * 2. Otherwise obj2 is higher up the prototype chain and we've keyed
14313 * on obj's identity, and since setting __proto__ reshapes all objects
14314 * along the old prototype chain, then provided we shape-guard obj2,
14315 * we can "teleport" directly to obj2 by embedding it as a constant
14316 * (this constant object instruction will be CSE'ed with the constant
14317 * emitted by test_property_cache, whose shape is guarded).
14319 obj_ins = (obj2 == obj->getProto()) ? w.ldpObjProto(obj_ins) : w.immpObjGC(obj2);
14324 if (obj2 == globalObj) {
14326 RETURN_STOP("get global method");
14327 if (!lazilyImportGlobalSlot(slot))
14328 RETURN_STOP("lazy import of global slot failed");
14329 v_ins = get(&globalObj->getSlotRef(slot));
14331 v_ins = unbox_slot(obj, obj_ins, slot, snapshot(BRANCH_EXIT));
14335 * Joined function object stored as a method must be cloned when extracted
14336 * as a property value other than a callee. Note that shapes cover method
14337 * value as well as other property attributes and order, so this condition
14338 * is trace-invariant.
14340 * We do not impose the method read barrier if in an imacro, assuming any
14341 * property gets it does (e.g., for 'toString' from JSOP_NEW) will not be
14342 * leaked to the calling script.
14344 if (isMethod && !cx->fp()->hasImacropc()) {
14345 enterDeepBailCall();
14346 LIns* args[] = { v_ins, w.immpShapeGC(shape), obj_ins, cx_ins };
14347 v_ins = w.call(&MethodReadBarrier_ci, args);
14348 leaveDeepBailCall();
14357 return RECORD_CONTINUE;
14361 * When we end up with a hole, read it as undefined, and make sure to set
14362 * addr_ins to null.
14364 JS_REQUIRES_STACK RecordingStatus
14365 TraceRecorder::denseArrayElement(Value& oval, Value& ival, Value*& vp, LIns*& v_ins,
14366 LIns*& addr_ins, VMSideExit* branchExit)
14368 JS_ASSERT(oval.isObject() && ival.isInt32());
14370 JSObject* obj = &oval.toObject();
14371 LIns* obj_ins = get(&oval);
14372 jsint idx = ival.toInt32();
14374 CHECK_STATUS(makeNumberInt32(get(&ival), &idx_ins));
14377 * Arrays have both a length and a capacity, but we only need to check
14378 * |index < capacity|; in the case where |length < index < capacity|
14379 * the entries [length..capacity-1] will have already been marked as
14380 * holes by resizeDenseArrayElements() so we can read them and get
14381 * the correct value.
14383 LIns* capacity_ins = w.ldiDenseArrayCapacity(obj_ins);
14384 jsuint capacity = obj->getDenseArrayCapacity();
14385 bool within = (jsuint(idx) < capacity);
14387 /* If not idx < capacity, stay on trace (and read value as undefined). */
14388 guard(true, w.geui(idx_ins, capacity_ins), branchExit);
14390 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj, obj_ins, snapshot(MISMATCH_EXIT)));
14392 v_ins = w.immiUndefined();
14394 return RECORD_CONTINUE;
14397 /* Guard that index is within capacity. */
14398 guard(true, w.name(w.ltui(idx_ins, capacity_ins), "inRange"), branchExit);
14400 /* Load the value and guard on its type to unbox it. */
14401 vp = &obj->slots[jsuint(idx)];
14402 JS_ASSERT(sizeof(Value) == 8); // The |3| in the following statement requires this.
14403 addr_ins = w.name(w.getDslotAddress(obj_ins, idx_ins), "elemp");
14404 v_ins = unbox_value(*vp, DSlotsAddress(addr_ins), branchExit);
14406 /* Don't let the hole value escape. Turn it into an undefined. */
14407 if (vp->isMagic()) {
14408 CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj, obj_ins, snapshot(MISMATCH_EXIT)));
14409 v_ins = w.immiUndefined();
14412 return RECORD_CONTINUE;
14415 /* See comments in TypedArrayTemplate<double>::copyIndexToValue. */
14417 TraceRecorder::canonicalizeNaNs(LIns *dval_ins)
14419 /* NaNs are the only floating point values that do not == themselves. */
14420 LIns *isnonnan_ins = w.eqd(dval_ins, dval_ins);
14421 return w.cmovd(isnonnan_ins, dval_ins, w.immd(js_NaN));
14424 JS_REQUIRES_STACK AbortableRecordingStatus
14425 TraceRecorder::typedArrayElement(Value& oval, Value& ival, Value*& vp, LIns*& v_ins)
14427 JS_ASSERT(oval.isObject() && ival.isInt32());
14429 JSObject* obj = &oval.toObject();
14430 LIns* obj_ins = get(&oval);
14431 jsint idx = ival.toInt32();
14433 CHECK_STATUS_A(makeNumberInt32(get(&ival), &idx_ins));
14434 LIns* pidx_ins = w.ui2p(idx_ins);
14436 js::TypedArray* tarray = js::TypedArray::fromJSObject(obj);
14439 /* priv_ins will load the TypedArray* */
14440 LIns* priv_ins = w.ldpObjPrivate(obj_ins);
14442 /* for out-of-range, do the same thing that the interpreter does, which is return undefined */
14443 if ((jsuint) idx >= tarray->length) {
14444 CHECK_STATUS_A(guard(false,
14445 w.ltui(idx_ins, w.ldiConstTypedArrayLength(priv_ins)),
14447 /* abortIfAlwaysExits = */true));
14448 v_ins = w.immiUndefined();
14449 return ARECORD_CONTINUE;
14453 * Ensure idx < length
14455 * NOTE! mLength is uint32, but it's guaranteed to fit in a Value
14456 * int, so we can treat it as either signed or unsigned.
14457 * If the index happens to be negative, when it's treated as
14458 * unsigned it'll be a very large int, and thus won't be less than
14462 w.name(w.ltui(idx_ins, w.ldiConstTypedArrayLength(priv_ins)), "inRange"),
14465 /* We are now ready to load. Do a different type of load
14466 * depending on what type of thing we're loading. */
14467 LIns* data_ins = w.ldpConstTypedArrayData(priv_ins);
14469 switch (tarray->type) {
14470 case js::TypedArray::TYPE_INT8:
14471 v_ins = w.i2d(w.ldc2iTypedArrayElement(data_ins, pidx_ins));
14473 case js::TypedArray::TYPE_UINT8:
14474 case js::TypedArray::TYPE_UINT8_CLAMPED:
14475 // i2d on purpose here: it's safe, because an 8-bit uint is guaranteed
14476 // to fit in a 32-bit int, and i2d gets more optimization than ui2d.
14477 v_ins = w.i2d(w.lduc2uiTypedArrayElement(data_ins, pidx_ins));
14479 case js::TypedArray::TYPE_INT16:
14480 v_ins = w.i2d(w.lds2iTypedArrayElement(data_ins, pidx_ins));
14482 case js::TypedArray::TYPE_UINT16:
14483 // i2d on purpose here: it's safe, because a 16-bit uint is guaranteed
14484 // to fit in a 32-bit int, and i2d gets more optimization than ui2d.
14485 v_ins = w.i2d(w.ldus2uiTypedArrayElement(data_ins, pidx_ins));
14487 case js::TypedArray::TYPE_INT32:
14488 v_ins = w.i2d(w.ldiTypedArrayElement(data_ins, pidx_ins));
14490 case js::TypedArray::TYPE_UINT32:
14491 v_ins = w.ui2d(w.ldiTypedArrayElement(data_ins, pidx_ins));
14493 case js::TypedArray::TYPE_FLOAT32:
14494 v_ins = canonicalizeNaNs(w.ldf2dTypedArrayElement(data_ins, pidx_ins));
14496 case js::TypedArray::TYPE_FLOAT64:
14497 v_ins = canonicalizeNaNs(w.lddTypedArrayElement(data_ins, pidx_ins));
14500 JS_NOT_REACHED("Unknown typed array type in tracer");
14503 return ARECORD_CONTINUE;
14506 JS_REQUIRES_STACK AbortableRecordingStatus
14507 TraceRecorder::getProp(JSObject* obj, LIns* obj_ins)
14509 JSOp op = JSOp(*cx->regs->pc);
14510 const JSCodeSpec& cs = js_CodeSpec[op];
14512 JS_ASSERT(cs.ndefs == 1);
14513 return prop(obj, obj_ins, NULL, NULL, &stackval(-cs.nuses));
14516 JS_REQUIRES_STACK AbortableRecordingStatus
14517 TraceRecorder::getProp(Value& v)
14519 if (v.isPrimitive())
14520 RETURN_STOP_A("primitive lhs");
14522 return getProp(&v.toObject(), get(&v));
14525 JS_REQUIRES_STACK AbortableRecordingStatus
14526 TraceRecorder::record_JSOP_NAME()
14531 CHECK_STATUS_A(name(vp, v_ins, nr));
14533 return ARECORD_CONTINUE;
14536 JS_REQUIRES_STACK AbortableRecordingStatus
14537 TraceRecorder::record_JSOP_DOUBLE()
14539 double d = consts[GET_INDEX(cx->regs->pc)].toDouble();
14540 stack(0, w.immd(d));
14541 return ARECORD_CONTINUE;
14544 JS_REQUIRES_STACK AbortableRecordingStatus
14545 TraceRecorder::record_JSOP_STRING()
14547 JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)];
14548 stack(0, w.immpAtomGC(atom));
14549 return ARECORD_CONTINUE;
14552 JS_REQUIRES_STACK AbortableRecordingStatus
14553 TraceRecorder::record_JSOP_ZERO()
14555 stack(0, w.immd(0));
14556 return ARECORD_CONTINUE;
14559 JS_REQUIRES_STACK AbortableRecordingStatus
14560 TraceRecorder::record_JSOP_ONE()
14562 stack(0, w.immd(1));
14563 return ARECORD_CONTINUE;
14566 JS_REQUIRES_STACK AbortableRecordingStatus
14567 TraceRecorder::record_JSOP_NULL()
14569 stack(0, w.immpNull());
14570 return ARECORD_CONTINUE;
14573 JS_REQUIRES_STACK AbortableRecordingStatus
14574 TraceRecorder::record_JSOP_THIS()
14577 CHECK_STATUS_A(getThis(this_ins));
14578 stack(0, this_ins);
14579 return ARECORD_CONTINUE;
14582 JS_REQUIRES_STACK AbortableRecordingStatus
14583 TraceRecorder::record_JSOP_FALSE()
14585 stack(0, w.immi(0));
14586 return ARECORD_CONTINUE;
14589 JS_REQUIRES_STACK AbortableRecordingStatus
14590 TraceRecorder::record_JSOP_TRUE()
14592 stack(0, w.immi(1));
14593 return ARECORD_CONTINUE;
14596 JS_REQUIRES_STACK AbortableRecordingStatus
14597 TraceRecorder::record_JSOP_OR()
14602 JS_REQUIRES_STACK AbortableRecordingStatus
14603 TraceRecorder::record_JSOP_AND()
14608 JS_REQUIRES_STACK AbortableRecordingStatus
14609 TraceRecorder::record_JSOP_TABLESWITCH()
14611 #ifdef NANOJIT_IA32
14612 /* Handle tableswitches specially -- prepare a jump table if needed. */
14613 return tableswitch();
14615 return InjectStatus(switchop());
14619 JS_REQUIRES_STACK AbortableRecordingStatus
14620 TraceRecorder::record_JSOP_LOOKUPSWITCH()
14622 return InjectStatus(switchop());
14625 JS_REQUIRES_STACK AbortableRecordingStatus
14626 TraceRecorder::record_JSOP_STRICTEQ()
14628 CHECK_STATUS_A(strictEquality(true, false));
14629 return ARECORD_CONTINUE;
14632 JS_REQUIRES_STACK AbortableRecordingStatus
14633 TraceRecorder::record_JSOP_STRICTNE()
14635 CHECK_STATUS_A(strictEquality(false, false));
14636 return ARECORD_CONTINUE;
14639 JS_REQUIRES_STACK AbortableRecordingStatus
14640 TraceRecorder::record_JSOP_OBJECT()
14642 JSStackFrame* const fp = cx->fp();
14643 JSScript* script = fp->script();
14644 unsigned index = atoms - script->atomMap.vector + GET_INDEX(cx->regs->pc);
14647 obj = script->getObject(index);
14648 stack(0, w.immpObjGC(obj));
14649 return ARECORD_CONTINUE;
14652 JS_REQUIRES_STACK AbortableRecordingStatus
14653 TraceRecorder::record_JSOP_POP()
14655 return ARECORD_CONTINUE;
14658 JS_REQUIRES_STACK AbortableRecordingStatus
14659 TraceRecorder::record_JSOP_TRAP()
14661 return ARECORD_STOP;
14664 JS_REQUIRES_STACK AbortableRecordingStatus
14665 TraceRecorder::record_JSOP_GETARG()
14667 stack(0, arg(GET_ARGNO(cx->regs->pc)));
14668 return ARECORD_CONTINUE;
14671 JS_REQUIRES_STACK AbortableRecordingStatus
14672 TraceRecorder::record_JSOP_SETARG()
14674 arg(GET_ARGNO(cx->regs->pc), stack(-1));
14675 return ARECORD_CONTINUE;
14678 JS_REQUIRES_STACK AbortableRecordingStatus
14679 TraceRecorder::record_JSOP_GETLOCAL()
14681 stack(0, var(GET_SLOTNO(cx->regs->pc)));
14682 return ARECORD_CONTINUE;
14685 JS_REQUIRES_STACK AbortableRecordingStatus
14686 TraceRecorder::record_JSOP_SETLOCAL()
14688 var(GET_SLOTNO(cx->regs->pc), stack(-1));
14689 return ARECORD_CONTINUE;
14692 JS_REQUIRES_STACK AbortableRecordingStatus
14693 TraceRecorder::record_JSOP_UINT16()
14695 stack(0, w.immd(GET_UINT16(cx->regs->pc)));
14696 return ARECORD_CONTINUE;
14699 JS_REQUIRES_STACK AbortableRecordingStatus
14700 TraceRecorder::record_JSOP_NEWINIT()
14705 JSProtoKey key = JSProtoKey(cx->regs->pc[1]);
14708 CHECK_STATUS_A(getClassPrototype(key, proto_ins));
14711 if (key == JSProto_Array) {
14712 LIns *args[] = { proto_ins, cx_ins };
14713 v_ins = w.call(&NewDenseEmptyArray_ci, args);
14715 LIns *args[] = { w.immpNull(), proto_ins, cx_ins };
14716 v_ins = w.call(&js_InitializerObject_ci, args);
14718 guard(false, w.eqp0(v_ins), OOM_EXIT);
14720 return ARECORD_CONTINUE;
14723 JS_REQUIRES_STACK AbortableRecordingStatus
14724 TraceRecorder::record_JSOP_NEWARRAY()
14729 CHECK_STATUS_A(getClassPrototype(JSProto_Array, proto_ins));
14731 unsigned count = GET_UINT24(cx->regs->pc);
14732 LIns *args[] = { proto_ins, w.immi(count), cx_ins };
14733 LIns *v_ins = w.call(&NewDenseAllocatedArray_ci, args);
14735 guard(false, w.eqp0(v_ins), OOM_EXIT);
14737 return ARECORD_CONTINUE;
14740 JS_REQUIRES_STACK AbortableRecordingStatus
14741 TraceRecorder::record_JSOP_NEWOBJECT()
14746 CHECK_STATUS_A(getClassPrototype(JSProto_Object, proto_ins));
14748 JSObject* baseobj = cx->fp()->script()->getObject(getFullIndex(0));
14750 LIns *args[] = { w.immpObjGC(baseobj), proto_ins, cx_ins };
14751 LIns *v_ins = w.call(&js_InitializerObject_ci, args);
14753 guard(false, w.eqp0(v_ins), OOM_EXIT);
14755 return ARECORD_CONTINUE;
14758 JS_REQUIRES_STACK AbortableRecordingStatus
14759 TraceRecorder::record_JSOP_ENDINIT()
14762 if (initDepth == 0)
14763 hadNewInit = false;
14766 Value& v = stackval(-1);
14767 JS_ASSERT(!v.isPrimitive());
14769 return ARECORD_CONTINUE;
14772 JS_REQUIRES_STACK AbortableRecordingStatus
14773 TraceRecorder::record_JSOP_INITELEM()
14775 Value& v = stackval(-1);
14776 Value& idx = stackval(-2);
14777 Value& lval = stackval(-3);
14779 // The object is either a dense Array or an Object. Only handle the dense case here.
14780 // Also skip array initializers which might be unoptimized NEWINIT initializers.
14781 if (!lval.toObject().isDenseArray() || hadNewInit)
14782 return setElem(-3, -2, -1);
14784 // The index is always the same constant integer.
14785 JS_ASSERT(idx.isInt32());
14787 // Nothing to do for holes, the array's length has already been set.
14788 if (v.isMagic(JS_ARRAY_HOLE))
14789 return ARECORD_CONTINUE;
14791 LIns* obj_ins = get(&lval);
14792 LIns* v_ins = get(&v);
14794 // Set the element.
14795 LIns *slots_ins = w.ldpObjSlots(obj_ins);
14796 box_value_into(v, v_ins, DSlotsAddress(slots_ins, idx.toInt32()));
14798 return ARECORD_CONTINUE;
14801 JS_REQUIRES_STACK AbortableRecordingStatus
14802 TraceRecorder::record_JSOP_DEFSHARP()
14804 return ARECORD_STOP;
14807 JS_REQUIRES_STACK AbortableRecordingStatus
14808 TraceRecorder::record_JSOP_USESHARP()
14810 return ARECORD_STOP;
14813 JS_REQUIRES_STACK AbortableRecordingStatus
14814 TraceRecorder::record_JSOP_INCARG()
14816 return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), 1));
14819 JS_REQUIRES_STACK AbortableRecordingStatus
14820 TraceRecorder::record_JSOP_INCLOCAL()
14822 return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), 1));
14825 JS_REQUIRES_STACK AbortableRecordingStatus
14826 TraceRecorder::record_JSOP_DECARG()
14828 return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), -1));
14831 JS_REQUIRES_STACK AbortableRecordingStatus
14832 TraceRecorder::record_JSOP_DECLOCAL()
14834 return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), -1));
14837 JS_REQUIRES_STACK AbortableRecordingStatus
14838 TraceRecorder::record_JSOP_ARGINC()
14840 return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), 1, false));
14843 JS_REQUIRES_STACK AbortableRecordingStatus
14844 TraceRecorder::record_JSOP_LOCALINC()
14846 return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), 1, false));
14849 JS_REQUIRES_STACK AbortableRecordingStatus
14850 TraceRecorder::record_JSOP_ARGDEC()
14852 return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), -1, false));
14855 JS_REQUIRES_STACK AbortableRecordingStatus
14856 TraceRecorder::record_JSOP_LOCALDEC()
14858 return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), -1, false));
14861 JS_REQUIRES_STACK AbortableRecordingStatus
14862 TraceRecorder::record_JSOP_IMACOP()
14864 JS_ASSERT(cx->fp()->hasImacropc());
14865 return ARECORD_CONTINUE;
14868 static JSBool FASTCALL
14869 ObjectToIterator(JSContext* cx, JSObject *obj, int32 flags, Value* vp)
14871 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
14873 vp->setObject(*obj);
14874 bool ok = js_ValueToIterator(cx, flags, vp);
14876 SetBuiltinError(tm);
14879 return WasBuiltinSuccessful(tm);
14881 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, ObjectToIterator, CONTEXT, OBJECT, INT32, VALUEPTR,
14882 0, ACCSET_STORE_ANY)
14884 JS_REQUIRES_STACK AbortableRecordingStatus
14885 TraceRecorder::record_JSOP_ITER()
14887 Value& v = stackval(-1);
14888 if (v.isPrimitive())
14889 RETURN_STOP_A("for-in on a primitive value");
14891 RETURN_IF_XML_A(v);
14893 LIns *obj_ins = get(&v);
14894 jsuint flags = cx->regs->pc[1];
14896 enterDeepBailCall();
14898 LIns* vp_ins = w.allocp(sizeof(Value));
14899 LIns* args[] = { vp_ins, w.immi(flags), obj_ins, cx_ins };
14900 LIns* ok_ins = w.call(&ObjectToIterator_ci, args);
14902 // We need to guard on ok_ins, but this requires a snapshot of the state
14903 // after this op. monitorRecording will do it for us.
14904 pendingGuardCondition = ok_ins;
14906 // ObjectToIterator can deep-bail without throwing, leaving a value of
14907 // unknown type in *vp (it can be either a function or a non-function
14908 // object). Use the same mechanism as finishGetProp to arrange for
14909 // LeaveTree to deal with this value.
14910 pendingUnboxSlot = cx->regs->sp - 1;
14911 set(pendingUnboxSlot, w.name(w.lddAlloc(vp_ins), "iterval"));
14913 leaveDeepBailCall();
14915 return ARECORD_CONTINUE;
14918 static JSBool FASTCALL
14919 IteratorMore(JSContext *cx, JSObject *iterobj, Value *vp)
14921 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
14923 if (!js_IteratorMore(cx, iterobj, vp)) {
14924 SetBuiltinError(tm);
14927 return WasBuiltinSuccessful(tm);
14929 JS_DEFINE_CALLINFO_3(extern, BOOL_FAIL, IteratorMore, CONTEXT, OBJECT, VALUEPTR,
14930 0, ACCSET_STORE_ANY)
14932 JS_REQUIRES_STACK AbortableRecordingStatus
14933 TraceRecorder::record_JSOP_MOREITER()
14935 Value& iterobj_val = stackval(-1);
14936 if (iterobj_val.isPrimitive())
14937 RETURN_STOP_A("for-in on a primitive value");
14939 RETURN_IF_XML_A(iterobj_val);
14941 JSObject* iterobj = &iterobj_val.toObject();
14942 LIns* iterobj_ins = get(&iterobj_val);
14946 * JSOP_FOR* already guards on this, but in certain rare cases we might
14947 * record misformed loop traces. Note that it's not necessary to guard on
14948 * ni->flags (nor do we in unboxNextValue), because the different
14949 * iteration type will guarantee a different entry typemap.
14951 if (iterobj->hasClass(&js_IteratorClass)) {
14952 guardClass(iterobj_ins, &js_IteratorClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
14954 NativeIterator *ni = (NativeIterator *) iterobj->getPrivate();
14955 if (ni->isKeyIter()) {
14956 LIns *ni_ins = w.ldpObjPrivate(iterobj_ins);
14957 LIns *cursor_ins = w.ldpIterCursor(ni_ins);
14958 LIns *end_ins = w.ldpIterEnd(ni_ins);
14960 cond_ins = w.ltp(cursor_ins, end_ins);
14961 stack(0, cond_ins);
14962 return ARECORD_CONTINUE;
14965 guardNotClass(iterobj_ins, &js_IteratorClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
14968 enterDeepBailCall();
14970 LIns* vp_ins = w.allocp(sizeof(Value));
14971 LIns* args[] = { vp_ins, iterobj_ins, cx_ins };
14972 pendingGuardCondition = w.call(&IteratorMore_ci, args);
14974 leaveDeepBailCall();
14976 cond_ins = is_boxed_true(AllocSlotsAddress(vp_ins));
14977 stack(0, cond_ins);
14979 // Write this value back even though we haven't changed it.
14980 // See the comment in DeepBail about "clobbering deep bails".
14981 stack(-1, iterobj_ins);
14983 return ARECORD_CONTINUE;
14986 static JSBool FASTCALL
14987 CloseIterator(JSContext *cx, JSObject *iterobj)
14989 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
14991 if (!js_CloseIterator(cx, iterobj)) {
14992 SetBuiltinError(tm);
14995 return WasBuiltinSuccessful(tm);
14997 JS_DEFINE_CALLINFO_2(extern, BOOL_FAIL, CloseIterator, CONTEXT, OBJECT, 0, ACCSET_STORE_ANY)
14999 JS_REQUIRES_STACK AbortableRecordingStatus
15000 TraceRecorder::record_JSOP_ENDITER()
15002 JS_ASSERT(!stackval(-1).isPrimitive());
15004 enterDeepBailCall();
15006 LIns* args[] = { stack(-1), cx_ins };
15007 LIns* ok_ins = w.call(&CloseIterator_ci, args);
15009 // We need to guard on ok_ins, but this requires a snapshot of the state
15010 // after this op. monitorRecording will do it for us.
15011 pendingGuardCondition = ok_ins;
15013 leaveDeepBailCall();
15015 return ARECORD_CONTINUE;
15018 #if JS_BITS_PER_WORD == 32
15019 JS_REQUIRES_STACK void
15020 TraceRecorder::storeMagic(JSWhyMagic why, Address addr)
15022 w.stiValuePayload(w.immpMagicWhy(why), addr);
15023 w.stiValueTag(w.immpMagicWhy(JSVAL_TAG_MAGIC), addr);
15025 #elif JS_BITS_PER_WORD == 64
15026 JS_REQUIRES_STACK void
15027 TraceRecorder::storeMagic(JSWhyMagic why, Address addr)
15029 LIns *magic = w.nameImmq(BUILD_JSVAL(JSVAL_TAG_MAGIC, why));
15030 w.stq(magic, addr);
15034 JS_REQUIRES_STACK AbortableRecordingStatus
15035 TraceRecorder::unboxNextValue(LIns* &v_ins)
15037 Value &iterobj_val = stackval(-1);
15038 JSObject *iterobj = &iterobj_val.toObject();
15039 LIns* iterobj_ins = get(&iterobj_val);
15041 if (iterobj->hasClass(&js_IteratorClass)) {
15042 guardClass(iterobj_ins, &js_IteratorClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
15043 NativeIterator *ni = (NativeIterator *) iterobj->getPrivate();
15045 LIns *ni_ins = w.ldpObjPrivate(iterobj_ins);
15046 LIns *cursor_ins = w.ldpIterCursor(ni_ins);
15048 /* Emit code to stringify the id if necessary. */
15049 Address cursorAddr = IterPropsAddress(cursor_ins);
15050 if (ni->isKeyIter()) {
15051 /* Read the next id from the iterator. */
15052 jsid id = *ni->current();
15053 LIns *id_ins = w.name(w.ldp(cursorAddr), "id");
15056 * Most iterations over object properties never have to actually deal with
15057 * any numeric properties, so we guard here instead of branching.
15059 guard(JSID_IS_STRING(id), is_string_id(id_ins), BRANCH_EXIT);
15061 if (JSID_IS_STRING(id)) {
15062 v_ins = unbox_string_id(id_ins);
15063 } else if (JSID_IS_INT(id)) {
15064 /* id is an integer, convert to a string. */
15065 LIns *id_to_int_ins = unbox_int_id(id_ins);
15066 LIns* args[] = { id_to_int_ins, cx_ins };
15067 v_ins = w.call(&js_IntToString_ci, args);
15068 guard(false, w.eqp0(v_ins), OOM_EXIT);
15070 #if JS_HAS_XML_SUPPORT
15071 JS_ASSERT(JSID_IS_OBJECT(id));
15072 JS_ASSERT(JSID_TO_OBJECT(id)->isXMLId());
15073 RETURN_STOP_A("iterated over a property with an XML id");
15075 JS_NEVER_REACHED("unboxNextValue");
15079 /* Increment the cursor by one jsid and store it back. */
15080 cursor_ins = w.addp(cursor_ins, w.nameImmw(sizeof(jsid)));
15081 w.stpIterCursor(cursor_ins, ni_ins);
15082 return ARECORD_CONTINUE;
15085 guardNotClass(iterobj_ins, &js_IteratorClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
15089 Address iterValueAddr = CxAddress(iterValue);
15090 v_ins = unbox_value(cx->iterValue, iterValueAddr, snapshot(BRANCH_EXIT));
15091 storeMagic(JS_NO_ITER_VALUE, iterValueAddr);
15093 return ARECORD_CONTINUE;
15096 JS_REQUIRES_STACK AbortableRecordingStatus
15097 TraceRecorder::record_JSOP_FORNAME()
15102 CHECK_STATUS_A(name(vp, x_ins, nr));
15104 RETURN_STOP_A("forname on non-tracked value not supported");
15106 CHECK_STATUS_A(unboxNextValue(v_ins));
15108 return ARECORD_CONTINUE;
15111 JS_REQUIRES_STACK AbortableRecordingStatus
15112 TraceRecorder::record_JSOP_FORGNAME()
15114 return record_JSOP_FORNAME();
15117 JS_REQUIRES_STACK AbortableRecordingStatus
15118 TraceRecorder::record_JSOP_FORPROP()
15120 return ARECORD_STOP;
15123 JS_REQUIRES_STACK AbortableRecordingStatus
15124 TraceRecorder::record_JSOP_FORELEM()
15127 CHECK_STATUS_A(unboxNextValue(v_ins));
15129 return ARECORD_CONTINUE;
15132 JS_REQUIRES_STACK AbortableRecordingStatus
15133 TraceRecorder::record_JSOP_FORARG()
15136 CHECK_STATUS_A(unboxNextValue(v_ins));
15137 arg(GET_ARGNO(cx->regs->pc), v_ins);
15138 return ARECORD_CONTINUE;
15141 JS_REQUIRES_STACK AbortableRecordingStatus
15142 TraceRecorder::record_JSOP_FORLOCAL()
15145 CHECK_STATUS_A(unboxNextValue(v_ins));
15146 var(GET_SLOTNO(cx->regs->pc), v_ins);
15147 return ARECORD_CONTINUE;
15150 JS_REQUIRES_STACK AbortableRecordingStatus
15151 TraceRecorder::record_JSOP_POPN()
15153 return ARECORD_CONTINUE;
15157 IsFindableCallObj(JSObject *obj)
15159 return obj->isCall() &&
15160 (obj->callIsForEval() || obj->getCallObjCalleeFunction()->isHeavyweight());
15164 * Generate LIR to reach |obj2| from |obj| by traversing the scope chain. The
15165 * generated code also ensures that any call objects found have not changed shape.
15167 * obj starting object
15168 * obj_ins LIR instruction representing obj
15169 * targetObj end object for traversal
15170 * targetIns [out] LIR instruction representing obj2
15172 JS_REQUIRES_STACK RecordingStatus
15173 TraceRecorder::traverseScopeChain(JSObject *obj, LIns *obj_ins, JSObject *targetObj,
15176 VMSideExit* exit = NULL;
15179 * Scope chains are often left "incomplete", and reified lazily when
15180 * necessary, since doing so is expensive. When creating null and flat
15181 * closures on trace (the only kinds supported), the global object is
15182 * hardcoded as the parent, since reifying the scope chain on trace
15183 * would be extremely difficult. This is because block objects need frame
15184 * pointers, which do not exist on trace, and thus would require magic
15185 * similar to arguments objects or reification of stack frames. Luckily,
15186 * for null and flat closures, these blocks are unnecessary.
15188 * The problem, as exposed by bug 523793, is that this means creating a
15189 * fixed traversal on trace can be inconsistent with the shorter scope
15190 * chain used when executing a trace. To address this, perform an initial
15191 * sweep of the scope chain to make sure that if there is a heavyweight
15192 * function with a call object, and there is also a block object, the
15193 * trace is safely aborted.
15195 * If there is no call object, we must have arrived at the global object,
15196 * and can bypass the scope chain traversal completely.
15198 bool foundCallObj = false;
15199 bool foundBlockObj = false;
15200 JSObject* searchObj = obj;
15203 if (searchObj != globalObj) {
15204 if (searchObj->isBlock())
15205 foundBlockObj = true;
15206 else if (IsFindableCallObj(searchObj))
15207 foundCallObj = true;
15210 if (searchObj == targetObj)
15213 searchObj = searchObj->getParent();
15215 RETURN_STOP("cannot traverse this scope chain on trace");
15218 if (!foundCallObj) {
15219 JS_ASSERT(targetObj == globalObj);
15220 targetIns = w.nameImmpNonGC(globalObj);
15221 return RECORD_CONTINUE;
15225 RETURN_STOP("cannot traverse this scope chain on trace");
15227 /* There was a call object, or should be a call object now. */
15229 if (obj != globalObj) {
15230 if (!IsCacheableNonGlobalScope(obj))
15231 RETURN_STOP("scope chain lookup crosses non-cacheable object");
15233 // We must guard on the shape of all call objects for heavyweight functions
15234 // that we traverse on the scope chain: if the shape changes, a variable with
15235 // the same name may have been inserted in the scope chain.
15236 if (IsFindableCallObj(obj)) {
15238 exit = snapshot(BRANCH_EXIT);
15240 w.name(w.eqiN(w.ldiObjShape(obj_ins), obj->shape()), "guard_shape"),
15245 JS_ASSERT(!obj->isBlock());
15247 if (obj == targetObj)
15250 obj = obj->getParent();
15251 obj_ins = w.ldpObjParent(obj_ins);
15254 targetIns = obj_ins;
15255 return RECORD_CONTINUE;
15258 JS_REQUIRES_STACK AbortableRecordingStatus
15259 TraceRecorder::record_JSOP_BINDNAME()
15261 TraceMonitor *localtm = traceMonitor;
15262 JSStackFrame* const fp = cx->fp();
15265 if (!fp->isFunctionFrame()) {
15266 obj = &fp->scopeChain();
15269 JSStackFrame *fp2 = fp;
15273 * In global code, fp->scopeChain can only contain blocks whose values
15274 * are still on the stack. We never use BINDNAME to refer to these.
15276 while (obj->isBlock()) {
15277 // The block's values are still on the stack.
15279 // NB: fp2 can't be a generator frame, because !fp->hasFunction.
15280 while (obj->getPrivate() != fp2) {
15281 JS_ASSERT(fp2->isEvalOrDebuggerFrame());
15284 JS_NOT_REACHED("bad stack frame");
15287 obj = obj->getParent();
15288 // Blocks always have parents.
15293 * If this is a strict mode eval frame, we will have a Call object for
15294 * it. For now just don't trace this case.
15296 if (obj != globalObj) {
15297 JS_ASSERT(obj->isCall());
15298 JS_ASSERT(obj->callIsForEval());
15299 RETURN_STOP_A("BINDNAME within strict eval code");
15303 * The trace is specialized to this global object. Furthermore, we know
15304 * it is the sole 'global' object on the scope chain: we set globalObj
15305 * to the scope chain element with no parent, and we reached it
15306 * starting from the function closure or the current scopeChain, so
15307 * there is nothing inner to it. Therefore this must be the right base
15310 stack(0, w.immpObjGC(obj));
15311 return ARECORD_CONTINUE;
15314 // We can't trace BINDNAME in functions that contain direct calls to eval,
15315 // as they might add bindings which previously-traced references would have
15317 if (JSFUN_HEAVYWEIGHT_TEST(fp->fun()->flags))
15318 RETURN_STOP_A("BINDNAME in heavyweight function.");
15320 // We don't have the scope chain on trace, so instead we get a start object
15321 // that is on the scope chain and doesn't skip the target object (the one
15322 // that contains the property).
15323 Value *callee = &cx->fp()->calleeValue();
15324 obj = callee->toObject().getParent();
15325 if (obj == globalObj) {
15326 stack(0, w.immpObjGC(obj));
15327 return ARECORD_CONTINUE;
15329 LIns *obj_ins = w.ldpObjParent(get(callee));
15331 // Find the target object.
15332 JSAtom *atom = atoms[GET_INDEX(cx->regs->pc)];
15333 jsid id = ATOM_TO_JSID(atom);
15334 JSObject *obj2 = js_FindIdentifierBase(cx, &fp->scopeChain(), id);
15336 RETURN_ERROR_A("error in js_FindIdentifierBase");
15337 if (!localtm->recorder)
15338 return ARECORD_ABORTED;
15339 if (obj2 != globalObj && !obj2->isCall())
15340 RETURN_STOP_A("BINDNAME on non-global, non-call object");
15342 // Generate LIR to get to the target object from the start object.
15344 CHECK_STATUS_A(traverseScopeChain(obj, obj_ins, obj2, obj2_ins));
15346 // If |obj2| is the global object, we can refer to it directly instead of walking up
15347 // the scope chain. There may still be guards on intervening call objects.
15348 stack(0, obj2 == globalObj ? w.immpObjGC(obj2) : obj2_ins);
15349 return ARECORD_CONTINUE;
15352 JS_REQUIRES_STACK AbortableRecordingStatus
15353 TraceRecorder::record_JSOP_THROW()
15355 return ARECORD_STOP;
15358 JS_REQUIRES_STACK AbortableRecordingStatus
15359 TraceRecorder::record_JSOP_IN()
15361 Value& rval = stackval(-1);
15362 Value& lval = stackval(-2);
15364 if (rval.isPrimitive())
15365 RETURN_STOP_A("JSOP_IN on non-object right operand");
15366 JSObject* obj = &rval.toObject();
15367 LIns* obj_ins = get(&rval);
15371 if (lval.isInt32()) {
15372 if (!js_Int32ToId(cx, lval.toInt32(), &id))
15373 RETURN_ERROR_A("OOM converting left operand of JSOP_IN to string");
15375 if (obj->isDenseArray()) {
15376 // Fast path for dense arrays
15377 VMSideExit* branchExit = snapshot(BRANCH_EXIT);
15378 guardDenseArray(obj_ins, branchExit);
15380 // If our proto has indexed props, all bets are off on our
15381 // "false" values and out-of-bounds access. Just guard on
15383 CHECK_STATUS_A(guardPrototypeHasNoIndexedProperties(obj, obj_ins,
15384 snapshot(MISMATCH_EXIT)));
15387 CHECK_STATUS_A(makeNumberInt32(get(&lval), &idx_ins));
15388 idx_ins = w.name(idx_ins, "index");
15389 LIns* capacity_ins = w.ldiDenseArrayCapacity(obj_ins);
15390 LIns* inRange = w.ltui(idx_ins, capacity_ins);
15392 if (jsuint(lval.toInt32()) < obj->getDenseArrayCapacity()) {
15393 guard(true, inRange, branchExit);
15395 LIns *elem_ins = w.getDslotAddress(obj_ins, idx_ins);
15396 // Need to make sure we don't have a hole
15397 LIns *is_hole_ins =
15398 is_boxed_magic(DSlotsAddress(elem_ins), JS_ARRAY_HOLE);
15400 // Set x to true (index in our array) if is_hole_ins == 0
15401 x = w.eqi0(is_hole_ins);
15403 guard(false, inRange, branchExit);
15408 CHECK_STATUS_A(makeNumberInt32(get(&lval), &num_ins));
15409 LIns* args[] = { num_ins, obj_ins, cx_ins };
15410 x = w.call(&js_HasNamedPropertyInt32_ci, args);
15412 } else if (lval.isString()) {
15413 if (!js_ValueToStringId(cx, lval, &id))
15414 RETURN_ERROR_A("left operand of JSOP_IN didn't convert to a string-id");
15415 LIns* args[] = { get(&lval), obj_ins, cx_ins };
15416 x = w.call(&js_HasNamedProperty_ci, args);
15418 RETURN_STOP_A("string or integer expected");
15421 guard(false, w.eqiN(x, JS_NEITHER), OOM_EXIT);
15424 TraceMonitor &localtm = *traceMonitor;
15428 JSBool ok = obj->lookupProperty(cx, id, &obj2, &prop);
15431 RETURN_ERROR_A("obj->lookupProperty failed in JSOP_IN");
15433 /* lookupProperty can reenter the interpreter and kill |this|. */
15434 if (!localtm.recorder)
15435 return ARECORD_ABORTED;
15437 bool cond = prop != NULL;
15440 * The interpreter fuses comparisons and the following branch, so we have
15441 * to do that here as well.
15443 jsbytecode *pc = cx->regs->pc;
15444 fuseIf(pc + 1, cond, x);
15446 /* If the branch was to a loop header, we may need to close it. */
15447 if (pc[1] == JSOP_IFNE || pc[1] == JSOP_IFEQ)
15448 CHECK_STATUS_A(checkTraceEnd(pc + 1));
15451 * We update the stack after the guard. This is safe since the guard bails
15452 * out at the comparison and the interpreter will therefore re-execute the
15453 * comparison. This way the value of the condition doesn't have to be
15454 * calculated and saved on the stack in most cases.
15457 return ARECORD_CONTINUE;
15460 static JSBool FASTCALL
15461 HasInstanceOnTrace(JSContext* cx, JSObject* ctor, ValueArgType arg)
15463 TraceMonitor *tm = JS_TRACE_MONITOR_ON_TRACE(cx);
15465 const Value &argref = ValueArgToConstRef(arg);
15466 JSBool result = JS_FALSE;
15467 if (!HasInstance(cx, ctor, &argref, &result))
15468 SetBuiltinError(tm);
15471 JS_DEFINE_CALLINFO_3(static, BOOL_FAIL, HasInstanceOnTrace, CONTEXT, OBJECT, VALUE,
15472 0, ACCSET_STORE_ANY)
15474 JS_REQUIRES_STACK AbortableRecordingStatus
15475 TraceRecorder::record_JSOP_INSTANCEOF()
15477 // If the rhs isn't an object, we are headed for a TypeError.
15478 Value& ctor = stackval(-1);
15479 if (ctor.isPrimitive())
15480 RETURN_STOP_A("non-object on rhs of instanceof");
15482 Value& val = stackval(-2);
15483 LIns* val_ins = box_value_for_native_call(val, get(&val));
15485 enterDeepBailCall();
15486 LIns* args[] = {val_ins, get(&ctor), cx_ins};
15487 stack(-2, w.call(&HasInstanceOnTrace_ci, args));
15488 LIns* status_ins = w.ldiStateField(builtinStatus);
15489 pendingGuardCondition = w.eqi0(status_ins);
15490 leaveDeepBailCall();
15492 return ARECORD_CONTINUE;
15495 JS_REQUIRES_STACK AbortableRecordingStatus
15496 TraceRecorder::record_JSOP_DEBUGGER()
15498 return ARECORD_STOP;
15501 JS_REQUIRES_STACK AbortableRecordingStatus
15502 TraceRecorder::record_JSOP_GOSUB()
15504 return ARECORD_STOP;
15507 JS_REQUIRES_STACK AbortableRecordingStatus
15508 TraceRecorder::record_JSOP_RETSUB()
15510 return ARECORD_STOP;
15513 JS_REQUIRES_STACK AbortableRecordingStatus
15514 TraceRecorder::record_JSOP_EXCEPTION()
15516 return ARECORD_STOP;
15519 JS_REQUIRES_STACK AbortableRecordingStatus
15520 TraceRecorder::record_JSOP_LINENO()
15522 return ARECORD_CONTINUE;
15525 JS_REQUIRES_STACK AbortableRecordingStatus
15526 TraceRecorder::record_JSOP_BLOCKCHAIN()
15528 return ARECORD_CONTINUE;
15531 JS_REQUIRES_STACK AbortableRecordingStatus
15532 TraceRecorder::record_JSOP_NULLBLOCKCHAIN()
15534 return ARECORD_CONTINUE;
15537 JS_REQUIRES_STACK AbortableRecordingStatus
15538 TraceRecorder::record_JSOP_CONDSWITCH()
15540 return ARECORD_CONTINUE;
15543 JS_REQUIRES_STACK AbortableRecordingStatus
15544 TraceRecorder::record_JSOP_CASE()
15546 CHECK_STATUS_A(strictEquality(true, true));
15547 return ARECORD_CONTINUE;
15550 JS_REQUIRES_STACK AbortableRecordingStatus
15551 TraceRecorder::record_JSOP_DEFAULT()
15553 return ARECORD_CONTINUE;
15556 JS_REQUIRES_STACK AbortableRecordingStatus
15557 TraceRecorder::record_JSOP_EVAL()
15559 return ARECORD_STOP;
15562 JS_REQUIRES_STACK AbortableRecordingStatus
15563 TraceRecorder::record_JSOP_ENUMELEM()
15566 * To quote from jsinterp.cpp's JSOP_ENUMELEM case:
15567 * Funky: the value to set is under the [obj, id] pair.
15569 return setElem(-2, -1, -3);
15572 JS_REQUIRES_STACK AbortableRecordingStatus
15573 TraceRecorder::record_JSOP_GETTER()
15575 return ARECORD_STOP;
15578 JS_REQUIRES_STACK AbortableRecordingStatus
15579 TraceRecorder::record_JSOP_SETTER()
15581 return ARECORD_STOP;
15584 JS_REQUIRES_STACK AbortableRecordingStatus
15585 TraceRecorder::record_JSOP_DEFFUN()
15587 return ARECORD_STOP;
15590 JS_REQUIRES_STACK AbortableRecordingStatus
15591 TraceRecorder::record_JSOP_DEFFUN_FC()
15593 return ARECORD_STOP;
15596 JS_REQUIRES_STACK AbortableRecordingStatus
15597 TraceRecorder::record_JSOP_DEFCONST()
15599 return ARECORD_STOP;
15602 JS_REQUIRES_STACK AbortableRecordingStatus
15603 TraceRecorder::record_JSOP_DEFVAR()
15605 return ARECORD_STOP;
15609 TraceRecorder::getFullIndex(ptrdiff_t pcoff)
15611 jsatomid index = GET_INDEX(cx->regs->pc + pcoff);
15612 index += atoms - cx->fp()->script()->atomMap.vector;
15616 JS_REQUIRES_STACK AbortableRecordingStatus
15617 TraceRecorder::record_JSOP_LAMBDA()
15620 fun = cx->fp()->script()->getFunction(getFullIndex());
15622 if (FUN_NULL_CLOSURE(fun) && FUN_OBJECT(fun)->getParent() != globalObj)
15623 RETURN_STOP_A("Null closure function object parent must be global object");
15626 * Emit code to clone a null closure parented by this recorder's global
15627 * object, in order to preserve function object evaluation rules observable
15628 * via identity and mutation. But don't clone if our result is consumed by
15629 * JSOP_SETMETHOD or JSOP_INITMETHOD, since we optimize away the clone for
15630 * these combinations and clone only if the "method value" escapes.
15632 * See jsinterp.cpp, the JSOP_LAMBDA null closure case. The JSOP_SETMETHOD and
15633 * JSOP_INITMETHOD logic governing the early ARECORD_CONTINUE returns below
15634 * must agree with the corresponding break-from-do-while(0) logic there.
15636 if (FUN_NULL_CLOSURE(fun) && FUN_OBJECT(fun)->getParent() == &cx->fp()->scopeChain()) {
15637 jsbytecode *pc2 = AdvanceOverBlockchainOp(cx->regs->pc + JSOP_LAMBDA_LENGTH);
15638 JSOp op2 = JSOp(*pc2);
15640 if (op2 == JSOP_INITMETHOD) {
15641 stack(0, w.immpObjGC(FUN_OBJECT(fun)));
15642 return ARECORD_CONTINUE;
15645 if (op2 == JSOP_SETMETHOD) {
15646 Value lval = stackval(-1);
15648 if (!lval.isPrimitive() && lval.toObject().canHaveMethodBarrier()) {
15649 stack(0, w.immpObjGC(FUN_OBJECT(fun)));
15650 return ARECORD_CONTINUE;
15652 } else if (fun->joinable()) {
15653 if (op2 == JSOP_CALL) {
15655 * Array.prototype.sort and String.prototype.replace are
15656 * optimized as if they are special form. We know that they
15657 * won't leak the joined function object in obj, therefore
15658 * we don't need to clone that compiler- created function
15659 * object for identity/mutation reasons.
15661 int iargc = GET_ARGC(pc2);
15664 * Note that we have not yet pushed obj as the final argument,
15665 * so regs.sp[1 - (iargc + 2)], and not regs.sp[-(iargc + 2)],
15666 * is the callee for this JSOP_CALL.
15668 const Value &cref = cx->regs->sp[1 - (iargc + 2)];
15671 if (IsFunctionObject(cref, &callee)) {
15672 JSFunction *calleeFun = callee->getFunctionPrivate();
15673 Native native = calleeFun->maybeNative();
15675 if ((iargc == 1 && native == array_sort) ||
15676 (iargc == 2 && native == str_replace)) {
15677 stack(0, w.immpObjGC(FUN_OBJECT(fun)));
15678 return ARECORD_CONTINUE;
15681 } else if (op2 == JSOP_NULL) {
15682 pc2 += JSOP_NULL_LENGTH;
15685 if (op2 == JSOP_CALL && GET_ARGC(pc2) == 0) {
15686 stack(0, w.immpObjGC(FUN_OBJECT(fun)));
15687 return ARECORD_CONTINUE;
15693 CHECK_STATUS_A(getClassPrototype(JSProto_Function, proto_ins));
15695 LIns* args[] = { w.immpObjGC(globalObj), proto_ins, w.immpFunGC(fun), cx_ins };
15696 LIns* x = w.call(&js_NewNullClosure_ci, args);
15698 return ARECORD_CONTINUE;
15701 if (GetBlockChainFast(cx, cx->fp(), JSOP_LAMBDA, JSOP_LAMBDA_LENGTH))
15702 RETURN_STOP_A("Unable to trace creating lambda in let");
15705 CHECK_STATUS_A(getClassPrototype(JSProto_Function, proto_ins));
15706 LIns* scopeChain_ins = scopeChain();
15707 JS_ASSERT(scopeChain_ins);
15708 LIns* args[] = { proto_ins, scopeChain_ins, w.nameImmpNonGC(fun), cx_ins };
15709 LIns* call_ins = w.call(&js_CloneFunctionObject_ci, args);
15711 w.name(w.eqp0(call_ins), "guard(js_CloneFunctionObject)"),
15713 stack(0, call_ins);
15715 return ARECORD_CONTINUE;
15718 JS_REQUIRES_STACK AbortableRecordingStatus
15719 TraceRecorder::record_JSOP_LAMBDA_FC()
15722 fun = cx->fp()->script()->getFunction(getFullIndex());
15724 if (FUN_OBJECT(fun)->getParent() != globalObj)
15725 return ARECORD_STOP;
15727 if (GetBlockChainFast(cx, cx->fp(), JSOP_LAMBDA_FC, JSOP_LAMBDA_FC_LENGTH))
15728 RETURN_STOP_A("Unable to trace creating lambda in let");
15730 LIns* args[] = { scopeChain(), w.immpFunGC(fun), cx_ins };
15731 LIns* closure_ins = w.call(&js_AllocFlatClosure_ci, args);
15733 w.name(w.eqp(closure_ins, w.immpNull()), "guard(js_AllocFlatClosure)"),
15736 JSScript *script = fun->script();
15737 if (script->bindings.hasUpvars()) {
15738 JSUpvarArray *uva = script->upvars();
15739 LIns* upvars_ins = w.getObjPrivatizedSlot(closure_ins,
15740 JSObject::JSSLOT_FLAT_CLOSURE_UPVARS);
15742 for (uint32 i = 0, n = uva->length; i < n; i++) {
15744 LIns* v_ins = upvar(script, uva, i, v);
15746 return ARECORD_STOP;
15748 box_value_into(v, v_ins, FCSlotsAddress(upvars_ins, i));
15752 stack(0, closure_ins);
15753 return ARECORD_CONTINUE;
15756 JS_REQUIRES_STACK AbortableRecordingStatus
15757 TraceRecorder::record_JSOP_CALLEE()
15759 stack(0, get(&cx->fp()->calleeValue()));
15760 return ARECORD_CONTINUE;
15763 JS_REQUIRES_STACK AbortableRecordingStatus
15764 TraceRecorder::record_JSOP_SETLOCALPOP()
15766 var(GET_SLOTNO(cx->regs->pc), stack(-1));
15767 return ARECORD_CONTINUE;
15770 JS_REQUIRES_STACK AbortableRecordingStatus
15771 TraceRecorder::record_JSOP_IFPRIMTOP()
15773 // Traces are type-specialized, including null vs. object, so we need do
15774 // nothing here. The upstream unbox_value called after valueOf or toString
15775 // from an imacro (e.g.) will fork the trace for us, allowing us to just
15776 // follow along mindlessly :-).
15777 return ARECORD_CONTINUE;
15780 JS_REQUIRES_STACK AbortableRecordingStatus
15781 TraceRecorder::record_JSOP_SETCALL()
15783 return ARECORD_STOP;
15786 JS_REQUIRES_STACK AbortableRecordingStatus
15787 TraceRecorder::record_JSOP_TRY()
15789 return ARECORD_CONTINUE;
15792 JS_REQUIRES_STACK AbortableRecordingStatus
15793 TraceRecorder::record_JSOP_FINALLY()
15795 return ARECORD_CONTINUE;
15798 JS_REQUIRES_STACK AbortableRecordingStatus
15799 TraceRecorder::record_JSOP_NOP()
15801 return ARECORD_CONTINUE;
15804 JS_REQUIRES_STACK AbortableRecordingStatus
15805 TraceRecorder::record_JSOP_ARGSUB()
15807 JSStackFrame* const fp = cx->fp();
15810 * The arguments object or its absence in the frame is part of the typemap,
15811 * so a record-time check suffices here. We don't bother tracing ARGSUB in
15812 * the case of an arguments object exising, because ARGSUB and to a lesser
15813 * extent ARGCNT are emitted to avoid arguments object creation.
15815 if (!fp->hasArgsObj() && !fp->fun()->isHeavyweight()) {
15816 uintN slot = GET_ARGNO(cx->regs->pc);
15817 if (slot >= fp->numActualArgs())
15818 RETURN_STOP_A("can't trace out-of-range arguments");
15820 stack(0, get(&cx->fp()->canonicalActualArg(slot)));
15821 return ARECORD_CONTINUE;
15823 RETURN_STOP_A("can't trace JSOP_ARGSUB hard case");
15826 JS_REQUIRES_STACK LIns*
15827 TraceRecorder::guardArgsLengthNotAssigned(LIns* argsobj_ins)
15829 // The following implements JSObject::isArgsLengthOverridden on trace.
15830 // ARGS_LENGTH_OVERRIDDEN_BIT is set if length was overridden.
15831 LIns *len_ins = w.getArgsLength(argsobj_ins);
15832 LIns *ovr_ins = w.andi(len_ins, w.nameImmi(JSObject::ARGS_LENGTH_OVERRIDDEN_BIT));
15833 guard(true, w.eqi0(ovr_ins), MISMATCH_EXIT);
15837 JS_REQUIRES_STACK AbortableRecordingStatus
15838 TraceRecorder::record_JSOP_ARGCNT()
15840 JSStackFrame * const fp = cx->fp();
15842 if (fp->fun()->flags & JSFUN_HEAVYWEIGHT)
15843 RETURN_STOP_A("can't trace heavyweight JSOP_ARGCNT");
15845 // argc is fixed on trace, so ideally we would simply generate LIR for
15846 // constant argc. But the user can mutate arguments.length in the
15847 // interpreter, so we have to check for that in the trace entry frame.
15848 // We also have to check that arguments.length has not been mutated
15849 // at record time, because if so we will generate incorrect constant
15850 // LIR, which will assert in alu().
15851 if (fp->hasArgsObj() && fp->argsObj().isArgsLengthOverridden())
15852 RETURN_STOP_A("can't trace JSOP_ARGCNT if arguments.length has been modified");
15853 LIns *a_ins = getFrameObjPtr(fp->addressOfArgs());
15854 if (callDepth == 0) {
15855 if (MaybeBranch mbr = w.jt(w.eqp0(a_ins))) {
15856 guardArgsLengthNotAssigned(a_ins);
15860 stack(0, w.immd(fp->numActualArgs()));
15861 return ARECORD_CONTINUE;
15864 JS_REQUIRES_STACK AbortableRecordingStatus
15865 TraceRecorder::record_DefLocalFunSetSlot(uint32 slot, JSObject* obj)
15867 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
15869 if (FUN_NULL_CLOSURE(fun) && FUN_OBJECT(fun)->getParent() == globalObj) {
15871 CHECK_STATUS_A(getClassPrototype(JSProto_Function, proto_ins));
15873 LIns* args[] = { w.immpObjGC(globalObj), proto_ins, w.immpFunGC(fun), cx_ins };
15874 LIns* x = w.call(&js_NewNullClosure_ci, args);
15876 return ARECORD_CONTINUE;
15879 return ARECORD_STOP;
15882 JS_REQUIRES_STACK AbortableRecordingStatus
15883 TraceRecorder::record_JSOP_DEFLOCALFUN()
15885 return ARECORD_CONTINUE;
15888 JS_REQUIRES_STACK AbortableRecordingStatus
15889 TraceRecorder::record_JSOP_DEFLOCALFUN_FC()
15891 return ARECORD_CONTINUE;
15894 JS_REQUIRES_STACK AbortableRecordingStatus
15895 TraceRecorder::record_JSOP_GOTOX()
15897 return record_JSOP_GOTO();
15900 JS_REQUIRES_STACK AbortableRecordingStatus
15901 TraceRecorder::record_JSOP_IFEQX()
15903 return record_JSOP_IFEQ();
15906 JS_REQUIRES_STACK AbortableRecordingStatus
15907 TraceRecorder::record_JSOP_IFNEX()
15909 return record_JSOP_IFNE();
15912 JS_REQUIRES_STACK AbortableRecordingStatus
15913 TraceRecorder::record_JSOP_ORX()
15915 return record_JSOP_OR();
15918 JS_REQUIRES_STACK AbortableRecordingStatus
15919 TraceRecorder::record_JSOP_ANDX()
15921 return record_JSOP_AND();
15924 JS_REQUIRES_STACK AbortableRecordingStatus
15925 TraceRecorder::record_JSOP_GOSUBX()
15927 return record_JSOP_GOSUB();
15930 JS_REQUIRES_STACK AbortableRecordingStatus
15931 TraceRecorder::record_JSOP_CASEX()
15933 CHECK_STATUS_A(strictEquality(true, true));
15934 return ARECORD_CONTINUE;
15937 JS_REQUIRES_STACK AbortableRecordingStatus
15938 TraceRecorder::record_JSOP_DEFAULTX()
15940 return ARECORD_CONTINUE;
15943 JS_REQUIRES_STACK AbortableRecordingStatus
15944 TraceRecorder::record_JSOP_TABLESWITCHX()
15946 return record_JSOP_TABLESWITCH();
15949 JS_REQUIRES_STACK AbortableRecordingStatus
15950 TraceRecorder::record_JSOP_LOOKUPSWITCHX()
15952 return InjectStatus(switchop());
15955 JS_REQUIRES_STACK AbortableRecordingStatus
15956 TraceRecorder::record_JSOP_BACKPATCH()
15958 return ARECORD_CONTINUE;
15961 JS_REQUIRES_STACK AbortableRecordingStatus
15962 TraceRecorder::record_JSOP_BACKPATCH_POP()
15964 return ARECORD_CONTINUE;
15967 JS_REQUIRES_STACK AbortableRecordingStatus
15968 TraceRecorder::record_JSOP_THROWING()
15970 return ARECORD_STOP;
15973 JS_REQUIRES_STACK AbortableRecordingStatus
15974 TraceRecorder::record_JSOP_SETRVAL()
15976 // If we implement this, we need to update JSOP_STOP.
15977 return ARECORD_STOP;
15980 JS_REQUIRES_STACK AbortableRecordingStatus
15981 TraceRecorder::record_JSOP_RETRVAL()
15983 return ARECORD_STOP;
15986 JS_REQUIRES_STACK AbortableRecordingStatus
15987 TraceRecorder::record_JSOP_REGEXP()
15989 JSStackFrame* const fp = cx->fp();
15990 JSScript* script = fp->script();
15991 unsigned index = atoms - script->atomMap.vector + GET_INDEX(cx->regs->pc);
15994 CHECK_STATUS_A(getClassPrototype(JSProto_RegExp, proto_ins));
15998 w.immpObjGC(script->getRegExp(index)),
16001 LIns* regex_ins = w.call(&js_CloneRegExpObject_ci, args);
16002 guard(false, w.eqp0(regex_ins), OOM_EXIT);
16004 stack(0, regex_ins);
16005 return ARECORD_CONTINUE;
16008 // begin JS_HAS_XML_SUPPORT
16010 JS_REQUIRES_STACK AbortableRecordingStatus
16011 TraceRecorder::record_JSOP_DEFXMLNS()
16013 return ARECORD_STOP;
16016 JS_REQUIRES_STACK AbortableRecordingStatus
16017 TraceRecorder::record_JSOP_ANYNAME()
16019 return ARECORD_STOP;
16022 JS_REQUIRES_STACK AbortableRecordingStatus
16023 TraceRecorder::record_JSOP_QNAMEPART()
16025 return record_JSOP_STRING();
16028 JS_REQUIRES_STACK AbortableRecordingStatus
16029 TraceRecorder::record_JSOP_QNAMECONST()
16031 return ARECORD_STOP;
16034 JS_REQUIRES_STACK AbortableRecordingStatus
16035 TraceRecorder::record_JSOP_QNAME()
16037 return ARECORD_STOP;
16040 JS_REQUIRES_STACK AbortableRecordingStatus
16041 TraceRecorder::record_JSOP_TOATTRNAME()
16043 return ARECORD_STOP;
16046 JS_REQUIRES_STACK AbortableRecordingStatus
16047 TraceRecorder::record_JSOP_TOATTRVAL()
16049 return ARECORD_STOP;
16052 JS_REQUIRES_STACK AbortableRecordingStatus
16053 TraceRecorder::record_JSOP_ADDATTRNAME()
16055 return ARECORD_STOP;
16058 JS_REQUIRES_STACK AbortableRecordingStatus
16059 TraceRecorder::record_JSOP_ADDATTRVAL()
16061 return ARECORD_STOP;
16064 JS_REQUIRES_STACK AbortableRecordingStatus
16065 TraceRecorder::record_JSOP_BINDXMLNAME()
16067 return ARECORD_STOP;
16070 JS_REQUIRES_STACK AbortableRecordingStatus
16071 TraceRecorder::record_JSOP_SETXMLNAME()
16073 return ARECORD_STOP;
16076 JS_REQUIRES_STACK AbortableRecordingStatus
16077 TraceRecorder::record_JSOP_XMLNAME()
16079 return ARECORD_STOP;
16082 JS_REQUIRES_STACK AbortableRecordingStatus
16083 TraceRecorder::record_JSOP_DESCENDANTS()
16085 return ARECORD_STOP;
16088 JS_REQUIRES_STACK AbortableRecordingStatus
16089 TraceRecorder::record_JSOP_FILTER()
16091 return ARECORD_STOP;
16094 JS_REQUIRES_STACK AbortableRecordingStatus
16095 TraceRecorder::record_JSOP_ENDFILTER()
16097 return ARECORD_STOP;
16100 JS_REQUIRES_STACK AbortableRecordingStatus
16101 TraceRecorder::record_JSOP_TOXML()
16103 return ARECORD_STOP;
16106 JS_REQUIRES_STACK AbortableRecordingStatus
16107 TraceRecorder::record_JSOP_TOXMLLIST()
16109 return ARECORD_STOP;
16112 JS_REQUIRES_STACK AbortableRecordingStatus
16113 TraceRecorder::record_JSOP_XMLTAGEXPR()
16115 return ARECORD_STOP;
16118 JS_REQUIRES_STACK AbortableRecordingStatus
16119 TraceRecorder::record_JSOP_XMLELTEXPR()
16121 return ARECORD_STOP;
16124 JS_REQUIRES_STACK AbortableRecordingStatus
16125 TraceRecorder::record_JSOP_XMLCDATA()
16127 return ARECORD_STOP;
16130 JS_REQUIRES_STACK AbortableRecordingStatus
16131 TraceRecorder::record_JSOP_XMLCOMMENT()
16133 return ARECORD_STOP;
16136 JS_REQUIRES_STACK AbortableRecordingStatus
16137 TraceRecorder::record_JSOP_XMLPI()
16139 return ARECORD_STOP;
16142 JS_REQUIRES_STACK AbortableRecordingStatus
16143 TraceRecorder::record_JSOP_GETFUNNS()
16145 return ARECORD_STOP;
16148 JS_REQUIRES_STACK AbortableRecordingStatus
16149 TraceRecorder::record_JSOP_STARTXML()
16151 return ARECORD_STOP;
16154 JS_REQUIRES_STACK AbortableRecordingStatus
16155 TraceRecorder::record_JSOP_STARTXMLEXPR()
16157 return ARECORD_STOP;
16160 // end JS_HAS_XML_SUPPORT
16162 JS_REQUIRES_STACK AbortableRecordingStatus
16163 TraceRecorder::record_JSOP_CALLPROP()
16165 Value& l = stackval(-1);
16169 if (!l.isPrimitive()) {
16170 obj = &l.toObject();
16172 this_ins = obj_ins; // |this| for subsequent call
16174 JSProtoKey protoKey;
16175 debug_only_stmt(const char* protoname = NULL;)
16176 if (l.isString()) {
16177 protoKey = JSProto_String;
16178 debug_only_stmt(protoname = "String.prototype";)
16179 } else if (l.isNumber()) {
16180 protoKey = JSProto_Number;
16181 debug_only_stmt(protoname = "Number.prototype";)
16182 } else if (l.isBoolean()) {
16183 protoKey = JSProto_Boolean;
16184 debug_only_stmt(protoname = "Boolean.prototype";)
16186 JS_ASSERT(l.isNull() || l.isUndefined());
16187 RETURN_STOP_A("callprop on null or void");
16190 if (!js_GetClassPrototype(cx, NULL, protoKey, &obj))
16191 RETURN_ERROR_A("GetClassPrototype failed!");
16193 obj_ins = w.immpObjGC(obj);
16194 debug_only_stmt(obj_ins = w.name(obj_ins, protoname);)
16195 this_ins = get(&l); // use primitive as |this|
16200 CHECK_STATUS_A(test_property_cache(obj, obj_ins, obj2, pcval));
16202 if (pcval.isNull())
16203 RETURN_STOP_A("callprop of missing method");
16205 if (pcval.isFunObj()) {
16206 if (l.isPrimitive()) {
16207 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, &pcval.toFunObj());
16208 if (fun->isInterpreted() && !fun->inStrictMode())
16209 RETURN_STOP_A("callee does not accept primitive |this|");
16211 set(&l, w.immpObjGC(&pcval.toFunObj()));
16213 if (l.isPrimitive())
16214 RETURN_STOP_A("callprop of primitive method");
16215 JS_ASSERT_IF(pcval.isShape(), !pcval.toShape()->isMethod());
16216 CHECK_STATUS_A(propTail(obj, obj_ins, obj2, pcval, NULL, NULL, &l));
16218 stack(0, this_ins);
16219 return ARECORD_CONTINUE;
16222 JS_REQUIRES_STACK AbortableRecordingStatus
16223 TraceRecorder::record_JSOP_DELDESC()
16225 return ARECORD_STOP;
16228 JS_REQUIRES_STACK AbortableRecordingStatus
16229 TraceRecorder::record_JSOP_UINT24()
16231 stack(0, w.immd(GET_UINT24(cx->regs->pc)));
16232 return ARECORD_CONTINUE;
16235 JS_REQUIRES_STACK AbortableRecordingStatus
16236 TraceRecorder::record_JSOP_INDEXBASE()
16238 atoms += GET_INDEXBASE(cx->regs->pc);
16239 return ARECORD_CONTINUE;
16242 JS_REQUIRES_STACK AbortableRecordingStatus
16243 TraceRecorder::record_JSOP_RESETBASE()
16246 return ARECORD_CONTINUE;
16249 JS_REQUIRES_STACK AbortableRecordingStatus
16250 TraceRecorder::record_JSOP_RESETBASE0()
16253 return ARECORD_CONTINUE;
16256 JS_REQUIRES_STACK AbortableRecordingStatus
16257 TraceRecorder::record_JSOP_CALLELEM()
16259 return record_JSOP_GETELEM();
16262 JS_REQUIRES_STACK AbortableRecordingStatus
16263 TraceRecorder::record_JSOP_STOP()
16265 JSStackFrame *fp = cx->fp();
16267 /* A return from callDepth 0 terminates the current loop, except for recursion. */
16268 if (callDepth == 0 && !fp->hasImacropc()) {
16269 AUDIT(returnLoopExits);
16273 if (fp->hasImacropc()) {
16275 * End of imacro, so return true to the interpreter immediately. The
16276 * interpreter's JSOP_STOP case will return from the imacro, back to
16277 * the pc after the calling op, still in the same JSStackFrame.
16279 updateAtoms(fp->script());
16280 return ARECORD_CONTINUE;
16283 CHECK_STATUS_A(putActivationObjects());
16285 if (Probes::callTrackingActive(cx)) {
16286 LIns* args[] = { w.immi(0), w.nameImmpNonGC(cx->fp()->fun()), cx_ins };
16287 LIns* call_ins = w.call(&functionProbe_ci, args);
16288 guard(false, w.eqi0(call_ins), MISMATCH_EXIT);
16292 * We know falling off the end of a constructor returns the new object that
16293 * was passed in via fp->argv[-1], while falling off the end of a function
16294 * returns undefined.
16296 * NB: we do not support script rval (eval, API users who want the result
16297 * of the last expression-statement, debugger API calls).
16299 if (fp->isConstructing()) {
16300 rval_ins = get(&fp->thisValue());
16302 rval_ins = w.immiUndefined();
16304 clearReturningFrameFromNativeTracker();
16305 return ARECORD_CONTINUE;
16308 JS_REQUIRES_STACK AbortableRecordingStatus
16309 TraceRecorder::record_JSOP_GETXPROP()
16311 Value& l = stackval(-1);
16312 if (l.isPrimitive())
16313 RETURN_STOP_A("primitive-this for GETXPROP?");
16318 CHECK_STATUS_A(name(vp, v_ins, nr));
16320 return ARECORD_CONTINUE;
16323 JS_REQUIRES_STACK AbortableRecordingStatus
16324 TraceRecorder::record_JSOP_CALLXMLNAME()
16326 return ARECORD_STOP;
16329 JS_REQUIRES_STACK AbortableRecordingStatus
16330 TraceRecorder::record_JSOP_TYPEOFEXPR()
16332 return record_JSOP_TYPEOF();
16335 JS_REQUIRES_STACK AbortableRecordingStatus
16336 TraceRecorder::record_JSOP_ENTERBLOCK()
16339 obj = cx->fp()->script()->getObject(getFullIndex(0));
16341 LIns* void_ins = w.immiUndefined();
16342 for (int i = 0, n = OBJ_BLOCK_COUNT(cx, obj); i < n; i++)
16343 stack(i, void_ins);
16344 return ARECORD_CONTINUE;
16347 JS_REQUIRES_STACK AbortableRecordingStatus
16348 TraceRecorder::record_JSOP_LEAVEBLOCK()
16350 return ARECORD_CONTINUE;
16353 JS_REQUIRES_STACK AbortableRecordingStatus
16354 TraceRecorder::record_JSOP_GENERATOR()
16356 return ARECORD_STOP;
16359 JS_REQUIRES_STACK AbortableRecordingStatus
16360 TraceRecorder::record_JSOP_YIELD()
16362 return ARECORD_STOP;
16365 JS_REQUIRES_STACK AbortableRecordingStatus
16366 TraceRecorder::record_JSOP_ARRAYPUSH()
16368 uint32_t slot = GET_UINT16(cx->regs->pc);
16369 JS_ASSERT(cx->fp()->numFixed() <= slot);
16370 JS_ASSERT(cx->fp()->slots() + slot < cx->regs->sp - 1);
16371 Value &arrayval = cx->fp()->slots()[slot];
16372 JS_ASSERT(arrayval.isObject());
16373 LIns *array_ins = get(&arrayval);
16374 Value &elt = stackval(-1);
16375 LIns *elt_ins = box_value_for_native_call(elt, get(&elt));
16377 enterDeepBailCall();
16379 LIns *args[] = { elt_ins, array_ins, cx_ins };
16380 pendingGuardCondition = w.call(&js_ArrayCompPush_tn_ci, args);
16382 leaveDeepBailCall();
16383 return ARECORD_CONTINUE;
16386 JS_REQUIRES_STACK AbortableRecordingStatus
16387 TraceRecorder::record_JSOP_ENUMCONSTELEM()
16389 return ARECORD_STOP;
16392 JS_REQUIRES_STACK AbortableRecordingStatus
16393 TraceRecorder::record_JSOP_LEAVEBLOCKEXPR()
16395 LIns* v_ins = stack(-1);
16396 int n = -1 - GET_UINT16(cx->regs->pc);
16398 return ARECORD_CONTINUE;
16401 JS_REQUIRES_STACK AbortableRecordingStatus
16402 TraceRecorder::record_JSOP_GETTHISPROP()
16406 CHECK_STATUS_A(getThis(this_ins));
16409 * It's safe to just use cx->fp->thisValue() here because getThis() returns
16410 * ARECORD_STOP or ARECORD_ERROR if thisv is not available.
16412 const Value &thisv = cx->fp()->thisValue();
16413 if (!thisv.isObject())
16414 RETURN_STOP_A("primitive this for GETTHISPROP");
16416 CHECK_STATUS_A(getProp(&thisv.toObject(), this_ins));
16417 return ARECORD_CONTINUE;
16420 JS_REQUIRES_STACK AbortableRecordingStatus
16421 TraceRecorder::record_JSOP_GETARGPROP()
16423 return getProp(argval(GET_ARGNO(cx->regs->pc)));
16426 JS_REQUIRES_STACK AbortableRecordingStatus
16427 TraceRecorder::record_JSOP_GETLOCALPROP()
16429 return getProp(varval(GET_SLOTNO(cx->regs->pc)));
16432 JS_REQUIRES_STACK AbortableRecordingStatus
16433 TraceRecorder::record_JSOP_INDEXBASE1()
16436 return ARECORD_CONTINUE;
16439 JS_REQUIRES_STACK AbortableRecordingStatus
16440 TraceRecorder::record_JSOP_INDEXBASE2()
16443 return ARECORD_CONTINUE;
16446 JS_REQUIRES_STACK AbortableRecordingStatus
16447 TraceRecorder::record_JSOP_INDEXBASE3()
16450 return ARECORD_CONTINUE;
16453 JS_REQUIRES_STACK AbortableRecordingStatus
16454 TraceRecorder::record_JSOP_CALLLOCAL()
16456 uintN slot = GET_SLOTNO(cx->regs->pc);
16457 stack(0, var(slot));
16458 stack(1, w.immiUndefined());
16459 return ARECORD_CONTINUE;
16462 JS_REQUIRES_STACK AbortableRecordingStatus
16463 TraceRecorder::record_JSOP_CALLARG()
16465 uintN slot = GET_ARGNO(cx->regs->pc);
16466 stack(0, arg(slot));
16467 stack(1, w.immiUndefined());
16468 return ARECORD_CONTINUE;
16471 JS_REQUIRES_STACK AbortableRecordingStatus
16472 TraceRecorder::record_JSOP_BINDGNAME()
16474 stack(0, w.immpObjGC(globalObj));
16475 return ARECORD_CONTINUE;
16478 JS_REQUIRES_STACK AbortableRecordingStatus
16479 TraceRecorder::record_JSOP_INT8()
16481 stack(0, w.immd(GET_INT8(cx->regs->pc)));
16482 return ARECORD_CONTINUE;
16485 JS_REQUIRES_STACK AbortableRecordingStatus
16486 TraceRecorder::record_JSOP_INT32()
16488 stack(0, w.immd(GET_INT32(cx->regs->pc)));
16489 return ARECORD_CONTINUE;
16492 JS_REQUIRES_STACK AbortableRecordingStatus
16493 TraceRecorder::record_JSOP_LENGTH()
16495 Value& l = stackval(-1);
16496 if (l.isPrimitive()) {
16498 RETURN_STOP_A("non-string primitive JSOP_LENGTH unsupported");
16499 set(&l, w.i2d(w.p2i(w.getStringLength(get(&l)))));
16500 return ARECORD_CONTINUE;
16503 JSObject* obj = &l.toObject();
16504 LIns* obj_ins = get(&l);
16506 if (obj->isArguments()) {
16508 JSStackFrame *afp = guardArguments(obj, obj_ins, &depth);
16510 RETURN_STOP_A("can't reach arguments object's frame");
16512 // We must both check at record time and guard at run time that
16513 // arguments.length has not been reassigned, redefined or deleted.
16514 if (obj->isArgsLengthOverridden())
16515 RETURN_STOP_A("can't trace JSOP_ARGCNT if arguments.length has been modified");
16516 LIns* slot_ins = guardArgsLengthNotAssigned(obj_ins);
16518 // slot_ins is the value from the slot; right-shift to get the length
16519 // (see JSObject::getArgsInitialLength in jsfun.cpp).
16520 LIns* v_ins = w.i2d(w.rshiN(slot_ins, JSObject::ARGS_PACKED_BITS_COUNT));
16522 return ARECORD_CONTINUE;
16526 if (obj->isArray()) {
16527 if (obj->isDenseArray()) {
16528 guardDenseArray(obj_ins, BRANCH_EXIT);
16530 JS_ASSERT(obj->isSlowArray());
16531 guardClass(obj_ins, &js_SlowArrayClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
16533 v_ins = w.lduiObjPrivate(obj_ins);
16534 if (obj->getArrayLength() <= JSVAL_INT_MAX) {
16535 guard(true, w.leui(v_ins, w.immi(JSVAL_INT_MAX)), BRANCH_EXIT);
16536 v_ins = w.i2d(v_ins);
16538 v_ins = w.ui2d(v_ins);
16540 } else if (OkToTraceTypedArrays && js_IsTypedArray(obj)) {
16541 // Ensure array is a typed array and is the same type as what was written
16542 guardClass(obj_ins, obj->getClass(), snapshot(BRANCH_EXIT), LOAD_NORMAL);
16543 v_ins = w.i2d(w.ldiConstTypedArrayLength(w.ldpObjPrivate(obj_ins)));
16545 if (!obj->isNative())
16546 RETURN_STOP_A("can't trace length property access on non-array, non-native object");
16547 return getProp(obj, obj_ins);
16550 return ARECORD_CONTINUE;
16553 JS_REQUIRES_STACK AbortableRecordingStatus
16554 TraceRecorder::record_JSOP_HOLE()
16556 stack(0, w.immpMagicWhy(JS_ARRAY_HOLE));
16557 return ARECORD_CONTINUE;
16560 AbortableRecordingStatus
16561 TraceRecorder::record_JSOP_TRACE()
16563 return ARECORD_CONTINUE;
16566 AbortableRecordingStatus
16567 TraceRecorder::record_JSOP_NOTRACE()
16569 return ARECORD_CONTINUE;
16573 js_Unbrand(JSContext *cx, JSObject *obj)
16575 return obj->unbrand(cx);
16578 JS_DEFINE_CALLINFO_2(extern, BOOL, js_Unbrand, CONTEXT, OBJECT, 0, ACCSET_STORE_ANY)
16580 JS_REQUIRES_STACK AbortableRecordingStatus
16581 TraceRecorder::record_JSOP_UNBRAND()
16583 LIns* args_ins[] = { stack(-1), cx_ins };
16584 LIns* call_ins = w.call(&js_Unbrand_ci, args_ins);
16585 guard(false, w.eqi0(call_ins), OOM_EXIT);
16586 return ARECORD_CONTINUE;
16589 JS_REQUIRES_STACK AbortableRecordingStatus
16590 TraceRecorder::record_JSOP_UNBRANDTHIS()
16592 /* In case of primitive this, do nothing. */
16593 JSStackFrame *fp = cx->fp();
16594 if (fp->fun()->inStrictMode() && !fp->thisValue().isObject())
16595 return ARECORD_CONTINUE;
16598 RecordingStatus status = getThis(this_ins);
16599 if (status != RECORD_CONTINUE)
16600 return InjectStatus(status);
16602 LIns* args_ins[] = { this_ins, cx_ins };
16603 LIns* call_ins = w.call(&js_Unbrand_ci, args_ins);
16604 guard(false, w.eqi0(call_ins), OOM_EXIT);
16605 return ARECORD_CONTINUE;
16608 JS_REQUIRES_STACK AbortableRecordingStatus
16609 TraceRecorder::record_JSOP_SHARPINIT()
16611 return ARECORD_STOP;
16614 JS_REQUIRES_STACK AbortableRecordingStatus
16615 TraceRecorder::record_JSOP_GETGLOBAL()
16617 uint32 slot = cx->fp()->script()->getGlobalSlot(GET_SLOTNO(cx->regs->pc));
16618 if (!lazilyImportGlobalSlot(slot))
16619 RETURN_STOP_A("lazy import of global slot failed");
16621 stack(0, get(&globalObj->getSlotRef(slot)));
16622 return ARECORD_CONTINUE;
16625 JS_REQUIRES_STACK AbortableRecordingStatus
16626 TraceRecorder::record_JSOP_CALLGLOBAL()
16628 uint32 slot = cx->fp()->script()->getGlobalSlot(GET_SLOTNO(cx->regs->pc));
16629 if (!lazilyImportGlobalSlot(slot))
16630 RETURN_STOP_A("lazy import of global slot failed");
16632 Value &v = globalObj->getSlotRef(slot);
16634 stack(1, w.immiUndefined());
16635 return ARECORD_CONTINUE;
16638 JS_REQUIRES_STACK AbortableRecordingStatus
16639 TraceRecorder::record_JSOP_GETGNAME()
16641 return record_JSOP_NAME();
16644 JS_REQUIRES_STACK AbortableRecordingStatus
16645 TraceRecorder::record_JSOP_SETGNAME()
16647 return record_JSOP_SETNAME();
16650 JS_REQUIRES_STACK AbortableRecordingStatus
16651 TraceRecorder::record_JSOP_GNAMEDEC()
16653 return record_JSOP_NAMEDEC();
16656 JS_REQUIRES_STACK AbortableRecordingStatus
16657 TraceRecorder::record_JSOP_GNAMEINC()
16659 return record_JSOP_NAMEINC();
16662 JS_REQUIRES_STACK AbortableRecordingStatus
16663 TraceRecorder::record_JSOP_DECGNAME()
16665 return record_JSOP_DECNAME();
16668 JS_REQUIRES_STACK AbortableRecordingStatus
16669 TraceRecorder::record_JSOP_INCGNAME()
16671 return record_JSOP_INCNAME();
16674 JS_REQUIRES_STACK AbortableRecordingStatus
16675 TraceRecorder::record_JSOP_CALLGNAME()
16677 return record_JSOP_CALLNAME();
16680 #define DBG_STUB(OP) \
16681 JS_REQUIRES_STACK AbortableRecordingStatus \
16682 TraceRecorder::record_##OP() \
16684 RETURN_STOP_A("can't trace " #OP); \
16687 DBG_STUB(JSOP_GETUPVAR_DBG)
16688 DBG_STUB(JSOP_CALLUPVAR_DBG)
16689 DBG_STUB(JSOP_DEFFUN_DBGFC)
16690 DBG_STUB(JSOP_DEFLOCALFUN_DBGFC)
16691 DBG_STUB(JSOP_LAMBDA_DBGFC)
16695 * Print information about entry typemaps and unstable exits for all peers
16699 DumpPeerStability(TraceMonitor* tm, const void* ip, JSObject* globalObj, uint32 globalShape,
16703 bool looped = false;
16704 unsigned length = 0;
16706 for (f = LookupLoop(tm, ip, globalObj, globalShape, argc); f != NULL; f = f->peer) {
16709 debug_only_printf(LC_TMRecorder, "Stability of fragment %p:\nENTRY STACK=", (void*)f);
16711 JS_ASSERT(f->nStackTypes == length);
16712 for (unsigned i = 0; i < f->nStackTypes; i++)
16713 debug_only_printf(LC_TMRecorder, "%c", TypeToChar(f->stackTypeMap()[i]));
16714 debug_only_print0(LC_TMRecorder, " GLOBALS=");
16715 for (unsigned i = 0; i < f->nGlobalTypes(); i++)
16716 debug_only_printf(LC_TMRecorder, "%c", TypeToChar(f->globalTypeMap()[i]));
16717 debug_only_print0(LC_TMRecorder, "\n");
16718 UnstableExit* uexit = f->unstableExits;
16719 while (uexit != NULL) {
16720 debug_only_print0(LC_TMRecorder, "EXIT ");
16721 JSValueType* m = uexit->exit->fullTypeMap();
16722 debug_only_print0(LC_TMRecorder, "STACK=");
16723 for (unsigned i = 0; i < uexit->exit->numStackSlots; i++)
16724 debug_only_printf(LC_TMRecorder, "%c", TypeToChar(m[i]));
16725 debug_only_print0(LC_TMRecorder, " GLOBALS=");
16726 for (unsigned i = 0; i < uexit->exit->numGlobalSlots; i++) {
16727 debug_only_printf(LC_TMRecorder, "%c",
16728 TypeToChar(m[uexit->exit->numStackSlots + i]));
16730 debug_only_print0(LC_TMRecorder, "\n");
16731 uexit = uexit->next;
16733 length = f->nStackTypes;
16739 #ifdef MOZ_TRACEVIS
16741 FILE* traceVisLogFile = NULL;
16742 JSHashTable *traceVisScriptTable = NULL;
16744 JS_FRIEND_API(bool)
16745 StartTraceVis(const char* filename = "tracevis.dat")
16747 if (traceVisLogFile) {
16748 // If we're currently recording, first we must stop.
16752 traceVisLogFile = fopen(filename, "wb");
16753 if (!traceVisLogFile)
16759 JS_FRIEND_API(JSBool)
16760 StartTraceVisNative(JSContext *cx, uintN argc, jsval *vp)
16764 if (argc > 0 && JSVAL_IS_STRING(JS_ARGV(cx, vp)[0])) {
16765 JSString *str = JSVAL_TO_STRING(JS_ARGV(cx, vp)[0]);
16766 char *filename = js_DeflateString(cx, str->chars(), str->length());
16769 ok = StartTraceVis(filename);
16770 cx->free(filename);
16772 ok = StartTraceVis();
16776 fprintf(stderr, "started TraceVis recording\n");
16777 JS_SET_RVAL(cx, vp, JSVAL_VOID);
16782 JS_ReportError(cx, "failed to start TraceVis recording");
16786 JS_FRIEND_API(bool)
16789 if (!traceVisLogFile)
16792 fclose(traceVisLogFile); // not worth checking the result
16793 traceVisLogFile = NULL;
16798 JS_FRIEND_API(JSBool)
16799 StopTraceVisNative(JSContext *cx, uintN argc, jsval *vp)
16801 JSBool ok = StopTraceVis();
16804 fprintf(stderr, "stopped TraceVis recording\n");
16805 JS_SET_RVAL(cx, vp, JSVAL_VOID);
16807 JS_ReportError(cx, "TraceVis isn't running");
16813 #endif /* MOZ_TRACEVIS */
16815 JS_REQUIRES_STACK void
16816 TraceRecorder::captureStackTypes(unsigned callDepth, JSValueType* typeMap)
16818 CaptureTypesVisitor capVisitor(cx, traceMonitor->oracle, typeMap, !!oracle);
16819 VisitStackSlots(capVisitor, cx, callDepth);
16822 JS_REQUIRES_STACK void
16823 TraceRecorder::determineGlobalTypes(JSValueType* typeMap)
16825 DetermineTypesVisitor detVisitor(*this, typeMap);
16826 VisitGlobalSlots(detVisitor, cx, *tree->globalSlots);
16829 #ifdef JS_METHODJIT
16831 class AutoRetBlacklist
16837 AutoRetBlacklist(jsbytecode* pc, bool* blacklist)
16838 : pc(pc), blacklist(blacklist)
16841 ~AutoRetBlacklist()
16843 *blacklist = IsBlacklisted(pc);
16847 JS_REQUIRES_STACK TracePointAction
16848 RecordTracePoint(JSContext* cx, TraceMonitor* tm,
16849 uintN& inlineCallCount, bool* blacklist, bool execAllowed)
16851 JSStackFrame* fp = cx->fp();
16852 jsbytecode* pc = cx->regs->pc;
16854 JS_ASSERT(!tm->recorder);
16855 JS_ASSERT(!tm->profile);
16857 JSObject* globalObj = cx->fp()->scopeChain().getGlobal();
16858 uint32 globalShape = -1;
16859 SlotList* globalSlots = NULL;
16861 AutoRetBlacklist autoRetBlacklist(pc, blacklist);
16863 if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
16865 return TPA_Nothing;
16868 uint32 argc = entryFrameArgc(cx);
16869 TreeFragment* tree = LookupOrAddLoop(tm, pc, globalObj, globalShape, argc);
16871 debug_only_printf(LC_TMTracer,
16872 "Looking for compat peer %d@%d, from %p (ip: %p)\n",
16873 js_FramePCToLineNumber(cx, cx->fp()),
16874 FramePCOffset(cx, cx->fp()), (void*)tree, tree->ip);
16876 if (tree->code() || tree->peer) {
16878 TreeFragment* match = FindVMCompatiblePeer(cx, globalObj, tree, count);
16880 VMSideExit* lr = NULL;
16881 VMSideExit* innermostNestedGuard = NULL;
16883 if (!execAllowed) {
16884 /* We've already compiled a trace for it, but we don't want to use that trace. */
16885 Blacklist((jsbytecode*)tree->root->ip);
16886 return TPA_Nothing;
16889 /* Best case - just go and execute. */
16890 if (!ExecuteTree(cx, tm, match, inlineCallCount, &innermostNestedGuard, &lr))
16894 return TPA_Nothing;
16896 switch (lr->exitType) {
16897 case UNSTABLE_LOOP_EXIT:
16898 if (!AttemptToStabilizeTree(cx, tm, globalObj, lr, NULL, NULL, 0))
16899 return TPA_RanStuff;
16902 case MUL_ZERO_EXIT:
16903 case OVERFLOW_EXIT:
16904 if (lr->exitType == MUL_ZERO_EXIT)
16905 tm->oracle->markInstructionSlowZeroTest(cx->regs->pc);
16907 tm->oracle->markInstructionUndemotable(cx->regs->pc);
16911 if (!AttemptToExtendTree(cx, tm, lr, NULL, NULL, NULL))
16912 return TPA_RanStuff;
16916 if (!innermostNestedGuard)
16917 return TPA_RanStuff;
16918 if (!AttemptToExtendTree(cx, tm, innermostNestedGuard, lr, NULL, NULL))
16919 return TPA_RanStuff;
16923 return TPA_RanStuff;
16926 JS_ASSERT(tm->recorder);
16931 if (count >= MAXPEERS) {
16932 debug_only_print0(LC_TMTracer, "Blacklisted: too many peer trees.\n");
16933 Blacklist((jsbytecode*)tree->root->ip);
16934 return TPA_Nothing;
16938 if (++tree->hits() < HOTLOOP)
16939 return TPA_Nothing;
16940 if (!ScopeChainCheck(cx, tree))
16941 return TPA_Nothing;
16942 if (!RecordTree(cx, tm, tree->first, NULL, NULL, 0, globalSlots))
16943 return TPA_Nothing;
16946 JS_ASSERT(tm->recorder);
16948 /* Locked and loaded with a recorder. Ask the interperter to go run some code. */
16949 if (!Interpret(cx, fp, inlineCallCount, JSINTERP_RECORD))
16952 JS_ASSERT(!cx->isExceptionPending());
16954 return TPA_RanStuff;
16957 LoopProfile::LoopProfile(TraceMonitor *tm, JSStackFrame *entryfp,
16958 jsbytecode *top, jsbytecode *bottom)
16959 : traceMonitor(tm),
16960 entryScript(entryfp->script()),
16966 unprofitable(false)
16972 LoopProfile::reset()
16978 numSelfOpsMult = 0;
16979 branchMultiplier = 1;
16981 maybeShortLoop = false;
16983 loopStackDepth = 0;
16986 PodArrayZero(allOps);
16987 PodArrayZero(selfOps);
16991 LoopProfile::profileLoopEdge(JSContext* cx, uintN& inlineCallCount)
16993 if (cx->regs->pc == top) {
16994 debug_only_print0(LC_TMProfiler, "Profiling complete (edge)\n");
16997 /* Record an inner loop invocation. */
16998 JSStackFrame *fp = cx->fp();
16999 jsbytecode *pc = cx->regs->pc;
17000 bool found = false;
17002 /* We started with the most deeply nested one first, since it gets hit most often.*/
17003 for (int i = int(numInnerLoops)-1; i >= 0; i--) {
17004 if (innerLoops[i].entryfp == fp && innerLoops[i].top == pc) {
17005 innerLoops[i].iters++;
17011 if (!found && numInnerLoops < PROFILE_MAX_INNER_LOOPS)
17012 innerLoops[numInnerLoops++] = InnerLoop(fp, pc, NULL);
17015 return MONITOR_NOT_RECORDING;
17019 static const uintN PROFILE_HOTLOOP = 61;
17020 static const uintN MAX_PROFILE_OPS = 4096;
17022 static jsbytecode *
17023 GetLoopBottom(JSContext *cx)
17025 return GetLoopBottom(cx, cx->regs->pc);
17028 static LoopProfile *
17029 LookupOrAddProfile(JSContext *cx, TraceMonitor *tm, void** traceData, uintN *traceEpoch)
17034 * We try to keep a pointer to the loop profile inside the TRACE IC.
17035 * We also keep a pointer inside a hashtable for when we need to
17036 * look up nested loops (or when ICs are disabled).
17038 * Memory for the profile is allocated in the dataAlloc for the
17039 * trace monitor. Since this thing can get flushed periodically,
17040 * we use epochs to decide if the profile in the MIC is valid, as
17041 * follows. Every time the trace monitor is flushed,
17042 * |tm->flushEpoch| is incremented. When storing the profile in
17043 * the IC, we store the current |tm->flushEpoch| along with it.
17044 * Before pulling a profile out of the IC, we check that its
17045 * stored epoch is still up-to-date with |tm->flushEpoch|.
17046 * This ensures that no flush has happened in between.
17050 if (*traceData && *traceEpoch == tm->flushEpoch) {
17051 prof = (LoopProfile *)*traceData;
17053 jsbytecode* pc = cx->regs->pc;
17054 jsbytecode* bottom = GetLoopBottom(cx);
17057 prof = new (*tm->dataAlloc) LoopProfile(tm, cx->fp(), pc, bottom);
17059 *traceEpoch = tm->flushEpoch;
17060 tm->loopProfiles->put(pc, prof);
17063 LoopProfileMap &table = *tm->loopProfiles;
17064 jsbytecode* pc = cx->regs->pc;
17065 if (LoopProfileMap::AddPtr p = table.lookupForAdd(pc)) {
17068 jsbytecode* bottom = GetLoopBottom(cx);
17071 prof = new (*tm->dataAlloc) LoopProfile(tm, cx->fp(), pc, bottom);
17072 table.add(p, pc, prof);
17079 static LoopProfile *
17080 LookupLoopProfile(TraceMonitor *tm, jsbytecode *pc)
17082 LoopProfileMap &table = *tm->loopProfiles;
17083 if (LoopProfileMap::Ptr p = table.lookup(pc)) {
17084 JS_ASSERT(p->value->top == pc);
17091 LoopProfile::stopProfiling(JSContext *cx)
17093 JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL);
17094 JS_THREAD_DATA(cx)->profilingCompartment = NULL;
17096 traceMonitor->profile = NULL;
17099 JS_REQUIRES_STACK TracePointAction
17100 MonitorTracePoint(JSContext *cx, uintN& inlineCallCount, bool* blacklist,
17101 void** traceData, uintN *traceEpoch, uint32 *loopCounter, uint32 hits)
17103 TraceMonitor *tm = JS_TRACE_MONITOR_FROM_CONTEXT(cx);
17105 if (!cx->profilingEnabled)
17106 return RecordTracePoint(cx, tm, inlineCallCount, blacklist, true);
17108 *blacklist = false;
17111 * This is the only place where we check for re-entering the profiler.
17112 * The assumption is that MonitorTracePoint is the only place where we
17113 * start profiling. When we do so, we enter an interpreter frame with
17114 * JSINTERP_PROFILE mode. All other entry points to the profiler check
17115 * that the interpreter mode is JSINTERP_PROFILE. If it isn't, they
17118 if (TRACE_PROFILER(cx))
17119 return TPA_Nothing;
17121 jsbytecode* pc = cx->regs->pc;
17122 LoopProfile *prof = LookupOrAddProfile(cx, tm, traceData, traceEpoch);
17125 return TPA_Nothing;
17128 prof->hits += hits;
17129 if (prof->hits < PROFILE_HOTLOOP)
17130 return TPA_Nothing;
17132 AutoRetBlacklist autoRetBlacklist(cx->regs->pc, blacklist);
17134 if (prof->profiled) {
17135 if (prof->traceOK) {
17136 return RecordTracePoint(cx, tm, inlineCallCount, blacklist, prof->execOK);
17138 return TPA_Nothing;
17142 debug_only_printf(LC_TMProfiler, "Profiling at line %d\n",
17143 js_FramePCToLineNumber(cx, cx->fp()));
17145 tm->profile = prof;
17147 JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
17148 JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL);
17149 JS_THREAD_DATA(cx)->profilingCompartment = cx->compartment;
17151 if (!Interpret(cx, cx->fp(), inlineCallCount, JSINTERP_PROFILE))
17154 JS_ASSERT(!cx->isExceptionPending());
17156 /* Look it up again since a reset may have happened during Interpret. */
17157 prof = LookupLoopProfile(tm, pc);
17158 if (prof && prof->undecided) {
17159 *loopCounter = 3000;
17163 return TPA_RanStuff;
17167 * Returns true if pc is within the given loop.
17168 * If we're in a different script, then we must have come from
17169 * a call instruction within the loop (since we check if we're within
17170 * the loop before each instruction) so we're still in the loop.
17174 PCWithinLoop(JSStackFrame *fp, jsbytecode *pc, T& loop)
17176 return fp > loop.entryfp || (fp == loop.entryfp && pc >= loop.top && pc <= loop.bottom);
17179 LoopProfile::ProfileAction
17180 LoopProfile::profileOperation(JSContext* cx, JSOp op)
17182 TraceMonitor* tm = JS_TRACE_MONITOR_FROM_CONTEXT(cx);
17184 JS_ASSERT(tm == traceMonitor);
17185 JS_ASSERT(&entryScript->compartment->traceMonitor == tm);
17189 return ProfComplete;
17192 jsbytecode *pc = cx->regs->pc;
17193 JSStackFrame *fp = cx->fp();
17194 JSScript *script = fp->script();
17196 if (!PCWithinLoop(fp, pc, *this)) {
17197 debug_only_printf(LC_TMProfiler, "Profiling complete (loop exit) at line %u\n",
17198 js_FramePCToLineNumber(cx, cx->fp()));
17199 tm->profile->decide(cx);
17201 return ProfComplete;
17204 while (loopStackDepth > 0 && !PCWithinLoop(fp, pc, loopStack[loopStackDepth-1])) {
17205 debug_only_print0(LC_TMProfiler, "Profiler: Exiting inner loop\n");
17209 if (op == JSOP_TRACE || op == JSOP_NOTRACE) {
17210 if (pc != top && (loopStackDepth == 0 || pc != loopStack[loopStackDepth-1].top)) {
17211 if (loopStackDepth == PROFILE_MAX_INNER_LOOPS) {
17212 debug_only_print0(LC_TMProfiler, "Profiling complete (maxnest)\n");
17213 tm->profile->decide(cx);
17215 return ProfComplete;
17218 debug_only_printf(LC_TMProfiler, "Profiler: Entering inner loop at line %d\n",
17219 js_FramePCToLineNumber(cx, cx->fp()));
17220 loopStack[loopStackDepth++] = InnerLoop(fp, pc, GetLoopBottom(cx));
17225 if (loopStackDepth == 0) {
17227 numSelfOpsMult += branchMultiplier;
17230 if (op == JSOP_ADD || op == JSOP_SUB || op == JSOP_MUL || op == JSOP_DIV) {
17231 Value& v1 = cx->regs->sp[-1];
17232 Value& v2 = cx->regs->sp[-2];
17234 /* If either operand is a double, treat it as a floating-point op. */
17235 if (v1.isDouble() || v2.isDouble())
17236 increment(OP_FLOAT);
17237 else if (v1.isInt32() || v2.isInt32())
17241 if (op == JSOP_EQ || op == JSOP_NE)
17244 if (op == JSOP_BITOR || op == JSOP_BITXOR || op == JSOP_BITAND
17245 || op == JSOP_LSH || op == JSOP_RSH || op == JSOP_URSH || op == JSOP_BITNOT)
17250 if (op == JSOP_EVAL)
17251 increment(OP_EVAL);
17253 if (op == JSOP_NEW)
17256 if (op == JSOP_GETELEM || op == JSOP_SETELEM) {
17257 Value& lval = cx->regs->sp[op == JSOP_GETELEM ? -2 : -3];
17258 if (lval.isObject() && js_IsTypedArray(&lval.toObject()))
17259 increment(OP_TYPED_ARRAY);
17260 else if (lval.isObject() && lval.toObject().isDenseArray() && op == JSOP_GETELEM)
17261 increment(OP_ARRAY_READ);
17264 if (op == JSOP_CALL) {
17265 increment(OP_CALL);
17267 uintN argc = GET_ARGC(cx->regs->pc);
17268 Value &v = cx->regs->sp[-((int)argc + 2)];
17270 if (IsFunctionObject(v, &callee)) {
17271 JSFunction *fun = callee->getFunctionPrivate();
17272 if (fun->isInterpreted()) {
17273 if (cx->fp()->isFunctionFrame() && fun == cx->fp()->fun())
17274 increment(OP_RECURSIVE);
17276 js::Native native = fun->u.n.native;
17277 if (js_IsMathFunction(JS_JSVALIFY_NATIVE(native)))
17278 increment(OP_FLOAT);
17283 if (op == JSOP_CALLPROP && loopStackDepth == 0)
17284 branchMultiplier *= mjit::GetCallTargetCount(script, pc);
17286 if (op == JSOP_TABLESWITCH) {
17287 jsint low = GET_JUMP_OFFSET(pc + JUMP_OFFSET_LEN);
17288 jsint high = GET_JUMP_OFFSET(pc + JUMP_OFFSET_LEN*2);
17289 branchMultiplier *= high - low + 1;
17292 if (op == JSOP_LOOKUPSWITCH)
17293 branchMultiplier *= GET_UINT16(pc + JUMP_OFFSET_LEN);
17295 if (numAllOps >= MAX_PROFILE_OPS) {
17296 debug_only_print0(LC_TMProfiler, "Profiling complete (maxops)\n");
17297 tm->profile->decide(cx);
17299 return ProfComplete;
17302 /* These are the places where the interpreter skips over branches. */
17303 jsbytecode *testPC = cx->regs->pc;
17304 if (op == JSOP_EQ || op == JSOP_NE || op == JSOP_LT || op == JSOP_GT
17305 || op == JSOP_LE || op == JSOP_GE || op == JSOP_IN || op == JSOP_MOREITER)
17307 const JSCodeSpec *cs = &js_CodeSpec[op];
17308 ptrdiff_t oplen = cs->length;
17309 JS_ASSERT(oplen != -1);
17311 if (cx->regs->pc - script->code + oplen < ptrdiff_t(script->length))
17312 if (cx->regs->pc[oplen] == JSOP_IFEQ || cx->regs->pc[oplen] == JSOP_IFNE)
17313 testPC = cx->regs->pc + oplen;
17316 /* Check if we're exiting the loop being profiled. */
17317 JSOp testOp = js_GetOpcode(cx, script, testPC);
17318 if (testOp == JSOP_IFEQ || testOp == JSOP_IFNE || testOp == JSOP_GOTO
17319 || testOp == JSOP_AND || testOp == JSOP_OR)
17321 ptrdiff_t len = GET_JUMP_OFFSET(testPC);
17322 if (testPC + len == top && (op == JSOP_LT || op == JSOP_LE)) {
17323 StackValue v = stackAt(-1);
17324 if (v.hasValue && v.value < 8)
17328 if (testPC + len == top && (op == JSOP_LT || op == JSOP_LE)
17329 && cx->regs->sp[-2].isInt32() && cx->regs->sp[-2].toInt32() < 16)
17331 maybeShortLoop = true;
17334 if (testOp != JSOP_GOTO && len > 0) {
17336 if (testOp == JSOP_IFEQ || testOp == JSOP_IFNE)
17337 isConst = stackAt(-1).isConst && stackAt(-2).isConst;
17339 isConst = stackAt(-1).isConst;
17341 increment(OP_FWDJUMP);
17342 if (loopStackDepth == 0 && !isConst)
17343 branchMultiplier *= 2;
17347 if (op == JSOP_INT8) {
17348 stackPush(StackValue(true, GET_INT8(cx->regs->pc)));
17349 } else if (op == JSOP_STRING) {
17350 stackPush(StackValue(true));
17351 } else if (op == JSOP_TYPEOF || op == JSOP_TYPEOFEXPR) {
17352 stackPush(StackValue(true));
17353 } else if (op == JSOP_EQ || op == JSOP_NE) {
17354 StackValue v1 = stackAt(-1);
17355 StackValue v2 = stackAt(-2);
17356 stackPush(StackValue(v1.isConst && v2.isConst));
17357 } else if (op == JSOP_AND) {
17358 bool b = !!js_ValueToBoolean(cx->regs->sp[-1]);
17359 StackValue v = stackAt(-1);
17366 return ProfContinue;
17370 * Returns true if the loop would probably take a long time to
17374 LoopProfile::isCompilationExpensive(JSContext *cx, uintN depth)
17382 /* Too many ops to compile? */
17383 if (numSelfOps == MAX_PROFILE_OPS)
17386 /* Is the code too branchy? */
17387 if (numSelfOpsMult > numSelfOps*100000)
17390 /* Ensure that inner loops aren't too expensive. */
17391 for (uintN i=0; i<numInnerLoops; i++) {
17392 LoopProfile *prof = LookupLoopProfile(traceMonitor, innerLoops[i].top);
17393 if (!prof || prof->isCompilationExpensive(cx, depth-1))
17401 * This function recognizes loops that are short and that contain
17402 * jumps. The tracer does badly with these loops because it
17403 * needs to do a lot of side exits, which are somewhat
17407 LoopProfile::isCompilationUnprofitable(JSContext *cx, uintN goodOps)
17412 if (goodOps <= 22 && allOps[OP_FWDJUMP])
17415 /* Ensure that inner loops aren't fleeting. */
17416 for (uintN i=0; i<numInnerLoops; i++) {
17417 LoopProfile *prof = LookupLoopProfile(traceMonitor, innerLoops[i].top);
17418 if (!prof || prof->unprofitable)
17425 /* After profiling is done, this method decides whether to trace the loop. */
17427 LoopProfile::decide(JSContext *cx)
17429 bool wasUndecided = undecided;
17430 bool wasTraceOK = traceOK;
17437 uintN line = js_PCToLineNumber(cx, entryScript, top);
17439 debug_only_printf(LC_TMProfiler, "LOOP %s:%d\n", entryScript->filename, line);
17441 for (uintN i=0; i<numInnerLoops; i++) {
17442 InnerLoop &loop = innerLoops[i];
17443 if (LoopProfile *prof = LookupLoopProfile(traceMonitor, loop.top)) {
17444 uintN line = js_PCToLineNumber(cx, prof->entryScript, prof->top);
17445 debug_only_printf(LC_TMProfiler, "NESTED %s:%d (%d iters)\n",
17446 prof->entryScript->filename, line, loop.iters);
17449 debug_only_printf(LC_TMProfiler, "FEATURE float %d\n", allOps[OP_FLOAT]);
17450 debug_only_printf(LC_TMProfiler, "FEATURE int %d\n", allOps[OP_INT]);
17451 debug_only_printf(LC_TMProfiler, "FEATURE bit %d\n", allOps[OP_BIT]);
17452 debug_only_printf(LC_TMProfiler, "FEATURE equality %d\n", allOps[OP_EQ]);
17453 debug_only_printf(LC_TMProfiler, "FEATURE eval %d\n", allOps[OP_EVAL]);
17454 debug_only_printf(LC_TMProfiler, "FEATURE new %d\n", allOps[OP_NEW]);
17455 debug_only_printf(LC_TMProfiler, "FEATURE call %d\n", allOps[OP_CALL]);
17456 debug_only_printf(LC_TMProfiler, "FEATURE arrayread %d\n", allOps[OP_ARRAY_READ]);
17457 debug_only_printf(LC_TMProfiler, "FEATURE typedarray %d\n", allOps[OP_TYPED_ARRAY]);
17458 debug_only_printf(LC_TMProfiler, "FEATURE fwdjump %d\n", allOps[OP_FWDJUMP]);
17459 debug_only_printf(LC_TMProfiler, "FEATURE recursive %d\n", allOps[OP_RECURSIVE]);
17460 debug_only_printf(LC_TMProfiler, "FEATURE shortLoop %d\n", shortLoop);
17461 debug_only_printf(LC_TMProfiler, "FEATURE maybeShortLoop %d\n", maybeShortLoop);
17462 debug_only_printf(LC_TMProfiler, "FEATURE numAllOps %d\n", numAllOps);
17463 debug_only_printf(LC_TMProfiler, "FEATURE selfOps %d\n", numSelfOps);
17464 debug_only_printf(LC_TMProfiler, "FEATURE selfOpsMult %g\n", numSelfOpsMult);
17467 if (count(OP_RECURSIVE)) {
17468 debug_only_print0(LC_TMProfiler, "NOTRACE: recursive\n");
17469 } else if (count(OP_EVAL)) {
17470 debug_only_print0(LC_TMProfiler, "NOTRACE: eval\n");
17471 } else if (numInnerLoops > 7) {
17472 debug_only_print0(LC_TMProfiler, "NOTRACE: >3 inner loops\n");
17473 } else if (shortLoop) {
17474 debug_only_print0(LC_TMProfiler, "NOTRACE: short\n");
17475 } else if (isCompilationExpensive(cx, 4)) {
17476 debug_only_print0(LC_TMProfiler, "NOTRACE: expensive\n");
17477 } else if (maybeShortLoop && numInnerLoops < 2) {
17478 if (wasUndecided) {
17479 debug_only_print0(LC_TMProfiler, "NOTRACE: maybe short\n");
17481 debug_only_print0(LC_TMProfiler, "UNDECIDED: maybe short\n");
17482 undecided = true; /* Profile the loop again to see if it's still short. */
17487 /* The tracer handles these ops well because of type specialization. */
17488 goodOps += count(OP_FLOAT)*10 + count(OP_BIT)*11 + count(OP_INT)*5 + count(OP_EQ)*15;
17490 /* The tracer handles these ops well because of inlining. */
17491 goodOps += (count(OP_CALL) + count(OP_NEW))*20;
17493 /* The tracer specialized typed array access. */
17494 goodOps += count(OP_TYPED_ARRAY)*10;
17496 /* The methodjit is faster at array writes, but the tracer is faster for reads. */
17497 goodOps += count(OP_ARRAY_READ)*15;
17499 debug_only_printf(LC_TMProfiler, "FEATURE goodOps %u\n", goodOps);
17501 unprofitable = isCompilationUnprofitable(cx, goodOps);
17503 debug_only_print0(LC_TMProfiler, "NOTRACE: unprofitable\n");
17504 else if (goodOps >= numAllOps)
17508 debug_only_printf(LC_TMProfiler, "TRACE %s:%d = %d\n", entryScript->filename, line, traceOK);
17511 /* Unblacklist the inner loops. */
17512 for (uintN i=0; i<numInnerLoops; i++) {
17513 InnerLoop &loop = innerLoops[i];
17514 LoopProfile *prof = LookupLoopProfile(traceMonitor, loop.top);
17517 * Note that execOK for the inner loop is left unchanged. So even
17518 * if we trace the inner loop, we will never call that trace
17519 * on its own. We'll only call it from this trace.
17521 prof->traceOK = true;
17522 if (IsBlacklisted(loop.top)) {
17523 debug_only_printf(LC_TMProfiler, "Unblacklisting at %d\n",
17524 js_PCToLineNumber(cx, prof->entryScript, loop.top));
17525 Unblacklist(prof->entryScript, loop.top);
17532 traceOK = wasTraceOK || traceOK;
17534 if (!traceOK && !undecided) {
17535 debug_only_printf(LC_TMProfiler, "Blacklisting at %d\n", line);
17539 debug_only_print0(LC_TMProfiler, "\n");
17542 JS_REQUIRES_STACK MonitorResult
17543 MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, JSInterpMode interpMode)
17545 TraceMonitor *tm = JS_TRACE_MONITOR_FROM_CONTEXT(cx);
17546 if (interpMode == JSINTERP_PROFILE && tm->profile)
17547 return tm->profile->profileLoopEdge(cx, inlineCallCount);
17549 return RecordLoopEdge(cx, tm, inlineCallCount);
17553 AbortProfiling(JSContext *cx)
17555 JS_ASSERT(TRACE_PROFILER(cx));
17556 LoopProfile *prof = TRACE_PROFILER(cx);
17558 debug_only_print0(LC_TMProfiler, "Profiling complete (aborted)\n");
17559 prof->profiled = true;
17560 prof->traceOK = false;
17561 prof->execOK = false;
17562 prof->stopProfiling(cx);
17565 #else /* JS_METHODJIT */
17567 JS_REQUIRES_STACK MonitorResult
17568 MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, JSInterpMode interpMode)
17570 TraceMonitor *tm = JS_TRACE_MONITOR_FROM_CONTEXT(cx);
17571 return RecordLoopEdge(cx, tm, inlineCallCount);
17574 #endif /* JS_METHODJIT */
17577 GetHotloop(JSContext *cx)
17579 #ifdef JS_METHODJIT
17580 if (cx->profilingEnabled)
17581 return PROFILE_HOTLOOP;
17587 } /* namespace js */