1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla Communicator client code, released
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS bytecode generation.
59 #include "jsversion.h"
69 #include "jsautooplen.h" // generated headers last
70 #include "jsstaticcheck.h"
72 #include "jsatominlines.h"
73 #include "jsobjinlines.h"
74 #include "jsscopeinlines.h"
75 #include "jsscriptinlines.h"
77 /* Allocation chunk counts, must be powers of two in general. */
78 #define BYTECODE_CHUNK 256 /* code allocation increment */
79 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
80 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
82 /* Macros to compute byte sizes from typed element counts. */
83 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
84 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
85 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
88 using namespace js::gc;
91 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
92 uintN stackDepth, size_t start, size_t end);
95 EmitIndexOp(JSContext *cx, JSOp op, uintN index, JSCodeGenerator *cg);
98 EmitLeaveBlock(JSContext *cx, JSCodeGenerator *cg, JSOp op, JSObjectBox *box);
101 JSTreeContext::trace(JSTracer *trc)
106 JSCodeGenerator::JSCodeGenerator(Parser *parser,
107 JSArenaPool *cpool, JSArenaPool *npool,
109 : JSTreeContext(parser),
110 codePool(cpool), notePool(npool),
111 codeMark(JS_ARENA_MARK(cpool)), noteMark(JS_ARENA_MARK(npool)),
112 stackDepth(0), maxStackDepth(0),
113 ntrynotes(0), lastTryNode(NULL),
114 spanDeps(NULL), jumpTargets(NULL), jtFreeList(NULL),
115 numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
118 constMap(parser->context),
119 constList(parser->context),
120 globalUses(ContextAllocPolicy(parser->context)),
121 closedArgs(ContextAllocPolicy(parser->context)),
122 closedVars(ContextAllocPolicy(parser->context)),
125 flags = TCF_COMPILING;
126 memset(&prolog, 0, sizeof prolog);
127 memset(&main, 0, sizeof main);
129 firstLine = prolog.currentLine = main.currentLine = lineno;
130 prolog.noteMask = main.noteMask = SRCNOTE_CHUNK - 1;
131 memset(&upvarMap, 0, sizeof upvarMap);
134 bool JSCodeGenerator::init()
136 return constMap.init();
139 JSCodeGenerator::~JSCodeGenerator()
141 JS_ARENA_RELEASE(codePool, codeMark);
142 JS_ARENA_RELEASE(notePool, noteMark);
144 /* NB: non-null only after OOM. */
146 parser->context->free(spanDeps);
149 parser->context->free(upvarMap.vector);
153 EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
155 jsbytecode *base, *limit, *next;
156 ptrdiff_t offset, length;
161 limit = CG_LIMIT(cg);
162 offset = next - base;
163 if (next + delta > limit) {
164 length = offset + delta;
165 length = (length <= BYTECODE_CHUNK)
167 : JS_BIT(JS_CeilingLog2(length));
168 incr = BYTECODE_SIZE(length);
170 JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr);
172 size = BYTECODE_SIZE(limit - base);
174 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
177 js_ReportOutOfScriptQuota(cx);
181 CG_LIMIT(cg) = base + length;
182 CG_NEXT(cg) = base + offset;
188 UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
192 const JSCodeSpec *cs;
193 uintN extra, depth, nuses;
196 pc = CG_CODE(cg, target);
198 cs = &js_CodeSpec[op];
200 extern uint8 js_opcode2extra[];
201 extra = js_opcode2extra[op];
205 if ((cs->format & JOF_TMPSLOT_MASK) || extra) {
206 depth = (uintN) cg->stackDepth +
207 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT) +
209 /* :TODO: hack - remove later. */
228 if (depth > cg->maxStackDepth)
229 cg->maxStackDepth = depth;
232 nuses = js_GetStackUses(cs, op, pc);
233 cg->stackDepth -= nuses;
234 JS_ASSERT(cg->stackDepth >= 0);
235 if (cg->stackDepth < 0) {
239 JS_snprintf(numBuf, sizeof numBuf, "%d", target);
240 ts = &cg->parser->tokenStream;
241 JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING,
242 js_GetErrorMessage, NULL,
243 JSMSG_STACK_UNDERFLOW,
244 ts->getFilename() ? ts->getFilename() : "stdin",
251 /* We just executed IndexParsedObject */
252 JS_ASSERT(op == JSOP_ENTERBLOCK);
253 JS_ASSERT(nuses == 0);
254 blockObj = cg->objectList.lastbox->object;
255 JS_ASSERT(blockObj->isStaticBlock());
256 JS_ASSERT(blockObj->getSlot(JSSLOT_BLOCK_DEPTH).isUndefined());
258 OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth);
259 ndefs = OBJ_BLOCK_COUNT(cx, blockObj);
261 cg->stackDepth += ndefs;
262 if ((uintN)cg->stackDepth > cg->maxStackDepth)
263 cg->maxStackDepth = cg->stackDepth;
267 js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op)
269 ptrdiff_t offset = EmitCheck(cx, cg, op, 1);
272 *CG_NEXT(cg)++ = (jsbytecode)op;
273 UpdateDepth(cx, cg, offset);
279 js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1)
281 ptrdiff_t offset = EmitCheck(cx, cg, op, 2);
284 jsbytecode *next = CG_NEXT(cg);
285 next[0] = (jsbytecode)op;
287 CG_NEXT(cg) = next + 2;
288 UpdateDepth(cx, cg, offset);
294 js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1,
297 ptrdiff_t offset = EmitCheck(cx, cg, op, 3);
300 jsbytecode *next = CG_NEXT(cg);
301 next[0] = (jsbytecode)op;
304 CG_NEXT(cg) = next + 3;
305 UpdateDepth(cx, cg, offset);
311 js_Emit5(JSContext *cx, JSCodeGenerator *cg, JSOp op, uint16 op1, uint16 op2)
313 ptrdiff_t offset = EmitCheck(cx, cg, op, 5);
316 jsbytecode *next = CG_NEXT(cg);
317 next[0] = (jsbytecode)op;
318 next[1] = UINT16_HI(op1);
319 next[2] = UINT16_LO(op1);
320 next[3] = UINT16_HI(op2);
321 next[4] = UINT16_LO(op2);
322 CG_NEXT(cg) = next + 5;
323 UpdateDepth(cx, cg, offset);
329 js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra)
331 ptrdiff_t length = 1 + (ptrdiff_t)extra;
332 ptrdiff_t offset = EmitCheck(cx, cg, op, length);
335 jsbytecode *next = CG_NEXT(cg);
336 *next = (jsbytecode)op;
337 memset(next + 1, 0, BYTECODE_SIZE(extra));
338 CG_NEXT(cg) = next + length;
341 * Don't UpdateDepth if op's use-count comes from the immediate
342 * operand yet to be stored in the extra bytes after op.
344 if (js_CodeSpec[op].nuses >= 0)
345 UpdateDepth(cx, cg, offset);
350 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
351 const char js_with_statement_str[] = "with statement";
352 const char js_finally_block_str[] = "finally block";
353 const char js_script_str[] = "script";
355 static const char *statementName[] = {
356 "label statement", /* LABEL */
357 "if statement", /* IF */
358 "else statement", /* ELSE */
359 "destructuring body", /* BODY */
360 "switch statement", /* SWITCH */
362 js_with_statement_str, /* WITH */
363 "catch block", /* CATCH */
364 "try block", /* TRY */
365 js_finally_block_str, /* FINALLY */
366 js_finally_block_str, /* SUBROUTINE */
367 "do loop", /* DO_LOOP */
368 "for loop", /* FOR_LOOP */
369 "for/in loop", /* FOR_IN_LOOP */
370 "while loop", /* WHILE_LOOP */
373 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT);
376 StatementName(JSCodeGenerator *cg)
379 return js_script_str;
380 return statementName[cg->topStmt->type];
384 ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg)
386 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
391 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
392 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
393 into unconditional (gotos and gosubs), and conditional jumps or branches
394 (which pop a value, test it, and jump depending on its value). Most jumps
395 have just one immediate operand, a signed offset from the jump opcode's pc
396 to the target bytecode. The lookup and table switch opcodes may contain
399 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
400 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
401 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
402 the extended form of the JSOP_OR branch opcode). The unextended or short
403 formats have 16-bit signed immediate offset operands, the extended or long
404 formats have 32-bit signed immediates. The span-dependency problem consists
405 of selecting as few long instructions as possible, or about as few -- since
406 jumps can span other jumps, extending one jump may cause another to need to
409 Most JS scripts are short, so need no extended jumps. We optimize for this
410 case by generating short jumps until we know a long jump is needed. After
411 that point, we keep generating short jumps, but each jump's 16-bit immediate
412 offset operand is actually an unsigned index into cg->spanDeps, an array of
413 JSSpanDep structs. Each struct tells the top offset in the script of the
414 opcode, the "before" offset of the jump (which will be the same as top for
415 simplex jumps, but which will index further into the bytecode array for a
416 non-initial jump offset in a lookup or table switch), the after "offset"
417 adjusted during span-dependent instruction selection (initially the same
418 value as the "before" offset), and the jump target (more below).
420 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
421 ensure that all bytecode generated so far can be inspected to discover where
422 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
423 that we generate span-dependency records sorted by their offsets, so we can
424 binary-search when trying to find a JSSpanDep for a given bytecode offset,
425 or the nearest JSSpanDep at or above a given pc.
427 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
428 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
429 tells us that we need to binary-search for the cg->spanDeps entry by the
430 jump opcode's bytecode offset (sd->before).
432 Jump targets need to be maintained in a data structure that lets us look
433 up an already-known target by its address (jumps may have a common target),
434 and that also lets us update the addresses (script-relative, a.k.a. absolute
435 offsets) of targets that come after a jump target (for when a jump below
436 that target needs to be extended). We use an AVL tree, implemented using
437 recursion, but with some tricky optimizations to its height-balancing code
438 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
440 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
441 positive sign, even though they link "backward" (i.e., toward lower bytecode
442 address). We don't want to waste space and search time in the AVL tree for
443 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
444 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
445 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
446 target, or is still awaiting backpatching.
448 Note that backpatch chains would present a problem for BuildSpanDepTable,
449 which inspects bytecode to build cg->spanDeps on demand, when the first
450 short jump offset overflows. To solve this temporary problem, we emit a
451 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
452 nuses/ndefs counts help keep the stack balanced, but whose opcode format
453 distinguishes its backpatch delta immediate operand from a normal jump
457 BalanceJumpTargets(JSJumpTarget **jtp)
459 JSJumpTarget *jt, *jt2, *root;
460 int dir, otherDir, heightChanged;
464 JS_ASSERT(jt->balance != 0);
466 if (jt->balance < -1) {
468 doubleRotate = (jt->kids[JT_LEFT]->balance > 0);
469 } else if (jt->balance > 1) {
471 doubleRotate = (jt->kids[JT_RIGHT]->balance < 0);
476 otherDir = JT_OTHER_DIR(dir);
478 jt2 = jt->kids[otherDir];
479 *jtp = root = jt2->kids[dir];
481 jt->kids[otherDir] = root->kids[dir];
482 root->kids[dir] = jt;
484 jt2->kids[dir] = root->kids[otherDir];
485 root->kids[otherDir] = jt2;
488 root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0);
489 root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0);
492 *jtp = root = jt->kids[otherDir];
493 jt->kids[otherDir] = root->kids[dir];
494 root->kids[dir] = jt;
496 heightChanged = (root->balance != 0);
497 jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
500 return heightChanged;
503 typedef struct AddJumpTargetArgs {
511 AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp)
518 JSCodeGenerator *cg = args->cg;
522 cg->jtFreeList = jt->kids[JT_LEFT];
524 JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool,
527 js_ReportOutOfScriptQuota(args->cx);
531 jt->offset = args->offset;
533 jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
534 cg->numJumpTargets++;
540 if (jt->offset == args->offset) {
545 if (args->offset < jt->offset)
546 balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]);
548 balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]);
552 jt->balance += balanceDelta;
553 return (balanceDelta && jt->balance)
554 ? 1 - BalanceJumpTargets(jtp)
559 static int AVLCheck(JSJumpTarget *jt)
564 JS_ASSERT(-1 <= jt->balance && jt->balance <= 1);
565 lh = AVLCheck(jt->kids[JT_LEFT]);
566 rh = AVLCheck(jt->kids[JT_RIGHT]);
567 JS_ASSERT(jt->balance == rh - lh);
568 return 1 + JS_MAX(lh, rh);
573 SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd,
576 AddJumpTargetArgs args;
578 if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
579 ReportStatementTooLarge(cx, cg);
585 args.offset = sd->top + off;
587 AddJumpTarget(&args, &cg->jumpTargets);
592 AVLCheck(cg->jumpTargets);
595 SD_SET_TARGET(sd, args.node);
599 #define SPANDEPS_MIN 256
600 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
601 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
604 AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2,
608 JSSpanDep *sdbase, *sd;
611 index = cg->numSpanDeps;
612 if (index + 1 == 0) {
613 ReportStatementTooLarge(cx, cg);
617 if ((index & (index - 1)) == 0 &&
618 (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) {
619 size = sdbase ? SPANDEPS_SIZE(index) : SPANDEPS_SIZE_MIN / 2;
620 sdbase = (JSSpanDep *) cx->realloc(sdbase, size + size);
623 cg->spanDeps = sdbase;
626 cg->numSpanDeps = index + 1;
627 sd = cg->spanDeps + index;
628 sd->top = pc - CG_BASE(cg);
629 sd->offset = sd->before = pc2 - CG_BASE(cg);
631 if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
632 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
634 JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
635 if (off > BPDELTA_MAX) {
636 ReportStatementTooLarge(cx, cg);
640 SD_SET_BPDELTA(sd, off);
641 } else if (off == 0) {
642 /* Jump offset will be patched directly, without backpatch chaining. */
643 SD_SET_TARGET(sd, 0);
645 /* The jump offset in off is non-zero, therefore it's already known. */
646 if (!SetSpanDepTarget(cx, cg, sd, off))
650 if (index > SPANDEP_INDEX_MAX)
651 index = SPANDEP_INDEX_HUGE;
652 SET_SPANDEP_INDEX(pc2, index);
657 AddSwitchSpanDeps(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc)
663 uintN njumps, indexlen;
666 JS_ASSERT(op == JSOP_TABLESWITCH || op == JSOP_LOOKUPSWITCH);
668 off = GET_JUMP_OFFSET(pc2);
669 if (!AddSpanDep(cx, cg, pc, pc2, off))
671 pc2 += JUMP_OFFSET_LEN;
672 if (op == JSOP_TABLESWITCH) {
673 low = GET_JUMP_OFFSET(pc2);
674 pc2 += JUMP_OFFSET_LEN;
675 high = GET_JUMP_OFFSET(pc2);
676 pc2 += JUMP_OFFSET_LEN;
677 njumps = (uintN) (high - low + 1);
680 njumps = GET_UINT16(pc2);
682 indexlen = INDEX_LEN;
687 off = GET_JUMP_OFFSET(pc2);
688 if (!AddSpanDep(cx, cg, pc, pc2, off))
690 pc2 += JUMP_OFFSET_LEN;
696 BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg)
698 jsbytecode *pc, *end;
700 const JSCodeSpec *cs;
703 pc = CG_BASE(cg) + cg->spanDepTodo;
708 cs = &js_CodeSpec[op];
710 switch (JOF_TYPE(cs->format)) {
711 case JOF_TABLESWITCH:
712 case JOF_LOOKUPSWITCH:
713 pc = AddSwitchSpanDeps(cx, cg, pc);
719 off = GET_JUMP_OFFSET(pc);
720 if (!AddSpanDep(cx, cg, pc, pc, off))
733 GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc)
740 index = GET_SPANDEP_INDEX(pc);
741 if (index != SPANDEP_INDEX_HUGE)
742 return cg->spanDeps + index;
744 offset = pc - CG_BASE(cg);
746 hi = cg->numSpanDeps - 1;
749 sd = cg->spanDeps + mid;
750 if (sd->before == offset)
752 if (sd->before < offset)
763 SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
768 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
769 if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
770 SET_JUMP_OFFSET(pc, delta);
774 if (delta > BPDELTA_MAX) {
775 ReportStatementTooLarge(cx, cg);
779 if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
782 sd = GetSpanDep(cg, pc);
783 JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
784 SD_SET_BPDELTA(sd, delta);
789 UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
791 if (jt->offset > pivot) {
793 if (jt->kids[JT_LEFT])
794 UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
796 if (jt->kids[JT_RIGHT])
797 UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
801 FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo,
805 JSSpanDep *sdbase, *sd;
807 num = cg->numSpanDeps;
810 sdbase = cg->spanDeps;
814 if (sd->before == offset)
816 if (sd->before < offset)
824 JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
829 FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt)
831 if (jt->kids[JT_LEFT])
832 FreeJumpTargets(cg, jt->kids[JT_LEFT]);
833 if (jt->kids[JT_RIGHT])
834 FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
835 jt->kids[JT_LEFT] = cg->jtFreeList;
840 OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg)
842 jsbytecode *pc, *oldpc, *base, *limit, *next;
843 JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
844 ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
849 jssrcnote *sn, *snlimit;
851 uintN i, n, noteIndex;
858 sdbase = cg->spanDeps;
859 sdlimit = sdbase + cg->numSpanDeps;
860 offset = CG_OFFSET(cg);
875 for (sd = sdbase; sd < sdlimit; sd++) {
876 JS_ASSERT(JT_HAS_TAG(sd->target));
879 if (sd->top != top) {
882 JS_ASSERT(top == sd->before);
886 type = JOF_OPTYPE(op);
887 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
889 * We already extended all the jump offset operands for
890 * the opcode at sd->top. Jumps and branches have only
891 * one jump offset operand, but switches have many, all
892 * of which are adjacent in cg->spanDeps.
897 JS_ASSERT(type == JOF_JUMP ||
898 type == JOF_TABLESWITCH ||
899 type == JOF_LOOKUPSWITCH);
902 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
903 span = SD_SPAN(sd, pivot);
904 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
905 ptrdiff_t deltaFromTop = 0;
910 case JSOP_GOTO: op = JSOP_GOTOX; break;
911 case JSOP_IFEQ: op = JSOP_IFEQX; break;
912 case JSOP_IFNE: op = JSOP_IFNEX; break;
913 case JSOP_OR: op = JSOP_ORX; break;
914 case JSOP_AND: op = JSOP_ANDX; break;
915 case JSOP_GOSUB: op = JSOP_GOSUBX; break;
916 case JSOP_CASE: op = JSOP_CASEX; break;
917 case JSOP_DEFAULT: op = JSOP_DEFAULTX; break;
918 case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break;
919 case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
921 ReportStatementTooLarge(cx, cg);
924 *pc = (jsbytecode) op;
926 for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
929 * sd2->offset already includes delta as it stood
930 * before we entered this loop, but it must also
931 * include the delta relative to top due to all the
932 * extended jump offset immediates for the opcode
933 * starting at top, which we extend in this loop.
935 * If there is only one extended jump offset, then
936 * sd2->offset won't change and this for loop will
939 sd2->offset += deltaFromTop;
940 deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
943 * sd2 comes after sd, and won't be revisited by
944 * the outer for loop, so we have to increase its
945 * offset by delta, not merely by deltaFromTop.
947 sd2->offset += delta;
950 delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
951 UpdateJumpTargets(cg->jumpTargets, sd2->offset,
952 JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
964 TokenStream *ts = &cg->parser->tokenStream;
966 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
967 ts->filename ? ts->filename : "stdin", cg->firstLine,
968 growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
969 passes, offset + growth, offset, growth);
973 * Ensure that we have room for the extended jumps, but don't round up
974 * to a power of two -- we're done generating code, so we cut to fit.
976 limit = CG_LIMIT(cg);
977 length = offset + growth;
978 next = base + length;
980 JS_ASSERT(length > BYTECODE_CHUNK);
981 size = BYTECODE_SIZE(limit - base);
982 incr = BYTECODE_SIZE(length) - size;
983 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
985 js_ReportOutOfScriptQuota(cx);
989 CG_LIMIT(cg) = next = base + length;
994 * Set up a fake span dependency record to guard the end of the code
995 * being generated. This guard record is returned as a fencepost by
996 * FindNearestSpanDep if there is no real spandep at or above a given
997 * unextended code offset.
1000 guard.offset = offset + growth;
1001 guard.before = offset;
1002 guard.target = NULL;
1006 * Now work backwards through the span dependencies, copying chunks of
1007 * bytecode between each extended jump toward the end of the grown code
1008 * space, and restoring immediate offset operands for all jump bytecodes.
1009 * The first chunk of bytecodes, starting at base and ending at the first
1010 * extended jump offset (NB: this chunk includes the operation bytecode
1011 * just before that immediate jump offset), doesn't need to be copied.
1013 JS_ASSERT(sd == sdlimit);
1015 while (--sd >= sdbase) {
1016 if (sd->top != top) {
1018 op = (JSOp) base[top];
1019 type = JOF_OPTYPE(op);
1021 for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--)
1024 pivot = sd2->offset;
1025 JS_ASSERT(top == sd2->before);
1028 oldpc = base + sd->before;
1029 span = SD_SPAN(sd, pivot);
1032 * If this jump didn't need to be extended, restore its span immediate
1033 * offset operand now, overwriting the index of sd within cg->spanDeps
1034 * that was stored temporarily after *pc when BuildSpanDepTable ran.
1036 * Note that span might fit in 16 bits even for an extended jump op,
1037 * if the op has multiple span operands, not all of which overflowed
1038 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
1039 * range for a short jump, but others are not).
1041 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1042 JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX);
1043 SET_JUMP_OFFSET(oldpc, span);
1048 * Set up parameters needed to copy the next run of bytecode starting
1049 * at offset (which is a cursor into the unextended, original bytecode
1050 * vector), down to sd->before (a cursor of the same scale as offset,
1051 * it's the index of the original jump pc). Reuse delta to count the
1052 * nominal number of bytes to copy.
1054 pc = base + sd->offset;
1055 delta = offset - sd->before;
1056 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
1059 * Don't bother copying the jump offset we're about to reset, but do
1060 * copy the bytecode at oldpc (which comes just before its immediate
1061 * jump offset operand), on the next iteration through the loop, by
1062 * including it in offset's new value.
1064 offset = sd->before + 1;
1065 size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN));
1067 memmove(pc + 1 + JUMPX_OFFSET_LEN,
1068 oldpc + 1 + JUMP_OFFSET_LEN,
1072 SET_JUMPX_OFFSET(pc, span);
1077 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1078 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1079 * at each sd that moved. The future may bring different offset sizes
1080 * for span-dependent instruction operands. However, we fix only main
1081 * notes here, not prolog notes -- we know that prolog opcodes are not
1082 * span-dependent, and aren't likely ever to be.
1084 offset = growth = 0;
1086 for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
1090 * Recall that the offset of a given note includes its delta, and
1091 * tells the offset of the annotated bytecode from the main entry
1092 * point of the script.
1094 offset += SN_DELTA(sn);
1095 while (sd < sdlimit && sd->before < offset) {
1097 * To compute the delta to add to sn, we need to look at the
1098 * spandep after sd, whose offset - (before + growth) tells by
1099 * how many bytes sd's instruction grew.
1104 delta = sd2->offset - (sd2->before + growth);
1106 JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
1107 sn = js_AddToSrcNoteDelta(cx, cg, sn, delta);
1110 snlimit = cg->main.notes + cg->main.noteCount;
1117 * If sn has span-dependent offset operands, check whether each
1118 * covers further span-dependencies, and increase those operands
1119 * accordingly. Some source notes measure offset not from the
1120 * annotated pc, but from that pc plus some small bias. NB: we
1121 * assume that spec->offsetBias can't itself span span-dependent
1124 spec = &js_SrcNoteSpec[SN_TYPE(sn)];
1125 if (spec->isSpanDep) {
1126 pivot = offset + spec->offsetBias;
1128 for (i = 0; i < n; i++) {
1129 span = js_GetSrcNoteOffset(sn, i);
1132 target = pivot + span * spec->isSpanDep;
1133 sd2 = FindNearestSpanDep(cg, target,
1140 * Increase target by sd2's before-vs-after offset delta,
1141 * which is absolute (i.e., relative to start of script,
1142 * as is target). Recompute the span by subtracting its
1143 * adjusted pivot from target.
1145 target += sd2->offset - sd2->before;
1146 span = target - (pivot + growth);
1147 span *= spec->isSpanDep;
1148 noteIndex = sn - cg->main.notes;
1149 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span))
1151 sn = cg->main.notes + noteIndex;
1152 snlimit = cg->main.notes + cg->main.noteCount;
1156 cg->main.lastNoteOffset += growth;
1159 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1160 * not clear how we can beat that).
1162 for (tryNode = cg->lastTryNode; tryNode; tryNode = tryNode->prev) {
1164 * First, look for the nearest span dependency at/above tn->start.
1165 * There may not be any such spandep, in which case the guard will
1168 offset = tryNode->note.start;
1169 sd = FindNearestSpanDep(cg, offset, 0, &guard);
1170 delta = sd->offset - sd->before;
1171 tryNode->note.start = offset + delta;
1174 * Next, find the nearest spandep at/above tn->start + tn->length.
1175 * Use its delta minus tn->start's delta to increase tn->length.
1177 length = tryNode->note.length;
1178 sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
1180 tryNode->note.length =
1181 length + sd2->offset - sd2->before - delta;
1186 #ifdef DEBUG_brendan
1190 for (sd = sdbase; sd < sdlimit; sd++) {
1191 offset = sd->offset;
1193 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1194 if (sd->top != top) {
1195 JS_ASSERT(top == -1 ||
1196 !JOF_TYPE_IS_EXTENDED_JUMP(type) ||
1200 JS_ASSERT(top == sd->before);
1201 op = (JSOp) base[offset];
1202 type = JOF_OPTYPE(op);
1203 JS_ASSERT(type == JOF_JUMP ||
1204 type == JOF_JUMPX ||
1205 type == JOF_TABLESWITCH ||
1206 type == JOF_TABLESWITCHX ||
1207 type == JOF_LOOKUPSWITCH ||
1208 type == JOF_LOOKUPSWITCHX);
1213 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1214 span = GET_JUMPX_OFFSET(pc);
1215 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
1218 JS_ASSERT(type == JOF_TABLESWITCHX ||
1219 type == JOF_LOOKUPSWITCHX);
1222 span = GET_JUMP_OFFSET(pc);
1224 JS_ASSERT(SD_SPAN(sd, pivot) == span);
1226 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
1231 * Reset so we optimize at most once -- cg may be used for further code
1232 * generation of successive, independent, top-level statements. No jump
1233 * can span top-level statements, because JS lacks goto.
1235 size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps)));
1236 cx->free(cg->spanDeps);
1237 cg->spanDeps = NULL;
1238 FreeJumpTargets(cg, cg->jumpTargets);
1239 cg->jumpTargets = NULL;
1240 cg->numSpanDeps = cg->numJumpTargets = 0;
1241 cg->spanDepTodo = CG_OFFSET(cg);
1246 EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off)
1252 extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off;
1253 if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg))
1256 jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
1257 if (jmp >= 0 && (extend || cg->spanDeps)) {
1258 pc = CG_CODE(cg, jmp);
1259 if (!AddSpanDep(cx, cg, pc, pc, off))
1266 GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc)
1273 return GET_JUMP_OFFSET(pc);
1275 sd = GetSpanDep(cg, pc);
1277 if (!JT_HAS_TAG(jt))
1278 return JT_TO_BPDELTA(jt);
1281 while (--sd >= cg->spanDeps && sd->top == top)
1284 return JT_CLR_TAG(jt)->offset - sd->offset;
1288 js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
1291 if (!cg->spanDeps) {
1292 if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
1293 SET_JUMP_OFFSET(pc, off);
1297 if (!BuildSpanDepTable(cx, cg))
1301 return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
1305 JSTreeContext::inStatement(JSStmtType type)
1307 for (JSStmtInfo *stmt = topStmt; stmt; stmt = stmt->down) {
1308 if (stmt->type == type)
1315 JSTreeContext::ensureSharpSlots()
1317 #if JS_HAS_SHARP_VARS
1318 JS_STATIC_ASSERT(SHARP_NSLOTS == 2);
1320 if (sharpSlotBase >= 0) {
1321 JS_ASSERT(flags & TCF_HAS_SHARPS);
1325 JS_ASSERT(!(flags & TCF_HAS_SHARPS));
1327 JSContext *cx = parser->context;
1328 JSAtom *sharpArrayAtom = js_Atomize(cx, "#array", 6, 0);
1329 JSAtom *sharpDepthAtom = js_Atomize(cx, "#depth", 6, 0);
1330 if (!sharpArrayAtom || !sharpDepthAtom)
1333 sharpSlotBase = bindings.countVars();
1334 if (!bindings.addVariable(cx, sharpArrayAtom))
1336 if (!bindings.addVariable(cx, sharpDepthAtom))
1340 * Compiler::compileScript will rebase immediate operands indexing
1341 * the sharp slots to come at the end of the global script's |nfixed|
1342 * slots storage, after gvars and regexps.
1346 flags |= TCF_HAS_SHARPS;
1352 JSTreeContext::skipSpansGenerator(unsigned skip)
1354 JSTreeContext *tc = this;
1355 for (unsigned i = 0; i < skip; ++i, tc = tc->parent) {
1358 if (tc->flags & TCF_FUN_IS_GENERATOR)
1365 js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type,
1370 stmt->blockid = tc->blockid();
1371 SET_STATEMENT_TOP(stmt, top);
1373 JS_ASSERT(!stmt->blockBox);
1374 stmt->down = tc->topStmt;
1376 if (STMT_LINKS_SCOPE(stmt)) {
1377 stmt->downScope = tc->topScopeStmt;
1378 tc->topScopeStmt = stmt;
1380 stmt->downScope = NULL;
1385 js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSObjectBox *blockBox,
1388 js_PushStatement(tc, stmt, STMT_BLOCK, top);
1389 stmt->flags |= SIF_SCOPE;
1390 blockBox->parent = tc->blockChainBox;
1391 blockBox->object->setParent(tc->blockChain());
1392 stmt->downScope = tc->topScopeStmt;
1393 tc->topScopeStmt = stmt;
1394 tc->blockChainBox = blockBox;
1395 stmt->blockBox = blockBox;
1399 * Emit a backpatch op with offset pointing to the previous jump of this type,
1400 * so that we can walk back up the chain fixing up the op and jump offset.
1403 EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp)
1405 ptrdiff_t offset, delta;
1407 offset = CG_OFFSET(cg);
1408 delta = offset - *lastp;
1410 JS_ASSERT(delta > 0);
1411 return EmitJump(cx, cg, op, delta);
1415 EmitTraceOp(JSContext *cx, JSCodeGenerator *cg)
1417 uint32 index = cg->traceIndex;
1418 if (index < UINT16_MAX)
1420 return js_Emit3(cx, cg, JSOP_TRACE, UINT16_HI(index), UINT16_LO(index));
1424 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1425 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1426 * NB: We use cx and cg from our caller's lexical environment, and return
1429 #define EMIT_UINT16_IMM_OP(op, i) \
1431 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1435 #define EMIT_UINT16PAIR_IMM_OP(op, i, j) \
1437 ptrdiff_t off_ = js_EmitN(cx, cg, op, 2 * UINT16_LEN); \
1440 jsbytecode *pc_ = CG_CODE(cg, off_); \
1441 SET_UINT16(pc_, i); \
1442 pc_ += UINT16_LEN; \
1443 SET_UINT16(pc_, j); \
1446 #define EMIT_UINT16_IN_PLACE(offset, op, i) \
1448 CG_CODE(cg, offset)[0] = op; \
1449 CG_CODE(cg, offset)[1] = UINT16_HI(i); \
1450 CG_CODE(cg, offset)[2] = UINT16_LO(i); \
1454 FlushPops(JSContext *cx, JSCodeGenerator *cg, intN *npops)
1456 JS_ASSERT(*npops != 0);
1457 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1459 EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
1465 * Emit additional bytecode(s) for non-local jumps.
1468 EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt)
1474 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1475 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1476 * end of a with statement, so we save cg->stackDepth here and restore it
1477 * just before a successful return.
1479 depth = cg->stackDepth;
1482 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1484 for (stmt = cg->topStmt; stmt != toStmt; stmt = stmt->down) {
1485 switch (stmt->type) {
1488 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1490 if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0)
1495 /* There's a With object on the stack that we need to pop. */
1497 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1499 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
1503 case STMT_FOR_IN_LOOP:
1505 * The iterator and the object being iterated need to be popped.
1508 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1510 if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
1514 case STMT_SUBROUTINE:
1516 * There's a [exception or hole, retsub pc-index] pair on the
1517 * stack that we need to pop.
1525 if (stmt->flags & SIF_SCOPE) {
1526 /* There is a Block object with locals on the stack to pop. */
1528 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1530 if (!EmitLeaveBlock(cx, cg, JSOP_LEAVEBLOCK, stmt->blockBox))
1536 cg->stackDepth = depth;
1543 EmitKnownBlockChain(JSContext *cx, JSCodeGenerator *cg, JSObjectBox *box)
1546 return EmitIndexOp(cx, JSOP_BLOCKCHAIN, box->index, cg);
1547 return js_Emit1(cx, cg, JSOP_NULLBLOCKCHAIN) >= 0;
1551 EmitBlockChain(JSContext *cx, JSCodeGenerator *cg)
1553 return EmitKnownBlockChain(cx, cg, cg->blockChainBox);
1557 EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1558 ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
1562 if (!EmitNonLocalJumpFixup(cx, cg, toStmt))
1566 index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
1567 else if (noteType != SRC_NULL)
1568 index = js_NewSrcNote(cx, cg, noteType);
1574 ptrdiff_t result = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
1578 if (!EmitBlockChain(cx, cg))
1585 BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last,
1586 jsbytecode *target, jsbytecode op)
1588 jsbytecode *pc, *stop;
1589 ptrdiff_t delta, span;
1591 pc = CG_CODE(cg, last);
1592 stop = CG_CODE(cg, -1);
1593 while (pc != stop) {
1594 delta = GetJumpOffset(cg, pc);
1596 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
1599 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1600 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1601 * and need to see the JSOP_BACKPATCH* op at *pc).
1610 js_PopStatement(JSTreeContext *tc)
1615 tc->topStmt = stmt->down;
1616 if (STMT_LINKS_SCOPE(stmt)) {
1617 tc->topScopeStmt = stmt->downScope;
1618 if (stmt->flags & SIF_SCOPE) {
1619 tc->blockChainBox = stmt->blockBox->parent;
1620 JS_SCOPE_DEPTH_METERING(--tc->scopeDepth);
1626 js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
1631 if (!STMT_IS_TRYING(stmt) &&
1632 (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
1633 !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
1637 js_PopStatement(cg);
1642 js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1645 /* XXX just do numbers for now */
1646 if (pn->pn_type == TOK_NUMBER) {
1647 if (!cg->constMap.put(atom, NumberValue(pn->pn_dval)))
1654 js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt)
1657 stmt = tc->topScopeStmt;
1658 for (; stmt; stmt = stmt->downScope) {
1659 if (stmt->type == STMT_WITH)
1662 /* Skip "maybe scope" statements that don't contain let bindings. */
1663 if (!(stmt->flags & SIF_SCOPE))
1666 JSObject *obj = stmt->blockBox->object;
1667 JS_ASSERT(obj->isStaticBlock());
1669 const Shape *shape = obj->nativeLookup(ATOM_TO_JSID(atom));
1671 JS_ASSERT(shape->hasShortID());
1674 JS_ASSERT(obj->getSlot(JSSLOT_BLOCK_DEPTH).isInt32());
1675 *slotp = obj->getSlot(JSSLOT_BLOCK_DEPTH).toInt32() + shape->shortid;
1687 * The function sets vp to NO_CONSTANT when the atom does not corresponds to a
1688 * name defining a constant.
1691 LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1698 * Chase down the cg stack, but only until we reach the outermost cg.
1699 * This enables propagating consts from top-level into switch cases in a
1700 * function compiled along with the top-level script.
1702 constp->setMagic(JS_NO_CONSTANT);
1704 if (cg->inFunction() || cg->compileAndGo()) {
1705 /* XXX this will need revising if 'const' becomes block-scoped. */
1706 stmt = js_LexicalLookup(cg, atom, NULL);
1710 if (JSCodeGenerator::ConstMap::Ptr p = cg->constMap.lookup(atom)) {
1711 JS_ASSERT(!p->value.isMagic(JS_NO_CONSTANT));
1717 * Try looking in the variable object for a direct property that
1718 * is readonly and permanent. We know such a property can't be
1719 * shadowed by another property on obj's prototype chain, or a
1720 * with object or catch variable; nor can prop's value be changed,
1721 * nor can prop be deleted.
1723 if (cg->inFunction()) {
1724 if (cg->bindings.hasBinding(cx, atom))
1727 JS_ASSERT(cg->compileAndGo());
1728 obj = cg->scopeChain();
1730 const Shape *shape = obj->nativeLookup(ATOM_TO_JSID(atom));
1733 * We're compiling code that will be executed immediately,
1734 * not re-executed against a different scope chain and/or
1735 * variable object. Therefore we can get constant values
1736 * from our variable object here.
1738 if (!shape->writable() && !shape->configurable() &&
1739 shape->hasDefaultGetter() && obj->containsSlot(shape->slot)) {
1740 *constp = obj->getSlot(shape->slot);
1748 } while (cg->parent && (cg = cg->parent->asCodeGenerator()));
1753 FitsWithoutBigIndex(uintN index)
1755 return index < JS_BIT(16);
1759 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1760 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1761 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1762 * after the main bytecode sequence.
1765 EmitBigIndexPrefix(JSContext *cx, JSCodeGenerator *cg, uintN index)
1770 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1773 JS_STATIC_ASSERT(INDEX_LIMIT <= JS_BIT(24));
1774 JS_STATIC_ASSERT(INDEX_LIMIT >=
1775 (JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 2) << 16);
1777 if (FitsWithoutBigIndex(index))
1779 indexBase = index >> 16;
1780 if (indexBase <= JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 1) {
1781 if (js_Emit1(cx, cg, (JSOp)(JSOP_INDEXBASE1 + indexBase - 1)) < 0)
1783 return JSOP_RESETBASE0;
1786 if (index >= INDEX_LIMIT) {
1787 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1788 JSMSG_TOO_MANY_LITERALS);
1792 if (js_Emit2(cx, cg, JSOP_INDEXBASE, (JSOp)indexBase) < 0)
1794 return JSOP_RESETBASE;
1798 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1799 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1800 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1801 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1802 * with single-byte JSOP_INDEXBASE[123] codes.
1804 * Such prefixing currently requires a suffix to restore the "zero segment"
1805 * register setting, but this could be optimized further.
1808 EmitIndexOp(JSContext *cx, JSOp op, uintN index, JSCodeGenerator *cg)
1812 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1813 if (bigSuffix == JSOP_FALSE)
1815 EMIT_UINT16_IMM_OP(op, index);
1816 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1820 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1821 * caller's lexical environment, and embedding a false return on error.
1823 #define EMIT_INDEX_OP(op, index) \
1825 if (!EmitIndexOp(cx, op, index, cg)) \
1830 EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1832 JSAtomListElement *ale;
1834 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1835 if (op == JSOP_GETPROP &&
1836 pn->pn_atom == cx->runtime->atomState.lengthAtom) {
1837 return js_Emit1(cx, cg, JSOP_LENGTH) >= 0;
1839 ale = cg->atomList.add(cg->parser, pn->pn_atom);
1842 return EmitIndexOp(cx, op, ALE_INDEX(ale), cg);
1846 EmitObjectOp(JSContext *cx, JSObjectBox *objbox, JSOp op,
1847 JSCodeGenerator *cg)
1849 JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
1850 return EmitIndexOp(cx, op, cg->objectList.index(objbox), cg);
1854 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1855 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1856 * other parts of the code there's no necessary relationship between the two.
1857 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1858 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1860 JS_STATIC_ASSERT(ARGNO_LEN == 2);
1861 JS_STATIC_ASSERT(SLOTNO_LEN == 2);
1864 EmitSlotIndexOp(JSContext *cx, JSOp op, uintN slot, uintN index,
1865 JSCodeGenerator *cg)
1871 JS_ASSERT(JOF_OPTYPE(op) == JOF_SLOTATOM ||
1872 JOF_OPTYPE(op) == JOF_SLOTOBJECT);
1873 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1874 if (bigSuffix == JSOP_FALSE)
1877 /* Emit [op, slot, index]. */
1878 off = js_EmitN(cx, cg, op, 2 + INDEX_LEN);
1881 pc = CG_CODE(cg, off);
1882 SET_UINT16(pc, slot);
1884 SET_INDEX(pc, index);
1885 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1889 JSCodeGenerator::shouldNoteClosedName(JSParseNode *pn)
1891 return !callsEval() && pn->pn_defn && pn->isClosed();
1895 * Adjust the slot for a block local to account for the number of variables
1896 * that share the same index space with locals. Due to the incremental code
1897 * generation for top-level script, we do the adjustment via code patching in
1898 * Compiler::compileScript; see comments there.
1900 * The function returns -1 on failures.
1903 AdjustBlockSlot(JSContext *cx, JSCodeGenerator *cg, jsint slot)
1905 JS_ASSERT((jsuint) slot < cg->maxStackDepth);
1906 if (cg->inFunction()) {
1907 slot += cg->bindings.countVars();
1908 if ((uintN) slot >= SLOTNO_LIMIT) {
1909 ReportCompileErrorNumber(cx, CG_TS(cg), NULL, JSREPORT_ERROR, JSMSG_TOO_MANY_LOCALS);
1917 EmitEnterBlock(JSContext *cx, JSParseNode *pn, JSCodeGenerator *cg)
1919 JS_ASSERT(PN_TYPE(pn) == TOK_LEXICALSCOPE);
1920 if (!EmitObjectOp(cx, pn->pn_objbox, JSOP_ENTERBLOCK, cg))
1923 JSObject *blockObj = pn->pn_objbox->object;
1924 jsint depth = AdjustBlockSlot(cx, cg, OBJ_BLOCK_DEPTH(cx, blockObj));
1928 uintN base = JSSLOT_FREE(&js_BlockClass);
1929 for (uintN slot = base, limit = base + OBJ_BLOCK_COUNT(cx, blockObj); slot < limit; slot++) {
1930 const Value &v = blockObj->getSlot(slot);
1932 /* Beware the empty destructuring dummy. */
1933 if (v.isUndefined()) {
1934 JS_ASSERT(slot + 1 <= limit);
1938 JSDefinition *dn = (JSDefinition *) v.toPrivate();
1939 JS_ASSERT(dn->pn_defn);
1940 JS_ASSERT(uintN(dn->frameSlot() + depth) < JS_BIT(16));
1941 dn->pn_cookie.set(dn->pn_cookie.level(), uint16(dn->frameSlot() + depth));
1943 for (JSParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
1944 JS_ASSERT(pnu->pn_lexdef == dn);
1945 JS_ASSERT(!(pnu->pn_dflags & PND_BOUND));
1946 JS_ASSERT(pnu->pn_cookie.isFree());
1951 * If this variable is closed over, and |eval| is not present, then
1952 * then set a bit in dslots so the Method JIT can deoptimize this
1955 bool isClosed = cg->shouldNoteClosedName(dn);
1956 blockObj->setSlot(slot, BooleanValue(isClosed));
1963 EmitLeaveBlock(JSContext *cx, JSCodeGenerator *cg, JSOp op,
1967 uintN count = OBJ_BLOCK_COUNT(cx, box->object);
1969 bigSuffix = EmitBigIndexPrefix(cx, cg, box->index);
1970 if (bigSuffix == JSOP_FALSE)
1972 if (js_Emit5(cx, cg, op, count, box->index) < 0)
1974 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1978 * Try to convert a *NAME op to a *GNAME op, which optimizes access to
1979 * undeclared globals. Return true if a conversion was made.
1981 * This conversion is not made if we are in strict mode. In eval code nested
1982 * within (strict mode) eval code, access to an undeclared "global" might
1983 * merely be to a binding local to that outer eval:
1987 * eval('var x = "eval"; eval("x");'); // 'eval', not 'global'
1989 * Outside eval code, access to an undeclared global is a strict mode error:
1994 * undeclared = 17; // throws ReferenceError
1999 TryConvertToGname(JSCodeGenerator *cg, JSParseNode *pn, JSOp *op)
2001 if (cg->compileAndGo() &&
2002 cg->compiler()->globalScope->globalObj &&
2003 !cg->mightAliasLocals() &&
2004 !pn->isDeoptimized() &&
2005 !(cg->flags & TCF_STRICT_MODE_CODE)) {
2007 case JSOP_NAME: *op = JSOP_GETGNAME; break;
2008 case JSOP_SETNAME: *op = JSOP_SETGNAME; break;
2009 case JSOP_INCNAME: *op = JSOP_INCGNAME; break;
2010 case JSOP_NAMEINC: *op = JSOP_GNAMEINC; break;
2011 case JSOP_DECNAME: *op = JSOP_DECGNAME; break;
2012 case JSOP_NAMEDEC: *op = JSOP_GNAMEDEC; break;
2013 case JSOP_FORNAME: *op = JSOP_FORGNAME; break;
2016 /* Not supported. */
2018 default: JS_NOT_REACHED("gname");
2025 // Binds a global, given a |dn| that is known to have the PND_GVAR bit, and a pn
2026 // that is |dn| or whose definition is |dn|. |pn->pn_cookie| is an outparam
2027 // that will be free (meaning no binding), or a slot number.
2029 BindKnownGlobal(JSContext *cx, JSCodeGenerator *cg, JSParseNode *dn, JSParseNode *pn, JSAtom *atom)
2031 // Cookie is an outparam; make sure caller knew to clear it.
2032 JS_ASSERT(pn->pn_cookie.isFree());
2034 if (cg->mightAliasLocals())
2037 GlobalScope *globalScope = cg->compiler()->globalScope;
2040 if (dn->pn_cookie.isFree()) {
2041 // The definition wasn't bound, so find its atom's index in the
2042 // mapping of defined globals.
2043 JSAtomListElement *ale = globalScope->names.lookup(atom);
2044 index = ALE_INDEX(ale);
2046 JSCodeGenerator *globalcg = globalScope->cg;
2048 // If the definition is bound, and we're in the same cg, we can re-use
2050 if (globalcg == cg) {
2051 pn->pn_cookie = dn->pn_cookie;
2052 pn->pn_dflags |= PND_BOUND;
2056 // Otherwise, find the atom's index by using the originating cg's
2057 // global use table.
2058 index = globalcg->globalUses[dn->pn_cookie.asInteger()].slot;
2061 if (!cg->addGlobalUse(atom, index, &pn->pn_cookie))
2064 if (!pn->pn_cookie.isFree())
2065 pn->pn_dflags |= PND_BOUND;
2070 // See BindKnownGlobal()'s comment.
2072 BindGlobal(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn, JSAtom *atom)
2074 pn->pn_cookie.makeFree();
2082 dn = (JSDefinition *)pn;
2085 // Only optimize for defined globals.
2086 if (!dn->isGlobal())
2089 return BindKnownGlobal(cx, cg, dn, pn, atom);
2093 * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
2094 * and stores, given the compile-time information in cg and a TOK_NAME node pn.
2095 * It returns false on error, true on success.
2097 * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
2098 * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
2099 * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
2100 * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
2101 * pn->pn_op was modified, if this function finds an argument or local variable
2102 * name, PND_CONST will be set in pn_dflags for read-only properties after a
2103 * successful return.
2105 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
2106 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
2110 BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
2115 JSDefinition::Kind dn_kind;
2116 JSAtomListElement *ale;
2119 JS_ASSERT(pn->pn_type == TOK_NAME);
2121 /* Idempotency tests come first, since we may be called more than once. */
2122 if (pn->pn_dflags & PND_BOUND)
2125 /* No cookie initialized for these two, they're pre-bound by definition. */
2126 JS_ASSERT(pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE);
2129 * The parser linked all uses (including forward references) to their
2130 * definitions, unless a with statement or direct eval intervened.
2133 JS_ASSERT(pn->pn_cookie.isFree());
2135 JS_ASSERT(dn->pn_defn);
2136 if (pn->isDeoptimized())
2138 pn->pn_dflags |= (dn->pn_dflags & PND_CONST);
2142 dn = (JSDefinition *) pn;
2149 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2151 UpvarCookie cookie = dn->pn_cookie;
2152 dn_kind = dn->kind();
2155 * Turn attempts to mutate const-declared bindings into get ops (for
2156 * pre-increment and pre-decrement ops, our caller will have to emit
2157 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
2159 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
2160 * bindings visible to the compiler are permanent in JS unless the
2161 * declaration originates at top level in eval code.
2168 if (dn_kind != JSDefinition::UNKNOWN) {
2169 if (cg->parser->callerFrame && dn->isTopLevel())
2170 JS_ASSERT(cg->compileAndGo());
2172 pn->pn_op = JSOP_FALSE;
2173 pn->pn_dflags |= PND_BOUND;
2179 pn->pn_op = op = JSOP_NAME;
2182 if (dn->isGlobal()) {
2183 if (op == JSOP_NAME) {
2185 * If the definition is a defined global, not potentially aliased
2186 * by a local variable, and not mutating the variable, try and
2187 * optimize to a fast, unguarded global access.
2189 if (!BindKnownGlobal(cx, cg, dn, pn, atom))
2191 if (!pn->pn_cookie.isFree()) {
2192 pn->pn_op = JSOP_GETGLOBAL;
2198 * The locally stored cookie here should really come from |pn|, not
2199 * |dn|. For example, we could have a SETGNAME op's lexdef be a
2200 * GETGLOBAL op, and their cookies have very different meanings. As
2201 * a workaround, just make the cookie free.
2206 if (cookie.isFree()) {
2207 JSStackFrame *caller = cg->parser->callerFrame;
2209 JS_ASSERT(cg->compileAndGo());
2212 * Don't generate upvars on the left side of a for loop. See
2215 if (cg->flags & TCF_IN_FOR_INIT)
2218 JS_ASSERT(caller->isScriptFrame());
2221 * If this is an eval in the global scope, then unbound variables
2222 * must be globals, so try to use GNAME ops.
2224 if (caller->isGlobalFrame() && TryConvertToGname(cg, pn, &op)) {
2225 ale = cg->atomList.add(cg->parser, atom);
2230 pn->pn_dflags |= PND_BOUND;
2235 * Out of tricks, so we must rely on PICs to optimize named
2236 * accesses from direct eval called from function code.
2241 /* Optimize accesses to undeclared globals. */
2242 if (!cg->mightAliasLocals() && !TryConvertToGname(cg, pn, &op))
2245 ale = cg->atomList.add(cg->parser, atom);
2250 pn->pn_dflags |= PND_BOUND;
2255 uint16 level = cookie.level();
2256 JS_ASSERT(cg->staticLevel >= level);
2258 const uintN skip = cg->staticLevel - level;
2260 JS_ASSERT(cg->inFunction());
2261 JS_ASSERT_IF(cookie.slot() != UpvarCookie::CALLEE_SLOT, cg->lexdeps.lookup(atom));
2262 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2263 JS_ASSERT(cg->fun()->u.i.skipmin <= skip);
2266 * If op is a mutating opcode, this upvar's lookup skips too many levels,
2267 * or the function is heavyweight, we fall back on JSOP_*NAME*.
2269 if (op != JSOP_NAME)
2271 if (level >= UpvarCookie::UPVAR_LEVEL_LIMIT)
2273 if (cg->flags & TCF_FUN_HEAVYWEIGHT)
2276 if (!cg->fun()->isFlatClosure())
2279 ale = cg->upvarList.lookup(atom);
2281 index = ALE_INDEX(ale);
2283 if (!cg->bindings.addUpvar(cx, atom))
2286 ale = cg->upvarList.add(cg->parser, atom);
2289 index = ALE_INDEX(ale);
2290 JS_ASSERT(index == cg->upvarList.count - 1);
2292 UpvarCookie *vector = cg->upvarMap.vector;
2293 uint32 length = cg->lexdeps.count;
2294 if (!vector || cg->upvarMap.length != length) {
2295 vector = (UpvarCookie *) js_realloc(vector, length * sizeof *vector);
2297 JS_ReportOutOfMemory(cx);
2300 cg->upvarMap.vector = vector;
2301 cg->upvarMap.length = length;
2304 uintN slot = cookie.slot();
2305 if (slot != UpvarCookie::CALLEE_SLOT && dn_kind != JSDefinition::ARG) {
2306 JSTreeContext *tc = cg;
2309 } while (tc->staticLevel != level);
2310 if (tc->inFunction())
2311 slot += tc->fun()->nargs;
2314 JS_ASSERT(index < cg->upvarMap.length);
2315 vector[index].set(skip, slot);
2318 pn->pn_op = JSOP_GETFCSLOT;
2319 JS_ASSERT((index & JS_BITMASK(16)) == index);
2320 pn->pn_cookie.set(0, index);
2321 pn->pn_dflags |= PND_BOUND;
2326 * We are compiling a function body and may be able to optimize name
2327 * to stack slot. Look for an argument or variable in the function and
2328 * rewrite pn_op and update pn accordingly.
2331 case JSDefinition::UNKNOWN:
2334 case JSDefinition::LET:
2336 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2337 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2338 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2339 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2340 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2341 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2342 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2343 default: JS_NOT_REACHED("let");
2347 case JSDefinition::ARG:
2349 case JSOP_NAME: op = JSOP_GETARG; break;
2350 case JSOP_SETNAME: op = JSOP_SETARG; break;
2351 case JSOP_INCNAME: op = JSOP_INCARG; break;
2352 case JSOP_NAMEINC: op = JSOP_ARGINC; break;
2353 case JSOP_DECNAME: op = JSOP_DECARG; break;
2354 case JSOP_NAMEDEC: op = JSOP_ARGDEC; break;
2355 case JSOP_FORNAME: op = JSOP_FORARG; break;
2356 default: JS_NOT_REACHED("arg");
2358 JS_ASSERT(!pn->isConst());
2361 case JSDefinition::VAR:
2362 if (PN_OP(dn) == JSOP_CALLEE) {
2363 JS_ASSERT(op != JSOP_CALLEE);
2364 JS_ASSERT((cg->fun()->flags & JSFUN_LAMBDA) && atom == cg->fun()->atom);
2367 * Leave pn->pn_op == JSOP_NAME if cg->fun is heavyweight to
2368 * address two cases: a new binding introduced by eval, and
2369 * assignment to the name in strict mode.
2371 * var fun = (function f(s) { eval(s); return f; });
2372 * assertEq(fun("var f = 42"), 42);
2374 * ECMAScript specifies that a function expression's name is bound
2375 * in a lexical environment distinct from that used to bind its
2376 * named parameters, the arguments object, and its variables. The
2377 * new binding for "var f = 42" shadows the binding for the
2378 * function itself, so the name of the function will not refer to
2381 * (function f() { "use strict"; f = 12; })();
2383 * Outside strict mode, assignment to a function expression's name
2384 * has no effect. But in strict mode, this attempt to mutate an
2385 * immutable binding must throw a TypeError. We implement this by
2386 * not optimizing such assignments and by marking such functions as
2387 * heavyweight, ensuring that the function name is represented in
2388 * the scope chain so that assignment will throw a TypeError.
2390 JS_ASSERT(op != JSOP_DELNAME);
2391 if (!(cg->flags & TCF_FUN_HEAVYWEIGHT)) {
2393 pn->pn_dflags |= PND_CONST;
2397 pn->pn_dflags |= PND_BOUND;
2403 JS_ASSERT_IF(dn_kind != JSDefinition::FUNCTION,
2404 dn_kind == JSDefinition::VAR ||
2405 dn_kind == JSDefinition::CONST);
2407 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2408 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2409 case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
2410 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2411 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2412 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2413 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2414 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2415 default: JS_NOT_REACHED("local");
2417 JS_ASSERT_IF(dn_kind == JSDefinition::CONST, pn->pn_dflags & PND_CONST);
2421 JS_ASSERT(op != PN_OP(pn));
2423 pn->pn_cookie.set(0, cookie.slot());
2424 pn->pn_dflags |= PND_BOUND;
2429 JSCodeGenerator::addGlobalUse(JSAtom *atom, uint32 slot, UpvarCookie *cookie)
2431 JSAtomListElement *ale = globalMap.lookup(atom);
2433 cookie->set(0, uint16(ALE_INDEX(ale)));
2437 /* Don't bother encoding indexes >= uint16 */
2438 if (globalUses.length() >= UINT16_LIMIT) {
2443 /* Find or add an existing atom table entry. */
2444 ale = atomList.add(parser, atom);
2448 cookie->set(0, globalUses.length());
2450 GlobalSlotArray::Entry entry = { ALE_INDEX(ale), slot };
2451 if (!globalUses.append(entry))
2454 ale = globalMap.add(parser, atom);
2458 ALE_SET_INDEX(ale, cookie->asInteger());
2463 * If pn contains a useful expression, return true with *answer set to true.
2464 * If pn contains a useless expression, return true with *answer set to false.
2465 * Return false on error.
2467 * The caller should initialize *answer to false and invoke this function on
2468 * an expression statement or similar subtree to decide whether the tree could
2469 * produce code that has any side effects. For an expression statement, we
2470 * define useless code as code with no side effects, because the main effect,
2471 * the value left on the stack after the code executes, will be discarded by a
2475 CheckSideEffects(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2485 switch (pn->pn_arity) {
2488 * A named function, contrary to ES3, is no longer useful, because we
2489 * bind its name lexically (using JSOP_CALLEE) instead of creating an
2490 * Object instance and binding a readonly, permanent property in it
2491 * (the object and binding can be detected and hijacked or captured).
2492 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
2498 if (pn->pn_op == JSOP_NOP ||
2499 pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2500 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2502 * Non-operators along with ||, &&, ===, and !== never invoke
2503 * toString or valueOf.
2505 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
2506 ok &= CheckSideEffects(cx, cg, pn2, answer);
2509 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2510 * are presumed to be useful, because they may have side effects
2511 * even if their main effect (their return value) is discarded.
2513 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2514 * to avoid too much recursion. All such lists must be presumed
2515 * to be useful because each index operation could invoke a getter
2516 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2517 * does not apply here: arguments[i][j] might invoke a getter).
2519 * Likewise, array and object initialisers may call prototype
2520 * setters (the __defineSetter__ built-in, and writable __proto__
2521 * on Array.prototype create this hazard). Initialiser list nodes
2522 * have JSOP_NEWINIT in their pn_op.
2529 ok = CheckSideEffects(cx, cg, pn->pn_kid1, answer) &&
2530 CheckSideEffects(cx, cg, pn->pn_kid2, answer) &&
2531 CheckSideEffects(cx, cg, pn->pn_kid3, answer);
2535 if (pn->pn_type == TOK_ASSIGN) {
2537 * Assignment is presumed to be useful, even if the next operation
2538 * is another assignment overwriting this one's ostensible effect,
2539 * because the left operand may be a property with a setter that
2542 * The only exception is assignment of a useless value to a const
2543 * declared in the function currently being compiled.
2546 if (pn2->pn_type != TOK_NAME) {
2549 if (!BindNameToSlot(cx, cg, pn2))
2551 if (!CheckSideEffects(cx, cg, pn->pn_right, answer))
2553 if (!*answer && (pn->pn_op != JSOP_NOP || !pn2->isConst()))
2557 if (pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2558 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2560 * ||, &&, ===, and !== do not convert their operands via
2561 * toString or valueOf method calls.
2563 ok = CheckSideEffects(cx, cg, pn->pn_left, answer) &&
2564 CheckSideEffects(cx, cg, pn->pn_right, answer);
2567 * We can't easily prove that neither operand ever denotes an
2568 * object with a toString or valueOf method.
2576 switch (pn->pn_type) {
2579 switch (pn2->pn_type) {
2581 if (!BindNameToSlot(cx, cg, pn2))
2583 if (pn2->isConst()) {
2589 #if JS_HAS_XML_SUPPORT
2594 /* All these delete addressing modes have effects too. */
2598 ok = CheckSideEffects(cx, cg, pn2, answer);
2604 if (pn->pn_op == JSOP_NOT) {
2605 /* ! does not convert its operand via toString or valueOf. */
2606 ok = CheckSideEffects(cx, cg, pn->pn_kid, answer);
2613 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2614 * have direct effects. Of the remaining unary-arity node types,
2615 * we can't easily prove that the operand never denotes an object
2616 * with a toString or valueOf method.
2625 * Take care to avoid trying to bind a label name (labels, both for
2626 * statements and property values in object initialisers, have pn_op
2627 * defaulted to JSOP_NOP).
2629 if (pn->pn_type == TOK_NAME && pn->pn_op != JSOP_NOP) {
2630 if (!BindNameToSlot(cx, cg, pn))
2632 if (pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE &&
2633 pn->pn_cookie.isFree()) {
2635 * Not an argument or local variable use, and not a use of a
2636 * unshadowed named function expression's given name, so this
2637 * expression could invoke a getter that has side effects.
2642 pn2 = pn->maybeExpr();
2643 if (pn->pn_type == TOK_DOT) {
2644 if (pn2->pn_type == TOK_NAME && !BindNameToSlot(cx, cg, pn2))
2646 if (!(pn2->pn_op == JSOP_ARGUMENTS &&
2647 pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
2649 * Any dotted property reference could call a getter, except
2650 * for arguments.length where arguments is unambiguous.
2655 ok = CheckSideEffects(cx, cg, pn2, answer);
2659 ok = CheckSideEffects(cx, cg, pn->pn_tree, answer);
2663 if (pn->pn_type == TOK_DEBUGGER)
2671 EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2676 if (!BindNameToSlot(cx, cg, pn))
2686 op = JSOP_CALLGNAME;
2688 case JSOP_GETGLOBAL:
2689 op = JSOP_CALLGLOBAL;
2695 op = JSOP_CALLLOCAL;
2697 case JSOP_GETFCSLOT:
2698 op = JSOP_CALLFCSLOT;
2701 JS_ASSERT(op == JSOP_ARGUMENTS || op == JSOP_CALLEE);
2706 if (op == JSOP_ARGUMENTS || op == JSOP_CALLEE) {
2707 if (js_Emit1(cx, cg, op) < 0)
2709 if (callContext && js_Emit1(cx, cg, JSOP_PUSH) < 0)
2712 if (!pn->pn_cookie.isFree()) {
2713 EMIT_UINT16_IMM_OP(op, pn->pn_cookie.asInteger());
2715 if (!EmitAtomOp(cx, pn, op, cg))
2723 #if JS_HAS_XML_SUPPORT
2725 EmitXMLName(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2730 JS_ASSERT(pn->pn_type == TOK_UNARYOP);
2731 JS_ASSERT(pn->pn_op == JSOP_XMLNAME);
2732 JS_ASSERT(op == JSOP_XMLNAME || op == JSOP_CALLXMLNAME);
2735 oldflags = cg->flags;
2736 cg->flags &= ~TCF_IN_FOR_INIT;
2737 if (!js_EmitTree(cx, cg, pn2))
2739 cg->flags |= oldflags & TCF_IN_FOR_INIT;
2740 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2741 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2745 return js_Emit1(cx, cg, op) >= 0;
2750 EmitSpecialPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2753 * Special case for obj.__proto__ to deoptimize away from fast paths in the
2754 * interpreter and trace recorder, which skip dense array instances by
2755 * going up to Array.prototype before looking up the property name.
2757 JSAtomListElement *ale = cg->atomList.add(cg->parser, pn->pn_atom);
2760 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
2762 if (js_Emit1(cx, cg, op) < 0)
2768 EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg,
2771 JSParseNode *pn2, *pndot, *pnup, *pndown;
2774 JS_ASSERT(pn->pn_arity == PN_NAME);
2775 pn2 = pn->maybeExpr();
2777 /* Special case deoptimization for __proto__. */
2778 if ((op == JSOP_GETPROP || op == JSOP_CALLPROP) &&
2779 pn->pn_atom == cx->runtime->atomState.protoAtom) {
2780 if (pn2 && !js_EmitTree(cx, cg, pn2))
2782 return EmitSpecialPropOp(cx, pn, callContext ? JSOP_CALLELEM : JSOP_GETELEM, cg);
2786 JS_ASSERT(pn->pn_type == TOK_DOT);
2787 JS_ASSERT(op == JSOP_GETPROP);
2789 } else if (op == JSOP_GETPROP && pn->pn_type == TOK_DOT) {
2790 if (pn2->pn_op == JSOP_THIS) {
2791 if (pn->pn_atom != cx->runtime->atomState.lengthAtom) {
2792 /* Fast path for gets of |this.foo|. */
2793 return EmitAtomOp(cx, pn, JSOP_GETTHISPROP, cg);
2795 } else if (pn2->pn_type == TOK_NAME) {
2798 * - arguments.length into JSOP_ARGCNT
2799 * - argname.prop into JSOP_GETARGPROP
2800 * - localname.prop into JSOP_GETLOCALPROP
2801 * but don't do this if the property is 'length' -- prefer to emit
2802 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2804 if (!BindNameToSlot(cx, cg, pn2))
2806 if (pn->pn_atom == cx->runtime->atomState.lengthAtom) {
2807 if (pn2->pn_op == JSOP_ARGUMENTS)
2808 return js_Emit1(cx, cg, JSOP_ARGCNT) >= 0;
2810 switch (pn2->pn_op) {
2812 op = JSOP_GETARGPROP;
2815 op = JSOP_GETLOCALPROP;
2817 JSAtomListElement *ale;
2820 ale = cg->atomList.add(cg->parser, pn->pn_atom);
2823 atomIndex = ALE_INDEX(ale);
2824 return EmitSlotIndexOp(cx, op, pn2->pn_cookie.asInteger(), atomIndex, cg);
2834 * If the object operand is also a dotted property reference, reverse the
2835 * list linked via pn_expr temporarily so we can iterate over it from the
2836 * bottom up (reversing again as we go), to avoid excessive recursion.
2838 if (pn2->pn_type == TOK_DOT) {
2841 top = CG_OFFSET(cg);
2843 /* Reverse pndot->pn_expr to point up, not down. */
2844 pndot->pn_offset = top;
2845 JS_ASSERT(!pndot->pn_used);
2846 pndown = pndot->pn_expr;
2847 pndot->pn_expr = pnup;
2848 if (pndown->pn_type != TOK_DOT)
2854 /* pndown is a primary expression, not a dotted property reference. */
2855 if (!js_EmitTree(cx, cg, pndown))
2859 /* Walk back up the list, emitting annotated name ops. */
2860 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2861 CG_OFFSET(cg) - pndown->pn_offset) < 0) {
2865 /* Special case deoptimization on __proto__, as above. */
2866 if (pndot->pn_arity == PN_NAME && pndot->pn_atom == cx->runtime->atomState.protoAtom) {
2867 if (!EmitSpecialPropOp(cx, pndot, JSOP_GETELEM, cg))
2869 } else if (!EmitAtomOp(cx, pndot, PN_OP(pndot), cg)) {
2873 /* Reverse the pn_expr link again. */
2874 pnup = pndot->pn_expr;
2875 pndot->pn_expr = pndown;
2877 } while ((pndot = pnup) != NULL);
2879 if (!js_EmitTree(cx, cg, pn2))
2883 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2884 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2888 return EmitAtomOp(cx, pn, op, cg);
2892 EmitElemOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2895 JSParseNode *left, *right, *next, ltmp, rtmp;
2898 top = CG_OFFSET(cg);
2899 if (pn->pn_arity == PN_LIST) {
2900 /* Left-associative operator chain to avoid too much recursion. */
2901 JS_ASSERT(pn->pn_op == JSOP_GETELEM);
2902 JS_ASSERT(pn->pn_count >= 3);
2905 next = left->pn_next;
2906 JS_ASSERT(next != right);
2909 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2910 * one or more index expression and JSOP_GETELEM op pairs.
2912 if (left->pn_type == TOK_NAME && next->pn_type == TOK_NUMBER) {
2913 if (!BindNameToSlot(cx, cg, left))
2915 if (left->pn_op == JSOP_ARGUMENTS &&
2916 JSDOUBLE_IS_INT32(next->pn_dval, &slot) &&
2917 jsuint(slot) < JS_BIT(16) &&
2918 (!cg->inStrictMode() ||
2919 (!cg->mutatesParameter() && !cg->callsEval()))) {
2921 * arguments[i]() requires arguments object as "this".
2922 * Check that we never generates list for that usage.
2924 JS_ASSERT(op != JSOP_CALLELEM || next->pn_next);
2925 left->pn_offset = next->pn_offset = top;
2926 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2928 next = left->pn_next;
2933 * Check whether we generated JSOP_ARGSUB, just above, and have only
2934 * one more index expression to emit. Given arguments[0][j], we must
2935 * skip the while loop altogether, falling through to emit code for j
2936 * (in the subtree referenced by right), followed by the annotated op,
2937 * at the bottom of this function.
2939 JS_ASSERT(next != right || pn->pn_count == 3);
2940 if (left == pn->pn_head) {
2941 if (!js_EmitTree(cx, cg, left))
2944 while (next != right) {
2945 if (!js_EmitTree(cx, cg, next))
2947 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2949 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
2951 next = next->pn_next;
2954 if (pn->pn_arity == PN_NAME) {
2956 * Set left and right so pn appears to be a TOK_LB node, instead
2957 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2958 * EmitDestructuringOps nearer below. In the destructuring case,
2959 * the base expression (pn_expr) of the name may be null, which
2960 * means we have to emit a JSOP_BINDNAME.
2962 left = pn->maybeExpr();
2965 left->pn_type = TOK_STRING;
2966 left->pn_op = JSOP_BINDNAME;
2967 left->pn_arity = PN_NULLARY;
2968 left->pn_pos = pn->pn_pos;
2969 left->pn_atom = pn->pn_atom;
2972 right->pn_type = TOK_STRING;
2973 right->pn_op = js_IsIdentifier(ATOM_TO_STRING(pn->pn_atom))
2976 right->pn_arity = PN_NULLARY;
2977 right->pn_pos = pn->pn_pos;
2978 right->pn_atom = pn->pn_atom;
2980 JS_ASSERT(pn->pn_arity == PN_BINARY);
2982 right = pn->pn_right;
2985 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2986 if (op == JSOP_GETELEM &&
2987 left->pn_type == TOK_NAME &&
2988 right->pn_type == TOK_NUMBER) {
2989 if (!BindNameToSlot(cx, cg, left))
2991 if (left->pn_op == JSOP_ARGUMENTS &&
2992 JSDOUBLE_IS_INT32(right->pn_dval, &slot) &&
2993 jsuint(slot) < JS_BIT(16) &&
2994 (!cg->inStrictMode() ||
2995 (!cg->mutatesParameter() && !cg->callsEval()))) {
2996 left->pn_offset = right->pn_offset = top;
2997 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
3002 if (!js_EmitTree(cx, cg, left))
3006 /* The right side of the descendant operator is implicitly quoted. */
3007 JS_ASSERT(op != JSOP_DESCENDANTS || right->pn_type != TOK_STRING ||
3008 right->pn_op == JSOP_QNAMEPART);
3009 if (!js_EmitTree(cx, cg, right))
3011 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
3013 return js_Emit1(cx, cg, op) >= 0;
3017 EmitNumberOp(JSContext *cx, jsdouble dval, JSCodeGenerator *cg)
3024 if (JSDOUBLE_IS_INT32(dval, &ival)) {
3026 return js_Emit1(cx, cg, JSOP_ZERO) >= 0;
3028 return js_Emit1(cx, cg, JSOP_ONE) >= 0;
3029 if ((jsint)(int8)ival == ival)
3030 return js_Emit2(cx, cg, JSOP_INT8, (jsbytecode)(int8)ival) >= 0;
3033 if (u < JS_BIT(16)) {
3034 EMIT_UINT16_IMM_OP(JSOP_UINT16, u);
3035 } else if (u < JS_BIT(24)) {
3036 off = js_EmitN(cx, cg, JSOP_UINT24, 3);
3039 pc = CG_CODE(cg, off);
3042 off = js_EmitN(cx, cg, JSOP_INT32, 4);
3045 pc = CG_CODE(cg, off);
3046 SET_INT32(pc, ival);
3051 if (!cg->constList.append(DoubleValue(dval)))
3054 return EmitIndexOp(cx, JSOP_DOUBLE, cg->constList.length() - 1, cg);
3058 * To avoid bloating all parse nodes for the special case of switch, values are
3059 * allocated in the temp pool and pointed to by the parse node. These values
3060 * are not currently recycled (like parse nodes) and the temp pool is only
3061 * flushed at the end of compiling a script, so these values are technically
3062 * leaked. This would only be a problem for scripts containing a large number
3063 * of large switches, which seems unlikely.
3066 AllocateSwitchConstant(JSContext *cx)
3069 JS_ARENA_ALLOCATE_TYPE(pv, Value, &cx->tempPool);
3074 EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
3075 JSStmtInfo *stmtInfo)
3078 JSBool ok, hasDefault, constPropagated;
3079 ptrdiff_t top, off, defaultOffset;
3080 JSParseNode *pn2, *pn3, *pn4;
3081 uint32 caseCount, tableLength;
3082 JSParseNode **table;
3083 int32_t i, low, high;
3084 JSAtomListElement *ale;
3086 size_t switchSize, tableSize;
3087 jsbytecode *pc, *savepc;
3088 #if JS_HAS_BLOCK_SCOPE
3092 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
3093 switchOp = JSOP_TABLESWITCH;
3095 hasDefault = constPropagated = JS_FALSE;
3099 * If the switch contains let variables scoped by its body, model the
3100 * resulting block on the stack first, before emitting the discriminant's
3101 * bytecode (in case the discriminant contains a stack-model dependency
3102 * such as a let expression).
3105 #if JS_HAS_BLOCK_SCOPE
3106 if (pn2->pn_type == TOK_LEXICALSCOPE) {
3108 * Push the body's block scope before discriminant code-gen for proper
3109 * static block scope linkage in case the discriminant contains a let
3110 * expression. The block's locals must lie under the discriminant on
3111 * the stack so that case-dispatch bytecodes can find the discriminant
3114 box = pn2->pn_objbox;
3115 js_PushBlockScope(cg, stmtInfo, box, -1);
3116 stmtInfo->type = STMT_SWITCH;
3118 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
3119 if (!EmitEnterBlock(cx, pn2, cg))
3123 * Pop the switch's statement info around discriminant code-gen. Note
3124 * how this leaves cg->blockChain referencing the switch's
3125 * block scope object, which is necessary for correct block parenting
3126 * in the case where the discriminant contains a let expression.
3128 cg->topStmt = stmtInfo->down;
3129 cg->topScopeStmt = stmtInfo->downScope;
3139 * Emit code for the discriminant first (or nearly first, in the case of a
3140 * switch whose body is a block scope).
3142 if (!js_EmitTree(cx, cg, pn->pn_left))
3145 /* Switch bytecodes run from here till end of final case. */
3146 top = CG_OFFSET(cg);
3147 #if !JS_HAS_BLOCK_SCOPE
3148 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3150 if (pn2->pn_type == TOK_LC) {
3151 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3153 /* Re-push the switch's statement info record. */
3154 cg->topStmt = cg->topScopeStmt = stmtInfo;
3155 cg->blockChainBox = stmtInfo->blockBox;
3157 /* Set the statement info record's idea of top. */
3158 stmtInfo->update = top;
3160 /* Advance pn2 to refer to the switch case list. */
3165 caseCount = pn2->pn_count;
3169 if (caseCount == 0 ||
3171 (hasDefault = (pn2->pn_head->pn_type == TOK_DEFAULT)))) {
3176 #define INTMAP_LENGTH 256
3177 jsbitmap intmap_space[INTMAP_LENGTH];
3178 jsbitmap *intmap = NULL;
3179 int32 intmap_bitlen = 0;
3181 low = JSVAL_INT_MAX;
3182 high = JSVAL_INT_MIN;
3184 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3185 if (pn3->pn_type == TOK_DEFAULT) {
3186 hasDefault = JS_TRUE;
3187 caseCount--; /* one of the "cases" was the default */
3191 JS_ASSERT(pn3->pn_type == TOK_CASE);
3192 if (switchOp == JSOP_CONDSWITCH)
3196 while (pn4->pn_type == TOK_RP)
3200 switch (pn4->pn_type) {
3202 constVal.setNumber(pn4->pn_dval);
3205 constVal.setString(ATOM_TO_STRING(pn4->pn_atom));
3208 if (!pn4->maybeExpr()) {
3209 ok = LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &constVal);
3212 if (!constVal.isMagic(JS_NO_CONSTANT)) {
3213 if (constVal.isObject()) {
3215 * XXX JSOP_LOOKUPSWITCH does not support const-
3216 * propagated object values, see bug 407186.
3218 switchOp = JSOP_CONDSWITCH;
3221 constPropagated = JS_TRUE;
3227 if (pn4->pn_op == JSOP_TRUE) {
3228 constVal.setBoolean(true);
3231 if (pn4->pn_op == JSOP_FALSE) {
3232 constVal.setBoolean(false);
3235 if (pn4->pn_op == JSOP_NULL) {
3241 switchOp = JSOP_CONDSWITCH;
3244 JS_ASSERT(constVal.isPrimitive());
3246 pn3->pn_pval = AllocateSwitchConstant(cx);
3247 if (!pn3->pn_pval) {
3252 *pn3->pn_pval = constVal;
3254 if (switchOp != JSOP_TABLESWITCH)
3256 if (!pn3->pn_pval->isInt32()) {
3257 switchOp = JSOP_LOOKUPSWITCH;
3260 i = pn3->pn_pval->toInt32();
3261 if ((jsuint)(i + (jsint)JS_BIT(15)) >= (jsuint)JS_BIT(16)) {
3262 switchOp = JSOP_LOOKUPSWITCH;
3271 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
3272 * We bias i by 65536 if it's negative, and hope that's a rare
3273 * case (because it requires a malloc'd bitmap).
3277 if (i >= intmap_bitlen) {
3279 i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) {
3280 intmap = intmap_space;
3281 intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2;
3283 /* Just grab 8K for the worst-case bitmap. */
3284 intmap_bitlen = JS_BIT(16);
3285 intmap = (jsbitmap *)
3286 cx->malloc((JS_BIT(16) >> JS_BITS_PER_WORD_LOG2)
3287 * sizeof(jsbitmap));
3289 JS_ReportOutOfMemory(cx);
3293 memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2);
3295 if (JS_TEST_BIT(intmap, i)) {
3296 switchOp = JSOP_LOOKUPSWITCH;
3299 JS_SET_BIT(intmap, i);
3303 if (intmap && intmap != intmap_space)
3309 * Compute table length and select lookup instead if overlarge or
3310 * more than half-sparse.
3312 if (switchOp == JSOP_TABLESWITCH) {
3313 tableLength = (uint32)(high - low + 1);
3314 if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
3315 switchOp = JSOP_LOOKUPSWITCH;
3316 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3318 * Lookup switch supports only atom indexes below 64K limit.
3319 * Conservatively estimate the maximum possible index during
3320 * switch generation and use conditional switch if it exceeds
3323 if (caseCount + cg->constList.length() > JS_BIT(16))
3324 switchOp = JSOP_CONDSWITCH;
3329 * Emit a note with two offsets: first tells total switch code length,
3330 * second tells offset to first JSOP_CASE if condswitch.
3332 noteIndex = js_NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0);
3336 if (switchOp == JSOP_CONDSWITCH) {
3338 * 0 bytes of immediate for unoptimized ECMAv2 switch.
3341 } else if (switchOp == JSOP_TABLESWITCH) {
3343 * 3 offsets (len, low, high) before the table, 1 per entry.
3345 switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
3348 * JSOP_LOOKUPSWITCH:
3349 * 1 offset (len) and 1 atom index (npairs) before the table,
3350 * 1 atom index and 1 jump offset per entry.
3352 switchSize = (size_t)(JUMP_OFFSET_LEN + INDEX_LEN +
3353 (INDEX_LEN + JUMP_OFFSET_LEN) * caseCount);
3357 * Emit switchOp followed by switchSize bytes of jump or lookup table.
3359 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
3360 * to emit the immediate operand(s) by which bytecode readers such as
3361 * BuildSpanDepTable discover the length of the switch opcode *before*
3362 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
3363 * also important to zero all unknown jump offset immediate operands,
3364 * so they can be converted to span dependencies with null targets to
3365 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
3367 if (js_EmitN(cx, cg, switchOp, switchSize) < 0)
3371 if (switchOp == JSOP_CONDSWITCH) {
3372 intN caseNoteIndex = -1;
3373 JSBool beforeCases = JS_TRUE;
3375 /* Emit code for evaluating cases and jumping to case statements. */
3376 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3378 if (pn4 && !js_EmitTree(cx, cg, pn4))
3380 if (caseNoteIndex >= 0) {
3381 /* off is the previous JSOP_CASE's bytecode offset. */
3382 if (!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3383 CG_OFFSET(cg) - off)) {
3388 JS_ASSERT(pn3->pn_type == TOK_DEFAULT);
3391 caseNoteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
3392 if (caseNoteIndex < 0)
3394 off = EmitJump(cx, cg, JSOP_CASE, 0);
3397 pn3->pn_offset = off;
3399 uintN noteCount, noteCountDelta;
3401 /* Switch note's second offset is to first JSOP_CASE. */
3402 noteCount = CG_NOTE_COUNT(cg);
3403 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
3407 noteCountDelta = CG_NOTE_COUNT(cg) - noteCount;
3408 if (noteCountDelta != 0)
3409 caseNoteIndex += noteCountDelta;
3410 beforeCases = JS_FALSE;
3415 * If we didn't have an explicit default (which could fall in between
3416 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
3417 * call in the loop above), link the last case to the implicit default
3418 * for the decompiler.
3421 caseNoteIndex >= 0 &&
3422 !js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3423 CG_OFFSET(cg) - off)) {
3427 /* Emit default even if no explicit default statement. */
3428 defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0);
3429 if (defaultOffset < 0)
3432 pc = CG_CODE(cg, top + JUMP_OFFSET_LEN);
3434 if (switchOp == JSOP_TABLESWITCH) {
3435 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
3436 SET_JUMP_OFFSET(pc, low);
3437 pc += JUMP_OFFSET_LEN;
3438 SET_JUMP_OFFSET(pc, high);
3439 pc += JUMP_OFFSET_LEN;
3442 * Use malloc to avoid arena bloat for programs with many switches.
3443 * We free table if non-null at label out, so all control flow must
3444 * exit this function through goto out or goto bad.
3446 if (tableLength != 0) {
3447 tableSize = (size_t)tableLength * sizeof *table;
3448 table = (JSParseNode **) cx->malloc(tableSize);
3451 memset(table, 0, tableSize);
3452 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3453 if (pn3->pn_type == TOK_DEFAULT)
3455 i = pn3->pn_pval->toInt32();
3457 JS_ASSERT((uint32)i < tableLength);
3462 JS_ASSERT(switchOp == JSOP_LOOKUPSWITCH);
3464 /* Fill in the number of cases. */
3465 SET_INDEX(pc, caseCount);
3470 * After this point, all control flow involving JSOP_TABLESWITCH
3471 * must set ok and goto out to exit this function. To keep things
3472 * simple, all switchOp cases exit that way.
3474 MUST_FLOW_THROUGH("out");
3477 * We have already generated at least one big jump so we must
3478 * explicitly add span dependencies for the switch jumps. When
3479 * called below, js_SetJumpOffset can only do it when patching
3480 * the first big jump or when cg->spanDeps is null.
3482 if (!AddSwitchSpanDeps(cx, cg, CG_CODE(cg, top)))
3486 if (constPropagated) {
3488 * Skip switchOp, as we are not setting jump offsets in the two
3489 * for loops below. We'll restore CG_NEXT(cg) from savepc after,
3490 * unless there was an error.
3492 savepc = CG_NEXT(cg);
3493 CG_NEXT(cg) = pc + 1;
3494 if (switchOp == JSOP_TABLESWITCH) {
3495 for (i = 0; i < (jsint)tableLength; i++) {
3498 (pn4 = pn3->pn_left) != NULL &&
3499 pn4->pn_type == TOK_NAME) {
3500 /* Note a propagated constant with the const's name. */
3501 JS_ASSERT(!pn4->maybeExpr());
3502 ale = cg->atomList.add(cg->parser, pn4->pn_atom);
3506 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3507 ALE_INDEX(ale)) < 0) {
3511 pc += JUMP_OFFSET_LEN;
3514 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3516 if (pn4 && pn4->pn_type == TOK_NAME) {
3517 /* Note a propagated constant with the const's name. */
3518 JS_ASSERT(!pn4->maybeExpr());
3519 ale = cg->atomList.add(cg->parser, pn4->pn_atom);
3523 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3524 ALE_INDEX(ale)) < 0) {
3528 pc += INDEX_LEN + JUMP_OFFSET_LEN;
3531 CG_NEXT(cg) = savepc;
3535 /* Emit code for each case's statements, copying pn_offset up to pn3. */
3536 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3537 if (switchOp == JSOP_CONDSWITCH && pn3->pn_type != TOK_DEFAULT)
3538 CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx, cg, pn3->pn_offset, goto bad);
3539 pn4 = pn3->pn_right;
3540 ok = js_EmitTree(cx, cg, pn4);
3543 pn3->pn_offset = pn4->pn_offset;
3544 if (pn3->pn_type == TOK_DEFAULT)
3545 off = pn3->pn_offset - top;
3549 /* If no default case, offset for default is to end of switch. */
3550 off = CG_OFFSET(cg) - top;
3553 /* We better have set "off" by now. */
3554 JS_ASSERT(off != -1);
3556 /* Set the default offset (to end of switch if no default). */
3557 if (switchOp == JSOP_CONDSWITCH) {
3559 JS_ASSERT(defaultOffset != -1);
3560 ok = js_SetJumpOffset(cx, cg, CG_CODE(cg, defaultOffset),
3561 off - (defaultOffset - top));
3565 pc = CG_CODE(cg, top);
3566 ok = js_SetJumpOffset(cx, cg, pc, off);
3569 pc += JUMP_OFFSET_LEN;
3572 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3573 off = CG_OFFSET(cg) - top;
3574 ok = js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, off);
3578 if (switchOp == JSOP_TABLESWITCH) {
3579 /* Skip over the already-initialized switch bounds. */
3580 pc += 2 * JUMP_OFFSET_LEN;
3582 /* Fill in the jump table, if there is one. */
3583 for (i = 0; i < (jsint)tableLength; i++) {
3585 off = pn3 ? pn3->pn_offset - top : 0;
3586 ok = js_SetJumpOffset(cx, cg, pc, off);
3589 pc += JUMP_OFFSET_LEN;
3591 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3592 /* Skip over the already-initialized number of cases. */
3595 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3596 if (pn3->pn_type == TOK_DEFAULT)
3598 if (!cg->constList.append(*pn3->pn_pval))
3600 SET_INDEX(pc, cg->constList.length() - 1);
3603 off = pn3->pn_offset - top;
3604 ok = js_SetJumpOffset(cx, cg, pc, off);
3607 pc += JUMP_OFFSET_LEN;
3615 ok = js_PopStatementCG(cx, cg);
3617 #if JS_HAS_BLOCK_SCOPE
3618 if (ok && pn->pn_right->pn_type == TOK_LEXICALSCOPE)
3619 ok = EmitLeaveBlock(cx, cg, JSOP_LEAVEBLOCK, box);
3630 js_EmitFunctionScript(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body)
3633 * The decompiler has assumptions about what may occur immediately after
3634 * script->main (e.g., in the case of destructuring params). Thus, put the
3635 * following ops into the range [script->code, script->main). Note:
3636 * execution starts from script->code, so this has no semantic effect.
3639 if (cg->flags & TCF_FUN_IS_GENERATOR) {
3640 /* JSOP_GENERATOR must be the first instruction. */
3641 CG_SWITCH_TO_PROLOG(cg);
3642 JS_ASSERT(CG_NEXT(cg) == CG_BASE(cg));
3643 if (js_Emit1(cx, cg, JSOP_GENERATOR) < 0)
3645 CG_SWITCH_TO_MAIN(cg);
3649 * Strict mode functions' arguments objects copy initial parameter values.
3650 * We create arguments objects lazily -- but that doesn't work for strict
3651 * mode functions where a parameter might be modified and arguments might
3652 * be accessed. For such functions we synthesize an access to arguments to
3653 * initialize it with the original parameter values.
3655 if (cg->needsEagerArguments()) {
3656 CG_SWITCH_TO_PROLOG(cg);
3657 if (js_Emit1(cx, cg, JSOP_ARGUMENTS) < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
3659 CG_SWITCH_TO_MAIN(cg);
3662 if (cg->flags & TCF_FUN_UNBRAND_THIS) {
3663 CG_SWITCH_TO_PROLOG(cg);
3664 if (js_Emit1(cx, cg, JSOP_UNBRANDTHIS) < 0)
3666 CG_SWITCH_TO_MAIN(cg);
3669 return js_EmitTree(cx, cg, body) &&
3670 js_Emit1(cx, cg, JSOP_STOP) >= 0 &&
3671 JSScript::NewScriptFromCG(cx, cg);
3674 /* A macro for inlining at the top of js_EmitTree (whence it came). */
3675 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, line) \
3677 uintN line_ = (line); \
3678 uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
3679 if (delta_ != 0) { \
3681 * Encode any change in the current source line number by using \
3682 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3683 * whichever consumes less space. \
3685 * NB: We handle backward line number deltas (possible with for \
3686 * loops where the update part is emitted after the body, but its \
3687 * line number is <= any line number in the body) here by letting \
3688 * unsigned delta_ wrap to a very large number, which triggers a \
3691 CG_CURRENT_LINE(cg) = line_; \
3692 if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3693 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3697 if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
3699 } while (--delta_ != 0); \
3704 /* A function, so that we avoid macro-bloating all the other callsites. */
3706 UpdateLineNumberNotes(JSContext *cx, JSCodeGenerator *cg, uintN line)
3708 UPDATE_LINE_NUMBER_NOTES(cx, cg, line);
3713 MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3714 JSParseNode *pn, jsatomid *result)
3717 JSAtomListElement *ale;
3719 if (!pn->pn_cookie.isFree()) {
3720 atomIndex = (jsatomid) pn->pn_cookie.slot();
3722 ale = cg->atomList.add(cg->parser, pn->pn_atom);
3725 atomIndex = ALE_INDEX(ale);
3728 if (JOF_OPTYPE(pn->pn_op) == JOF_ATOM &&
3729 (!cg->inFunction() || (cg->flags & TCF_FUN_HEAVYWEIGHT)) &&
3730 !(pn->pn_dflags & PND_GVAR))
3732 CG_SWITCH_TO_PROLOG(cg);
3733 if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.begin.lineno))
3735 EMIT_INDEX_OP(prologOp, atomIndex);
3736 CG_SWITCH_TO_MAIN(cg);
3739 if (cg->inFunction() &&
3740 JOF_OPTYPE(pn->pn_op) == JOF_LOCAL &&
3741 pn->pn_cookie.slot() < cg->bindings.countVars() &&
3742 cg->shouldNoteClosedName(pn))
3744 if (!cg->closedVars.append(pn->pn_cookie.slot()))
3749 *result = atomIndex;
3753 #if JS_HAS_DESTRUCTURING
3756 (*DestructuringDeclEmitter)(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3760 EmitDestructuringDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3763 JS_ASSERT(pn->pn_type == TOK_NAME);
3764 if (!BindNameToSlot(cx, cg, pn))
3767 JS_ASSERT(PN_OP(pn) != JSOP_ARGUMENTS && PN_OP(pn) != JSOP_CALLEE);
3768 return MaybeEmitVarDecl(cx, cg, prologOp, pn, NULL);
3772 EmitDestructuringDecls(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3775 JSParseNode *pn2, *pn3;
3776 DestructuringDeclEmitter emitter;
3778 if (pn->pn_type == TOK_RB) {
3779 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3780 if (pn2->pn_type == TOK_COMMA)
3782 emitter = (pn2->pn_type == TOK_NAME)
3783 ? EmitDestructuringDecl
3784 : EmitDestructuringDecls;
3785 if (!emitter(cx, cg, prologOp, pn2))
3789 JS_ASSERT(pn->pn_type == TOK_RC);
3790 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3791 pn3 = pn2->pn_right;
3792 emitter = (pn3->pn_type == TOK_NAME)
3793 ? EmitDestructuringDecl
3794 : EmitDestructuringDecls;
3795 if (!emitter(cx, cg, prologOp, pn3))
3803 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn);
3806 EmitDestructuringLHS(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3809 * Now emit the lvalue opcode sequence. If the lvalue is a nested
3810 * destructuring initialiser-form, call ourselves to handle it, then
3811 * pop the matched value. Otherwise emit an lvalue bytecode sequence
3812 * ending with a JSOP_ENUMELEM or equivalent op.
3814 if (pn->pn_type == TOK_RB || pn->pn_type == TOK_RC) {
3815 if (!EmitDestructuringOpsHelper(cx, cg, pn))
3817 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3820 if (pn->pn_type == TOK_NAME) {
3821 if (!BindNameToSlot(cx, cg, pn))
3823 if (pn->isConst() && !pn->isInitialized())
3824 return js_Emit1(cx, cg, JSOP_POP) >= 0;
3827 switch (pn->pn_op) {
3831 * NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
3832 * we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
3833 * So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
3835 if (!EmitElemOp(cx, pn, JSOP_ENUMELEM, cg))
3840 if (!EmitElemOp(cx, pn, JSOP_ENUMCONSTELEM, cg))
3846 jsuint slot = pn->pn_cookie.asInteger();
3847 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, slot);
3853 jsuint slot = pn->pn_cookie.asInteger();
3854 EMIT_UINT16_IMM_OP(PN_OP(pn), slot);
3855 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3864 top = CG_OFFSET(cg);
3865 if (!js_EmitTree(cx, cg, pn))
3867 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
3869 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
3883 * Recursive helper for EmitDestructuringOps.
3885 * Given a value to destructure on the stack, walk over an object or array
3886 * initialiser at pn, emitting bytecodes to match property values and store
3887 * them in the lvalues identified by the matched property names.
3890 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3893 JSParseNode *pn2, *pn3;
3897 intN stackDepth = cg->stackDepth;
3898 JS_ASSERT(stackDepth != 0);
3899 JS_ASSERT(pn->pn_arity == PN_LIST);
3900 JS_ASSERT(pn->pn_type == TOK_RB || pn->pn_type == TOK_RC);
3903 if (pn->pn_count == 0) {
3904 /* Emit a DUP;POP sequence for the decompiler. */
3905 return js_Emit1(cx, cg, JSOP_DUP) >= 0 &&
3906 js_Emit1(cx, cg, JSOP_POP) >= 0;
3910 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3912 * Duplicate the value being destructured to use as a reference base.
3913 * If dup is not the first one, annotate it for the decompiler.
3915 if (pn2 != pn->pn_head && js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
3917 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
3921 * Now push the property name currently being matched, which is either
3922 * the array initialiser's current index, or the current property name
3923 * "label" on the left of a colon in the object initialiser. Set pn3
3924 * to the lvalue node, which is in the value-initializing position.
3927 if (pn->pn_type == TOK_RB) {
3928 if (!EmitNumberOp(cx, index, cg))
3932 JS_ASSERT(pn->pn_type == TOK_RC);
3933 JS_ASSERT(pn2->pn_type == TOK_COLON);
3935 if (pn3->pn_type == TOK_NUMBER) {
3937 * If we are emitting an object destructuring initialiser,
3938 * annotate the index op with SRC_INITPROP so we know we are
3939 * not decompiling an array initialiser.
3941 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
3943 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
3946 JS_ASSERT(pn3->pn_type == TOK_STRING ||
3947 pn3->pn_type == TOK_NAME);
3948 if (!EmitAtomOp(cx, pn3, JSOP_GETPROP, cg))
3950 doElemOp = JS_FALSE;
3952 pn3 = pn2->pn_right;
3957 * Ok, get the value of the matching property name. This leaves
3958 * that value on top of the value being destructured, so the stack
3959 * is one deeper than when we started.
3961 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
3963 JS_ASSERT(cg->stackDepth == stackDepth + 1);
3966 /* Nullary comma node makes a hole in the array destructurer. */
3967 if (pn3->pn_type == TOK_COMMA && pn3->pn_arity == PN_NULLARY) {
3968 JS_ASSERT(pn->pn_type == TOK_RB);
3969 JS_ASSERT(pn2 == pn3);
3970 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3973 if (!EmitDestructuringLHS(cx, cg, pn3))
3977 JS_ASSERT(cg->stackDepth == stackDepth);
3985 OpToDeclType(JSOp op)
3989 return SRC_DECL_LET;
3991 return SRC_DECL_CONST;
3993 return SRC_DECL_VAR;
3995 return SRC_DECL_NONE;
4000 EmitDestructuringOps(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
4004 * If we're called from a variable declaration, help the decompiler by
4005 * annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
4006 * If the destructuring initialiser is empty, our helper will emit a
4007 * JSOP_DUP followed by a JSOP_POP for the decompiler.
4009 if (js_NewSrcNote2(cx, cg, SRC_DESTRUCT, OpToDeclType(prologOp)) < 0)
4013 * Call our recursive helper to emit the destructuring assignments and
4014 * related stack manipulations.
4016 return EmitDestructuringOpsHelper(cx, cg, pn);
4020 EmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
4021 JSParseNode *lhs, JSParseNode *rhs)
4023 jsuint depth, limit, i, nslots;
4026 depth = limit = (uintN) cg->stackDepth;
4027 for (pn = rhs->pn_head; pn; pn = pn->pn_next) {
4028 if (limit == JS_BIT(16)) {
4029 ReportCompileErrorNumber(cx, CG_TS(cg), rhs, JSREPORT_ERROR, JSMSG_ARRAY_INIT_TOO_BIG);
4033 /* MaybeEmitGroupAssignment won't call us if rhs is holey. */
4034 JS_ASSERT(!(pn->pn_type == TOK_COMMA && pn->pn_arity == PN_NULLARY));
4035 if (!js_EmitTree(cx, cg, pn))
4040 if (js_NewSrcNote2(cx, cg, SRC_GROUPASSIGN, OpToDeclType(prologOp)) < 0)
4044 for (pn = lhs->pn_head; pn; pn = pn->pn_next, ++i) {
4045 /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */
4046 JS_ASSERT(i < limit);
4047 jsint slot = AdjustBlockSlot(cx, cg, i);
4050 EMIT_UINT16_IMM_OP(JSOP_GETLOCAL, slot);
4052 if (pn->pn_type == TOK_COMMA && pn->pn_arity == PN_NULLARY) {
4053 if (js_Emit1(cx, cg, JSOP_POP) < 0)
4056 if (!EmitDestructuringLHS(cx, cg, pn))
4061 nslots = limit - depth;
4062 EMIT_UINT16_IMM_OP(JSOP_POPN, nslots);
4063 cg->stackDepth = (uintN) depth;
4068 * Helper called with pop out param initialized to a JSOP_POP* opcode. If we
4069 * can emit a group assignment sequence, which results in 0 stack depth delta,
4070 * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
4073 MaybeEmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
4074 JSParseNode *pn, JSOp *pop)
4076 JSParseNode *lhs, *rhs;
4078 JS_ASSERT(pn->pn_type == TOK_ASSIGN);
4079 JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_POPV);
4082 if (lhs->pn_type == TOK_RB && rhs->pn_type == TOK_RB &&
4083 !(rhs->pn_xflags & PNX_HOLEY) &&
4084 lhs->pn_count <= rhs->pn_count) {
4085 if (!EmitGroupAssignment(cx, cg, prologOp, lhs, rhs))
4092 #endif /* JS_HAS_DESTRUCTURING */
4095 EmitVariables(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
4096 JSBool inLetHead, ptrdiff_t *headNoteIndex)
4098 bool let, forInVar, first;
4099 #if JS_HAS_BLOCK_SCOPE
4100 bool forInLet, popScope;
4101 JSStmtInfo *stmt, *scopeStmt;
4103 ptrdiff_t off, noteIndex, tmp;
4104 JSParseNode *pn2, *pn3, *next;
4109 /* Default in case of JS_HAS_BLOCK_SCOPE early return, below. */
4110 *headNoteIndex = -1;
4113 * Let blocks and expressions have a parenthesized head in which the new
4114 * scope is not yet open. Initializer evaluation uses the parent node's
4115 * lexical scope. If popScope is true below, then we hide the top lexical
4116 * block from any calls to BindNameToSlot hiding in pn2->pn_expr so that
4117 * it won't find any names in the new let block.
4119 * The same goes for let declarations in the head of any kind of for loop.
4120 * Unlike a let declaration 'let x = i' within a block, where x is hoisted
4121 * to the start of the block, a 'for (let x = i...) ...' loop evaluates i
4122 * in the containing scope, and puts x in the loop body's scope.
4124 let = (pn->pn_op == JSOP_NOP);
4125 forInVar = (pn->pn_xflags & PNX_FORINVAR) != 0;
4126 #if JS_HAS_BLOCK_SCOPE
4127 forInLet = let && forInVar;
4128 popScope = (inLetHead || (let && (cg->flags & TCF_IN_FOR_INIT)));
4131 scopeStmt = cg->topScopeStmt;
4134 else stmt = scopeStmt = NULL; /* quell GCC overwarning */
4136 JS_ASSERT(!popScope || let);
4139 off = noteIndex = -1;
4140 for (pn2 = pn->pn_head; ; pn2 = next) {
4141 first = pn2 == pn->pn_head;
4142 next = pn2->pn_next;
4144 if (pn2->pn_type != TOK_NAME) {
4145 #if JS_HAS_DESTRUCTURING
4146 if (pn2->pn_type == TOK_RB || pn2->pn_type == TOK_RC) {
4148 * Emit variable binding ops, but not destructuring ops.
4149 * The parser (see Variables, jsparse.c) has ensured that
4150 * our caller will be the TOK_FOR/TOK_IN case in js_EmitTree,
4151 * and that case will emit the destructuring code only after
4152 * emitting an enumerating opcode and a branch that tests
4153 * whether the enumeration ended.
4155 JS_ASSERT(forInVar);
4156 JS_ASSERT(pn->pn_count == 1);
4157 if (!EmitDestructuringDecls(cx, cg, PN_OP(pn), pn2))
4164 * A destructuring initialiser assignment preceded by var will
4165 * never occur to the left of 'in' in a for-in loop. As with 'for
4166 * (var x = i in o)...', this will cause the entire 'var [a, b] =
4167 * i' to be hoisted out of the loop.
4169 JS_ASSERT(pn2->pn_type == TOK_ASSIGN);
4170 JS_ASSERT(!forInVar);
4173 * To allow the front end to rewrite var f = x; as f = x; when a
4174 * function f(){} precedes the var, detect simple name assignment
4175 * here and initialize the name.
4177 #if !JS_HAS_DESTRUCTURING
4178 JS_ASSERT(pn2->pn_left->pn_type == TOK_NAME);
4180 if (pn2->pn_left->pn_type == TOK_NAME)
4183 pn3 = pn2->pn_right;
4188 #if JS_HAS_DESTRUCTURING
4189 if (pn->pn_count == 1) {
4191 * If this is the only destructuring assignment in the list,
4192 * try to optimize to a group assignment. If we're in a let
4193 * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
4194 * in pn->pn_op, to suppress a second (and misplaced) 'let'.
4196 JS_ASSERT(noteIndex < 0 && !pn2->pn_next);
4198 if (!MaybeEmitGroupAssignment(cx, cg,
4199 inLetHead ? JSOP_POP : PN_OP(pn),
4203 if (op == JSOP_NOP) {
4204 pn->pn_xflags = (pn->pn_xflags & ~PNX_POPVAR) | PNX_GROUPINIT;
4210 if (!EmitDestructuringDecls(cx, cg, PN_OP(pn), pn3))
4213 if (!js_EmitTree(cx, cg, pn2->pn_right))
4217 * Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
4218 * that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
4219 * we will emit at the bottom of this function.
4221 if (!EmitDestructuringOps(cx, cg,
4222 inLetHead ? JSOP_POP : PN_OP(pn),
4231 * Load initializer early to share code above that jumps to do_name.
4232 * NB: if this var redeclares an existing binding, then pn2 is linked
4233 * on its definition's use-chain and pn_expr has been overlayed with
4236 pn3 = pn2->maybeExpr();
4239 if (!BindNameToSlot(cx, cg, pn2))
4243 if (op == JSOP_ARGUMENTS) {
4244 /* JSOP_ARGUMENTS => no initializer */
4245 JS_ASSERT(!pn3 && !let);
4248 atomIndex = 0; /* quell GCC overwarning */
4251 JS_ASSERT(op != JSOP_CALLEE);
4252 JS_ASSERT(!pn2->pn_cookie.isFree() || !let);
4253 if (!MaybeEmitVarDecl(cx, cg, PN_OP(pn), pn2, &atomIndex))
4257 JS_ASSERT(!forInVar);
4258 if (op == JSOP_SETNAME) {
4260 EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
4261 } else if (op == JSOP_SETGNAME) {
4263 EMIT_INDEX_OP(JSOP_BINDGNAME, atomIndex);
4265 if (pn->pn_op == JSOP_DEFCONST &&
4266 !js_DefineCompileTimeConstant(cx, cg, pn2->pn_atom, pn3)) {
4270 #if JS_HAS_BLOCK_SCOPE
4271 /* Evaluate expr in the outer lexical scope if requested. */
4273 cg->topStmt = stmt->down;
4274 cg->topScopeStmt = scopeStmt->downScope;
4278 oldflags = cg->flags;
4279 cg->flags &= ~TCF_IN_FOR_INIT;
4280 if (!js_EmitTree(cx, cg, pn3))
4282 cg->flags |= oldflags & TCF_IN_FOR_INIT;
4284 #if JS_HAS_BLOCK_SCOPE
4287 cg->topScopeStmt = scopeStmt;
4288 JS_ASSERT(cg->blockChainBox == scopeStmt->blockBox);
4295 * The parser rewrites 'for (var x = i in o)' to hoist 'var x = i' --
4296 * likewise 'for (let x = i in o)' becomes 'i; for (let x in o)' using
4297 * a TOK_SEQ node to make the two statements appear as one. Therefore
4298 * if this declaration is part of a for-in loop head, we do not need to
4299 * emit op or any source note. Our caller, the TOK_FOR/TOK_IN case in
4300 * js_EmitTree, will annotate appropriately.
4302 JS_ASSERT_IF(pn2->pn_defn, pn3 == pn2->pn_expr);
4304 JS_ASSERT(pn->pn_count == 1);
4311 js_NewSrcNote2(cx, cg, SRC_DECL,
4312 (pn->pn_op == JSOP_DEFCONST)
4314 : (pn->pn_op == JSOP_DEFVAR)
4316 : SRC_DECL_LET) < 0) {
4319 if (op == JSOP_ARGUMENTS) {
4320 if (js_Emit1(cx, cg, op) < 0)
4322 } else if (!pn2->pn_cookie.isFree()) {
4323 EMIT_UINT16_IMM_OP(op, atomIndex);
4325 EMIT_INDEX_OP(op, atomIndex);
4328 #if JS_HAS_DESTRUCTURING
4331 tmp = CG_OFFSET(cg);
4332 if (noteIndex >= 0) {
4333 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
4339 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
4340 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
4344 /* If this is a let head, emit and return a srcnote on the pop. */
4346 *headNoteIndex = js_NewSrcNote(cx, cg, SRC_DECL);
4347 if (*headNoteIndex < 0)
4349 if (!(pn->pn_xflags & PNX_POPVAR))
4350 return js_Emit1(cx, cg, JSOP_NOP) >= 0;
4353 return !(pn->pn_xflags & PNX_POPVAR) || js_Emit1(cx, cg, JSOP_POP) >= 0;
4356 #if defined DEBUG_brendan || defined DEBUG_mrbkap
4358 GettableNoteForNextOp(JSCodeGenerator *cg)
4360 ptrdiff_t offset, target;
4361 jssrcnote *sn, *end;
4364 target = CG_OFFSET(cg);
4365 for (sn = CG_NOTES(cg), end = sn + CG_NOTE_COUNT(cg); sn < end;
4367 if (offset == target && SN_IS_GETTABLE(sn))
4369 offset += SN_DELTA(sn);
4375 /* Top-level named functions need a nop for decompilation. */
4377 EmitFunctionDefNop(JSContext *cx, JSCodeGenerator *cg, uintN index)
4379 return js_NewSrcNote2(cx, cg, SRC_FUNCDEF, (ptrdiff_t)index) >= 0 &&
4380 js_Emit1(cx, cg, JSOP_NOP) >= 0;
4384 EmitNewInit(JSContext *cx, JSCodeGenerator *cg, JSProtoKey key, JSParseNode *pn, int sharpnum)
4386 if (js_Emit3(cx, cg, JSOP_NEWINIT, (jsbytecode) key, 0) < 0)
4388 #if JS_HAS_SHARP_VARS
4389 if (cg->hasSharps()) {
4390 if (pn->pn_count != 0)
4391 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, cg->sharpSlotBase);
4393 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, cg->sharpSlotBase, sharpnum);
4395 JS_ASSERT(sharpnum < 0);
4402 EmitEndInit(JSContext *cx, JSCodeGenerator *cg, uint32 count)
4404 #if JS_HAS_SHARP_VARS
4405 /* Emit an op for sharp array cleanup and decompilation. */
4406 if (cg->hasSharps() && count != 0)
4407 EMIT_UINT16_IMM_OP(JSOP_SHARPINIT, cg->sharpSlotBase);
4409 return js_Emit1(cx, cg, JSOP_ENDINIT) >= 0;
4413 JSParseNode::getConstantValue(JSContext *cx, bool strictChecks, Value *vp)
4417 vp->setNumber(pn_dval);
4420 vp->setString(ATOM_TO_STRING(pn_atom));
4428 vp->setBoolean(false);
4431 vp->setBoolean(true);
4434 JS_NOT_REACHED("Unexpected node");
4438 JS_ASSERT((pn_op == JSOP_NEWINIT) && !(pn_xflags & PNX_NONCONST));
4440 JSObject *obj = NewDenseAllocatedArray(cx, pn_count);
4441 if (!obj || !obj->ensureSlots(cx, pn_count))
4445 for (JSParseNode *pn = pn_head; pn; idx++, pn = pn->pn_next) {
4447 if (!pn->getConstantValue(cx, strictChecks, &value))
4449 obj->setDenseArrayElement(idx, value);
4451 JS_ASSERT(idx == pn_count);
4453 vp->setObject(*obj);
4457 JS_ASSERT((pn_op == JSOP_NEWINIT) && !(pn_xflags & PNX_NONCONST));
4459 gc::FinalizeKind kind = GuessObjectGCKind(pn_count, false);
4460 JSObject *obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
4464 for (JSParseNode *pn = pn_head; pn; pn = pn->pn_next) {
4466 if (!pn->pn_right->getConstantValue(cx, strictChecks, &value))
4469 JSParseNode *pnid = pn->pn_left;
4470 if (pnid->pn_type == TOK_NUMBER) {
4471 Value idvalue = NumberValue(pnid->pn_dval);
4473 if (idvalue.isInt32() && INT_FITS_IN_JSID(idvalue.toInt32()))
4474 id = INT_TO_JSID(idvalue.toInt32());
4475 else if (!js_InternNonIntElementId(cx, obj, idvalue, &id))
4477 if (!obj->defineProperty(cx, id, value, NULL, NULL, JSPROP_ENUMERATE))
4480 JS_ASSERT(pnid->pn_type == TOK_NAME ||
4481 pnid->pn_type == TOK_STRING);
4482 jsid id = ATOM_TO_JSID(pnid->pn_atom);
4483 if (!((pnid->pn_atom == cx->runtime->atomState.protoAtom)
4484 ? js_SetPropertyHelper(cx, obj, id, 0, &value, strictChecks)
4485 : js_DefineNativeProperty(cx, obj, id, value, NULL, NULL,
4486 JSPROP_ENUMERATE, 0, 0, NULL, 0))) {
4492 vp->setObject(*obj);
4496 JS_NOT_REACHED("Unexpected node");
4502 EmitSingletonInitialiser(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
4505 if (!pn->getConstantValue(cx, cg->needStrictChecks(), &value))
4508 JS_ASSERT(value.isObject());
4509 JSObjectBox *objbox = cg->parser->newObjectBox(&value.toObject());
4513 return EmitObjectOp(cx, objbox, JSOP_OBJECT, cg);
4516 /* See the SRC_FOR source note offsetBias comments later in this file. */
4517 JS_STATIC_ASSERT(JSOP_NOP_LENGTH == 1);
4518 JS_STATIC_ASSERT(JSOP_POP_LENGTH == 1);
4520 class EmitLevelManager
4523 JSCodeGenerator *cg;
4526 EmitLevelManager(JSCodeGenerator *cg) : cg(cg) { cg->emitLevel++; }
4528 ~EmitLevelManager() { cg->emitLevel--; }
4532 js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
4534 JSBool ok, useful, wantval;
4535 JSStmtInfo *stmt, stmtInfo;
4536 ptrdiff_t top, off, tmp, beq, jmp, tmp2, tmp3;
4537 JSParseNode *pn2, *pn3;
4539 JSAtomListElement *ale;
4542 ptrdiff_t noteIndex, noteIndex2;
4543 JSSrcNoteType noteType;
4548 EmitLevelManager elm(cg);
4549 #if JS_HAS_SHARP_VARS
4553 JS_CHECK_RECURSION(cx, return JS_FALSE);
4556 pn->pn_offset = top = CG_OFFSET(cg);
4558 /* Emit notes to tell the current bytecode's source line number. */
4559 UPDATE_LINE_NUMBER_NOTES(cx, cg, pn->pn_pos.begin.lineno);
4561 switch (pn->pn_type) {
4567 #if JS_HAS_XML_SUPPORT
4568 if (pn->pn_arity == PN_NULLARY) {
4569 if (js_Emit1(cx, cg, JSOP_GETFUNNS) < 0)
4575 fun = (JSFunction *) pn->pn_funbox->object;
4576 JS_ASSERT(FUN_INTERPRETED(fun));
4577 if (fun->u.i.script) {
4579 * This second pass is needed to emit JSOP_NOP with a source note
4580 * for the already-emitted function definition prolog opcode. See
4581 * comments in the TOK_LC case.
4583 JS_ASSERT(pn->pn_op == JSOP_NOP);
4584 JS_ASSERT(cg->inFunction());
4585 if (!EmitFunctionDefNop(cx, cg, pn->pn_index))
4590 JS_ASSERT_IF(pn->pn_funbox->tcflags & TCF_FUN_HEAVYWEIGHT,
4591 FUN_KIND(fun) == JSFUN_INTERPRETED);
4593 /* Generate code for the function's body. */
4594 void *cg2mark = JS_ARENA_MARK(cg->codePool);
4596 JS_ARENA_ALLOCATE_TYPE(cg2space, JSCodeGenerator, cg->codePool);
4598 js_ReportOutOfScriptQuota(cx);
4601 JSCodeGenerator *cg2 =
4602 new (cg2space) JSCodeGenerator(cg->parser,
4603 cg->codePool, cg->notePool,
4604 pn->pn_pos.begin.lineno);
4609 cg2->flags = pn->pn_funbox->tcflags | TCF_COMPILING | TCF_IN_FUNCTION |
4610 (cg->flags & TCF_FUN_MIGHT_ALIAS_LOCALS);
4611 cg2->bindings.transfer(cx, &pn->pn_funbox->bindings);
4612 #if JS_HAS_SHARP_VARS
4613 if (cg2->flags & TCF_HAS_SHARPS) {
4614 cg2->sharpSlotBase = cg2->bindings.sharpSlotBase(cx);
4615 if (cg2->sharpSlotBase < 0)
4619 cg2->setFunction(fun);
4620 cg2->funbox = pn->pn_funbox;
4624 * jsparse.cpp:SetStaticLevel limited static nesting depth to fit in 16
4625 * bits and to reserve the all-ones value, thereby reserving the magic
4626 * FREE_UPVAR_COOKIE value. Note the cg2->staticLevel assignment below.
4628 JS_ASSERT(cg->staticLevel < JS_BITMASK(16) - 1);
4629 cg2->staticLevel = cg->staticLevel + 1;
4631 /* We measured the max scope depth when we parsed the function. */
4632 JS_SCOPE_DEPTH_METERING(cg2->maxScopeDepth = uint16(-1));
4633 if (!js_EmitFunctionScript(cx, cg2, pn->pn_body))
4636 cg2->~JSCodeGenerator();
4637 JS_ARENA_RELEASE(cg->codePool, cg2mark);
4642 /* Make the function object a literal in the outer script's pool. */
4643 index = cg->objectList.index(pn->pn_funbox);
4645 /* Emit a bytecode pointing to the closure object in its immediate. */
4647 if (op != JSOP_NOP) {
4648 if ((pn->pn_funbox->tcflags & TCF_GENEXP_LAMBDA) &&
4649 js_NewSrcNote(cx, cg, SRC_GENEXP) < 0) {
4652 EMIT_INDEX_OP(op, index);
4654 /* Make blockChain determination quicker. */
4655 if (EmitBlockChain(cx, cg) < 0)
4661 * For a script we emit the code as we parse. Thus the bytecode for
4662 * top-level functions should go in the prolog to predefine their
4663 * names in the variable object before the already-generated main code
4664 * is executed. This extra work for top-level scripts is not necessary
4665 * when we emit the code for a function. It is fully parsed prior to
4666 * invocation of the emitter and calls to js_EmitTree for function
4667 * definitions can be scheduled before generating the rest of code.
4669 if (!cg->inFunction()) {
4670 JS_ASSERT(!cg->topStmt);
4671 if (!BindGlobal(cx, cg, pn, fun->atom))
4673 if (pn->pn_cookie.isFree()) {
4674 CG_SWITCH_TO_PROLOG(cg);
4675 op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFFUN_FC : JSOP_DEFFUN;
4676 EMIT_INDEX_OP(op, index);
4678 /* Make blockChain determination quicker. */
4679 if (EmitBlockChain(cx, cg) < 0)
4681 CG_SWITCH_TO_MAIN(cg);
4684 /* Emit NOP for the decompiler. */
4685 if (!EmitFunctionDefNop(cx, cg, index))
4691 cg->bindings.lookup(cx, fun->atom, &slot);
4692 JS_ASSERT(kind == VARIABLE || kind == CONSTANT);
4693 JS_ASSERT(index < JS_BIT(20));
4694 pn->pn_index = index;
4695 op = fun->isFlatClosure() ? JSOP_DEFLOCALFUN_FC : JSOP_DEFLOCALFUN;
4696 if (pn->isClosed() &&
4698 !cg->closedVars.append(pn->pn_cookie.slot())) {
4701 if (!EmitSlotIndexOp(cx, op, slot, index, cg))
4704 /* Make blockChain determination quicker. */
4705 if (EmitBlockChain(cx, cg) < 0)
4713 JSParseNode *pnlast = pn->last();
4714 for (JSParseNode *pn2 = pn->pn_head; pn2 != pnlast; pn2 = pn2->pn_next) {
4717 if (!BindNameToSlot(cx, cg, pn2))
4719 if (JOF_OPTYPE(pn2->pn_op) == JOF_QARG && cg->shouldNoteClosedName(pn2)) {
4720 if (!cg->closedArgs.append(pn2->pn_cookie.slot()))
4724 ok = js_EmitTree(cx, cg, pnlast);
4729 JS_ASSERT(cg->lexdeps.count == 0);
4730 JS_ASSERT(pn->pn_names.count != 0);
4731 cg->lexdeps = pn->pn_names;
4732 ok = js_EmitTree(cx, cg, pn->pn_tree);
4736 /* Initialize so we can detect else-if chains and avoid recursion. */
4737 stmtInfo.type = STMT_IF;
4742 /* Emit code for the condition before pushing stmtInfo. */
4743 if (!js_EmitTree(cx, cg, pn->pn_kid1))
4745 top = CG_OFFSET(cg);
4746 if (stmtInfo.type == STMT_IF) {
4747 js_PushStatement(cg, &stmtInfo, STMT_IF, top);
4750 * We came here from the goto further below that detects else-if
4751 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4752 * Also (see below for why) we need a note offset for SRC_IF_ELSE
4753 * to help the decompiler. Actually, we need two offsets, one for
4754 * decompiling any else clause and the second for decompiling an
4755 * else-if chain without bracing, overindenting, or incorrectly
4756 * scoping let declarations.
4758 JS_ASSERT(stmtInfo.type == STMT_ELSE);
4759 stmtInfo.type = STMT_IF;
4760 stmtInfo.update = top;
4761 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4763 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 1, top - beq))
4767 /* Emit an annotated branch-if-false around the then part. */
4769 noteIndex = js_NewSrcNote(cx, cg, pn3 ? SRC_IF_ELSE : SRC_IF);
4772 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4776 /* Emit code for the then and optional else parts. */
4777 if (!js_EmitTree(cx, cg, pn->pn_kid2))
4780 /* Modify stmtInfo so we know we're in the else part. */
4781 stmtInfo.type = STMT_ELSE;
4784 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4785 * around the else part. The js_PopStatementCG call at the bottom
4786 * of this switch case will fix up the backpatch chain linked from
4789 jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks, NULL, SRC_NULL);
4793 /* Ensure the branch-if-false comes here, then emit the else. */
4794 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4795 if (pn3->pn_type == TOK_IF) {
4800 if (!js_EmitTree(cx, cg, pn3))
4804 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4805 * the decompiler's benefit. We can't just "back up" from the pc
4806 * of the else clause, because we don't know whether an extended
4807 * jump was required to leap from the end of the then clause over
4810 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4813 /* No else part, fixup the branch-if-false to come here. */
4814 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4816 ok = js_PopStatementCG(cx, cg);
4820 /* Out of line to avoid bloating js_EmitTree's stack frame size. */
4821 ok = EmitSwitch(cx, cg, pn, &stmtInfo);
4826 * Minimize bytecodes issued for one or more iterations by jumping to
4827 * the condition below the body and closing the loop if the condition
4828 * is true with a backward branch. For iteration count i:
4830 * i test at the top test at the bottom
4831 * = =============== ==================
4832 * 0 ifeq-pass goto; ifne-fail
4833 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
4834 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
4836 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
4838 * SpiderMonkey, pre-mozilla.org, emitted while parsing and so used
4839 * test at the top. When JSParseNode trees were added during the ES3
4840 * work (1998-9), the code generation scheme was not optimized, and
4841 * the decompiler continued to take advantage of the branch and jump
4842 * that bracketed the body. But given the SRC_WHILE note, it is easy
4843 * to support the more efficient scheme.
4845 js_PushStatement(cg, &stmtInfo, STMT_WHILE_LOOP, top);
4846 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4849 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4852 noteIndex2 = js_NewSrcNote(cx, cg, SRC_TRACE);
4855 top = EmitTraceOp(cx, cg);
4858 if (!js_EmitTree(cx, cg, pn->pn_right))
4860 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
4861 if (!js_EmitTree(cx, cg, pn->pn_left))
4863 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4867 * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex
4870 if (!js_SetSrcNoteOffset(cx, cg, noteIndex2, 0, beq - top))
4872 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, beq - jmp))
4874 ok = js_PopStatementCG(cx, cg);
4878 /* Emit an annotated nop so we know to decompile a 'do' keyword. */
4879 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4880 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
4883 noteIndex2 = js_NewSrcNote(cx, cg, SRC_TRACE);
4887 /* Compile the loop body. */
4888 top = EmitTraceOp(cx, cg);
4891 js_PushStatement(cg, &stmtInfo, STMT_DO_LOOP, top);
4892 if (!js_EmitTree(cx, cg, pn->pn_left))
4895 /* Set loop and enclosing label update offsets, for continue. */
4898 stmt->update = CG_OFFSET(cg);
4899 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4901 /* Compile the loop condition, now that continues know where to go. */
4902 if (!js_EmitTree(cx, cg, pn->pn_right))
4906 * Since we use JSOP_IFNE for other purposes as well as for do-while
4907 * loops, we must store 1 + (beq - top) in the SRC_WHILE note offset,
4908 * and the decompiler must get that delta and decompile recursively.
4910 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
4914 * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex
4917 if (!js_SetSrcNoteOffset(cx, cg, noteIndex2, 0, beq - top))
4919 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, 1 + (beq - top)))
4921 ok = js_PopStatementCG(cx, cg);
4925 beq = 0; /* suppress gcc warnings */
4928 js_PushStatement(cg, &stmtInfo, STMT_FOR_LOOP, top);
4930 if (pn2->pn_type == TOK_IN) {
4931 /* Set stmtInfo type for later testing. */
4932 stmtInfo.type = STMT_FOR_IN_LOOP;
4935 * If the left part is 'var x', emit code to define x if necessary
4936 * using a prolog opcode, but do not emit a pop. If the left part
4937 * is 'var x = i', emit prolog code to define x if necessary; then
4938 * emit code to evaluate i, assign the result to x, and pop the
4939 * result off the stack.
4941 * All the logic to do this is implemented in the outer switch's
4942 * TOK_VAR case, conditioned on pn_xflags flags set by the parser.
4944 * In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
4945 * called here will generate the proper note for the assignment
4946 * op that sets x = i, hoisting the initialized var declaration
4947 * out of the loop: 'var x = i; for (x in o) ...'.
4949 * In the 'for (var x in o) ...' case, nothing but the prolog op
4950 * (if needed) should be generated here, we must emit the note
4951 * just before the JSOP_FOR* opcode in the switch on pn3->pn_type
4952 * a bit below, so nothing is hoisted: 'for (var x in o) ...'.
4954 * A 'for (let x = i in o)' loop must not be hoisted, since in
4955 * this form the let variable is scoped by the loop body (but not
4956 * the head). The initializer expression i must be evaluated for
4957 * any side effects. So we hoist only i in the let case.
4960 type = PN_TYPE(pn3);
4961 cg->flags |= TCF_IN_FOR_INIT;
4962 if (TokenKindIsDecl(type) && !js_EmitTree(cx, cg, pn3))
4964 cg->flags &= ~TCF_IN_FOR_INIT;
4966 /* Compile the object expression to the right of 'in'. */
4967 if (!js_EmitTree(cx, cg, pn2->pn_right))
4971 * Emit a bytecode to convert top of stack value to the iterator
4972 * object depending on the loop variant (for-in, for-each-in, or
4973 * destructuring for-in).
4975 JS_ASSERT(pn->pn_op == JSOP_ITER);
4976 if (js_Emit2(cx, cg, JSOP_ITER, (uint8) pn->pn_iflags) < 0)
4979 /* Annotate so the decompiler can find the loop-closing jump. */
4980 noteIndex = js_NewSrcNote(cx, cg, SRC_FOR_IN);
4985 * Jump down to the loop condition to minimize overhead assuming at
4986 * least one iteration, as the other loop forms do.
4988 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
4992 noteIndex2 = js_NewSrcNote(cx, cg, SRC_TRACE);
4996 top = CG_OFFSET(cg);
4997 SET_STATEMENT_TOP(&stmtInfo, top);
4998 if (EmitTraceOp(cx, cg) < 0)
5002 intN loopDepth = cg->stackDepth;
5006 * Compile a JSOP_FOR* bytecode based on the left hand side.
5008 * Initialize op to JSOP_SETNAME in case of |for ([a, b] in o)...|
5009 * or similar, to signify assignment, rather than declaration, to
5010 * the decompiler. EmitDestructuringOps takes a prolog bytecode
5011 * parameter and emits the appropriate source note, defaulting to
5012 * assignment, so JSOP_SETNAME is not critical here; many similar
5013 * ops could be used -- just not JSOP_NOP (which means 'let').
5017 #if JS_HAS_BLOCK_SCOPE
5021 JS_ASSERT(pn3->pn_arity == PN_LIST && pn3->pn_count == 1);
5023 #if JS_HAS_DESTRUCTURING
5024 if (pn3->pn_type == TOK_ASSIGN) {
5026 JS_ASSERT(pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC);
5028 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
5029 op = PN_OP(pn2->pn_left);
5030 goto destructuring_for;
5033 JS_ASSERT(pn3->pn_type == TOK_NAME);
5040 * Always annotate JSOP_FORLOCAL if given input of the form
5041 * 'for (let x in * o)' -- the decompiler must not hoist the
5042 * 'let x' out of the loop head, or x will be bound in the
5043 * wrong scope. Likewise, but in this case only for the sake
5044 * of higher decompilation fidelity only, do not hoist 'var x'
5045 * when given 'for (var x in o)'.
5048 #if JS_HAS_BLOCK_SCOPE
5051 (type == TOK_VAR && !pn3->maybeExpr())) &&
5052 js_NewSrcNote2(cx, cg, SRC_DECL,
5055 : SRC_DECL_LET) < 0) {
5058 UpvarCookie cookie = pn3->pn_cookie;
5059 if (!cookie.isFree()) {
5070 case JSOP_GETGLOBAL:
5075 JS_NOT_REACHED("unexpected opcode");
5078 pn3->pn_op = JSOP_FORNAME;
5079 if (!BindNameToSlot(cx, cg, pn3))
5082 cookie = pn3->pn_cookie;
5084 if (pn3->isConst()) {
5085 ReportCompileErrorNumber(cx, CG_TS(cg), pn3, JSREPORT_ERROR,
5086 JSMSG_BAD_FOR_LEFTSIDE);
5089 if (!cookie.isFree()) {
5090 atomIndex = (jsatomid) cookie.asInteger();
5091 EMIT_UINT16_IMM_OP(op, atomIndex);
5093 if (!EmitAtomOp(cx, pn3, op, cg))
5101 * 'for (o.p in q)' can use JSOP_FORPROP only if evaluating 'o'
5102 * has no side effects.
5105 if (!CheckSideEffects(cx, cg, pn3->expr(), &useful))
5108 if (!EmitPropOp(cx, pn3, JSOP_FORPROP, cg, JS_FALSE))
5114 #if JS_HAS_DESTRUCTURING
5118 if (js_Emit1(cx, cg, JSOP_FORELEM) < 0)
5120 JS_ASSERT(cg->stackDepth >= 2);
5122 #if JS_HAS_DESTRUCTURING
5123 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
5124 if (!EmitDestructuringOps(cx, cg, op, pn3))
5126 if (js_Emit1(cx, cg, JSOP_POP) < 0)
5130 if (pn3->pn_type == TOK_LP) {
5131 JS_ASSERT(pn3->pn_xflags & PNX_SETCALL);
5132 if (!js_EmitTree(cx, cg, pn3))
5134 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
5137 #if JS_HAS_XML_SUPPORT
5138 if (pn3->pn_type == TOK_UNARYOP) {
5139 JS_ASSERT(pn3->pn_op == JSOP_BINDXMLNAME);
5140 if (!js_EmitTree(cx, cg, pn3))
5142 if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
5146 if (!EmitElemOp(cx, pn3, JSOP_ENUMELEM, cg))
5151 /* The stack should be balanced around the JSOP_FOR* opcode sequence. */
5152 JS_ASSERT(cg->stackDepth == loopDepth);
5154 tmp2 = CG_OFFSET(cg);
5156 /* Emit code for the loop body. */
5157 if (!js_EmitTree(cx, cg, pn->pn_right))
5160 /* Set loop and enclosing "update" offsets, for continue. */
5163 stmt->update = CG_OFFSET(cg);
5164 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
5167 * Fixup the goto that starts the loop to jump down to JSOP_MOREITER.
5169 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
5170 if (js_Emit1(cx, cg, JSOP_MOREITER) < 0)
5172 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
5177 * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex
5180 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex2, 0, beq - top))
5182 /* Set the first srcnote offset so we can find the start of the loop body. */
5183 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp2 - jmp))
5185 /* Set the second srcnote offset so we can find the closing jump. */
5186 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1, beq - jmp))
5189 /* C-style for (init; cond; update) ... loop. */
5193 /* No initializer: emit an annotated nop for the decompiler. */
5196 cg->flags |= TCF_IN_FOR_INIT;
5197 #if JS_HAS_DESTRUCTURING
5198 if (pn3->pn_type == TOK_ASSIGN &&
5199 !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
5203 if (op == JSOP_POP) {
5204 if (!js_EmitTree(cx, cg, pn3))
5206 if (TokenKindIsDecl(PN_TYPE(pn3))) {
5208 * Check whether a destructuring-initialized var decl
5209 * was optimized to a group assignment. If so, we do
5210 * not need to emit a pop below, so switch to a nop,
5211 * just for the decompiler.
5213 JS_ASSERT(pn3->pn_arity == PN_LIST);
5214 if (pn3->pn_xflags & PNX_GROUPINIT)
5218 cg->flags &= ~TCF_IN_FOR_INIT;
5222 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
5223 * Use tmp to hold the biased srcnote "top" offset, which differs
5224 * from the top local variable by the length of the JSOP_GOTO{,X}
5225 * emitted in between tmp and top if this loop has a condition.
5227 noteIndex = js_NewSrcNote(cx, cg, SRC_FOR);
5228 if (noteIndex < 0 || js_Emit1(cx, cg, op) < 0)
5230 tmp = CG_OFFSET(cg);
5233 /* Goto the loop condition, which branches back to iterate. */
5234 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
5239 top = CG_OFFSET(cg);
5240 SET_STATEMENT_TOP(&stmtInfo, top);
5242 noteIndex2 = js_NewSrcNote(cx, cg, SRC_TRACE);
5246 /* Emit code for the loop body. */
5247 if (EmitTraceOp(cx, cg) < 0)
5249 if (!js_EmitTree(cx, cg, pn->pn_right))
5252 /* Set the second note offset so we can find the update part. */
5253 JS_ASSERT(noteIndex != -1);
5254 tmp2 = CG_OFFSET(cg);
5256 /* Set loop and enclosing "update" offsets, for continue. */
5259 stmt->update = CG_OFFSET(cg);
5260 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
5262 /* Check for update code to do before the condition (if any). */
5266 #if JS_HAS_DESTRUCTURING
5267 if (pn3->pn_type == TOK_ASSIGN &&
5268 !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
5272 if (op == JSOP_POP && !js_EmitTree(cx, cg, pn3))
5275 /* Always emit the POP or NOP, to help the decompiler. */
5276 if (js_Emit1(cx, cg, op) < 0)
5279 /* Restore the absolute line number for source note readers. */
5280 off = (ptrdiff_t) pn->pn_pos.end.lineno;
5281 if (CG_CURRENT_LINE(cg) != (uintN) off) {
5282 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, off) < 0)
5284 CG_CURRENT_LINE(cg) = (uintN) off;
5288 tmp3 = CG_OFFSET(cg);
5291 /* Fix up the goto from top to target the loop condition. */
5292 JS_ASSERT(jmp >= 0);
5293 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
5295 if (!js_EmitTree(cx, cg, pn2->pn_kid2))
5300 * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex
5303 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex2, 0,
5304 CG_OFFSET(cg) - top)) {
5307 /* Set the first note offset so we can find the loop condition. */
5308 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5312 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
5316 /* The third note offset helps us find the loop-closing jump. */
5317 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 2,
5318 CG_OFFSET(cg) - tmp)) {
5323 beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
5327 /* No loop condition -- emit the loop-closing jump. */
5328 jmp = EmitJump(cx, cg, JSOP_GOTO, top - CG_OFFSET(cg));
5334 /* Now fixup all breaks and continues (before for/in's JSOP_ENDITER). */
5335 if (!js_PopStatementCG(cx, cg))
5338 if (pn2->pn_type == TOK_IN) {
5339 if (!NewTryNote(cx, cg, JSTRY_ITER, cg->stackDepth, top, CG_OFFSET(cg)) ||
5340 js_Emit1(cx, cg, JSOP_ENDITER) < 0) {
5350 ale = cg->atomList.add(cg->parser, atom);
5353 while (stmt->type != STMT_LABEL || stmt->label != atom)
5355 noteType = SRC_BREAK2LABEL;
5358 while (!STMT_IS_LOOP(stmt) && stmt->type != STMT_SWITCH)
5360 noteType = (stmt->type == STMT_SWITCH) ? SRC_NULL : SRC_BREAK;
5363 if (EmitGoto(cx, cg, stmt, &stmt->breaks, ale, noteType) < 0)
5371 /* Find the loop statement enclosed by the matching label. */
5372 JSStmtInfo *loop = NULL;
5373 ale = cg->atomList.add(cg->parser, atom);
5376 while (stmt->type != STMT_LABEL || stmt->label != atom) {
5377 if (STMT_IS_LOOP(stmt))
5382 noteType = SRC_CONT2LABEL;
5385 while (!STMT_IS_LOOP(stmt))
5387 noteType = SRC_CONTINUE;
5390 if (EmitGoto(cx, cg, stmt, &stmt->continues, ale, noteType) < 0)
5395 if (!js_EmitTree(cx, cg, pn->pn_left))
5397 js_PushStatement(cg, &stmtInfo, STMT_WITH, CG_OFFSET(cg));
5398 if (js_Emit1(cx, cg, JSOP_ENTERWITH) < 0)
5401 /* Make blockChain determination quicker. */
5402 if (EmitBlockChain(cx, cg) < 0)
5404 if (!js_EmitTree(cx, cg, pn->pn_right))
5406 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
5408 ok = js_PopStatementCG(cx, cg);
5413 ptrdiff_t tryStart, tryEnd, catchJump, finallyStart;
5415 JSParseNode *lastCatch;
5420 * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
5423 * When a finally block is active (STMT_FINALLY in our tree context),
5424 * non-local jumps (including jumps-over-catches) result in a GOSUB
5425 * being written into the bytecode stream and fixed-up later (c.f.
5426 * EmitBackPatchOp and BackPatch).
5428 js_PushStatement(cg, &stmtInfo,
5429 pn->pn_kid3 ? STMT_FINALLY : STMT_TRY,
5433 * Since an exception can be thrown at any place inside the try block,
5434 * we need to restore the stack and the scope chain before we transfer
5435 * the control to the exception handler.
5437 * For that we store in a try note associated with the catch or
5438 * finally block the stack depth upon the try entry. The interpreter
5439 * uses this depth to properly unwind the stack and the scope chain.
5441 depth = cg->stackDepth;
5443 /* Mark try location for decompilation, then emit try block. */
5444 if (js_Emit1(cx, cg, JSOP_TRY) < 0)
5446 tryStart = CG_OFFSET(cg);
5447 if (!js_EmitTree(cx, cg, pn->pn_kid1))
5449 JS_ASSERT(depth == cg->stackDepth);
5451 /* GOSUB to finally, if present. */
5453 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5455 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(stmtInfo));
5460 /* Emit (hidden) jump over catch and/or finally. */
5461 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5463 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
5467 tryEnd = CG_OFFSET(cg);
5469 JSObjectBox *prevBox = NULL;
5470 /* If this try has a catch block, emit it. */
5474 uintN count = 0; /* previous catch block's population */
5477 * The emitted code for a catch block looks like:
5480 * [throwing] only if 2nd+ catch block
5481 * [leaveblock] only if 2nd+ catch block
5482 * enterblock with SRC_CATCH
5484 * [dup] only if catchguard
5485 * setlocalpop <slot> or destructuring code
5486 * [< catchguard code >] if there's a catchguard
5487 * [ifeq <offset to next catch block>] " "
5488 * [pop] only if catchguard
5489 * < catch block contents >
5491 * goto <end of catch blocks> non-local; finally applies
5493 * If there's no catch block without a catchguard, the last
5494 * <offset to next catch block> points to rethrow code. This
5495 * code will [gosub] to the finally code if appropriate, and is
5496 * also used for the catch-all trynote for capturing exceptions
5497 * thrown from catch{} blocks.
5499 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
5500 ptrdiff_t guardJump, catchNote;
5502 JS_ASSERT(cg->stackDepth == depth);
5503 guardJump = GUARDJUMP(stmtInfo);
5504 if (guardJump != -1) {
5505 if (EmitKnownBlockChain(cx, cg, prevBox) < 0)
5508 /* Fix up and clean up previous catch block. */
5509 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, guardJump);
5512 * Account for JSOP_ENTERBLOCK (whose block object count
5513 * is saved below) and pushed exception object that we
5514 * still have after the jumping from the previous guard.
5516 cg->stackDepth = depth + count + 1;
5519 * Move exception back to cx->exception to prepare for
5520 * the next catch. We hide [throwing] from the decompiler
5521 * since it compensates for the hidden JSOP_DUP at the
5522 * start of the previous guarded catch.
5524 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5525 js_Emit1(cx, cg, JSOP_THROWING) < 0) {
5528 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5530 if (!EmitLeaveBlock(cx, cg, JSOP_LEAVEBLOCK, prevBox))
5532 JS_ASSERT(cg->stackDepth == depth);
5536 * Annotate the JSOP_ENTERBLOCK that's about to be generated
5537 * by the call to js_EmitTree immediately below. Save this
5538 * source note's index in stmtInfo for use by the TOK_CATCH:
5539 * case, where the length of the catch guard is set as the
5542 catchNote = js_NewSrcNote2(cx, cg, SRC_CATCH, 0);
5545 CATCHNOTE(stmtInfo) = catchNote;
5548 * Emit the lexical scope and catch body. Save the catch's
5549 * block object population via count, for use when targeting
5550 * guardJump at the next catch (the guard mismatch case).
5552 JS_ASSERT(pn3->pn_type == TOK_LEXICALSCOPE);
5553 count = OBJ_BLOCK_COUNT(cx, pn3->pn_objbox->object);
5554 prevBox = pn3->pn_objbox;
5555 if (!js_EmitTree(cx, cg, pn3))
5558 /* gosub <finally>, if required */
5560 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH,
5564 JS_ASSERT(cg->stackDepth == depth);
5568 * Jump over the remaining catch blocks. This will get fixed
5569 * up to jump to after catch/finally.
5571 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5573 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
5578 * Save a pointer to the last catch node to handle try-finally
5579 * and try-catch(guard)-finally special cases.
5581 lastCatch = pn3->expr();
5586 * Last catch guard jumps to the rethrow code sequence if none of the
5587 * guards match. Target guardJump at the beginning of the rethrow
5588 * sequence, just in case a guard expression throws and leaves the
5591 if (lastCatch && lastCatch->pn_kid2) {
5592 if (EmitKnownBlockChain(cx, cg, prevBox) < 0)
5595 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, GUARDJUMP(stmtInfo));
5597 /* Sync the stack to take into account pushed exception. */
5598 JS_ASSERT(cg->stackDepth == depth);
5599 cg->stackDepth = depth + 1;
5602 * Rethrow the exception, delegating executing of finally if any
5603 * to the exception handler.
5605 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5606 js_Emit1(cx, cg, JSOP_THROW) < 0) {
5610 if (EmitBlockChain(cx, cg) < 0)
5614 JS_ASSERT(cg->stackDepth == depth);
5616 /* Emit finally handler if any. */
5617 finallyStart = 0; /* to quell GCC uninitialized warnings */
5620 * Fix up the gosubs that might have been emitted before non-local
5621 * jumps to the finally code.
5623 if (!BackPatch(cx, cg, GOSUBS(stmtInfo), CG_NEXT(cg), JSOP_GOSUB))
5626 finallyStart = CG_OFFSET(cg);
5628 /* Indicate that we're emitting a subroutine body. */
5629 stmtInfo.type = STMT_SUBROUTINE;
5630 if (!UpdateLineNumberNotes(cx, cg, pn->pn_kid3->pn_pos.begin.lineno))
5632 if (js_Emit1(cx, cg, JSOP_FINALLY) < 0 ||
5633 !js_EmitTree(cx, cg, pn->pn_kid3) ||
5634 js_Emit1(cx, cg, JSOP_RETSUB) < 0) {
5637 JS_ASSERT(cg->stackDepth == depth);
5639 if (!js_PopStatementCG(cx, cg))
5642 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
5643 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5647 /* Fix up the end-of-try/catch jumps to come here. */
5648 if (!BackPatch(cx, cg, catchJump, CG_NEXT(cg), JSOP_GOTO))
5652 * Add the try note last, to let post-order give us the right ordering
5653 * (first to last for a given nesting level, inner to outer by level).
5656 !NewTryNote(cx, cg, JSTRY_CATCH, depth, tryStart, tryEnd)) {
5661 * If we've got a finally, mark try+catch region with additional
5662 * trynote to catch exceptions (re)thrown from a catch block or
5663 * for the try{}finally{} case.
5666 !NewTryNote(cx, cg, JSTRY_FINALLY, depth, tryStart, finallyStart)) {
5674 ptrdiff_t catchStart, guardJump;
5678 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
5679 * and save the block object atom.
5682 JS_ASSERT(stmt->type == STMT_BLOCK && (stmt->flags & SIF_SCOPE));
5683 stmt->type = STMT_CATCH;
5684 catchStart = stmt->update;
5685 blockObj = stmt->blockBox->object;
5687 /* Go up one statement info record to the TRY or FINALLY record. */
5689 JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY);
5691 /* Pick up the pending exception and bind it to the catch variable. */
5692 if (js_Emit1(cx, cg, JSOP_EXCEPTION) < 0)
5696 * Dup the exception object if there is a guard for rethrowing to use
5697 * it later when rethrowing or in other catches.
5699 if (pn->pn_kid2 && js_Emit1(cx, cg, JSOP_DUP) < 0)
5703 switch (pn2->pn_type) {
5704 #if JS_HAS_DESTRUCTURING
5707 if (!EmitDestructuringOps(cx, cg, JSOP_NOP, pn2))
5709 if (js_Emit1(cx, cg, JSOP_POP) < 0)
5715 /* Inline and specialize BindNameToSlot for pn2. */
5716 JS_ASSERT(!pn2->pn_cookie.isFree());
5717 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, pn2->pn_cookie.asInteger());
5724 /* Emit the guard expression, if there is one. */
5726 if (!js_EmitTree(cx, cg, pn->pn_kid2))
5728 if (!js_SetSrcNoteOffset(cx, cg, CATCHNOTE(*stmt), 0,
5729 CG_OFFSET(cg) - catchStart)) {
5732 /* ifeq <next block> */
5733 guardJump = EmitJump(cx, cg, JSOP_IFEQ, 0);
5736 GUARDJUMP(*stmt) = guardJump;
5738 /* Pop duplicated exception object as we no longer need it. */
5739 if (js_Emit1(cx, cg, JSOP_POP) < 0)
5743 /* Emit the catch body. */
5744 if (!js_EmitTree(cx, cg, pn->pn_kid3))
5748 * Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
5749 * our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
5751 off = cg->stackDepth;
5752 if (js_NewSrcNote2(cx, cg, SRC_CATCH, off) < 0)
5758 if (!EmitVariables(cx, cg, pn, JS_FALSE, ¬eIndex))
5763 /* Push a return value */
5766 if (!js_EmitTree(cx, cg, pn2))
5769 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5774 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5775 * blocks having finally clauses and to exit intermingled let blocks.
5776 * We can't simply transfer control flow to our caller in that case,
5777 * because we must gosub to those finally clauses from inner to outer,
5778 * with the correct stack pointer (i.e., after popping any with,
5779 * for/in, etc., slots nested inside the finally's try).
5781 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5782 * extra JSOP_RETRVAL after the fixups.
5784 top = CG_OFFSET(cg);
5785 if (js_Emit1(cx, cg, JSOP_RETURN) < 0)
5787 if (!EmitNonLocalJumpFixup(cx, cg, NULL))
5789 if (top + JSOP_RETURN_LENGTH != CG_OFFSET(cg)) {
5790 CG_BASE(cg)[top] = JSOP_SETRVAL;
5791 if (js_Emit1(cx, cg, JSOP_RETRVAL) < 0)
5793 if (EmitBlockChain(cx, cg) < 0)
5798 #if JS_HAS_GENERATORS
5800 if (!cg->inFunction()) {
5801 ReportCompileErrorNumber(cx, CG_TS(cg), pn, JSREPORT_ERROR,
5802 JSMSG_BAD_RETURN_OR_YIELD,
5807 if (!js_EmitTree(cx, cg, pn->pn_kid))
5810 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5813 if (pn->pn_hidden && js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
5815 if (js_Emit1(cx, cg, JSOP_YIELD) < 0)
5822 #if JS_HAS_XML_SUPPORT
5823 if (pn->pn_arity == PN_UNARY) {
5824 if (!js_EmitTree(cx, cg, pn->pn_kid))
5826 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
5832 JS_ASSERT(pn->pn_arity == PN_LIST);
5835 tmp = CG_OFFSET(cg);
5836 if (pn->pn_xflags & PNX_NEEDBRACES) {
5837 noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
5838 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
5842 js_PushStatement(cg, &stmtInfo, STMT_BLOCK, top);
5844 JSParseNode *pnchild = pn->pn_head;
5845 if (pn->pn_xflags & PNX_FUNCDEFS) {
5847 * This block contains top-level function definitions. To ensure
5848 * that we emit the bytecode defining them before the rest of code
5849 * in the block we use a separate pass over functions. During the
5850 * main pass later the emitter will add JSOP_NOP with source notes
5851 * for the function to preserve the original functions position
5854 * Currently this is used only for functions, as compile-as-we go
5855 * mode for scripts does not allow separate emitter passes.
5857 JS_ASSERT(cg->inFunction());
5858 if (pn->pn_xflags & PNX_DESTRUCT) {
5860 * Assign the destructuring arguments before defining any
5861 * functions, see bug 419662.
5863 JS_ASSERT(pnchild->pn_type == TOK_SEMI);
5864 JS_ASSERT(pnchild->pn_kid->pn_type == TOK_VAR);
5865 if (!js_EmitTree(cx, cg, pnchild))
5867 pnchild = pnchild->pn_next;
5870 for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5871 if (pn2->pn_type == TOK_FUNCTION) {
5872 if (pn2->pn_op == JSOP_NOP) {
5873 if (!js_EmitTree(cx, cg, pn2))
5877 * JSOP_DEFFUN in a top-level block with function
5878 * definitions appears, for example, when "if (true)"
5879 * is optimized away from "if (true) function x() {}".
5882 JS_ASSERT(pn2->pn_op == JSOP_DEFFUN);
5887 for (pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
5888 if (!js_EmitTree(cx, cg, pn2))
5892 if (noteIndex >= 0 &&
5893 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5894 CG_OFFSET(cg) - tmp)) {
5898 ok = js_PopStatementCG(cx, cg);
5903 JS_ASSERT(pn->pn_arity == PN_LIST);
5904 js_PushStatement(cg, &stmtInfo, STMT_SEQ, top);
5905 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
5906 if (!js_EmitTree(cx, cg, pn2))
5909 ok = js_PopStatementCG(cx, cg);
5916 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5917 * debugger, and eval frames may need the value of the ultimate
5918 * expression statement as the script's result, despite the fact
5919 * that it appears useless to the compiler.
5921 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5922 * calling JS_Compile* to suppress JSOP_POPV.
5924 useful = wantval = !(cg->flags & (TCF_IN_FUNCTION | TCF_NO_SCRIPT_RVAL));
5926 /* Don't eliminate expressions with side effects. */
5928 if (!CheckSideEffects(cx, cg, pn2, &useful))
5933 * Don't eliminate apparently useless expressions if they are
5934 * labeled expression statements. The tc->topStmt->update test
5935 * catches the case where we are nesting in js_EmitTree for a
5936 * labeled compound statement.
5940 cg->topStmt->type == STMT_LABEL &&
5941 cg->topStmt->update >= CG_OFFSET(cg)) {
5946 /* Don't complain about directive prologue members; just don't emit their code. */
5947 if (!pn->isDirectivePrologueMember()) {
5948 CG_CURRENT_LINE(cg) = pn2->pn_pos.begin.lineno;
5949 if (!ReportCompileErrorNumber(cx, CG_TS(cg), pn2,
5950 JSREPORT_WARNING | JSREPORT_STRICT,
5951 JSMSG_USELESS_EXPR)) {
5956 op = wantval ? JSOP_POPV : JSOP_POP;
5957 #if JS_HAS_DESTRUCTURING
5959 pn2->pn_type == TOK_ASSIGN &&
5960 !MaybeEmitGroupAssignment(cx, cg, op, pn2, &op)) {
5964 if (op != JSOP_NOP) {
5966 * Specialize JSOP_SETPROP to JSOP_SETMETHOD to defer or
5967 * avoid null closure cloning. Do this only for assignment
5968 * statements that are not completion values wanted by a
5969 * script evaluator, to ensure that the joined function
5970 * can't escape directly.
5973 PN_TYPE(pn2) == TOK_ASSIGN &&
5974 PN_OP(pn2) == JSOP_NOP &&
5975 PN_OP(pn2->pn_left) == JSOP_SETPROP &&
5976 PN_OP(pn2->pn_right) == JSOP_LAMBDA &&
5977 pn2->pn_right->pn_funbox->joinable()) {
5978 pn2->pn_left->pn_op = JSOP_SETMETHOD;
5980 if (!js_EmitTree(cx, cg, pn2))
5982 if (js_Emit1(cx, cg, op) < 0)
5990 /* Emit an annotated nop so we know to decompile a label. */
5992 ale = cg->atomList.add(cg->parser, atom);
5996 noteType = (pn2->pn_type == TOK_LC ||
5997 (pn2->pn_type == TOK_LEXICALSCOPE &&
5998 pn2->expr()->pn_type == TOK_LC))
6001 noteIndex = js_NewSrcNote2(cx, cg, noteType,
6002 (ptrdiff_t) ALE_INDEX(ale));
6003 if (noteIndex < 0 ||
6004 js_Emit1(cx, cg, JSOP_NOP) < 0) {
6008 /* Emit code for the labeled statement. */
6009 js_PushStatement(cg, &stmtInfo, STMT_LABEL, CG_OFFSET(cg));
6010 stmtInfo.label = atom;
6011 if (!js_EmitTree(cx, cg, pn2))
6013 if (!js_PopStatementCG(cx, cg))
6016 /* If the statement was compound, emit a note for the end brace. */
6017 if (noteType == SRC_LABELBRACE) {
6018 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
6019 js_Emit1(cx, cg, JSOP_NOP) < 0) {
6027 * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
6028 * These notes help the decompiler bracket the bytecodes generated
6029 * from each sub-expression that follows a comma.
6031 off = noteIndex = -1;
6032 for (pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
6033 if (!js_EmitTree(cx, cg, pn2))
6035 tmp = CG_OFFSET(cg);
6036 if (noteIndex >= 0) {
6037 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
6043 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
6044 if (noteIndex < 0 ||
6045 js_Emit1(cx, cg, JSOP_POP) < 0) {
6053 * Check left operand type and generate specialized code for it.
6054 * Specialize to avoid ECMA "reference type" values on the operand
6055 * stack, which impose pervasive runtime "GetValue" costs.
6058 atomIndex = (jsatomid) -1; /* quell GCC overwarning */
6059 switch (PN_TYPE(pn2)) {
6061 if (!BindNameToSlot(cx, cg, pn2))
6063 if (!pn2->pn_cookie.isFree()) {
6064 atomIndex = (jsatomid) pn2->pn_cookie.asInteger();
6066 ale = cg->atomList.add(cg->parser, pn2->pn_atom);
6069 atomIndex = ALE_INDEX(ale);
6070 if (!pn2->isConst()) {
6071 JSOp op = PN_OP(pn2) == JSOP_SETGNAME ? JSOP_BINDGNAME : JSOP_BINDNAME;
6072 EMIT_INDEX_OP(op, atomIndex);
6077 if (!js_EmitTree(cx, cg, pn2->expr()))
6079 ale = cg->atomList.add(cg->parser, pn2->pn_atom);
6082 atomIndex = ALE_INDEX(ale);
6085 JS_ASSERT(pn2->pn_arity == PN_BINARY);
6086 if (!js_EmitTree(cx, cg, pn2->pn_left))
6088 if (!js_EmitTree(cx, cg, pn2->pn_right))
6091 #if JS_HAS_DESTRUCTURING
6097 if (!js_EmitTree(cx, cg, pn2))
6100 #if JS_HAS_XML_SUPPORT
6102 JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
6103 if (!js_EmitTree(cx, cg, pn2->pn_kid))
6105 if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
6114 if (op != JSOP_NOP) {
6115 switch (pn2->pn_type) {
6117 if (pn2->isConst()) {
6118 if (PN_OP(pn2) == JSOP_CALLEE) {
6119 if (js_Emit1(cx, cg, JSOP_CALLEE) < 0)
6122 EMIT_INDEX_OP(PN_OP(pn2), atomIndex);
6124 } else if (PN_OP(pn2) == JSOP_SETNAME) {
6125 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
6127 EMIT_INDEX_OP(JSOP_GETXPROP, atomIndex);
6128 } else if (PN_OP(pn2) == JSOP_SETGNAME) {
6129 if (!BindGlobal(cx, cg, pn2, pn2->pn_atom))
6131 if (pn2->pn_cookie.isFree())
6132 EmitAtomOp(cx, pn2, JSOP_GETGNAME, cg);
6134 EMIT_UINT16_IMM_OP(JSOP_GETGLOBAL, pn2->pn_cookie.asInteger());
6136 EMIT_UINT16_IMM_OP((PN_OP(pn2) == JSOP_SETARG)
6143 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
6145 if (pn2->pn_atom == cx->runtime->atomState.lengthAtom) {
6146 if (js_Emit1(cx, cg, JSOP_LENGTH) < 0)
6148 } else if (pn2->pn_atom == cx->runtime->atomState.protoAtom) {
6149 if (!EmitIndexOp(cx, JSOP_QNAMEPART, atomIndex, cg))
6151 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
6154 EMIT_INDEX_OP(JSOP_GETPROP, atomIndex);
6159 #if JS_HAS_XML_SUPPORT
6162 if (js_Emit1(cx, cg, JSOP_DUP2) < 0)
6164 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
6171 /* Now emit the right operand (it may affect the namespace). */
6172 if (!js_EmitTree(cx, cg, pn->pn_right))
6175 /* If += etc., emit the binary operator with a decompiler note. */
6176 if (op != JSOP_NOP) {
6178 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
6179 * declared in the current compilation unit, as in this case (just
6180 * a bit further below) we will avoid emitting the assignment op.
6182 if (pn2->pn_type != TOK_NAME || !pn2->isConst()) {
6183 if (js_NewSrcNote(cx, cg, SRC_ASSIGNOP) < 0)
6186 if (js_Emit1(cx, cg, op) < 0)
6190 /* Left parts such as a.b.c and a[b].c need a decompiler note. */
6191 if (pn2->pn_type != TOK_NAME &&
6192 #if JS_HAS_DESTRUCTURING
6193 pn2->pn_type != TOK_RB &&
6194 pn2->pn_type != TOK_RC &&
6196 js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0) {
6200 /* Finally, emit the specialized assignment bytecode. */
6201 switch (pn2->pn_type) {
6207 EMIT_INDEX_OP(PN_OP(pn2), atomIndex);
6211 if (js_Emit1(cx, cg, JSOP_SETELEM) < 0)
6214 #if JS_HAS_DESTRUCTURING
6217 if (!EmitDestructuringOps(cx, cg, JSOP_SETNAME, pn2))
6221 #if JS_HAS_XML_SUPPORT
6223 if (js_Emit1(cx, cg, JSOP_SETXMLNAME) < 0)
6233 /* Emit the condition, then branch if false to the else part. */
6234 if (!js_EmitTree(cx, cg, pn->pn_kid1))
6236 noteIndex = js_NewSrcNote(cx, cg, SRC_COND);
6239 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
6240 if (beq < 0 || !js_EmitTree(cx, cg, pn->pn_kid2))
6243 /* Jump around else, fixup the branch, emit else, fixup jump. */
6244 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
6247 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
6250 * Because each branch pushes a single value, but our stack budgeting
6251 * analysis ignores branches, we now have to adjust cg->stackDepth to
6252 * ignore the value pushed by the first branch. Execution will follow
6253 * only one path, so we must decrement cg->stackDepth.
6255 * Failing to do this will foil code, such as the try/catch/finally
6256 * exception handling code generator, that samples cg->stackDepth for
6257 * use at runtime (JSOP_SETSP), or in let expression and block code
6258 * generation, which must use the stack depth to compute local stack
6259 * indexes correctly.
6261 JS_ASSERT(cg->stackDepth > 0);
6263 if (!js_EmitTree(cx, cg, pn->pn_kid3))
6265 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
6266 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
6273 * JSOP_OR converts the operand on the stack to boolean, and if true,
6274 * leaves the original operand value on the stack and jumps; otherwise
6275 * it pops and falls into the next bytecode, which evaluates the right
6276 * operand. The jump goes around the right operand evaluation.
6278 * JSOP_AND converts the operand on the stack to boolean, and if false,
6279 * leaves the original operand value on the stack and jumps; otherwise
6280 * it pops and falls into the right operand's bytecode.
6282 if (pn->pn_arity == PN_BINARY) {
6283 if (!js_EmitTree(cx, cg, pn->pn_left))
6285 top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
6288 if (!js_EmitTree(cx, cg, pn->pn_right))
6290 off = CG_OFFSET(cg);
6291 pc = CG_CODE(cg, top);
6292 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
6295 JS_ASSERT(pn->pn_arity == PN_LIST);
6296 JS_ASSERT(pn->pn_head->pn_next->pn_next);
6298 /* Left-associative operator chain: avoid too much recursion. */
6300 if (!js_EmitTree(cx, cg, pn2))
6302 top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
6306 /* Emit nodes between the head and the tail. */
6308 while ((pn2 = pn2->pn_next)->pn_next) {
6309 if (!js_EmitTree(cx, cg, pn2))
6311 off = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
6314 if (!SetBackPatchDelta(cx, cg, CG_CODE(cg, jmp), off - jmp))
6319 if (!js_EmitTree(cx, cg, pn2))
6323 off = CG_OFFSET(cg);
6325 pc = CG_CODE(cg, top);
6326 tmp = GetJumpOffset(cg, pc);
6327 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
6330 } while ((pn2 = pn2->pn_next)->pn_next);
6341 case TOK_INSTANCEOF:
6346 if (pn->pn_arity == PN_LIST) {
6347 /* Left-associative operator chain: avoid too much recursion. */
6349 if (!js_EmitTree(cx, cg, pn2))
6352 while ((pn2 = pn2->pn_next) != NULL) {
6353 if (!js_EmitTree(cx, cg, pn2))
6355 if (js_Emit1(cx, cg, op) < 0)
6359 #if JS_HAS_XML_SUPPORT
6363 if (pn->pn_arity == PN_NAME) {
6364 if (!js_EmitTree(cx, cg, pn->expr()))
6366 if (!EmitAtomOp(cx, pn, PN_OP(pn), cg))
6372 * Binary :: has a right operand that brackets arbitrary code,
6373 * possibly including a let (a = b) ... expression. We must clear
6374 * TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
6376 oldflags = cg->flags;
6377 cg->flags &= ~TCF_IN_FOR_INIT;
6380 /* Binary operators that evaluate both operands unconditionally. */
6381 if (!js_EmitTree(cx, cg, pn->pn_left))
6383 if (!js_EmitTree(cx, cg, pn->pn_right))
6385 #if JS_HAS_XML_SUPPORT
6386 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6388 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
6394 #if JS_HAS_XML_SUPPORT
6397 JS_ASSERT(pn->pn_arity == PN_UNARY);
6404 /* Unary op, including unary +/-. */
6406 #if JS_HAS_XML_SUPPORT
6407 if (op == JSOP_XMLNAME) {
6408 if (!EmitXMLName(cx, pn, op, cg))
6415 if (op == JSOP_TYPEOF && pn2->pn_type != TOK_NAME)
6416 op = JSOP_TYPEOFEXPR;
6418 oldflags = cg->flags;
6419 cg->flags &= ~TCF_IN_FOR_INIT;
6420 if (!js_EmitTree(cx, cg, pn2))
6422 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6423 if (js_Emit1(cx, cg, op) < 0)
6430 /* Emit lvalue-specialized code for ++/-- operators. */
6432 JS_ASSERT(pn2->pn_type != TOK_RP);
6434 switch (pn2->pn_type) {
6436 JS_ASSERT(pn2->pn_type == TOK_NAME);
6438 if (!BindNameToSlot(cx, cg, pn2))
6441 if (op == JSOP_CALLEE) {
6442 if (js_Emit1(cx, cg, op) < 0)
6444 } else if (!pn2->pn_cookie.isFree()) {
6445 atomIndex = (jsatomid) pn2->pn_cookie.asInteger();
6446 EMIT_UINT16_IMM_OP(op, atomIndex);
6448 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
6449 if (!EmitAtomOp(cx, pn2, op, cg))
6453 if (pn2->isConst()) {
6454 if (js_Emit1(cx, cg, JSOP_POS) < 0)
6457 if (!(js_CodeSpec[op].format & JOF_POST)) {
6458 if (js_Emit1(cx, cg, JSOP_ONE) < 0)
6460 op = (js_CodeSpec[op].format & JOF_INC) ? JSOP_ADD : JSOP_SUB;
6461 if (js_Emit1(cx, cg, op) < 0)
6467 if (!EmitPropOp(cx, pn2, op, cg, JS_FALSE))
6471 if (!EmitElemOp(cx, pn2, op, cg))
6475 if (!js_EmitTree(cx, cg, pn2))
6477 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
6478 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
6481 if (js_Emit1(cx, cg, op) < 0)
6484 #if JS_HAS_XML_SUPPORT
6486 JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
6487 if (!js_EmitTree(cx, cg, pn2->pn_kid))
6489 if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
6491 if (js_Emit1(cx, cg, op) < 0)
6500 * Under ECMA 3, deleting a non-reference returns true -- but alas we
6501 * must evaluate the operand if it appears it might have side effects.
6504 switch (pn2->pn_type) {
6506 if (!BindNameToSlot(cx, cg, pn2))
6509 if (op == JSOP_FALSE) {
6510 if (js_Emit1(cx, cg, op) < 0)
6513 if (!EmitAtomOp(cx, pn2, op, cg))
6518 if (!EmitPropOp(cx, pn2, JSOP_DELPROP, cg, JS_FALSE))
6521 #if JS_HAS_XML_SUPPORT
6523 if (!EmitElemOp(cx, pn2, JSOP_DELDESC, cg))
6528 if (!EmitElemOp(cx, pn2, JSOP_DELELEM, cg))
6533 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
6534 * to foo(), true (a comma expression, requiring SRC_PCDELTA).
6537 if (!CheckSideEffects(cx, cg, pn2, &useful))
6540 off = noteIndex = -1;
6542 JS_ASSERT_IF(pn2->pn_type == TOK_LP, !(pn2->pn_xflags & PNX_SETCALL));
6543 if (!js_EmitTree(cx, cg, pn2))
6545 off = CG_OFFSET(cg);
6546 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
6547 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
6550 if (js_Emit1(cx, cg, JSOP_TRUE) < 0)
6552 if (noteIndex >= 0) {
6553 tmp = CG_OFFSET(cg);
6554 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
6560 #if JS_HAS_XML_SUPPORT
6562 if (!js_EmitTree(cx, cg, pn->pn_left))
6564 jmp = EmitJump(cx, cg, JSOP_FILTER, 0);
6567 top = EmitTraceOp(cx, cg);
6570 if (!js_EmitTree(cx, cg, pn->pn_right))
6572 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
6573 if (EmitJump(cx, cg, JSOP_ENDFILTER, top - CG_OFFSET(cg)) < 0)
6576 /* Make blockChain determination quicker. */
6577 if (EmitBlockChain(cx, cg) < 0)
6584 * Pop a stack operand, convert it to object, get a property named by
6585 * this bytecode's immediate-indexed atom operand, and push its value
6586 * (not a reference to it).
6588 ok = EmitPropOp(cx, pn, PN_OP(pn), cg, JS_FALSE);
6592 #if JS_HAS_XML_SUPPORT
6596 * Pop two operands, convert the left one to object and the right one
6597 * to property name (atom or tagged int), get the named property, and
6598 * push its value. Set the "obj" register to the result of ToObject
6599 * on the left operand.
6601 ok = EmitElemOp(cx, pn, PN_OP(pn), cg);
6607 bool callop = (PN_TYPE(pn) == TOK_LP);
6610 * Emit callable invocation or operator new (constructor call) code.
6611 * First, emit code for the left operand to evaluate the callable or
6612 * constructable object expression.
6614 * For operator new applied to other expressions than E4X ones, we emit
6615 * JSOP_GETPROP instead of JSOP_CALLPROP, etc. This is necessary to
6616 * interpose the lambda-initialized method read barrier -- see the code
6617 * in jsinterp.cpp for JSOP_LAMBDA followed by JSOP_{SET,INIT}PROP.
6619 * Then (or in a call case that has no explicit reference-base
6620 * object) we emit JSOP_PUSH to produce the |this| slot required
6621 * for calls (which non-strict mode functions will box into the
6625 switch (pn2->pn_type) {
6627 if (!EmitNameOp(cx, cg, pn2, callop))
6631 if (!EmitPropOp(cx, pn2, PN_OP(pn2), cg, callop))
6635 JS_ASSERT(pn2->pn_op == JSOP_GETELEM);
6636 if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, cg))
6640 #if JS_HAS_XML_SUPPORT
6641 if (pn2->pn_op == JSOP_XMLNAME) {
6642 if (!EmitXMLName(cx, pn2, JSOP_CALLXMLNAME, cg))
6644 callop = true; /* suppress JSOP_PUSH after */
6650 if (!js_EmitTree(cx, cg, pn2))
6652 callop = false; /* trigger JSOP_PUSH after */
6655 if (!callop && js_Emit1(cx, cg, JSOP_PUSH) < 0)
6658 /* Remember start of callable-object bytecode for decompilation hint. */
6662 * Emit code for each argument in order, then emit the JSOP_*CALL or
6663 * JSOP_NEW bytecode with a two-byte immediate telling how many args
6664 * were pushed on the operand stack.
6666 uintN oldflags = cg->flags;
6667 cg->flags &= ~TCF_IN_FOR_INIT;
6668 for (pn3 = pn2->pn_next; pn3; pn3 = pn3->pn_next) {
6669 if (!js_EmitTree(cx, cg, pn3))
6672 cg->flags |= oldflags & TCF_IN_FOR_INIT;
6673 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - off) < 0)
6676 argc = pn->pn_count - 1;
6677 if (js_Emit3(cx, cg, PN_OP(pn), ARGC_HI(argc), ARGC_LO(argc)) < 0)
6679 if (PN_OP(pn) == JSOP_EVAL) {
6680 EMIT_UINT16_IMM_OP(JSOP_LINENO, pn->pn_pos.begin.lineno);
6681 if (EmitBlockChain(cx, cg) < 0)
6684 if (pn->pn_xflags & PNX_SETCALL) {
6685 if (js_Emit1(cx, cg, JSOP_SETCALL) < 0)
6691 case TOK_LEXICALSCOPE:
6693 JSObjectBox *objbox;
6695 objbox = pn->pn_objbox;
6696 js_PushBlockScope(cg, &stmtInfo, objbox, CG_OFFSET(cg));
6699 * If this lexical scope is not for a catch block, let block or let
6700 * expression, or any kind of for loop (where the scope starts in the
6701 * head after the first part if for (;;), else in the body if for-in);
6702 * and if our container is top-level but not a function body, or else
6703 * a block statement; then emit a SRC_BRACE note. All other container
6704 * statements get braces by default from the decompiler.
6707 type = PN_TYPE(pn->expr());
6708 if (type != TOK_CATCH && type != TOK_LET && type != TOK_FOR &&
6709 (!(stmt = stmtInfo.down)
6711 : stmt->type == STMT_BLOCK)) {
6712 #if defined DEBUG_brendan || defined DEBUG_mrbkap
6713 /* There must be no source note already output for the next op. */
6714 JS_ASSERT(CG_NOTE_COUNT(cg) == 0 ||
6715 CG_LAST_NOTE_OFFSET(cg) != CG_OFFSET(cg) ||
6716 !GettableNoteForNextOp(cg));
6718 noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
6723 JS_ASSERT(CG_OFFSET(cg) == top);
6724 if (!EmitEnterBlock(cx, pn, cg))
6727 if (!js_EmitTree(cx, cg, pn->pn_expr))
6731 if (op == JSOP_LEAVEBLOCKEXPR) {
6732 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
6735 if (noteIndex >= 0 &&
6736 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
6737 CG_OFFSET(cg) - top)) {
6742 /* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
6743 if (!EmitLeaveBlock(cx, cg, op, objbox))
6746 ok = js_PopStatementCG(cx, cg);
6750 #if JS_HAS_BLOCK_SCOPE
6752 /* Let statements have their variable declarations on the left. */
6753 if (pn->pn_arity == PN_BINARY) {
6760 /* Non-null pn2 means that pn is the variable list from a let head. */
6761 JS_ASSERT(pn->pn_arity == PN_LIST);
6762 if (!EmitVariables(cx, cg, pn, pn2 != NULL, ¬eIndex))
6765 /* Thus non-null pn2 is the body of the let block or expression. */
6766 tmp = CG_OFFSET(cg);
6767 if (pn2 && !js_EmitTree(cx, cg, pn2))
6770 if (noteIndex >= 0 &&
6771 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
6772 CG_OFFSET(cg) - tmp)) {
6776 #endif /* JS_HAS_BLOCK_SCOPE */
6778 #if JS_HAS_GENERATORS
6779 case TOK_ARRAYPUSH: {
6783 * The array object's stack index is in cg->arrayCompDepth. See below
6784 * under the array initialiser code generator for array comprehension
6787 if (!js_EmitTree(cx, cg, pn->pn_kid))
6789 slot = AdjustBlockSlot(cx, cg, cg->arrayCompDepth);
6792 EMIT_UINT16_IMM_OP(PN_OP(pn), slot);
6798 #if JS_HAS_GENERATORS
6802 * Emit code for [a, b, c] that is equivalent to constructing a new
6803 * array and in source order evaluating each element value and adding
6804 * it to the array, without invoking latent setters. We use the
6805 * JSOP_NEWINIT and JSOP_INITELEM bytecodes to ignore setters and to
6806 * avoid dup'ing and popping the array as each element is added, as
6807 * JSOP_SETELEM/JSOP_SETPROP would do.
6809 #if JS_HAS_SHARP_VARS
6814 #if JS_HAS_GENERATORS
6815 if (pn->pn_type == TOK_ARRAYCOMP) {
6818 if (!EmitNewInit(cx, cg, JSProto_Array, pn, sharpnum))
6822 * Pass the new array's stack index to the TOK_ARRAYPUSH case via
6823 * cg->arrayCompDepth, then simply traverse the TOK_FOR node and
6824 * its kids under pn2 to generate this comprehension.
6826 JS_ASSERT(cg->stackDepth > 0);
6827 saveDepth = cg->arrayCompDepth;
6828 cg->arrayCompDepth = (uint32) (cg->stackDepth - 1);
6829 if (!js_EmitTree(cx, cg, pn->pn_head))
6831 cg->arrayCompDepth = saveDepth;
6833 /* Emit the usual op needed for decompilation. */
6834 if (!EmitEndInit(cx, cg, 1))
6838 #endif /* JS_HAS_GENERATORS */
6840 if (!cg->hasSharps() && !(pn->pn_xflags & PNX_NONCONST) && cg->checkSingletonContext()) {
6841 if (!EmitSingletonInitialiser(cx, cg, pn))
6846 /* Use the slower NEWINIT for arrays in scripts containing sharps. */
6847 if (cg->hasSharps()) {
6848 if (!EmitNewInit(cx, cg, JSProto_Array, pn, sharpnum))
6851 ptrdiff_t off = js_EmitN(cx, cg, JSOP_NEWARRAY, 3);
6854 pc = CG_CODE(cg, off);
6855 SET_UINT24(pc, pn->pn_count);
6859 for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) {
6860 if (!EmitNumberOp(cx, atomIndex, cg))
6862 if (pn2->pn_type == TOK_COMMA && pn2->pn_arity == PN_NULLARY) {
6863 if (js_Emit1(cx, cg, JSOP_HOLE) < 0)
6866 if (!js_EmitTree(cx, cg, pn2))
6869 if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6872 JS_ASSERT(atomIndex == pn->pn_count);
6874 if (pn->pn_xflags & PNX_ENDCOMMA) {
6875 /* Emit a source note so we know to decompile an extra comma. */
6876 if (js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
6881 * Emit an op to finish the array and, secondarily, to aid in sharp
6882 * array cleanup (if JS_HAS_SHARP_VARS) and decompilation.
6884 if (!EmitEndInit(cx, cg, atomIndex))
6889 #if JS_HAS_SHARP_VARS
6893 #if JS_HAS_DESTRUCTURING_SHORTHAND
6894 if (pn->pn_xflags & PNX_DESTRUCT) {
6895 ReportCompileErrorNumber(cx, CG_TS(cg), pn, JSREPORT_ERROR, JSMSG_BAD_OBJECT_INIT);
6900 if (!cg->hasSharps() && !(pn->pn_xflags & PNX_NONCONST) && cg->checkSingletonContext()) {
6901 if (!EmitSingletonInitialiser(cx, cg, pn))
6907 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
6908 * a new object and in source order evaluating each property value and
6909 * adding the property to the object, without invoking latent setters.
6910 * We use the JSOP_NEWINIT and JSOP_INITELEM/JSOP_INITPROP bytecodes to
6911 * ignore setters and to avoid dup'ing and popping the object as each
6912 * property is added, as JSOP_SETELEM/JSOP_SETPROP would do.
6914 ptrdiff_t offset = CG_NEXT(cg) - CG_BASE(cg);
6915 if (!EmitNewInit(cx, cg, JSProto_Object, pn, sharpnum))
6919 * Try to construct the shape of the object as we go, so we can emit a
6920 * JSOP_NEWOBJECT with the final shape instead.
6922 JSObject *obj = NULL;
6923 if (!cg->hasSharps() && cg->compileAndGo()) {
6924 gc::FinalizeKind kind = GuessObjectGCKind(pn->pn_count, false);
6925 obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
6930 uintN methodInits = 0, slowMethodInits = 0;
6931 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6932 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6934 if (pn3->pn_type == TOK_NUMBER) {
6935 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
6939 /* Emit code for the property initializer. */
6940 if (!js_EmitTree(cx, cg, pn2->pn_right))
6944 if (op == JSOP_GETTER || op == JSOP_SETTER) {
6946 if (js_Emit1(cx, cg, op) < 0)
6950 /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
6951 if (pn3->pn_type == TOK_NUMBER) {
6953 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
6955 if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6958 JS_ASSERT(pn3->pn_type == TOK_NAME ||
6959 pn3->pn_type == TOK_STRING);
6960 ale = cg->atomList.add(cg->parser, pn3->pn_atom);
6964 /* Check whether we can optimize to JSOP_INITMETHOD. */
6965 JSParseNode *init = pn2->pn_right;
6966 bool lambda = PN_OP(init) == JSOP_LAMBDA;
6969 if (op == JSOP_INITPROP && lambda && init->pn_funbox->joinable()) {
6971 op = JSOP_INITMETHOD;
6972 pn2->pn_op = uint8(op);
6975 * Disable NEWOBJECT on initializers that set __proto__, which has
6976 * a non-standard setter on objects.
6978 if (pn3->pn_atom == cx->runtime->atomState.protoAtom)
6986 JS_ASSERT(!obj->inDictionaryMode());
6987 if (!js_DefineNativeProperty(cx, obj, ATOM_TO_JSID(pn3->pn_atom),
6988 UndefinedValue(), NULL, NULL,
6989 JSPROP_ENUMERATE, 0, 0, NULL)) {
6992 if (obj->inDictionaryMode())
6996 EMIT_INDEX_OP(op, ALE_INDEX(ale));
7000 if (cg->funbox && cg->funbox->shouldUnbrand(methodInits, slowMethodInits)) {
7002 if (js_Emit1(cx, cg, JSOP_UNBRAND) < 0)
7005 if (!EmitEndInit(cx, cg, pn->pn_count))
7010 * The object survived and has a predictable shape. Update the original bytecode,
7011 * as long as we can do so without using a big index prefix/suffix.
7013 JSObjectBox *objbox = cg->parser->newObjectBox(obj);
7016 unsigned index = cg->objectList.index(objbox);
7017 if (FitsWithoutBigIndex(index))
7018 EMIT_UINT16_IN_PLACE(offset, JSOP_NEWOBJECT, uint16(index));
7024 #if JS_HAS_SHARP_VARS
7026 JS_ASSERT(cg->hasSharps());
7027 sharpnum = pn->pn_num;
7029 if (pn->pn_type == TOK_RB)
7031 # if JS_HAS_GENERATORS
7032 if (pn->pn_type == TOK_ARRAYCOMP)
7035 if (pn->pn_type == TOK_RC)
7036 goto do_emit_object;
7038 if (!js_EmitTree(cx, cg, pn))
7040 EMIT_UINT16PAIR_IMM_OP(JSOP_DEFSHARP, cg->sharpSlotBase, (jsatomid) sharpnum);
7044 JS_ASSERT(cg->hasSharps());
7045 EMIT_UINT16PAIR_IMM_OP(JSOP_USESHARP, cg->sharpSlotBase, (jsatomid) pn->pn_num);
7047 #endif /* JS_HAS_SHARP_VARS */
7051 * Cope with a left-over function definition that was replaced by a use
7052 * of a later function definition of the same name. See FunctionDef and
7053 * MakeDefIntoUse in jsparse.cpp.
7055 if (pn->pn_op == JSOP_NOP)
7057 if (!EmitNameOp(cx, cg, pn, JS_FALSE))
7061 #if JS_HAS_XML_SUPPORT
7066 case TOK_XMLCOMMENT:
7069 ok = EmitAtomOp(cx, pn, PN_OP(pn), cg);
7073 ok = EmitNumberOp(cx, pn->pn_dval, cg);
7077 JS_ASSERT(pn->pn_op == JSOP_REGEXP);
7078 ok = EmitIndexOp(cx, JSOP_REGEXP,
7079 cg->regexpList.index(pn->pn_objbox),
7084 #if JS_HAS_XML_SUPPORT
7088 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
7093 if (js_Emit1(cx, cg, JSOP_DEBUGGER) < 0)
7097 #if JS_HAS_XML_SUPPORT
7100 JS_ASSERT(PN_TYPE(pn) == TOK_XMLLIST || pn->pn_count != 0);
7101 switch (pn->pn_head ? PN_TYPE(pn->pn_head) : TOK_XMLLIST) {
7109 if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
7113 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
7114 if (pn2->pn_type == TOK_LC &&
7115 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
7118 if (!js_EmitTree(cx, cg, pn2))
7120 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
7124 if (pn->pn_xflags & PNX_XMLROOT) {
7125 if (pn->pn_count == 0) {
7126 JS_ASSERT(pn->pn_type == TOK_XMLLIST);
7127 atom = cx->runtime->atomState.emptyAtom;
7128 ale = cg->atomList.add(cg->parser, atom);
7131 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
7133 if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
7138 JS_ASSERT(pn->pn_count != 0);
7148 if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
7151 ale = cg->atomList.add(cg->parser,
7152 (pn->pn_type == TOK_XMLETAGO)
7153 ? cx->runtime->atomState.etagoAtom
7154 : cx->runtime->atomState.stagoAtom);
7157 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
7159 JS_ASSERT(pn->pn_count != 0);
7161 if (pn2->pn_type == TOK_LC && js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0)
7163 if (!js_EmitTree(cx, cg, pn2))
7165 if (js_Emit1(cx, cg, JSOP_ADD) < 0)
7168 for (pn2 = pn2->pn_next, i = 0; pn2; pn2 = pn2->pn_next, i++) {
7169 if (pn2->pn_type == TOK_LC &&
7170 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
7173 if (!js_EmitTree(cx, cg, pn2))
7175 if ((i & 1) && pn2->pn_type == TOK_LC) {
7176 if (js_Emit1(cx, cg, JSOP_TOATTRVAL) < 0)
7179 if (js_Emit1(cx, cg,
7180 (i & 1) ? JSOP_ADDATTRVAL : JSOP_ADDATTRNAME) < 0) {
7185 ale = cg->atomList.add(cg->parser,
7186 (pn->pn_type == TOK_XMLPTAGC)
7187 ? cx->runtime->atomState.ptagcAtom
7188 : cx->runtime->atomState.tagcAtom);
7191 EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
7192 if (js_Emit1(cx, cg, JSOP_ADD) < 0)
7195 if ((pn->pn_xflags & PNX_XMLROOT) && js_Emit1(cx, cg, PN_OP(pn)) < 0)
7201 if (pn->pn_arity == PN_LIST) {
7202 JS_ASSERT(pn->pn_count != 0);
7203 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
7204 if (pn2->pn_type == TOK_LC &&
7205 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
7208 if (!js_EmitTree(cx, cg, pn2))
7210 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
7214 JS_ASSERT(pn->pn_arity == PN_NULLARY);
7215 ok = (pn->pn_op == JSOP_OBJECT)
7216 ? EmitObjectOp(cx, pn->pn_objbox, PN_OP(pn), cg)
7217 : EmitAtomOp(cx, pn, PN_OP(pn), cg);
7222 ale = cg->atomList.add(cg->parser, pn->pn_atom2);
7225 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
7227 if (!EmitAtomOp(cx, pn, JSOP_XMLPI, cg))
7230 #endif /* JS_HAS_XML_SUPPORT */
7236 /* cg->emitLevel == 1 means we're last on the stack, so finish up. */
7237 if (ok && cg->emitLevel == 1) {
7239 ok = OptimizeSpanDeps(cx, cg);
7240 if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.end.lineno))
7248 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
7249 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR and SRC_DECL.
7251 JS_FRIEND_DATA(JSSrcNoteSpec) js_SrcNoteSpec[] = {
7254 {"if-else", 2, 0, 1},
7257 {"continue", 0, 0, 0},
7259 {"pcdelta", 1, 0, 1},
7260 {"assignop", 0, 0, 0},
7263 {"hidden", 0, 0, 0},
7264 {"pcbase", 1, 0, -1},
7266 {"labelbrace", 1, 0, 0},
7267 {"endbrace", 0, 0, 0},
7268 {"break2label", 1, 0, 0},
7269 {"cont2label", 1, 0, 0},
7270 {"switch", 2, 0, 1},
7271 {"funcdef", 1, 0, 0},
7273 {"extended", -1, 0, 0},
7274 {"newline", 0, 0, 0},
7275 {"setline", 1, 0, 0},
7276 {"xdelta", 0, 0, 0},
7280 AllocSrcNote(JSContext *cx, JSCodeGenerator *cg)
7286 index = CG_NOTE_COUNT(cg);
7287 if (((uintN)index & CG_NOTE_MASK(cg)) == 0) {
7288 pool = cg->notePool;
7289 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
7290 if (!CG_NOTES(cg)) {
7291 /* Allocate the first note array lazily; leave noteMask alone. */
7292 JS_ARENA_ALLOCATE_CAST(CG_NOTES(cg), jssrcnote *, pool, size);
7294 /* Grow by doubling note array size; update noteMask on success. */
7295 JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
7297 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
7299 if (!CG_NOTES(cg)) {
7300 js_ReportOutOfScriptQuota(cx);
7305 CG_NOTE_COUNT(cg) = index + 1;
7310 js_NewSrcNote(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type)
7314 ptrdiff_t offset, delta, xdelta;
7317 * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
7318 * incrementing CG_NOTE_COUNT(cg).
7320 index = AllocSrcNote(cx, cg);
7323 sn = &CG_NOTES(cg)[index];
7326 * Compute delta from the last annotated bytecode's offset. If it's too
7327 * big to fit in sn, allocate one or more xdelta notes and reset sn.
7329 offset = CG_OFFSET(cg);
7330 delta = offset - CG_LAST_NOTE_OFFSET(cg);
7331 CG_LAST_NOTE_OFFSET(cg) = offset;
7332 if (delta >= SN_DELTA_LIMIT) {
7334 xdelta = JS_MIN(delta, SN_XDELTA_MASK);
7335 SN_MAKE_XDELTA(sn, xdelta);
7337 index = AllocSrcNote(cx, cg);
7340 sn = &CG_NOTES(cg)[index];
7341 } while (delta >= SN_DELTA_LIMIT);
7345 * Initialize type and delta, then allocate the minimum number of notes
7346 * needed for type's arity. Usually, we won't need more, but if an offset
7347 * does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
7349 SN_MAKE_NOTE(sn, type, delta);
7350 for (n = (intN)js_SrcNoteSpec[type].arity; n > 0; n--) {
7351 if (js_NewSrcNote(cx, cg, SRC_NULL) < 0)
7358 js_NewSrcNote2(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
7363 index = js_NewSrcNote(cx, cg, type);
7365 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset))
7372 js_NewSrcNote3(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
7373 ptrdiff_t offset1, ptrdiff_t offset2)
7377 index = js_NewSrcNote(cx, cg, type);
7379 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset1))
7381 if (!js_SetSrcNoteOffset(cx, cg, index, 1, offset2))
7388 GrowSrcNotes(JSContext *cx, JSCodeGenerator *cg)
7393 /* Grow by doubling note array size; update noteMask on success. */
7394 pool = cg->notePool;
7395 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
7396 JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
7397 if (!CG_NOTES(cg)) {
7398 js_ReportOutOfScriptQuota(cx);
7401 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
7406 js_AddToSrcNoteDelta(JSContext *cx, JSCodeGenerator *cg, jssrcnote *sn,
7409 ptrdiff_t base, limit, newdelta, diff;
7413 * Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
7414 * main script note deltas, and only by a small positive amount.
7416 JS_ASSERT(cg->current == &cg->main);
7417 JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT);
7419 base = SN_DELTA(sn);
7420 limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT;
7421 newdelta = base + delta;
7422 if (newdelta < limit) {
7423 SN_SET_DELTA(sn, newdelta);
7425 index = sn - cg->main.notes;
7426 if ((cg->main.noteCount & cg->main.noteMask) == 0) {
7427 if (!GrowSrcNotes(cx, cg))
7429 sn = cg->main.notes + index;
7431 diff = cg->main.noteCount - index;
7432 cg->main.noteCount++;
7433 memmove(sn + 1, sn, SRCNOTE_SIZE(diff));
7434 SN_MAKE_XDELTA(sn, delta);
7440 JS_FRIEND_API(uintN)
7441 js_SrcNoteLength(jssrcnote *sn)
7446 arity = (intN)js_SrcNoteSpec[SN_TYPE(sn)].arity;
7447 for (base = sn++; arity; sn++, arity--) {
7448 if (*sn & SN_3BYTE_OFFSET_FLAG)
7454 JS_FRIEND_API(ptrdiff_t)
7455 js_GetSrcNoteOffset(jssrcnote *sn, uintN which)
7457 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7458 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7459 JS_ASSERT((intN) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
7460 for (sn++; which; sn++, which--) {
7461 if (*sn & SN_3BYTE_OFFSET_FLAG)
7464 if (*sn & SN_3BYTE_OFFSET_FLAG) {
7465 return (ptrdiff_t)(((uint32)(sn[0] & SN_3BYTE_OFFSET_MASK) << 16)
7469 return (ptrdiff_t)*sn;
7473 js_SetSrcNoteOffset(JSContext *cx, JSCodeGenerator *cg, uintN index,
7474 uintN which, ptrdiff_t offset)
7479 if ((jsuword)offset >= (jsuword)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG << 16)) {
7480 ReportStatementTooLarge(cx, cg);
7484 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7485 sn = &CG_NOTES(cg)[index];
7486 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
7487 JS_ASSERT((intN) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
7488 for (sn++; which; sn++, which--) {
7489 if (*sn & SN_3BYTE_OFFSET_FLAG)
7493 /* See if the new offset requires three bytes. */
7494 if (offset > (ptrdiff_t)SN_3BYTE_OFFSET_MASK) {
7495 /* Maybe this offset was already set to a three-byte value. */
7496 if (!(*sn & SN_3BYTE_OFFSET_FLAG)) {
7497 /* Losing, need to insert another two bytes for this offset. */
7498 index = sn - CG_NOTES(cg);
7501 * Simultaneously test to see if the source note array must grow to
7502 * accommodate either the first or second byte of additional storage
7503 * required by this 3-byte offset.
7505 if (((CG_NOTE_COUNT(cg) + 1) & CG_NOTE_MASK(cg)) <= 1) {
7506 if (!GrowSrcNotes(cx, cg))
7508 sn = CG_NOTES(cg) + index;
7510 CG_NOTE_COUNT(cg) += 2;
7512 diff = CG_NOTE_COUNT(cg) - (index + 3);
7513 JS_ASSERT(diff >= 0);
7515 memmove(sn + 3, sn + 1, SRCNOTE_SIZE(diff));
7517 *sn++ = (jssrcnote)(SN_3BYTE_OFFSET_FLAG | (offset >> 16));
7518 *sn++ = (jssrcnote)(offset >> 8);
7520 *sn = (jssrcnote)offset;
7525 #define DEBUG_srcnotesize
7528 #ifdef DEBUG_srcnotesize
7530 static uint32 hist[NBINS];
7532 void DumpSrcNoteSizeHist()
7538 fp = fopen("/tmp/srcnotes.hist", "w");
7541 setvbuf(fp, NULL, _IONBF, 0);
7543 fprintf(fp, "SrcNote size histogram:\n");
7544 for (i = 0; i < NBINS; i++) {
7545 fprintf(fp, "%4u %4u ", JS_BIT(i), hist[i]);
7546 for (n = (int) JS_HOWMANY(hist[i], 10); n > 0; --n)
7555 * Fill in the storage at notes with prolog and main srcnotes; the space at
7556 * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
7557 * SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
7558 * CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
7561 js_FinishTakingSrcNotes(JSContext *cx, JSCodeGenerator *cg, jssrcnote *notes)
7563 uintN prologCount, mainCount, totalCount;
7564 ptrdiff_t offset, delta;
7567 JS_ASSERT(cg->current == &cg->main);
7569 prologCount = cg->prolog.noteCount;
7570 if (prologCount && cg->prolog.currentLine != cg->firstLine) {
7571 CG_SWITCH_TO_PROLOG(cg);
7572 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)cg->firstLine) < 0)
7574 prologCount = cg->prolog.noteCount;
7575 CG_SWITCH_TO_MAIN(cg);
7578 * Either no prolog srcnotes, or no line number change over prolog.
7579 * We don't need a SRC_SETLINE, but we may need to adjust the offset
7580 * of the first main note, by adding to its delta and possibly even
7581 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
7582 * that came at and after the last annotated bytecode.
7584 offset = CG_PROLOG_OFFSET(cg) - cg->prolog.lastNoteOffset;
7585 JS_ASSERT(offset >= 0);
7586 if (offset > 0 && cg->main.noteCount != 0) {
7587 /* NB: Use as much of the first main note's delta as we can. */
7588 sn = cg->main.notes;
7589 delta = SN_IS_XDELTA(sn)
7590 ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK)
7591 : SN_DELTA_MASK - (*sn & SN_DELTA_MASK);
7595 if (!js_AddToSrcNoteDelta(cx, cg, sn, delta))
7600 delta = JS_MIN(offset, SN_XDELTA_MASK);
7601 sn = cg->main.notes;
7606 mainCount = cg->main.noteCount;
7607 totalCount = prologCount + mainCount;
7609 memcpy(notes, cg->prolog.notes, SRCNOTE_SIZE(prologCount));
7610 memcpy(notes + prologCount, cg->main.notes, SRCNOTE_SIZE(mainCount));
7611 SN_MAKE_TERMINATOR(¬es[totalCount]);
7614 { int bin = JS_CeilingLog2(totalCount);
7624 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
7625 uintN stackDepth, size_t start, size_t end)
7629 JS_ASSERT((uintN)(uint16)stackDepth == stackDepth);
7630 JS_ASSERT(start <= end);
7631 JS_ASSERT((size_t)(uint32)start == start);
7632 JS_ASSERT((size_t)(uint32)end == end);
7634 JS_ARENA_ALLOCATE_TYPE(tryNode, JSTryNode, &cx->tempPool);
7636 js_ReportOutOfScriptQuota(cx);
7640 tryNode->note.kind = kind;
7641 tryNode->note.stackDepth = (uint16)stackDepth;
7642 tryNode->note.start = (uint32)start;
7643 tryNode->note.length = (uint32)(end - start);
7644 tryNode->prev = cg->lastTryNode;
7645 cg->lastTryNode = tryNode;
7651 js_FinishTakingTryNotes(JSCodeGenerator *cg, JSTryNoteArray *array)
7656 JS_ASSERT(array->length > 0 && array->length == cg->ntrynotes);
7657 tn = array->vector + array->length;
7658 tryNode = cg->lastTryNode;
7660 *--tn = tryNode->note;
7661 } while ((tryNode = tryNode->prev) != NULL);
7662 JS_ASSERT(tn == array->vector);
7666 * Find the index of the given object for code generator.
7668 * Since the emitter refers to each parsed object only once, for the index we
7669 * use the number of already indexes objects. We also add the object to a list
7670 * to convert the list to a fixed-size array when we complete code generation,
7671 * see JSCGObjectList::finish below.
7673 * Most of the objects go to JSCodeGenerator.objectList but for regexp we use a
7674 * separated JSCodeGenerator.regexpList. In this way the emitted index can be
7675 * directly used to store and fetch a reference to a cloned RegExp object that
7676 * shares the same JSRegExp private data created for the object literal in
7677 * objbox. We need a cloned object to hold lastIndex and other direct properties
7678 * that should not be shared among threads sharing a precompiled function or
7681 * If the code being compiled is function code, allocate a reserved slot in
7682 * the cloned function object that shares its precompiled script with other
7683 * cloned function objects and with the compiler-created clone-parent. There
7684 * are nregexps = script->regexps()->length such reserved slots in each
7685 * function object cloned from fun->object. NB: during compilation, a funobj
7686 * slots element must never be allocated, because JSObject::allocSlot could
7687 * hand out one of the slots that should be given to a regexp clone.
7689 * If the code being compiled is global code, the cloned regexp are stored in
7690 * fp->vars slot and to protect regexp slots from GC we set fp->nvars to
7693 * The slots initially contain undefined or null. We populate them lazily when
7694 * JSOP_REGEXP is executed for the first time.
7696 * Why clone regexp objects? ECMA specifies that when a regular expression
7697 * literal is scanned, a RegExp object is created. In the spec, compilation
7698 * and execution happen indivisibly, but in this implementation and many of
7699 * its embeddings, code is precompiled early and re-executed in multiple
7700 * threads, or using multiple global objects, or both, for efficiency.
7702 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7703 * objects, which makes for collisions on the lastIndex property (especially
7704 * for global regexps) and on any ad-hoc properties. Also, __proto__ refers to
7705 * the pre-compilation prototype, a pigeon-hole problem for instanceof tests.
7708 JSCGObjectList::index(JSObjectBox *objbox)
7710 JS_ASSERT(!objbox->emitLink);
7711 objbox->emitLink = lastbox;
7713 objbox->index = length++;
7714 return objbox->index;
7718 JSCGObjectList::finish(JSObjectArray *array)
7721 JSObjectBox *objbox;
7723 JS_ASSERT(length <= INDEX_LIMIT);
7724 JS_ASSERT(length == array->length);
7726 cursor = array->vector + array->length;
7730 JS_ASSERT(!*cursor);
7731 *cursor = objbox->object;
7732 } while ((objbox = objbox->emitLink) != NULL);
7733 JS_ASSERT(cursor == array->vector);
7737 JSGCConstList::finish(JSConstArray *array)
7739 JS_ASSERT(array->length == list.length());
7740 Value *src = list.begin(), *srcend = list.end();
7741 Value *dst = array->vector;
7742 for (; src != srcend; ++src, ++dst)