blob: bf8d240157055266d202a6b4afd7f9279a1ec142 [file] [log] [blame]
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sts=4 et sw=4 tw=99:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
* JS bytecode generation.
*/
#include "frontend/BytecodeEmitter-inl.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/FloatingPoint.h"
#include "mozilla/PodOperations.h"
#include <string.h>
#include "jstypes.h"
#include "jsutil.h"
#include "jsapi.h"
#include "jsatom.h"
#include "jscntxt.h"
#include "jsfun.h"
#include "jsnum.h"
#include "jsopcode.h"
#include "jsscript.h"
#include "frontend/Parser.h"
#include "frontend/TokenStream.h"
#include "jit/AsmJS.h"
#include "vm/Debugger.h"
#include "jsatominlines.h"
#include "jsobjinlines.h"
#include "jsscriptinlines.h"
#include "frontend/ParseMaps-inl.h"
#include "frontend/ParseNode-inl.h"
#include "frontend/SharedContext-inl.h"
using namespace js;
using namespace js::gc;
using namespace js::frontend;
using mozilla::DebugOnly;
using mozilla::DoubleIsInt32;
using mozilla::PodCopy;
static bool
SetSrcNoteOffset(JSContext *cx, BytecodeEmitter *bce, unsigned index, unsigned which, ptrdiff_t offset);
struct frontend::StmtInfoBCE : public StmtInfoBase
{
StmtInfoBCE *down; /* info for enclosing statement */
StmtInfoBCE *downScope; /* next enclosing lexical scope */
ptrdiff_t update; /* loop update offset (top if none) */
ptrdiff_t breaks; /* offset of last break in loop */
ptrdiff_t continues; /* offset of last continue in loop */
StmtInfoBCE(JSContext *cx) : StmtInfoBase(cx) {}
/*
* To reuse space, alias two of the ptrdiff_t fields for use during
* try/catch/finally code generation and backpatching.
*
* Only a loop, switch, or label statement info record can have breaks and
* continues, and only a for loop has an update backpatch chain, so it's
* safe to overlay these for the "trying" StmtTypes.
*/
ptrdiff_t &gosubs() {
JS_ASSERT(type == STMT_FINALLY);
return breaks;
}
ptrdiff_t &guardJump() {
JS_ASSERT(type == STMT_TRY || type == STMT_FINALLY);
return continues;
}
};
BytecodeEmitter::BytecodeEmitter(BytecodeEmitter *parent,
Parser<FullParseHandler> *parser, SharedContext *sc,
HandleScript script, bool insideEval, HandleScript evalCaller,
bool hasGlobalScope, uint32_t lineNum, EmitterMode emitterMode)
: sc(sc),
parent(parent),
script(sc->context, script),
prolog(sc->context, lineNum),
main(sc->context, lineNum),
current(&main),
parser(parser),
evalCaller(evalCaller),
topStmt(NULL),
topScopeStmt(NULL),
blockChain(sc->context),
atomIndices(sc->context),
firstLine(lineNum),
stackDepth(0), maxStackDepth(0),
tryNoteList(sc->context),
arrayCompDepth(0),
emitLevel(0),
constList(sc->context),
typesetCount(0),
hasSingletons(false),
emittingForInit(false),
emittingRunOnceLambda(false),
insideEval(insideEval),
hasGlobalScope(hasGlobalScope),
emitterMode(emitterMode)
{
JS_ASSERT_IF(evalCaller, insideEval);
}
bool
BytecodeEmitter::init()
{
return atomIndices.ensureMap(sc->context);
}
static ptrdiff_t
EmitCheck(JSContext *cx, BytecodeEmitter *bce, ptrdiff_t delta)
{
ptrdiff_t offset = bce->code().length();
// Start it off moderately large to avoid repeated resizings early on.
if (bce->code().capacity() == 0 && !bce->code().reserve(1024))
return -1;
jsbytecode dummy = 0;
if (!bce->code().appendN(dummy, delta)) {
js_ReportOutOfMemory(cx);
return -1;
}
return offset;
}
static StaticBlockObject &
CurrentBlock(StmtInfoBCE *topStmt)
{
JS_ASSERT(topStmt->type == STMT_BLOCK || topStmt->type == STMT_SWITCH);
JS_ASSERT(topStmt->blockObj->is<StaticBlockObject>());
return *topStmt->blockObj;
}
static void
UpdateDepth(JSContext *cx, BytecodeEmitter *bce, ptrdiff_t target)
{
jsbytecode *pc = bce->code(target);
JSOp op = (JSOp) *pc;
const JSCodeSpec *cs = &js_CodeSpec[op];
if (cs->format & JOF_TMPSLOT_MASK) {
/*
* An opcode may temporarily consume stack space during execution.
* Account for this in maxStackDepth separately from uses/defs here.
*/
unsigned depth = (unsigned) bce->stackDepth +
((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT);
if (depth > bce->maxStackDepth)
bce->maxStackDepth = depth;
}
/*
* Specially handle any case in which StackUses or StackDefs would call
* NumBlockSlots, since that requires a well-formed script. This allows us
* to safely pass NULL as the 'script' parameter to StackUses and StackDefs.
*/
int nuses, ndefs;
if (op == JSOP_ENTERBLOCK) {
nuses = 0;
ndefs = CurrentBlock(bce->topStmt).slotCount();
} else if (op == JSOP_ENTERLET0) {
nuses = ndefs = CurrentBlock(bce->topStmt).slotCount();
} else if (op == JSOP_ENTERLET1) {
nuses = ndefs = CurrentBlock(bce->topStmt).slotCount() + 1;
} else {
nuses = StackUses(NULL, pc);
ndefs = StackDefs(NULL, pc);
}
bce->stackDepth -= nuses;
JS_ASSERT(bce->stackDepth >= 0);
bce->stackDepth += ndefs;
if ((unsigned)bce->stackDepth > bce->maxStackDepth)
bce->maxStackDepth = bce->stackDepth;
}
ptrdiff_t
frontend::Emit1(JSContext *cx, BytecodeEmitter *bce, JSOp op)
{
ptrdiff_t offset = EmitCheck(cx, bce, 1);
if (offset < 0)
return -1;
jsbytecode *code = bce->code(offset);
code[0] = jsbytecode(op);
UpdateDepth(cx, bce, offset);
return offset;
}
ptrdiff_t
frontend::Emit2(JSContext *cx, BytecodeEmitter *bce, JSOp op, jsbytecode op1)
{
ptrdiff_t offset = EmitCheck(cx, bce, 2);
if (offset < 0)
return -1;
jsbytecode *code = bce->code(offset);
code[0] = jsbytecode(op);
code[1] = op1;
UpdateDepth(cx, bce, offset);
return offset;
}
ptrdiff_t
frontend::Emit3(JSContext *cx, BytecodeEmitter *bce, JSOp op, jsbytecode op1,
jsbytecode op2)
{
/* These should filter through EmitVarOp. */
JS_ASSERT(!IsArgOp(op));
JS_ASSERT(!IsLocalOp(op));
ptrdiff_t offset = EmitCheck(cx, bce, 3);
if (offset < 0)
return -1;
jsbytecode *code = bce->code(offset);
code[0] = jsbytecode(op);
code[1] = op1;
code[2] = op2;
UpdateDepth(cx, bce, offset);
return offset;
}
ptrdiff_t
frontend::EmitN(JSContext *cx, BytecodeEmitter *bce, JSOp op, size_t extra)
{
ptrdiff_t length = 1 + (ptrdiff_t)extra;
ptrdiff_t offset = EmitCheck(cx, bce, length);
if (offset < 0)
return -1;
jsbytecode *code = bce->code(offset);
code[0] = jsbytecode(op);
/* The remaining |extra| bytes are set by the caller */
/*
* Don't UpdateDepth if op's use-count comes from the immediate
* operand yet to be stored in the extra bytes after op.
*/
if (js_CodeSpec[op].nuses >= 0)
UpdateDepth(cx, bce, offset);
return offset;
}
static ptrdiff_t
EmitJump(JSContext *cx, BytecodeEmitter *bce, JSOp op, ptrdiff_t off)
{
ptrdiff_t offset = EmitCheck(cx, bce, 5);
if (offset < 0)
return -1;
jsbytecode *code = bce->code(offset);
code[0] = jsbytecode(op);
SET_JUMP_OFFSET(code, off);
UpdateDepth(cx, bce, offset);
return offset;
}
/* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
const char js_with_statement_str[] = "with statement";
const char js_finally_block_str[] = "finally block";
const char js_script_str[] = "script";
static const char * const statementName[] = {
"label statement", /* LABEL */
"if statement", /* IF */
"else statement", /* ELSE */
"destructuring body", /* BODY */
"switch statement", /* SWITCH */
"block", /* BLOCK */
js_with_statement_str, /* WITH */
"catch block", /* CATCH */
"try block", /* TRY */
js_finally_block_str, /* FINALLY */
js_finally_block_str, /* SUBROUTINE */
"do loop", /* DO_LOOP */
"for loop", /* FOR_LOOP */
"for/in loop", /* FOR_IN_LOOP */
"while loop", /* WHILE_LOOP */
};
JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT);
static const char *
StatementName(StmtInfoBCE *topStmt)
{
if (!topStmt)
return js_script_str;
return statementName[topStmt->type];
}
static void
ReportStatementTooLarge(JSContext *cx, StmtInfoBCE *topStmt)
{
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
StatementName(topStmt));
}
/*
* Emit a backpatch op with offset pointing to the previous jump of this type,
* so that we can walk back up the chain fixing up the op and jump offset.
*/
static ptrdiff_t
EmitBackPatchOp(JSContext *cx, BytecodeEmitter *bce, ptrdiff_t *lastp)
{
ptrdiff_t offset, delta;
offset = bce->offset();
delta = offset - *lastp;
*lastp = offset;
JS_ASSERT(delta > 0);
return EmitJump(cx, bce, JSOP_BACKPATCH, delta);
}
/* Updates line number notes, not column notes. */
static inline bool
UpdateLineNumberNotes(JSContext *cx, BytecodeEmitter *bce, uint32_t offset)
{
TokenStream *ts = &bce->parser->tokenStream;
if (!ts->srcCoords.isOnThisLine(offset, bce->currentLine())) {
unsigned line = ts->srcCoords.lineNum(offset);
unsigned delta = line - bce->currentLine();
/*
* Encode any change in the current source line number by using
* either several SRC_NEWLINE notes or just one SRC_SETLINE note,
* whichever consumes less space.
*
* NB: We handle backward line number deltas (possible with for
* loops where the update part is emitted after the body, but its
* line number is <= any line number in the body) here by letting
* unsigned delta_ wrap to a very large number, which triggers a
* SRC_SETLINE.
*/
bce->current->currentLine = line;
bce->current->lastColumn = 0;
if (delta >= (unsigned)(2 + ((line > SN_3BYTE_OFFSET_MASK)<<1))) {
if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)line) < 0)
return false;
} else {
do {
if (NewSrcNote(cx, bce, SRC_NEWLINE) < 0)
return false;
} while (--delta != 0);
}
}
return true;
}
/* A function, so that we avoid macro-bloating all the other callsites. */
static bool
UpdateSourceCoordNotes(JSContext *cx, BytecodeEmitter *bce, uint32_t offset)
{
if (!UpdateLineNumberNotes(cx, bce, offset))
return false;
uint32_t columnIndex = bce->parser->tokenStream.srcCoords.columnIndex(offset);
ptrdiff_t colspan = ptrdiff_t(columnIndex) - ptrdiff_t(bce->current->lastColumn);
if (colspan != 0) {
if (colspan < 0) {
colspan += SN_COLSPAN_DOMAIN;
} else if (colspan >= SN_COLSPAN_DOMAIN / 2) {
// If the column span is so large that we can't store it, then just
// discard this information because column information would most
// likely be useless anyway once the column numbers are ~4000000.
// This has been known to happen with scripts that have been
// minimized and put into all one line.
return true;
}
if (NewSrcNote2(cx, bce, SRC_COLSPAN, colspan) < 0)
return false;
bce->current->lastColumn = columnIndex;
}
return true;
}
static ptrdiff_t
EmitLoopHead(JSContext *cx, BytecodeEmitter *bce, ParseNode *nextpn)
{
if (nextpn) {
/*
* Try to give the JSOP_LOOPHEAD the same line number as the next
* instruction. nextpn is often a block, in which case the next
* instruction typically comes from the first statement inside.
*/
JS_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST));
if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head)
nextpn = nextpn->pn_head;
if (!UpdateSourceCoordNotes(cx, bce, nextpn->pn_pos.begin))
return -1;
}
return Emit1(cx, bce, JSOP_LOOPHEAD);
}
static bool
EmitLoopEntry(JSContext *cx, BytecodeEmitter *bce, ParseNode *nextpn)
{
if (nextpn) {
/* Update the line number, as for LOOPHEAD. */
JS_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST));
if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head)
nextpn = nextpn->pn_head;
if (!UpdateSourceCoordNotes(cx, bce, nextpn->pn_pos.begin))
return false;
}
/*
* Calculate loop depth. Note that this value is just a hint, so
* give up for deeply nested loops.
*/
uint32_t loopDepth = 0;
StmtInfoBCE *stmt = bce->topStmt;
while (stmt) {
if (stmt->isLoop()) {
loopDepth++;
if (loopDepth >= 5)
break;
}
stmt = stmt->down;
}
JS_ASSERT(loopDepth > 0);
return Emit2(cx, bce, JSOP_LOOPENTRY, uint8_t(loopDepth)) >= 0;
}
/*
* If op is JOF_TYPESET (see the type barriers comment in jsinfer.h), reserve
* a type set to store its result.
*/
static inline void
CheckTypeSet(JSContext *cx, BytecodeEmitter *bce, JSOp op)
{
if (js_CodeSpec[op].format & JOF_TYPESET) {
if (bce->typesetCount < UINT16_MAX)
bce->typesetCount++;
}
}
/*
* Macro to emit a bytecode followed by a uint16_t immediate operand stored in
* big-endian order, used for arg and var numbers as well as for atomIndexes.
* NB: We use cx and bce from our caller's lexical environment, and return
* false on error.
*/
#define EMIT_UINT16_IMM_OP(op, i) \
JS_BEGIN_MACRO \
if (Emit3(cx, bce, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
return false; \
CheckTypeSet(cx, bce, op); \
JS_END_MACRO
#define EMIT_UINT16PAIR_IMM_OP(op, i, j) \
JS_BEGIN_MACRO \
ptrdiff_t off_ = EmitN(cx, bce, op, 2 * UINT16_LEN); \
if (off_ < 0) \
return false; \
jsbytecode *pc_ = bce->code(off_); \
SET_UINT16(pc_, i); \
pc_ += UINT16_LEN; \
SET_UINT16(pc_, j); \
JS_END_MACRO
#define EMIT_UINT16_IN_PLACE(offset, op, i) \
JS_BEGIN_MACRO \
bce->code(offset)[0] = op; \
bce->code(offset)[1] = UINT16_HI(i); \
bce->code(offset)[2] = UINT16_LO(i); \
JS_END_MACRO
#define EMIT_UINT32_IN_PLACE(offset, op, i) \
JS_BEGIN_MACRO \
bce->code(offset)[0] = op; \
bce->code(offset)[1] = jsbytecode(i >> 24); \
bce->code(offset)[2] = jsbytecode(i >> 16); \
bce->code(offset)[3] = jsbytecode(i >> 8); \
bce->code(offset)[4] = jsbytecode(i); \
JS_END_MACRO
static bool
FlushPops(JSContext *cx, BytecodeEmitter *bce, int *npops)
{
JS_ASSERT(*npops != 0);
if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
return false;
EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
*npops = 0;
return true;
}
static bool
PopIterator(JSContext *cx, BytecodeEmitter *bce)
{
if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
return false;
if (Emit1(cx, bce, JSOP_ENDITER) < 0)
return false;
return true;
}
/*
* Emit additional bytecode(s) for non-local jumps.
*/
static bool
EmitNonLocalJumpFixup(JSContext *cx, BytecodeEmitter *bce, StmtInfoBCE *toStmt)
{
/*
* The non-local jump fixup we emit will unbalance bce->stackDepth, because
* the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
* end of a with statement, so we save bce->stackDepth here and restore it
* just before a successful return.
*/
int depth = bce->stackDepth;
int npops = 0;
#define FLUSH_POPS() if (npops && !FlushPops(cx, bce, &npops)) return false
for (StmtInfoBCE *stmt = bce->topStmt; stmt != toStmt; stmt = stmt->down) {
switch (stmt->type) {
case STMT_FINALLY:
FLUSH_POPS();
if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
return false;
if (EmitBackPatchOp(cx, bce, &stmt->gosubs()) < 0)
return false;
break;
case STMT_WITH:
/* There's a With object on the stack that we need to pop. */
FLUSH_POPS();
if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
return false;
if (Emit1(cx, bce, JSOP_LEAVEWITH) < 0)
return false;
break;
case STMT_FOR_IN_LOOP:
FLUSH_POPS();
if (!PopIterator(cx, bce))
return false;
break;
case STMT_SUBROUTINE:
/*
* There's a [exception or hole, retsub pc-index] pair on the
* stack that we need to pop.
*/
npops += 2;
break;
default:;
}
if (stmt->isBlockScope) {
FLUSH_POPS();
unsigned blockObjCount = stmt->blockObj->slotCount();
if (stmt->isForLetBlock) {
/*
* For a for-let-in statement, pushing/popping the block is
* interleaved with JSOP_(END)ITER. Just handle both together
* here and skip over the enclosing STMT_FOR_IN_LOOP.
*/
JS_ASSERT(stmt->down->type == STMT_FOR_IN_LOOP);
stmt = stmt->down;
if (stmt == toStmt)
break;
if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
return false;
if (Emit1(cx, bce, JSOP_LEAVEFORLETIN) < 0)
return false;
if (!PopIterator(cx, bce))
return false;
if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
return false;
EMIT_UINT16_IMM_OP(JSOP_POPN, blockObjCount);
} else {
/* There is a Block object with locals on the stack to pop. */
if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
return false;
EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, blockObjCount);
}
}
}
FLUSH_POPS();
bce->stackDepth = depth;
return true;
#undef FLUSH_POPS
}
static const jsatomid INVALID_ATOMID = -1;
static ptrdiff_t
EmitGoto(JSContext *cx, BytecodeEmitter *bce, StmtInfoBCE *toStmt, ptrdiff_t *lastp,
SrcNoteType noteType = SRC_NULL)
{
if (!EmitNonLocalJumpFixup(cx, bce, toStmt))
return -1;
if (noteType != SRC_NULL) {
if (NewSrcNote(cx, bce, noteType) < 0)
return -1;
}
return EmitBackPatchOp(cx, bce, lastp);
}
static bool
BackPatch(JSContext *cx, BytecodeEmitter *bce, ptrdiff_t last, jsbytecode *target, jsbytecode op)
{
jsbytecode *pc, *stop;
ptrdiff_t delta, span;
pc = bce->code(last);
stop = bce->code(-1);
while (pc != stop) {
delta = GET_JUMP_OFFSET(pc);
span = target - pc;
SET_JUMP_OFFSET(pc, span);
*pc = op;
pc -= delta;
}
return true;
}
#define SET_STATEMENT_TOP(stmt, top) \
((stmt)->update = (top), (stmt)->breaks = (stmt)->continues = (-1))
static void
PushStatementBCE(BytecodeEmitter *bce, StmtInfoBCE *stmt, StmtType type, ptrdiff_t top)
{
SET_STATEMENT_TOP(stmt, top);
PushStatement(bce, stmt, type);
}
/*
* Return the enclosing lexical scope, which is the innermost enclosing static
* block object or compiler created function.
*/
static JSObject *
EnclosingStaticScope(BytecodeEmitter *bce)
{
if (bce->blockChain)
return bce->blockChain;
if (!bce->sc->isFunctionBox()) {
JS_ASSERT(!bce->parent);
return NULL;
}
return bce->sc->asFunctionBox()->function();
}
// Push a block scope statement and link blockObj into bce->blockChain.
static void
PushBlockScopeBCE(BytecodeEmitter *bce, StmtInfoBCE *stmt, StaticBlockObject &blockObj,
ptrdiff_t top)
{
PushStatementBCE(bce, stmt, STMT_BLOCK, top);
blockObj.initEnclosingStaticScope(EnclosingStaticScope(bce));
FinishPushBlockScope(bce, stmt, blockObj);
}
// Patches |breaks| and |continues| unless the top statement info record
// represents a try-catch-finally suite. May fail if a jump offset overflows.
static bool
PopStatementBCE(JSContext *cx, BytecodeEmitter *bce)
{
StmtInfoBCE *stmt = bce->topStmt;
if (!stmt->isTrying() &&
(!BackPatch(cx, bce, stmt->breaks, bce->code().end(), JSOP_GOTO) ||
!BackPatch(cx, bce, stmt->continues, bce->code(stmt->update), JSOP_GOTO)))
{
return false;
}
FinishPopStatement(bce);
return true;
}
static bool
EmitIndex32(JSContext *cx, JSOp op, uint32_t index, BytecodeEmitter *bce)
{
const size_t len = 1 + UINT32_INDEX_LEN;
JS_ASSERT(len == size_t(js_CodeSpec[op].length));
ptrdiff_t offset = EmitCheck(cx, bce, len);
if (offset < 0)
return false;
jsbytecode *code = bce->code(offset);
code[0] = jsbytecode(op);
SET_UINT32_INDEX(code, index);
UpdateDepth(cx, bce, offset);
CheckTypeSet(cx, bce, op);
return true;
}
static bool
EmitIndexOp(JSContext *cx, JSOp op, uint32_t index, BytecodeEmitter *bce)
{
const size_t len = js_CodeSpec[op].length;
JS_ASSERT(len >= 1 + UINT32_INDEX_LEN);
ptrdiff_t offset = EmitCheck(cx, bce, len);
if (offset < 0)
return false;
jsbytecode *code = bce->code(offset);
code[0] = jsbytecode(op);
SET_UINT32_INDEX(code, index);
UpdateDepth(cx, bce, offset);
CheckTypeSet(cx, bce, op);
return true;
}
static bool
EmitAtomOp(JSContext *cx, JSAtom *atom, JSOp op, BytecodeEmitter *bce)
{
JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
if (op == JSOP_GETPROP && atom == cx->names().length) {
/* Specialize length accesses for the interpreter. */
op = JSOP_LENGTH;
}
jsatomid index;
if (!bce->makeAtomIndex(atom, &index))
return false;
return EmitIndexOp(cx, op, index, bce);
}
static bool
EmitAtomOp(JSContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce)
{
JS_ASSERT(pn->pn_atom != NULL);
return EmitAtomOp(cx, pn->pn_atom, op, bce);
}
static bool
EmitObjectOp(JSContext *cx, ObjectBox *objbox, JSOp op, BytecodeEmitter *bce)
{
JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
return EmitIndex32(cx, op, bce->objectList.add(objbox), bce);
}
static bool
EmitRegExp(JSContext *cx, uint32_t index, BytecodeEmitter *bce)
{
return EmitIndex32(cx, JSOP_REGEXP, index, bce);
}
/*
* To catch accidental misuse, EMIT_UINT16_IMM_OP/Emit3 assert that they are
* not used to unconditionally emit JSOP_GETLOCAL. Variable access should
* instead be emitted using EmitVarOp. In special cases, when the caller
* definitely knows that a given local slot is unaliased, this function may be
* used as a non-asserting version of EMIT_UINT16_IMM_OP.
*/
static bool
EmitUnaliasedVarOp(JSContext *cx, JSOp op, uint16_t slot, BytecodeEmitter *bce)
{
JS_ASSERT(JOF_OPTYPE(op) != JOF_SCOPECOORD);
ptrdiff_t off = EmitN(cx, bce, op, sizeof(uint16_t));
if (off < 0)
return false;
SET_UINT16(bce->code(off), slot);
return true;
}
static bool
EmitAliasedVarOp(JSContext *cx, JSOp op, ScopeCoordinate sc, BytecodeEmitter *bce)
{
JS_ASSERT(JOF_OPTYPE(op) == JOF_SCOPECOORD);
uint32_t maybeBlockIndex = UINT32_MAX;
if (bce->blockChain)
maybeBlockIndex = bce->objectList.indexOf(bce->blockChain);
unsigned n = 2 * sizeof(uint16_t) + sizeof(uint32_t);
JS_ASSERT(int(n) + 1 /* op */ == js_CodeSpec[op].length);
ptrdiff_t off = EmitN(cx, bce, op, n);
if (off < 0)
return false;
jsbytecode *pc = bce->code(off);
SET_UINT16(pc, sc.hops);
pc += sizeof(uint16_t);
SET_UINT16(pc, sc.slot);
pc += sizeof(uint16_t);
SET_UINT32_INDEX(pc, maybeBlockIndex);
CheckTypeSet(cx, bce, op);
return true;
}
static unsigned
ClonedBlockDepth(BytecodeEmitter *bce)
{
unsigned clonedBlockDepth = 0;
for (StaticBlockObject *b = bce->blockChain; b; b = b->enclosingBlock()) {
if (b->needsClone())
++clonedBlockDepth;
}
return clonedBlockDepth;
}
static bool
LookupAliasedName(HandleScript script, PropertyName *name, uint16_t *pslot)
{
/*
* Beware: BindingIter may contain more than one Binding for a given name
* (in the case of |function f(x,x) {}|) but only one will be aliased.
*/
unsigned slot = CallObject::RESERVED_SLOTS;
for (BindingIter bi(script); !bi.done(); bi++) {
if (bi->aliased()) {
if (bi->name() == name) {
*pslot = slot;
return true;
}
slot++;
}
}
return false;
}
static bool
EmitAliasedVarOp(JSContext *cx, JSOp op, ParseNode *pn, BytecodeEmitter *bce)
{
unsigned skippedScopes = 0;
BytecodeEmitter *bceOfDef = bce;
if (pn->isUsed()) {
/*
* As explained in BindNameToSlot, the 'level' of a use indicates how
* many function scopes (i.e., BytecodeEmitters) to skip to find the
* enclosing function scope of the definition being accessed.
*/
for (unsigned i = pn->pn_cookie.level(); i; i--) {
skippedScopes += ClonedBlockDepth(bceOfDef);
FunctionBox *funbox = bceOfDef->sc->asFunctionBox();
if (funbox->isHeavyweight()) {
skippedScopes++;
if (funbox->function()->isNamedLambda())
skippedScopes++;
}
bceOfDef = bceOfDef->parent;
}
} else {
JS_ASSERT(pn->isDefn());
JS_ASSERT(pn->pn_cookie.level() == bce->script->staticLevel);
}
ScopeCoordinate sc;
if (IsArgOp(pn->getOp())) {
sc.hops = skippedScopes + ClonedBlockDepth(bceOfDef);
JS_ALWAYS_TRUE(LookupAliasedName(bceOfDef->script, pn->name(), &sc.slot));
} else {
JS_ASSERT(IsLocalOp(pn->getOp()) || pn->isKind(PNK_FUNCTION));
unsigned local = pn->pn_cookie.slot();
if (local < bceOfDef->script->bindings.numVars()) {
sc.hops = skippedScopes + ClonedBlockDepth(bceOfDef);
JS_ALWAYS_TRUE(LookupAliasedName(bceOfDef->script, pn->name(), &sc.slot));
} else {
unsigned depth = local - bceOfDef->script->bindings.numVars();
StaticBlockObject *b = bceOfDef->blockChain;
while (!b->containsVarAtDepth(depth)) {
if (b->needsClone())
skippedScopes++;
b = b->enclosingBlock();
}
sc.hops = skippedScopes;
sc.slot = b->localIndexToSlot(bceOfDef->script->bindings, local);
}
}
return EmitAliasedVarOp(cx, op, sc, bce);
}
static bool
EmitVarOp(JSContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce)
{
JS_ASSERT(pn->isKind(PNK_FUNCTION) || pn->isKind(PNK_NAME));
JS_ASSERT(!pn->pn_cookie.isFree());
if (IsAliasedVarOp(op)) {
ScopeCoordinate sc;
sc.hops = pn->pn_cookie.level();
sc.slot = pn->pn_cookie.slot();
return EmitAliasedVarOp(cx, op, sc, bce);
}
JS_ASSERT_IF(pn->isKind(PNK_NAME), IsArgOp(op) || IsLocalOp(op));
if (!bce->isAliasedName(pn)) {
JS_ASSERT(pn->isUsed() || pn->isDefn());
JS_ASSERT_IF(pn->isUsed(), pn->pn_cookie.level() == 0);
JS_ASSERT_IF(pn->isDefn(), pn->pn_cookie.level() == bce->script->staticLevel);
return EmitUnaliasedVarOp(cx, op, pn->pn_cookie.slot(), bce);
}
switch (op) {
case JSOP_GETARG: case JSOP_GETLOCAL: op = JSOP_GETALIASEDVAR; break;
case JSOP_SETARG: case JSOP_SETLOCAL: op = JSOP_SETALIASEDVAR; break;
case JSOP_CALLARG: case JSOP_CALLLOCAL: op = JSOP_CALLALIASEDVAR; break;
default: JS_NOT_REACHED("unexpected var op");
}
return EmitAliasedVarOp(cx, op, pn, bce);
}
static JSOp
GetIncDecInfo(ParseNodeKind kind, bool *post)
{
JS_ASSERT(kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT ||
kind == PNK_POSTDECREMENT || kind == PNK_PREDECREMENT);
*post = kind == PNK_POSTINCREMENT || kind == PNK_POSTDECREMENT;
return (kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT) ? JSOP_ADD : JSOP_SUB;
}
static bool
EmitVarIncDec(JSContext *cx, ParseNode *pn, BytecodeEmitter *bce)
{
JSOp op = pn->pn_kid->getOp();
JS_ASSERT(IsArgOp(op) || IsLocalOp(op) || IsAliasedVarOp(op));
JS_ASSERT(pn->pn_kid->isKind(PNK_NAME));
JS_ASSERT(!pn->pn_kid->pn_cookie.isFree());
bool post;
JSOp binop = GetIncDecInfo(pn->getKind(), &post);
JSOp getOp, setOp;
if (IsLocalOp(op)) {
getOp = JSOP_GETLOCAL;
setOp = JSOP_SETLOCAL;
} else if (IsArgOp(op)) {
getOp = JSOP_GETARG;
setOp = JSOP_SETARG;
} else {
getOp = JSOP_GETALIASEDVAR;
setOp = JSOP_SETALIASEDVAR;
}
if (!EmitVarOp(cx, pn->pn_kid, getOp, bce)) // V
return false;
if (Emit1(cx, bce, JSOP_POS) < 0) // N
return false;
if (post && Emit1(cx, bce, JSOP_DUP) < 0) // N? N
return false;
if (Emit1(cx, bce, JSOP_ONE) < 0) // N? N 1
return false;
if (Emit1(cx, bce, binop) < 0) // N? N+1
return false;
if (!EmitVarOp(cx, pn->pn_kid, setOp, bce)) // N? N+1
return false;
if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT
return false;
return true;
}
bool
BytecodeEmitter::isAliasedName(ParseNode *pn)
{
Definition *dn = pn->resolve();
JS_ASSERT(dn->isDefn());
JS_ASSERT(!dn->isPlaceholder());
JS_ASSERT(dn->isBound());
/* If dn is in an enclosing function, it is definitely aliased. */
if (dn->pn_cookie.level() != script->staticLevel)
return true;
switch (dn->kind()) {
case Definition::LET:
/*
* There are two ways to alias a let variable: nested functions and
* dynamic scope operations. (This is overly conservative since the
* bindingsAccessedDynamically flag is function-wide.)
*/
return dn->isClosed() || sc->bindingsAccessedDynamically();
case Definition::ARG:
/*
* Consult the bindings, since they already record aliasing. We might
* be tempted to use the same definition as VAR/CONST/LET, but there is
* a problem caused by duplicate arguments: only the last argument with
* a given name is aliased. This is necessary to avoid generating a
* shape for the call object with with more than one name for a given
* slot (which violates internal engine invariants). All this means that
* the '|| sc->bindingsAccessedDynamically' disjunct is incorrect since
* it will mark both parameters in function(x,x) as aliased.
*/
return script->formalIsAliased(pn->pn_cookie.slot());
case Definition::VAR:
case Definition::CONST:
return script->varIsAliased(pn->pn_cookie.slot());
case Definition::PLACEHOLDER:
case Definition::NAMED_LAMBDA:
case Definition::MISSING:
JS_NOT_REACHED("unexpected dn->kind");
}
return false;
}
/*
* Adjust the slot for a block local to account for the number of variables
* that share the same index space with locals. Due to the incremental code
* generation for top-level script, we do the adjustment via code patching in
* js::frontend::CompileScript; see comments there.
*
* The function returns -1 on failures.
*/
static int
AdjustBlockSlot(JSContext *cx, BytecodeEmitter *bce, int slot)
{
JS_ASSERT((unsigned) slot < bce->maxStackDepth);
if (bce->sc->isFunctionBox()) {
slot += bce->script->bindings.numVars();
if ((unsigned) slot >= SLOTNO_LIMIT) {
bce->reportError(NULL, JSMSG_TOO_MANY_LOCALS);
slot = -1;
}
}
return slot;
}
static bool
EmitEnterBlock(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSOp op)
{
JS_ASSERT(pn->isKind(PNK_LEXICALSCOPE));
if (!EmitObjectOp(cx, pn->pn_objbox, op, bce))
return false;
Rooted<StaticBlockObject*> blockObj(cx, &pn->pn_objbox->object->as<StaticBlockObject>());
int depth = bce->stackDepth -
(blockObj->slotCount() + ((op == JSOP_ENTERLET1) ? 1 : 0));
JS_ASSERT(depth >= 0);
blockObj->setStackDepth(depth);
int depthPlusFixed = AdjustBlockSlot(cx, bce, depth);
if (depthPlusFixed < 0)
return false;
for (unsigned i = 0; i < blockObj->slotCount(); i++) {
Definition *dn = blockObj->maybeDefinitionParseNode(i);
/* Beware the empty destructuring dummy. */
if (!dn) {
blockObj->setAliased(i, bce->sc->bindingsAccessedDynamically());
continue;
}
JS_ASSERT(dn->isDefn());
JS_ASSERT(unsigned(dn->frameSlot() + depthPlusFixed) < JS_BIT(16));
if (!dn->pn_cookie.set(cx, dn->pn_cookie.level(),
uint16_t(dn->frameSlot() + depthPlusFixed)))
return false;
#ifdef DEBUG
for (ParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
JS_ASSERT(pnu->pn_lexdef == dn);
JS_ASSERT(!(pnu->pn_dflags & PND_BOUND));
JS_ASSERT(pnu->pn_cookie.isFree());
}
#endif
blockObj->setAliased(i, bce->isAliasedName(dn));
}
return true;
}
/*
* Try to convert a *NAME op with a free name to a more specialized GNAME,
* INTRINSIC or ALIASEDVAR op, which optimize accesses on that name.
* Return true if a conversion was made.
*/
static bool
TryConvertFreeName(BytecodeEmitter *bce, ParseNode *pn)
{
/*
* In self-hosting mode, JSOP_*NAME is unconditionally converted to
* JSOP_*INTRINSIC. This causes lookups to be redirected to the special
* intrinsics holder in the global object, into which any missing values are
* cloned lazily upon first access.
*/
if (bce->emitterMode == BytecodeEmitter::SelfHosting) {
JSOp op;
switch (pn->getOp()) {
case JSOP_NAME: op = JSOP_GETINTRINSIC; break;
case JSOP_SETNAME: op = JSOP_SETINTRINSIC; break;
/* Other *NAME ops aren't (yet) supported in self-hosted code. */
default: JS_NOT_REACHED("intrinsic");
}
pn->setOp(op);
return true;
}
/*
* When parsing inner functions lazily, parse nodes for outer functions no
* longer exist and only the function's scope chain is available for
* resolving upvar accesses within the inner function.
*/
if (bce->emitterMode == BytecodeEmitter::LazyFunction) {
// The only statements within a lazy function which can push lexical
// scopes are try/catch blocks. Use generic ops in this case.
for (StmtInfoBCE *stmt = bce->topStmt; stmt; stmt = stmt->down) {
switch (stmt->type) {
case STMT_TRY:
case STMT_FINALLY:
return true;
default:;
}
}
size_t hops = 0;
FunctionBox *funbox = bce->sc->asFunctionBox();
if (funbox->hasExtensibleScope())
return false;
if (funbox->function()->isNamedLambda() && funbox->function()->atom() == pn->pn_atom)
return false;
if (funbox->isHeavyweight()) {
hops++;
if (funbox->function()->isNamedLambda())
hops++;
}
if (bce->script->directlyInsideEval)
return false;
RootedObject outerScope(bce->sc->context, bce->script->enclosingStaticScope());
for (StaticScopeIter ssi(bce->sc->context, outerScope); !ssi.done(); ssi++) {
if (ssi.type() != StaticScopeIter::FUNCTION) {
if (ssi.type() == StaticScopeIter::BLOCK) {
// Use generic ops if a catch block is encountered.
return false;
}
if (ssi.hasDynamicScopeObject())
hops++;
continue;
}
RootedScript script(bce->sc->context, ssi.funScript());
if (script->function()->atom() == pn->pn_atom)
return false;
if (ssi.hasDynamicScopeObject()) {
uint16_t slot;
if (LookupAliasedName(script, pn->pn_atom->asPropertyName(), &slot)) {
JSOp op;
switch (pn->getOp()) {
case JSOP_NAME: op = JSOP_GETALIASEDVAR; break;
case JSOP_SETNAME: op = JSOP_SETALIASEDVAR; break;
default: return false;
}
pn->setOp(op);
JS_ALWAYS_TRUE(pn->pn_cookie.set(bce->sc->context, hops, slot));
return true;
}
hops++;
}
if (script->funHasExtensibleScope || script->directlyInsideEval)
return false;
}
}
/*
* Try to convert free names in global scope to GNAME opcodes.
*
* This conversion is not made if we are in strict mode. In eval code nested
* within (strict mode) eval code, access to an undeclared "global" might
* merely be to a binding local to that outer eval:
*
* "use strict";
* var x = "global";
* eval('var x = "eval"; eval("x");'); // 'eval', not 'global'
*
* Outside eval code, access to an undeclared global is a strict mode error:
*
* "use strict";
* function foo()
* {
* undeclared = 17; // throws ReferenceError
* }
* foo();
*/
if (bce->script->compileAndGo &&
bce->hasGlobalScope &&
!(bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->mightAliasLocals()) &&
!pn->isDeoptimized() &&
!(bce->sc->strict && bce->insideEval))
{
// If you change anything here, you might also need to change
// js::ReportIfUndeclaredVarAssignment.
JSOp op;
switch (pn->getOp()) {
case JSOP_NAME: op = JSOP_GETGNAME; break;
case JSOP_SETNAME: op = JSOP_SETGNAME; break;
case JSOP_SETCONST:
/* Not supported. */
return false;
default: JS_NOT_REACHED("gname");
}
pn->setOp(op);
return true;
}
return false;
}
/*
* BindNameToSlotHelper attempts to optimize name gets and sets to stack slot
* loads and stores, given the compile-time information in bce and a PNK_NAME
* node pn. It returns false on error, true on success.
*
* The caller can test pn->pn_cookie.isFree() to tell whether optimization
* occurred, in which case BindNameToSlotHelper also updated pn->pn_op. If
* pn->pn_cookie.isFree() is still true on return, pn->pn_op still may have
* been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
* pn->pn_op was modified, if this function finds an argument or local variable
* name, PND_CONST will be set in pn_dflags for read-only properties after a
* successful return.
*
* NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
* to update the special cases in EmitFor (for-in) and EmitAssignment (= and
* op=, e.g. +=).
*/
static bool
BindNameToSlotHelper(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
JS_ASSERT(pn->isKind(PNK_NAME));
JS_ASSERT_IF(pn->isKind(PNK_FUNCTION), pn->isBound());
/* Don't attempt if 'pn' is already bound or deoptimized or a function. */
if (pn->isBound() || pn->isDeoptimized())
return true;
/* JSOP_CALLEE is pre-bound by definition. */
JSOp op = pn->getOp();
JS_ASSERT(op != JSOP_CALLEE);
JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
/*
* The parser already linked name uses to definitions when (where not
* prevented by non-lexical constructs like 'with' and 'eval').
*/
Definition *dn;
if (pn->isUsed()) {
JS_ASSERT(pn->pn_cookie.isFree());
dn = pn->pn_lexdef;
JS_ASSERT(dn->isDefn());
pn->pn_dflags |= (dn->pn_dflags & PND_CONST);
} else if (pn->isDefn()) {
dn = (Definition *) pn;
} else {
return true;
}
/*
* Turn attempts to mutate const-declared bindings into get ops (for
* pre-increment and pre-decrement ops, our caller will have to emit
* JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
*
* Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
* bindings visible to the compiler are permanent in JS unless the
* declaration originates at top level in eval code.
*/
switch (op) {
case JSOP_NAME:
case JSOP_SETCONST:
break;
default:
if (pn->isConst()) {
if (bce->sc->needStrictChecks()) {
JSAutoByteString name;
if (!js_AtomToPrintableString(cx, pn->pn_atom, &name) ||
!bce->reportStrictModeError(pn, JSMSG_READ_ONLY, name.ptr()))
{
return false;
}
}
pn->setOp(op = JSOP_NAME);
}
}
if (dn->pn_cookie.isFree()) {
if (HandleScript caller = bce->evalCaller) {
JS_ASSERT(bce->script->compileAndGo);
/*
* Don't generate upvars on the left side of a for loop. See
* bug 470758.
*/
if (bce->emittingForInit)
return true;
/*
* If this is an eval in the global scope, then unbound variables
* must be globals, so try to use GNAME ops.
*/
if (!caller->functionOrCallerFunction() && TryConvertFreeName(bce, pn)) {
pn->pn_dflags |= PND_BOUND;
return true;
}
/*
* Out of tricks, so we must rely on PICs to optimize named
* accesses from direct eval called from function code.
*/
return true;
}
/* Optimize accesses to undeclared globals. */
if (!TryConvertFreeName(bce, pn))
return true;
pn->pn_dflags |= PND_BOUND;
return true;
}
/*
* At this point, we are only dealing with uses that have already been
* bound to definitions via pn_lexdef. The rest of this routine converts
* the parse node of the use from its initial JSOP_*NAME* op to a LOCAL/ARG
* op. This requires setting the node's pn_cookie with a pair (level, slot)
* where 'level' is the number of function scopes between the use and the
* def and 'slot' is the index to emit as the immediate of the ARG/LOCAL
* op. For example, in this code:
*
* function(a,b,x) { return x }
* function(y) { function() { return y } }
*
* x will get (level = 0, slot = 2) and y will get (level = 1, slot = 0).
*/
JS_ASSERT(!pn->isDefn());
JS_ASSERT(pn->isUsed());
JS_ASSERT(pn->pn_lexdef);
JS_ASSERT(pn->pn_cookie.isFree());
/*
* We are compiling a function body and may be able to optimize name
* to stack slot. Look for an argument or variable in the function and
* rewrite pn_op and update pn accordingly.
*/
switch (dn->kind()) {
case Definition::ARG:
switch (op) {
case JSOP_NAME: op = JSOP_GETARG; break;
case JSOP_SETNAME: op = JSOP_SETARG; break;
default: JS_NOT_REACHED("arg");
}
JS_ASSERT(!pn->isConst());
break;
case Definition::VAR:
case Definition::CONST:
case Definition::LET:
switch (op) {
case JSOP_NAME: op = JSOP_GETLOCAL; break;
case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
default: JS_NOT_REACHED("local");
}
break;
case Definition::NAMED_LAMBDA: {
JS_ASSERT(dn->isOp(JSOP_CALLEE));
JS_ASSERT(op != JSOP_CALLEE);
/*
* Currently, the ALIASEDVAR ops do not support accessing the
* callee of a DeclEnvObject, so use NAME.
*/
if (dn->pn_cookie.level() != bce->script->staticLevel)
return true;
DebugOnly<JSFunction *> fun = bce->sc->asFunctionBox()->function();
JS_ASSERT(fun->isLambda());
JS_ASSERT(pn->pn_atom == fun->atom());
/*
* Leave pn->isOp(JSOP_NAME) if bce->fun is heavyweight to
* address two cases: a new binding introduced by eval, and
* assignment to the name in strict mode.
*
* var fun = (function f(s) { eval(s); return f; });
* assertEq(fun("var f = 42"), 42);
*
* ECMAScript specifies that a function expression's name is bound
* in a lexical environment distinct from that used to bind its
* named parameters, the arguments object, and its variables. The
* new binding for "var f = 42" shadows the binding for the
* function itself, so the name of the function will not refer to
* the function.
*
* (function f() { "use strict"; f = 12; })();
*
* Outside strict mode, assignment to a function expression's name
* has no effect. But in strict mode, this attempt to mutate an
* immutable binding must throw a TypeError. We implement this by
* not optimizing such assignments and by marking such functions as
* heavyweight, ensuring that the function name is represented in
* the scope chain so that assignment will throw a TypeError.
*/
if (!bce->sc->asFunctionBox()->isHeavyweight()) {
op = JSOP_CALLEE;
pn->pn_dflags |= PND_CONST;
}
pn->setOp(op);
pn->pn_dflags |= PND_BOUND;
return true;
}
case Definition::PLACEHOLDER:
return true;
case Definition::MISSING:
JS_NOT_REACHED("missing");
}
/*
* The difference between the current static level and the static level of
* the definition is the number of function scopes between the current
* scope and dn's scope.
*/
unsigned skip = bce->script->staticLevel - dn->pn_cookie.level();
JS_ASSERT_IF(skip, dn->isClosed());
/*
* Explicitly disallow accessing var/let bindings in global scope from
* nested functions. The reason for this limitation is that, since the
* global script is not included in the static scope chain (1. because it
* has no object to stand in the static scope chain, 2. to minimize memory
* bloat where a single live function keeps its whole global script
* alive.), ScopeCoordinateToTypeSet is not able to find the var/let's
* associated types::TypeSet.
*/
if (skip) {
BytecodeEmitter *bceSkipped = bce;
for (unsigned i = 0; i < skip; i++)
bceSkipped = bceSkipped->parent;
if (!bceSkipped->sc->isFunctionBox())
return true;
}
JS_ASSERT(!pn->isOp(op));
pn->setOp(op);
if (!pn->pn_cookie.set(bce->sc->context, skip, dn->pn_cookie.slot()))
return false;
pn->pn_dflags |= PND_BOUND;
return true;
}
/*
* Attempts to bind the name, then checks that no dynamic scope lookup ops are
* emitted in self-hosting mode. NAME ops do lookups off current scope chain,
* and we do not want to allow self-hosted code to use the dynamic scope.
*/
static bool
BindNameToSlot(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
if (!BindNameToSlotHelper(cx, bce, pn))
return false;
if (bce->emitterMode == BytecodeEmitter::SelfHosting && !pn->isBound()) {
bce->reportError(pn, JSMSG_SELFHOSTED_UNBOUND_NAME);
return false;
}
return true;
}
/*
* If pn contains a useful expression, return true with *answer set to true.
* If pn contains a useless expression, return true with *answer set to false.
* Return false on error.
*
* The caller should initialize *answer to false and invoke this function on
* an expression statement or similar subtree to decide whether the tree could
* produce code that has any side effects. For an expression statement, we
* define useless code as code with no side effects, because the main effect,
* the value left on the stack after the code executes, will be discarded by a
* pop bytecode.
*/
static bool
CheckSideEffects(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, bool *answer)
{
if (!pn || *answer)
return true;
switch (pn->getArity()) {
case PN_CODE:
/*
* A named function, contrary to ES3, is no longer useful, because we
* bind its name lexically (using JSOP_CALLEE) instead of creating an
* Object instance and binding a readonly, permanent property in it
* (the object and binding can be detected and hijacked or captured).
* This is a bug fix to ES3; it is fixed in ES3.1 drafts.
*/
MOZ_ASSERT(*answer == false);
return true;
case PN_LIST:
if (pn->isOp(JSOP_NOP) || pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) ||
pn->isOp(JSOP_STRICTEQ) || pn->isOp(JSOP_STRICTNE)) {
/*
* Non-operators along with ||, &&, ===, and !== never invoke
* toString or valueOf.
*/
bool ok = true;
for (ParseNode *pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
ok &= CheckSideEffects(cx, bce, pn2, answer);
return ok;
}
if (pn->isKind(PNK_GENEXP)) {
/* Generator-expressions are harmless if the result is ignored. */
MOZ_ASSERT(*answer == false);
return true;
}
/*
* All invocation operations (construct: PNK_NEW, call: PNK_CALL)
* are presumed to be useful, because they may have side effects
* even if their main effect (their return value) is discarded.
*
* PNK_ELEM binary trees of 3+ nodes are flattened into lists to
* avoid too much recursion. All such lists must be presumed to be
* useful because each index operation could invoke a getter.
*
* Likewise, array and object initialisers may call prototype
* setters (the __defineSetter__ built-in, and writable __proto__
* on Array.prototype create this hazard). Initialiser list nodes
* have JSOP_NEWINIT in their pn_op.
*/
*answer = true;
return true;
case PN_TERNARY:
return CheckSideEffects(cx, bce, pn->pn_kid1, answer) &&
CheckSideEffects(cx, bce, pn->pn_kid2, answer) &&
CheckSideEffects(cx, bce, pn->pn_kid3, answer);
case PN_BINARY:
if (pn->isAssignment()) {
/*
* Assignment is presumed to be useful, even if the next operation
* is another assignment overwriting this one's ostensible effect,
* because the left operand may be a property with a setter that
* has side effects.
*
* The only exception is assignment of a useless value to a const
* declared in the function currently being compiled.
*/
ParseNode *pn2 = pn->pn_left;
if (!pn2->isKind(PNK_NAME)) {
*answer = true;
} else {
if (!BindNameToSlot(cx, bce, pn2))
return false;
if (!CheckSideEffects(cx, bce, pn->pn_right, answer))
return false;
if (!*answer && (!pn->isOp(JSOP_NOP) || !pn2->isConst()))
*answer = true;
}
return true;
}
if (pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) || pn->isOp(JSOP_STRICTEQ) ||
pn->isOp(JSOP_STRICTNE)) {
/*
* ||, &&, ===, and !== do not convert their operands via
* toString or valueOf method calls.
*/
return CheckSideEffects(cx, bce, pn->pn_left, answer) &&
CheckSideEffects(cx, bce, pn->pn_right, answer);
}
/*
* We can't easily prove that neither operand ever denotes an
* object with a toString or valueOf method.
*/
*answer = true;
return true;
case PN_UNARY:
switch (pn->getKind()) {
case PNK_DELETE:
{
ParseNode *pn2 = pn->pn_kid;
switch (pn2->getKind()) {
case PNK_NAME:
if (!BindNameToSlot(cx, bce, pn2))
return false;
if (pn2->isConst()) {
MOZ_ASSERT(*answer == false);
return true;
}
/* FALL THROUGH */
case PNK_DOT:
case PNK_CALL:
case PNK_ELEM:
/* All these delete addressing modes have effects too. */
*answer = true;
return true;
default:
return CheckSideEffects(cx, bce, pn2, answer);
}
MOZ_NOT_REACHED("We have a returning default case");
return false;
}
case PNK_TYPEOF:
case PNK_VOID:
case PNK_NOT:
case PNK_BITNOT:
if (pn->isOp(JSOP_NOT)) {
/* ! does not convert its operand via toString or valueOf. */
return CheckSideEffects(cx, bce, pn->pn_kid, answer);
}
/* FALL THROUGH */
default:
/*
* All of PNK_INC, PNK_DEC, PNK_THROW, and PNK_YIELD have direct
* effects. Of the remaining unary-arity node types, we can't
* easily prove that the operand never denotes an object with a
* toString or valueOf method.
*/
*answer = true;
return true;
}
MOZ_NOT_REACHED("We have a returning default case");
return false;
case PN_NAME:
/*
* Take care to avoid trying to bind a label name (labels, both for
* statements and property values in object initialisers, have pn_op
* defaulted to JSOP_NOP).
*/
if (pn->isKind(PNK_NAME) && !pn->isOp(JSOP_NOP)) {
if (!BindNameToSlot(cx, bce, pn))
return false;
if (!pn->isOp(JSOP_CALLEE) && pn->pn_cookie.isFree()) {
/*
* Not a use of an unshadowed named function expression's given
* name, so this expression could invoke a getter that has side
* effects.
*/
*answer = true;
}
}
if (pn->isKind(PNK_DOT)) {
/* Dotted property references in general can call getters. */
*answer = true;
}
return CheckSideEffects(cx, bce, pn->maybeExpr(), answer);
case PN_NULLARY:
if (pn->isKind(PNK_DEBUGGER))
*answer = true;
return true;
}
return true;
}
bool
BytecodeEmitter::isInLoop()
{
for (StmtInfoBCE *stmt = topStmt; stmt; stmt = stmt->down) {
if (stmt->isLoop())
return true;
}
return false;
}
bool
BytecodeEmitter::checkSingletonContext()
{
if (!script->compileAndGo || sc->isFunctionBox() || isInLoop())
return false;
hasSingletons = true;
return true;
}
bool
BytecodeEmitter::needsImplicitThis()
{
if (!script->compileAndGo)
return true;
if (sc->isModuleBox()) {
/* Modules can never occur inside a with-statement */
return false;
} if (sc->isFunctionBox()) {
if (sc->asFunctionBox()->inWith)
return true;
} else {
JSObject *scope = sc->asGlobalSharedContext()->scopeChain();
while (scope) {
if (scope->is<WithObject>())
return true;
scope = scope->enclosingScope();
}
}
for (StmtInfoBCE *stmt = topStmt; stmt; stmt = stmt->down) {
if (stmt->type == STMT_WITH)
return true;
}
return false;
}
void
BytecodeEmitter::tellDebuggerAboutCompiledScript(JSContext *cx)
{
RootedFunction function(cx, script->function());
CallNewScriptHook(cx, script, function);
if (!parent) {
GlobalObject *compileAndGoGlobal = NULL;
if (script->compileAndGo)
compileAndGoGlobal = &script->global();
Debugger::onNewScript(cx, script, compileAndGoGlobal);
}
}
bool
BytecodeEmitter::reportError(ParseNode *pn, unsigned errorNumber, ...)
{
TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos;
va_list args;
va_start(args, errorNumber);
bool result = tokenStream()->reportCompileErrorNumberVA(pos.begin, JSREPORT_ERROR,
errorNumber, args);
va_end(args);
return result;
}
bool
BytecodeEmitter::reportStrictWarning(ParseNode *pn, unsigned errorNumber, ...)
{
TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos;
va_list args;
va_start(args, errorNumber);
bool result = tokenStream()->reportStrictWarningErrorNumberVA(pos.begin, errorNumber, args);
va_end(args);
return result;
}
bool
BytecodeEmitter::reportStrictModeError(ParseNode *pn, unsigned errorNumber, ...)
{
TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos;
va_list args;
va_start(args, errorNumber);
bool result = tokenStream()->reportStrictModeErrorNumberVA(pos.begin, sc->strict,
errorNumber, args);
va_end(args);
return result;
}
static bool
EmitNameOp(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, bool callContext)
{
JSOp op;
if (!BindNameToSlot(cx, bce, pn))
return false;
op = pn->getOp();
if (callContext) {
switch (op) {
case JSOP_NAME:
op = JSOP_CALLNAME;
break;
case JSOP_GETINTRINSIC:
op = JSOP_CALLINTRINSIC;
break;
case JSOP_GETGNAME:
op = JSOP_CALLGNAME;
break;
case JSOP_GETARG:
op = JSOP_CALLARG;
break;
case JSOP_GETLOCAL:
op = JSOP_CALLLOCAL;
break;
case JSOP_GETALIASEDVAR:
op = JSOP_CALLALIASEDVAR;
break;
default:
JS_ASSERT(op == JSOP_CALLEE);
break;
}
}
if (op == JSOP_CALLEE) {
if (Emit1(cx, bce, op) < 0)
return false;
} else {
if (!pn->pn_cookie.isFree()) {
JS_ASSERT(JOF_OPTYPE(op) != JOF_ATOM);
if (!EmitVarOp(cx, pn, op, bce))
return false;
} else {
if (!EmitAtomOp(cx, pn, op, bce))
return false;
}
}
/* Need to provide |this| value for call */
if (callContext) {
if (op == JSOP_CALLNAME && bce->needsImplicitThis()) {
if (!EmitAtomOp(cx, pn, JSOP_IMPLICITTHIS, bce))
return false;
} else {
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
if (Emit1(cx, bce, JSOP_NOTEARG) < 0)
return false;
}
return true;
}
static inline bool
EmitElemOpBase(JSContext *cx, BytecodeEmitter *bce, JSOp op)
{
if (Emit1(cx, bce, op) < 0)
return false;
CheckTypeSet(cx, bce, op);
if (op == JSOP_CALLELEM) {
if (Emit1(cx, bce, JSOP_SWAP) < 0)
return false;
if (Emit1(cx, bce, JSOP_NOTEARG) < 0)
return false;
}
return true;
}
static bool
EmitPropLHS(JSContext *cx, ParseNode *pn, JSOp *op, BytecodeEmitter *bce, bool callContext)
{
ParseNode *pn2 = pn->maybeExpr();
if (callContext) {
JS_ASSERT(pn->isKind(PNK_DOT));
JS_ASSERT(*op == JSOP_GETPROP);
*op = JSOP_CALLPROP;
} else if (*op == JSOP_GETPROP && pn->isKind(PNK_DOT)) {
if (pn2->isKind(PNK_NAME)) {
if (!BindNameToSlot(cx, bce, pn2))
return false;
}
}
/*
* If the object operand is also a dotted property reference, reverse the
* list linked via pn_expr temporarily so we can iterate over it from the
* bottom up (reversing again as we go), to avoid excessive recursion.
*/
if (pn2->isKind(PNK_DOT)) {
ParseNode *pndot = pn2;
ParseNode *pnup = NULL, *pndown;
ptrdiff_t top = bce->offset();
for (;;) {
/* Reverse pndot->pn_expr to point up, not down. */
pndot->pn_offset = top;
JS_ASSERT(!pndot->isUsed());
pndown = pndot->pn_expr;
pndot->pn_expr = pnup;
if (!pndown->isKind(PNK_DOT))
break;
pnup = pndot;
pndot = pndown;
}
/* pndown is a primary expression, not a dotted property reference. */
if (!EmitTree(cx, bce, pndown))
return false;
do {
/* Walk back up the list, emitting annotated name ops. */
if (!EmitAtomOp(cx, pndot, pndot->getOp(), bce))
return false;
/* Reverse the pn_expr link again. */
pnup = pndot->pn_expr;
pndot->pn_expr = pndown;
pndown = pndot;
} while ((pndot = pnup) != NULL);
} else {
if (!EmitTree(cx, bce, pn2))
return false;
}
return true;
}
static bool
EmitPropOp(JSContext *cx, ParseNode *pn, JSOp requested, BytecodeEmitter *bce, bool callContext)
{
JS_ASSERT(pn->isArity(PN_NAME));
JSOp op = requested;
if (!EmitPropLHS(cx, pn, &op, bce, callContext))
return false;
if (op == JSOP_CALLPROP && Emit1(cx, bce, JSOP_DUP) < 0)
return false;
if (!EmitAtomOp(cx, pn, op, bce))
return false;
if (op == JSOP_CALLPROP && Emit1(cx, bce, JSOP_SWAP) < 0)
return false;
if (op == JSOP_CALLPROP && Emit1(cx, bce, JSOP_NOTEARG) < 0)
return false;
return true;
}
static bool
EmitPropIncDec(JSContext *cx, ParseNode *pn, BytecodeEmitter *bce)
{
JS_ASSERT(pn->pn_kid->getKind() == PNK_DOT);
bool post;
JSOp binop = GetIncDecInfo(pn->getKind(), &post);
JSOp get = JSOP_GETPROP;
if (!EmitPropLHS(cx, pn->pn_kid, &get, bce, false)) // OBJ
return false;
JS_ASSERT(get == JSOP_GETPROP);
if (Emit1(cx, bce, JSOP_DUP) < 0) // OBJ OBJ
return false;
if (!EmitAtomOp(cx, pn->pn_kid, JSOP_GETPROP, bce)) // OBJ V
return false;
if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ N
return false;
if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ N? N
return false;
if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ N? N 1
return false;
if (Emit1(cx, bce, binop) < 0) // OBJ N? N+1
return false;
if (post) {
if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N? N+1 OBJ
return false;
if (Emit1(cx, bce, JSOP_SWAP) < 0) // N? OBJ N+1
return false;
}
if (!EmitAtomOp(cx, pn->pn_kid, JSOP_SETPROP, bce)) // N? N+1
return false;
if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT
return false;
return true;
}
static bool
EmitNameIncDec(JSContext *cx, ParseNode *pn, BytecodeEmitter *bce)
{
const JSCodeSpec *cs = &js_CodeSpec[pn->pn_kid->getOp()];
bool global = (cs->format & JOF_GNAME);
bool post;
JSOp binop = GetIncDecInfo(pn->getKind(), &post);
if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_BINDGNAME : JSOP_BINDNAME, bce)) // OBJ
return false;
if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_GETGNAME : JSOP_NAME, bce)) // OBJ V
return false;
if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ N
return false;
if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ N? N
return false;
if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ N? N 1
return false;
if (Emit1(cx, bce, binop) < 0) // OBJ N? N+1
return false;
if (post) {
if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N? N+1 OBJ
return false;
if (Emit1(cx, bce, JSOP_SWAP) < 0) // N? OBJ N+1
return false;
}
if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_SETGNAME : JSOP_SETNAME, bce)) // N? N+1
return false;
if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT
return false;
return true;
}
static bool
EmitElemOperands(JSContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce)
{
ParseNode *left, *right;
if (pn->isArity(PN_NAME)) {
/*
* Set left and right so pn appears to be a PNK_ELEM node, instead of
* a PNK_DOT node. See the PNK_FOR/IN case in EmitTree, and
* EmitDestructuringOps nearer below. In the destructuring case, the
* base expression (pn_expr) of the name may be null, which means we
* have to emit a JSOP_BINDNAME.
*/
left = pn->maybeExpr();
if (!left) {
left = bce->parser->handler.new_<NullaryNode>(
PNK_STRING, JSOP_BINDNAME, pn->pn_pos, pn->pn_atom);
if (!left)
return false;
}
right = bce->parser->handler.new_<NullaryNode>(
PNK_STRING, JSOP_STRING, pn->pn_pos, pn->pn_atom);
if (!right)
return false;
} else {
JS_ASSERT(pn->isArity(PN_BINARY));
left = pn->pn_left;
right = pn->pn_right;
}
if (op == JSOP_GETELEM && left->isKind(PNK_NAME) && right->isKind(PNK_NUMBER)) {
if (!BindNameToSlot(cx, bce, left))
return false;
}
if (!EmitTree(cx, bce, left))
return false;
if (op == JSOP_CALLELEM && Emit1(cx, bce, JSOP_DUP) < 0)
return false;
if (!EmitTree(cx, bce, right))
return false;
return true;
}
static bool
EmitElemOp(JSContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce)
{
return EmitElemOperands(cx, pn, op, bce) && EmitElemOpBase(cx, bce, op);
}
static bool
EmitElemIncDec(JSContext *cx, ParseNode *pn, BytecodeEmitter *bce)
{
JS_ASSERT(pn->pn_kid->getKind() == PNK_ELEM);
if (!EmitElemOperands(cx, pn->pn_kid, JSOP_GETELEM, bce))
return false;
bool post;
JSOp binop = GetIncDecInfo(pn->getKind(), &post);
/*
* We need to convert the key to an object id first, so that we do not do
* it inside both the GETELEM and the SETELEM.
*/
// OBJ KEY*
if (Emit1(cx, bce, JSOP_TOID) < 0) // OBJ KEY
return false;
if (Emit1(cx, bce, JSOP_DUP2) < 0) // OBJ KEY OBJ KEY
return false;
if (!EmitElemOpBase(cx, bce, JSOP_GETELEM)) // OBJ KEY V
return false;
if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ KEY N
return false;
if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ KEY N? N
return false;
if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ KEY N? N 1
return false;
if (Emit1(cx, bce, binop) < 0) // OBJ KEY N? N+1
return false;
if (post) {
if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // KEY N N+1 OBJ
return false;
if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // N N+1 OBJ KEY
return false;
if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N OBJ KEY N+1
return false;
}
if (!EmitElemOpBase(cx, bce, JSOP_SETELEM)) // N? N+1
return false;
if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT
return false;
return true;
}
static bool
EmitNumberOp(JSContext *cx, double dval, BytecodeEmitter *bce)
{
int32_t ival;
uint32_t u;
ptrdiff_t off;
jsbytecode *pc;
if (DoubleIsInt32(dval, &ival)) {
if (ival == 0)
return Emit1(cx, bce, JSOP_ZERO) >= 0;
if (ival == 1)
return Emit1(cx, bce, JSOP_ONE) >= 0;
if ((int)(int8_t)ival == ival)
return Emit2(cx, bce, JSOP_INT8, (jsbytecode)(int8_t)ival) >= 0;
u = (uint32_t)ival;
if (u < JS_BIT(16)) {
EMIT_UINT16_IMM_OP(JSOP_UINT16, u);
} else if (u < JS_BIT(24)) {
off = EmitN(cx, bce, JSOP_UINT24, 3);
if (off < 0)
return false;
pc = bce->code(off);
SET_UINT24(pc, u);
} else {
off = EmitN(cx, bce, JSOP_INT32, 4);
if (off < 0)
return false;
pc = bce->code(off);
SET_INT32(pc, ival);
}
return true;
}
if (!bce->constList.append(DoubleValue(dval)))
return false;
return EmitIndex32(cx, JSOP_DOUBLE, bce->constList.length() - 1, bce);
}
static inline void
SetJumpOffsetAt(BytecodeEmitter *bce, ptrdiff_t off)
{
SET_JUMP_OFFSET(bce->code(off), bce->offset() - off);
}
/*
* Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047.
* LLVM is deciding to inline this function which uses a lot of stack space
* into EmitTree which is recursive and uses relatively little stack space.
*/
MOZ_NEVER_INLINE static bool
EmitSwitch(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
JSOp switchOp;
bool hasDefault;
ptrdiff_t top, off, defaultOffset;
ParseNode *pn2, *pn3, *pn4;
int32_t low, high;
int noteIndex;
size_t switchSize;
jsbytecode *pc;
StmtInfoBCE stmtInfo(cx);
/* Try for most optimal, fall back if not dense ints. */
switchOp = JSOP_TABLESWITCH;
hasDefault = false;
defaultOffset = -1;
pn2 = pn->pn_right;
#if JS_HAS_BLOCK_SCOPE
/*
* If there are hoisted let declarations, their stack slots go under the
* discriminant's value so push their slots now and enter the block later.
*/
uint32_t blockObjCount = 0;
if (pn2->isKind(PNK_LEXICALSCOPE)) {
blockObjCount = pn2->pn_objbox->object->as<StaticBlockObject>().slotCount();
for (uint32_t i = 0; i < blockObjCount; ++i) {
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
}
#endif
/* Push the discriminant. */
if (!EmitTree(cx, bce, pn->pn_left))
return false;
#if JS_HAS_BLOCK_SCOPE
if (pn2->isKind(PNK_LEXICALSCOPE)) {
PushBlockScopeBCE(bce, &stmtInfo, pn2->pn_objbox->object->as<StaticBlockObject>(), -1);
stmtInfo.type = STMT_SWITCH;
if (!EmitEnterBlock(cx, bce, pn2, JSOP_ENTERLET1))
return false;
}
#endif
/* Switch bytecodes run from here till end of final case. */
top = bce->offset();
#if !JS_HAS_BLOCK_SCOPE
PushStatementBCE(bce, &stmtInfo, STMT_SWITCH, top);
#else
if (pn2->isKind(PNK_STATEMENTLIST)) {
PushStatementBCE(bce, &stmtInfo, STMT_SWITCH, top);
} else {
/*
* Set the statement info record's idea of top. Reset top too, since
* repushBlock emits code.
*/
stmtInfo.update = top = bce->offset();
/* Advance pn2 to refer to the switch case list. */
pn2 = pn2->expr();
}
#endif
uint32_t caseCount = pn2->pn_count;
uint32_t tableLength = 0;
ScopedJSFreePtr<ParseNode*> table(NULL);
if (caseCount > JS_BIT(16)) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
JSMSG_TOO_MANY_CASES);
return false;
}
if (caseCount == 0 ||
(caseCount == 1 &&
(hasDefault = (pn2->pn_head->isKind(PNK_DEFAULT))))) {
caseCount = 0;
low = 0;
high = -1;
} else {
bool ok = true;
#define INTMAP_LENGTH 256
jsbitmap intmap_space[INTMAP_LENGTH];
jsbitmap *intmap = NULL;
int32_t intmap_bitlen = 0;
low = JSVAL_INT_MAX;
high = JSVAL_INT_MIN;
for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
if (pn3->isKind(PNK_DEFAULT)) {
hasDefault = true;
caseCount--; /* one of the "cases" was the default */
continue;
}
JS_ASSERT(pn3->isKind(PNK_CASE));
if (switchOp == JSOP_CONDSWITCH)
continue;
JS_ASSERT(switchOp == JSOP_TABLESWITCH);
pn4 = pn3->pn_left;
if (pn4->getKind() != PNK_NUMBER) {
switchOp = JSOP_CONDSWITCH;
continue;
}
int32_t i;
if (!DoubleIsInt32(pn4->pn_dval, &i)) {
switchOp = JSOP_CONDSWITCH;
continue;
}
if ((unsigned)(i + (int)JS_BIT(15)) >= (unsigned)JS_BIT(16)) {
switchOp = JSOP_CONDSWITCH;
continue;
}
if (i < low)
low = i;
if (high < i)
high = i;
/*
* Check for duplicates, which require a JSOP_CONDSWITCH.
* We bias i by 65536 if it's negative, and hope that's a rare
* case (because it requires a malloc'd bitmap).
*/
if (i < 0)
i += JS_BIT(16);
if (i >= intmap_bitlen) {
if (!intmap &&
i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) {
intmap = intmap_space;
intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2;
} else {
/* Just grab 8K for the worst-case bitmap. */
intmap_bitlen = JS_BIT(16);
intmap = cx->pod_malloc<jsbitmap>(JS_BIT(16) >> JS_BITS_PER_WORD_LOG2);
if (!intmap) {
JS_ReportOutOfMemory(cx);
return false;
}
}
memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2);
}
if (JS_TEST_BIT(intmap, i)) {
switchOp = JSOP_CONDSWITCH;
continue;
}
JS_SET_BIT(intmap, i);
}
if (intmap && intmap != intmap_space)
js_free(intmap);
if (!ok)
return false;
/*
* Compute table length and select condswitch instead if overlarge or
* more than half-sparse.
*/
if (switchOp == JSOP_TABLESWITCH) {
tableLength = (uint32_t)(high - low + 1);
if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
switchOp = JSOP_CONDSWITCH;
}
}
/*
* The note has one or two offsets: first tells total switch code length;
* second (if condswitch) tells offset to first JSOP_CASE.
*/
if (switchOp == JSOP_CONDSWITCH) {
/* 0 bytes of immediate for unoptimized switch. */
switchSize = 0;
noteIndex = NewSrcNote3(cx, bce, SRC_CONDSWITCH, 0, 0);
} else {
JS_ASSERT(switchOp == JSOP_TABLESWITCH);
/* 3 offsets (len, low, high) before the table, 1 per entry. */
switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
noteIndex = NewSrcNote2(cx, bce, SRC_TABLESWITCH, 0);
}
if (noteIndex < 0)
return false;
/* Emit switchOp followed by switchSize bytes of jump or lookup table. */
if (EmitN(cx, bce, switchOp, switchSize) < 0)
return false;
off = -1;
if (switchOp == JSOP_CONDSWITCH) {
int caseNoteIndex = -1;
bool beforeCases = true;
/* Emit code for evaluating cases and jumping to case statements. */
for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
pn4 = pn3->pn_left;
if (pn4 && !EmitTree(cx, bce, pn4))
return false;
if (caseNoteIndex >= 0) {
/* off is the previous JSOP_CASE's bytecode offset. */
if (!SetSrcNoteOffset(cx, bce, (unsigned)caseNoteIndex, 0, bce->offset() - off))
return false;
}
if (!pn4) {
JS_ASSERT(pn3->isKind(PNK_DEFAULT));
continue;
}
caseNoteIndex = NewSrcNote2(cx, bce, SRC_NEXTCASE, 0);
if (caseNoteIndex < 0)
return false;
off = EmitJump(cx, bce, JSOP_CASE, 0);
if (off < 0)
return false;
pn3->pn_offset = off;
if (beforeCases) {
unsigned noteCount, noteCountDelta;
/* Switch note's second offset is to first JSOP_CASE. */
noteCount = bce->notes().length();
if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 1, off - top))
return false;
noteCountDelta = bce->notes().length() - noteCount;
if (noteCountDelta != 0)
caseNoteIndex += noteCountDelta;
beforeCases = false;
}
}
/*
* If we didn't have an explicit default (which could fall in between
* cases, preventing us from fusing this SetSrcNoteOffset with the call
* in the loop above), link the last case to the implicit default for
* the decompiler.
*/
if (!hasDefault &&
caseNoteIndex >= 0 &&
!SetSrcNoteOffset(cx, bce, (unsigned)caseNoteIndex, 0, bce->offset() - off))
{
return false;
}
/* Emit default even if no explicit default statement. */
defaultOffset = EmitJump(cx, bce, JSOP_DEFAULT, 0);
if (defaultOffset < 0)
return false;
} else {
JS_ASSERT(switchOp == JSOP_TABLESWITCH);
pc = bce->code(top + JUMP_OFFSET_LEN);
/* Fill in switch bounds, which we know fit in 16-bit offsets. */
SET_JUMP_OFFSET(pc, low);
pc += JUMP_OFFSET_LEN;
SET_JUMP_OFFSET(pc, high);
pc += JUMP_OFFSET_LEN;
/*
* Use malloc to avoid arena bloat for programs with many switches.
* ScopedJSFreePtr takes care of freeing it on exit.
*/
if (tableLength != 0) {
table = cx->pod_calloc<ParseNode*>(tableLength);
if (!table)
return false;
for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
if (pn3->isKind(PNK_DEFAULT))
continue;
JS_ASSERT(pn3->isKind(PNK_CASE));
pn4 = pn3->pn_left;
JS_ASSERT(pn4->getKind() == PNK_NUMBER);
int32_t i = int32_t(pn4->pn_dval);
JS_ASSERT(double(i) == pn4->pn_dval);
i -= low;
JS_ASSERT(uint32_t(i) < tableLength);
table[i] = pn3;
}
}
}
/* Emit code for each case's statements, copying pn_offset up to pn3. */
for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
if (switchOp == JSOP_CONDSWITCH && !pn3->isKind(PNK_DEFAULT))
SetJumpOffsetAt(bce, pn3->pn_offset);
pn4 = pn3->pn_right;
if (!EmitTree(cx, bce, pn4))
return false;
pn3->pn_offset = pn4->pn_offset;
if (pn3->isKind(PNK_DEFAULT))
off = pn3->pn_offset - top;
}
if (!hasDefault) {
/* If no default case, offset for default is to end of switch. */
off = bce->offset() - top;
}
/* We better have set "off" by now. */
JS_ASSERT(off != -1);
/* Set the default offset (to end of switch if no default). */
if (switchOp == JSOP_CONDSWITCH) {
pc = NULL;
JS_ASSERT(defaultOffset != -1);
SET_JUMP_OFFSET(bce->code(defaultOffset), off - (defaultOffset - top));
} else {
pc = bce->code(top);
SET_JUMP_OFFSET(pc, off);
pc += JUMP_OFFSET_LEN;
}
/* Set the SRC_SWITCH note's offset operand to tell end of switch. */
off = bce->offset() - top;
if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, off))
return false;
if (switchOp == JSOP_TABLESWITCH) {
/* Skip over the already-initialized switch bounds. */
pc += 2 * JUMP_OFFSET_LEN;
/* Fill in the jump table, if there is one. */
for (uint32_t i = 0; i < tableLength; i++) {
pn3 = table[i];
off = pn3 ? pn3->pn_offset - top : 0;
SET_JUMP_OFFSET(pc, off);
pc += JUMP_OFFSET_LEN;
}
}
if (!PopStatementBCE(cx, bce))
return false;
#if JS_HAS_BLOCK_SCOPE
if (pn->pn_right->isKind(PNK_LEXICALSCOPE))
EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, blockObjCount);
#endif
return true;
}
bool
frontend::EmitFunctionScript(JSContext *cx, BytecodeEmitter *bce, ParseNode *body)
{
/*
* The decompiler has assumptions about what may occur immediately after
* script->main (e.g., in the case of destructuring params). Thus, put the
* following ops into the range [script->code, script->main). Note:
* execution starts from script->code, so this has no semantic effect.
*/
FunctionBox *funbox = bce->sc->asFunctionBox();
if (funbox->argumentsHasLocalBinding()) {
JS_ASSERT(bce->offset() == 0); /* See JSScript::argumentsBytecode. */
bce->switchToProlog();
if (Emit1(cx, bce, JSOP_ARGUMENTS) < 0)
return false;
InternalBindingsHandle bindings(bce->script, &bce->script->bindings);
unsigned varIndex = Bindings::argumentsVarIndex(cx, bindings);
if (bce->script->varIsAliased(varIndex)) {
ScopeCoordinate sc;
sc.hops = 0;
JS_ALWAYS_TRUE(LookupAliasedName(bce->script, cx->names().arguments, &sc.slot));
if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce))
return false;
} else {
if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, varIndex, bce))
return false;
}
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
bce->switchToMain();
}
if (funbox->isGenerator()) {
bce->switchToProlog();
if (Emit1(cx, bce, JSOP_GENERATOR) < 0)
return false;
bce->switchToMain();
}
/*
* Emit a prologue for run-once scripts which will deoptimize JIT code if
* the script ends up running multiple times via foo.caller related
* shenanigans.
*/
bool runOnce = bce->parent &&
bce->parent->emittingRunOnceLambda &&
!funbox->argumentsHasLocalBinding() &&
!funbox->isGenerator();
if (runOnce) {
bce->switchToProlog();
if (Emit1(cx, bce, JSOP_RUNONCE) < 0)
return false;
bce->switchToMain();
}
if (!EmitTree(cx, bce, body))
return false;
/*
* Always end the script with a JSOP_STOP. Some other parts of the codebase
* depend on this opcode, e.g. js_InternalInterpret.
*/
if (Emit1(cx, bce, JSOP_STOP) < 0)
return false;
if (!JSScript::fullyInitFromEmitter(cx, bce->script, bce))
return false;
/*
* If this function is only expected to run once, mark the script so that
* initializers created within it may be given more precise types.
*/
if (runOnce) {
bce->script->treatAsRunOnce = true;
JS_ASSERT(!bce->script->hasRunOnce);
}
/* Initialize fun->script() so that the debugger has a valid fun->script(). */
RootedFunction fun(cx, bce->script->function());
JS_ASSERT(fun->isInterpreted());
fun->setScript(bce->script);
bce->tellDebuggerAboutCompiledScript(cx);
return true;
}
static bool
MaybeEmitVarDecl(JSContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn,
jsatomid *result)
{
jsatomid atomIndex;
if (!pn->pn_cookie.isFree()) {
atomIndex = pn->pn_cookie.slot();
} else {
if (!bce->makeAtomIndex(pn->pn_atom, &atomIndex))
return false;
}
if (JOF_OPTYPE(pn->getOp()) == JOF_ATOM &&
(!bce->sc->isFunctionBox() || bce->sc->asFunctionBox()->isHeavyweight()))
{
bce->switchToProlog();
if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
return false;
if (!EmitIndexOp(cx, prologOp, atomIndex, bce))
return false;
bce->switchToMain();
}
if (result)
*result = atomIndex;
return true;
}
/*
* This enum tells EmitVariables and the destructuring functions how emit the
* given Parser::variables parse tree. In the base case, DefineVars, the caller
* only wants variables to be defined in the prologue (if necessary). For
* PushInitialValues, variable initializer expressions are evaluated and left
* on the stack. For InitializeVars, the initializer expressions values are
* assigned (to local variables) and popped.
*/
enum VarEmitOption
{
DefineVars = 0,
PushInitialValues = 1,
InitializeVars = 2
};
#if JS_HAS_DESTRUCTURING
typedef bool
(*DestructuringDeclEmitter)(JSContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn);
static bool
EmitDestructuringDecl(JSContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn)
{
JS_ASSERT(pn->isKind(PNK_NAME));
if (!BindNameToSlot(cx, bce, pn))
return false;
JS_ASSERT(!pn->isOp(JSOP_CALLEE));
return MaybeEmitVarDecl(cx, bce, prologOp, pn, NULL);
}
static bool
EmitDestructuringDecls(JSContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn)
{
ParseNode *pn2, *pn3;
DestructuringDeclEmitter emitter;
if (pn->isKind(PNK_ARRAY)) {
for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
if (pn2->isKind(PNK_ELISION))
continue;
emitter = (pn2->isKind(PNK_NAME))
? EmitDestructuringDecl
: EmitDestructuringDecls;
if (!emitter(cx, bce, prologOp, pn2))
return false;
}
} else {
JS_ASSERT(pn->isKind(PNK_OBJECT));
for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
pn3 = pn2->pn_right;
emitter = pn3->isKind(PNK_NAME) ? EmitDestructuringDecl : EmitDestructuringDecls;
if (!emitter(cx, bce, prologOp, pn3))
return false;
}
}
return true;
}
static bool
EmitDestructuringOpsHelper(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn,
VarEmitOption emitOption);
/*
* EmitDestructuringLHS assumes the to-be-destructured value has been pushed on
* the stack and emits code to destructure a single lhs expression (either a
* name or a compound []/{} expression).
*
* If emitOption is InitializeVars, the to-be-destructured value is assigned to
* locals and ultimately the initial slot is popped (-1 total depth change).
*
* If emitOption is PushInitialValues, the to-be-destructured value is replaced
* with the initial values of the N (where 0 <= N) variables assigned in the
* lhs expression. (Same post-condition as EmitDestructuringOpsHelper)
*/
static bool
EmitDestructuringLHS(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, VarEmitOption emitOption)
{
JS_ASSERT(emitOption != DefineVars);
/*
* Now emit the lvalue opcode sequence. If the lvalue is a nested
* destructuring initialiser-form, call ourselves to handle it, then
* pop the matched value. Otherwise emit an lvalue bytecode sequence
* ending with a JSOP_ENUMELEM or equivalent op.
*/
if (pn->isKind(PNK_ARRAY) || pn->isKind(PNK_OBJECT)) {
if (!EmitDestructuringOpsHelper(cx, bce, pn, emitOption))
return false;
if (emitOption == InitializeVars) {
/*
* Per its post-condition, EmitDestructuringOpsHelper has left the
* to-be-destructured value on top of the stack.
*/
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
}
} else {
if (emitOption == PushInitialValues) {
/*
* The lhs is a simple name so the to-be-destructured value is
* its initial value and there is nothing to do.
*/
JS_ASSERT(pn->getOp() == JSOP_GETLOCAL);
JS_ASSERT(pn->pn_dflags & PND_BOUND);
return true;
}
/* All paths below must pop after assigning to the lhs. */
if (pn->isKind(PNK_NAME)) {
if (!BindNameToSlot(cx, bce, pn))
return false;
/* Allow 'const [x,y] = o', make 'const x,y; [x,y] = o' a nop. */
if (pn->isConst() && !pn->isDefn())
return Emit1(cx, bce, JSOP_POP) >= 0;
}
switch (pn->getOp()) {
case JSOP_SETNAME:
case JSOP_SETGNAME:
/*
* NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
* we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
* So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
*/
if (!EmitElemOp(cx, pn, JSOP_ENUMELEM, bce))
return false;
break;
case JSOP_SETCONST:
if (!EmitElemOp(cx, pn, JSOP_ENUMCONSTELEM, bce))
return false;
break;
case JSOP_SETLOCAL:
case JSOP_SETARG:
if (!EmitVarOp(cx, pn, pn->getOp(), bce))
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
break;
case JSOP_CALL:
case JSOP_EVAL:
case JSOP_FUNCALL:
case JSOP_FUNAPPLY:
JS_ASSERT(pn->pn_xflags & PNX_SETCALL);
if (!EmitTree(cx, bce, pn))
return false;
/*
* We just emitted JSOP_SETCALL which will always throw.
* Pop the call return value and the RHS.
*/
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
break;
default:
{
if (!EmitTree(cx, bce, pn))
return false;
if (!EmitElemOpBase(cx, bce, JSOP_ENUMELEM))
return false;
break;
}
case JSOP_ENUMELEM:
JS_ASSERT(0);
}
}
return true;
}
/*
* Recursive helper for EmitDestructuringOps.
* EmitDestructuringOpsHelper assumes the to-be-destructured value has been
* pushed on the stack and emits code to destructure each part of a [] or {}
* lhs expression.
*
* If emitOption is InitializeVars, the initial to-be-destructured value is
* left untouched on the stack and the overall depth is not changed.
*
* If emitOption is PushInitialValues, the to-be-destructured value is replaced
* with the initial values of the N (where 0 <= N) variables assigned in the
* lhs expression. (Same post-condition as EmitDestructuringLHS)
*/
static bool
EmitDestructuringOpsHelper(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn,
VarEmitOption emitOption)
{
JS_ASSERT(emitOption != DefineVars);
unsigned index;
ParseNode *pn2, *pn3;
bool doElemOp;
#ifdef DEBUG
int stackDepth = bce->stackDepth;
JS_ASSERT(stackDepth != 0);
JS_ASSERT(pn->isArity(PN_LIST));
JS_ASSERT(pn->isKind(PNK_ARRAY) || pn->isKind(PNK_OBJECT));
#endif
if (pn->pn_count == 0) {
/* Emit a DUP;POP sequence for the decompiler. */
if (Emit1(cx, bce, JSOP_DUP) < 0 || Emit1(cx, bce, JSOP_POP) < 0)
return false;
}
index = 0;
for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
/* Duplicate the value being destructured to use as a reference base. */
if (Emit1(cx, bce, JSOP_DUP) < 0)
return false;
/*
* Now push the property name currently being matched, which is either
* the array initialiser's current index, or the current property name
* "label" on the left of a colon in the object initialiser. Set pn3
* to the lvalue node, which is in the value-initializing position.
*/
doElemOp = true;
if (pn->isKind(PNK_ARRAY)) {
if (!EmitNumberOp(cx, index, bce))
return false;
pn3 = pn2;
} else {
JS_ASSERT(pn->isKind(PNK_OBJECT));
JS_ASSERT(pn2->isKind(PNK_COLON));
pn3 = pn2->pn_left;
if (pn3->isKind(PNK_NUMBER)) {
if (!EmitNumberOp(cx, pn3->pn_dval, bce))
return false;
} else {
JS_ASSERT(pn3->isKind(PNK_STRING) || pn3->isKind(PNK_NAME));
if (!EmitAtomOp(cx, pn3, JSOP_GETPROP, bce))
return false;
doElemOp = false;
}
pn3 = pn2->pn_right;
}
if (doElemOp) {
/*
* Ok, get the value of the matching property name. This leaves
* that value on top of the value being destructured, so the stack
* is one deeper than when we started.
*/
if (!EmitElemOpBase(cx, bce, JSOP_GETELEM))
return false;
JS_ASSERT(bce->stackDepth >= stackDepth + 1);
}
/* Elision node makes a hole in the array destructurer. */
if (pn3->isKind(PNK_ELISION)) {
JS_ASSERT(pn->isKind(PNK_ARRAY));
JS_ASSERT(pn2 == pn3);
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
} else {
int depthBefore = bce->stackDepth;
if (!EmitDestructuringLHS(cx, bce, pn3, emitOption))
return false;
if (emitOption == PushInitialValues) {
/*
* After '[x,y]' in 'let ([[x,y], z] = o)', the stack is
* | to-be-decompiled-value | x | y |
* The goal is:
* | x | y | z |
* so emit a pick to produce the intermediate state
* | x | y | to-be-decompiled-value |
* before destructuring z. This gives the loop invariant that
* the to-be-compiled-value is always on top of the stack.
*/
JS_ASSERT((bce->stackDepth - bce->stackDepth) >= -1);
unsigned pickDistance = (unsigned)((bce->stackDepth + 1) - depthBefore);
if (pickDistance > 0) {
if (pickDistance > UINT8_MAX) {
bce->reportError(pn3, JSMSG_TOO_MANY_LOCALS);
return false;
}
if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)pickDistance) < 0)
return false;
}
}
}
++index;
}
if (emitOption == PushInitialValues) {
/*
* Per the above loop invariant, to-be-decompiled-value is at the top
* of the stack. To achieve the post-condition, pop it.
*/
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
}
return true;
}
static bool
EmitDestructuringOps(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, bool isLet = false)
{
/*
* Call our recursive helper to emit the destructuring assignments and
* related stack manipulations.
*/
VarEmitOption emitOption = isLet ? PushInitialValues : InitializeVars;
return EmitDestructuringOpsHelper(cx, bce, pn, emitOption);
}
static bool
EmitGroupAssignment(JSContext *cx, BytecodeEmitter *bce, JSOp prologOp,
ParseNode *lhs, ParseNode *rhs)
{
unsigned depth, limit, i, nslots;
ParseNode *pn;
depth = limit = (unsigned) bce->stackDepth;
for (pn = rhs->pn_head; pn; pn = pn->pn_next) {
if (limit == JS_BIT(16)) {
bce->reportError(rhs, JSMSG_ARRAY_INIT_TOO_BIG);
return false;
}
/* MaybeEmitGroupAssignment won't call us if rhs is holey. */
JS_ASSERT(!pn->isKind(PNK_ELISION));
if (!EmitTree(cx, bce, pn))
return false;
++limit;
}
i = depth;
for (pn = lhs->pn_head; pn; pn = pn->pn_next, ++i) {
/* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */
JS_ASSERT(i < limit);
int slot = AdjustBlockSlot(cx, bce, i);
if (slot < 0)
return false;
if (!EmitUnaliasedVarOp(cx, JSOP_GETLOCAL, slot, bce))
return false;
if (pn->isKind(PNK_ELISION)) {
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
} else {
if (!EmitDestructuringLHS(cx, bce, pn, InitializeVars))
return false;
}
}
nslots = limit - depth;
EMIT_UINT16_IMM_OP(JSOP_POPN, nslots);
bce->stackDepth = (unsigned) depth;
return true;
}
enum GroupOption { GroupIsDecl, GroupIsNotDecl };
/*
* Helper called with pop out param initialized to a JSOP_POP* opcode. If we
* can emit a group assignment sequence, which results in 0 stack depth delta,
* we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
*/
static bool
MaybeEmitGroupAssignment(JSContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn,
GroupOption groupOption, JSOp *pop)
{
JS_ASSERT(pn->isKind(PNK_ASSIGN));
JS_ASSERT(pn->isOp(JSOP_NOP));
JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_POPV);
ParseNode *lhs = pn->pn_left;
ParseNode *rhs = pn->pn_right;
if (lhs->isKind(PNK_ARRAY) && rhs->isKind(PNK_ARRAY) &&
!(rhs->pn_xflags & PNX_SPECIALARRAYINIT) &&
lhs->pn_count <= rhs->pn_count)
{
if (groupOption == GroupIsDecl && !EmitDestructuringDecls(cx, bce, prologOp, lhs))
return false;
if (!EmitGroupAssignment(cx, bce, prologOp, lhs, rhs))
return false;
*pop = JSOP_NOP;
}
return true;
}
/*
* Like MaybeEmitGroupAssignment, but for 'let ([x,y] = [a,b]) ...'.
*
* Instead of issuing a sequence |dup|eval-rhs|set-lhs|pop| (which doesn't work
* since the bound vars don't yet have slots), just eval/push each rhs element
* just like what EmitLet would do for 'let (x = a, y = b) ...'. While shorter,
* simpler and more efficient than MaybeEmitGroupAssignment, it is harder to
* decompile so we restrict the ourselves to cases where the lhs and rhs are in
* 1:1 correspondence and lhs elements are simple names.
*/
static bool
MaybeEmitLetGroupDecl(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSOp *pop)
{
JS_ASSERT(pn->isKind(PNK_ASSIGN));
JS_ASSERT(pn->isOp(JSOP_NOP));
JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_POPV);
ParseNode *lhs = pn->pn_left;
ParseNode *rhs = pn->pn_right;
if (lhs->isKind(PNK_ARRAY) && rhs->isKind(PNK_ARRAY) &&
!(rhs->pn_xflags & PNX_SPECIALARRAYINIT) &&
!(lhs->pn_xflags & PNX_SPECIALARRAYINIT) &&
lhs->pn_count == rhs->pn_count)
{
for (ParseNode *l = lhs->pn_head; l; l = l->pn_next) {
if (l->getOp() != JSOP_SETLOCAL)
return true;
}
for (ParseNode *r = rhs->pn_head; r; r = r->pn_next) {
if (!EmitTree(cx, bce, r))
return false;
}
*pop = JSOP_NOP;
}
return true;
}
#endif /* JS_HAS_DESTRUCTURING */
static bool
EmitVariables(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, VarEmitOption emitOption,
bool isLet = false)
{
JS_ASSERT(pn->isArity(PN_LIST));
JS_ASSERT(isLet == (emitOption == PushInitialValues));
ParseNode *next;
for (ParseNode *pn2 = pn->pn_head; ; pn2 = next) {
next = pn2->pn_next;
ParseNode *pn3;
if (!pn2->isKind(PNK_NAME)) {
#if JS_HAS_DESTRUCTURING
if (pn2->isKind(PNK_ARRAY) || pn2->isKind(PNK_OBJECT)) {
/*
* Emit variable binding ops, but not destructuring ops. The
* parser (see Parser::variables) has ensured that our caller
* will be the PNK_FOR/PNK_FORIN case in EmitTree, and that
* case will emit the destructuring code only after emitting an
* enumerating opcode and a branch that tests whether the
* enumeration ended.
*/
JS_ASSERT(emitOption == DefineVars);
JS_ASSERT(pn->pn_count == 1);
if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn2))
return false;
break;
}
#endif
/*
* A destructuring initialiser assignment preceded by var will
* never occur to the left of 'in' in a for-in loop. As with 'for
* (var x = i in o)...', this will cause the entire 'var [a, b] =
* i' to be hoisted out of the loop.
*/
JS_ASSERT(pn2->isKind(PNK_ASSIGN));
JS_ASSERT(pn2->isOp(JSOP_NOP));
JS_ASSERT(emitOption != DefineVars);
/*
* To allow the front end to rewrite var f = x; as f = x; when a
* function f(){} precedes the var, detect simple name assignment
* here and initialize the name.
*/
#if !JS_HAS_DESTRUCTURING
JS_ASSERT(pn2->pn_left->isKind(PNK_NAME));
#else
if (pn2->pn_left->isKind(PNK_NAME))
#endif
{
pn3 = pn2->pn_right;
pn2 = pn2->pn_left;
goto do_name;
}
#if JS_HAS_DESTRUCTURING
ptrdiff_t stackDepthBefore = bce->stackDepth;
JSOp op = JSOP_POP;
if (pn->pn_count == 1) {
/*
* If this is the only destructuring assignment in the list,
* try to optimize to a group assignment. If we're in a let
* head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
* in pn->pn_op, to suppress a second (and misplaced) 'let'.
*/
JS_ASSERT(!pn2->pn_next);
if (isLet) {
if (!MaybeEmitLetGroupDecl(cx, bce, pn2, &op))
return false;
} else {
if (!MaybeEmitGroupAssignment(cx, bce, pn->getOp(), pn2, GroupIsDecl, &op))
return false;
}
}
if (op == JSOP_NOP) {
pn->pn_xflags = (pn->pn_xflags & ~PNX_POPVAR) | PNX_GROUPINIT;
} else {
pn3 = pn2->pn_left;
if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn3))
return false;
if (!EmitTree(cx, bce, pn2->pn_right))
return false;
if (!EmitDestructuringOps(cx, bce, pn3, isLet))
return false;
}
ptrdiff_t stackDepthAfter = bce->stackDepth;
/* Give let ([] = x) a slot (see CheckDestructuring). */
JS_ASSERT(stackDepthBefore <= stackDepthAfter);
if (isLet && stackDepthBefore == stackDepthAfter) {
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
/* If we are not initializing, nothing to pop. */
if (emitOption != InitializeVars) {
if (next)
continue;
break;
}
goto emit_note_pop;
#endif
}
/*
* Load initializer early to share code above that jumps to do_name.
* NB: if this var redeclares an existing binding, then pn2 is linked
* on its definition's use-chain and pn_expr has been overlayed with
* pn_lexdef.
*/
pn3 = pn2->maybeExpr();
do_name:
if (!BindNameToSlot(cx, bce, pn2))
return false;
JSOp op;
op = pn2->getOp();
JS_ASSERT(op != JSOP_CALLEE);
JS_ASSERT(!pn2->pn_cookie.isFree() || !pn->isOp(JSOP_NOP));
jsatomid atomIndex;
if (!MaybeEmitVarDecl(cx, bce, pn->getOp(), pn2, &atomIndex))
return false;
if (pn3) {
JS_ASSERT(emitOption != DefineVars);
if (op == JSOP_SETNAME || op == JSOP_SETGNAME || op == JSOP_SETINTRINSIC) {
JS_ASSERT(emitOption != PushInitialValues);
JSOp bindOp;
if (op == JSOP_SETNAME)
bindOp = JSOP_BINDNAME;
else if (op == JSOP_SETGNAME)
bindOp = JSOP_BINDGNAME;
else
bindOp = JSOP_BINDINTRINSIC;
if (!EmitIndex32(cx, bindOp, atomIndex, bce))
return false;
}
bool oldEmittingForInit = bce->emittingForInit;
bce->emittingForInit = false;
if (!EmitTree(cx, bce, pn3))
return false;
bce->emittingForInit = oldEmittingForInit;
} else if (isLet) {
/* JSOP_ENTERLETx expects at least 1 slot to have been pushed. */
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
/* If we are not initializing, nothing to pop. */
if (emitOption != InitializeVars) {
if (next)
continue;
break;
}
JS_ASSERT_IF(pn2->isDefn(), pn3 == pn2->pn_expr);
if (!pn2->pn_cookie.isFree()) {
if (!EmitVarOp(cx, pn2, op, bce))
return false;
} else {
if (!EmitIndexOp(cx, op, atomIndex, bce))
return false;
}
#if JS_HAS_DESTRUCTURING
emit_note_pop:
#endif
if (!next)
break;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
}
if (pn->pn_xflags & PNX_POPVAR) {
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
}
return true;
}
static bool
EmitAssignment(JSContext *cx, BytecodeEmitter *bce, ParseNode *lhs, JSOp op, ParseNode *rhs)
{
/*
* Check left operand type and generate specialized code for it.
* Specialize to avoid ECMA "reference type" values on the operand
* stack, which impose pervasive runtime "GetValue" costs.
*/
jsatomid atomIndex = (jsatomid) -1;
jsbytecode offset = 1;
switch (lhs->getKind()) {
case PNK_NAME:
if (!BindNameToSlot(cx, bce, lhs))
return false;
if (lhs->pn_cookie.isFree()) {
if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex))
return false;
if (!lhs->isConst()) {
JSOp bindOp;
if (lhs->isOp(JSOP_SETNAME))
bindOp = JSOP_BINDNAME;
else if (lhs->isOp(JSOP_SETGNAME))
bindOp = JSOP_BINDGNAME;
else
bindOp = JSOP_BINDINTRINSIC;
if (!EmitIndex32(cx, bindOp, atomIndex, bce))
return false;
offset++;
}
}
break;
case PNK_DOT:
if (!EmitTree(cx, bce, lhs->expr()))
return false;
offset++;
if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex))
return false;
break;
case PNK_ELEM:
JS_ASSERT(lhs->isArity(PN_BINARY));
if (!EmitTree(cx, bce, lhs->pn_left))
return false;
if (!EmitTree(cx, bce, lhs->pn_right))
return false;
offset += 2;
break;
#if JS_HAS_DESTRUCTURING
case PNK_ARRAY:
case PNK_OBJECT:
break;
#endif
case PNK_CALL:
JS_ASSERT(lhs->pn_xflags & PNX_SETCALL);
if (!EmitTree(cx, bce, lhs))
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
break;
default:
JS_ASSERT(0);
}
if (op != JSOP_NOP) {
JS_ASSERT(rhs);
switch (lhs->getKind()) {
case PNK_NAME:
if (lhs->isConst()) {
if (lhs->isOp(JSOP_CALLEE)) {
if (Emit1(cx, bce, JSOP_CALLEE) < 0)
return false;
} else if (lhs->isOp(JSOP_NAME) || lhs->isOp(JSOP_GETGNAME)) {
if (!EmitIndex32(cx, lhs->getOp(), atomIndex, bce))
return false;
} else {
JS_ASSERT(JOF_OPTYPE(lhs->getOp()) != JOF_ATOM);
if (!EmitVarOp(cx, lhs, lhs->getOp(), bce))
return false;
}
} else if (lhs->isOp(JSOP_SETNAME)) {
if (Emit1(cx, bce, JSOP_DUP) < 0)
return false;
if (!EmitIndex32(cx, JSOP_GETXPROP, atomIndex, bce))
return false;
} else if (lhs->isOp(JSOP_SETGNAME)) {
JS_ASSERT(lhs->pn_cookie.isFree());
if (!EmitAtomOp(cx, lhs, JSOP_GETGNAME, bce))
return false;
} else if (lhs->isOp(JSOP_SETINTRINSIC)) {
JS_ASSERT(lhs->pn_cookie.isFree());
if (!EmitAtomOp(cx, lhs, JSOP_GETINTRINSIC, bce))
return false;
} else {
JSOp op;
switch (lhs->getOp()) {
case JSOP_SETARG: op = JSOP_GETARG; break;
case JSOP_SETLOCAL: op = JSOP_GETLOCAL; break;
case JSOP_SETALIASEDVAR: op = JSOP_GETALIASEDVAR; break;
default: JS_NOT_REACHED("Bad op");
}
if (!EmitVarOp(cx, lhs, op, bce))
return false;
}
break;
case PNK_DOT: {
if (Emit1(cx, bce, JSOP_DUP) < 0)
return false;
bool isLength = (lhs->pn_atom == cx->names().length);
if (!EmitIndex32(cx, isLength ? JSOP_LENGTH : JSOP_GETPROP, atomIndex, bce))
return false;
break;
}
case PNK_ELEM:
if (Emit1(cx, bce, JSOP_DUP2) < 0)
return false;
if (!EmitElemOpBase(cx, bce, JSOP_GETELEM))
return false;
break;
case PNK_CALL:
/*
* We just emitted a JSOP_SETCALL (which will always throw) and
* popped the call's return value. Push a random value to make sure
* the stack depth is correct.
*/
JS_ASSERT(lhs->pn_xflags & PNX_SETCALL);
if (Emit1(cx, bce, JSOP_NULL) < 0)
return false;
break;
default:;
}
}
/* Now emit the right operand (it may affect the namespace). */
if (rhs) {
if (!EmitTree(cx, bce, rhs))
return false;
} else {
/*
* The value to assign is the next enumeration value in a for-in loop.
* That value is produced by a JSOP_ITERNEXT op, previously emitted.
* If offset == 1, that slot is already at the top of the
* stack. Otherwise, rearrange the stack to put that value on top.
*/
if (offset != 1 && Emit2(cx, bce, JSOP_PICK, offset - 1) < 0)
return false;
}
/* If += etc., emit the binary operator with a decompiler note. */
if (op != JSOP_NOP) {
/*
* Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
* declared in the current compilation unit, as in this case (just
* a bit further below) we will avoid emitting the assignment op.
*/
if (!lhs->isKind(PNK_NAME) || !lhs->isConst()) {
if (NewSrcNote(cx, bce, SRC_ASSIGNOP) < 0)
return false;
}
if (Emit1(cx, bce, op) < 0)
return false;
}
/* Finally, emit the specialized assignment bytecode. */
switch (lhs->getKind()) {
case PNK_NAME:
if (lhs->isConst()) {
if (!rhs) {
bce->reportError(lhs, JSMSG_BAD_FOR_LEFTSIDE);
return false;
}
break;
}
if (lhs->isOp(JSOP_SETARG) || lhs->isOp(JSOP_SETLOCAL) || lhs->isOp(JSOP_SETALIASEDVAR)) {
if (!EmitVarOp(cx, lhs, lhs->getOp(), bce))
return false;
} else {
if (!EmitIndexOp(cx, lhs->getOp(), atomIndex, bce))
return false;
}
break;
case PNK_DOT:
if (!EmitIndexOp(cx, lhs->getOp(), atomIndex, bce))
return false;
break;
case PNK_CALL:
/* Do nothing. The JSOP_SETCALL we emitted will always throw. */
JS_ASSERT(lhs->pn_xflags & PNX_SETCALL);
break;
case PNK_ELEM:
if (Emit1(cx, bce, JSOP_SETELEM) < 0)
return false;
break;
#if JS_HAS_DESTRUCTURING
case PNK_ARRAY:
case PNK_OBJECT:
if (!EmitDestructuringOps(cx, bce, lhs))
return false;
break;
#endif
default:
JS_ASSERT(0);
}
return true;
}
static bool
EmitNewInit(JSContext *cx, BytecodeEmitter *bce, JSProtoKey key, ParseNode *pn)
{
const size_t len = 1 + UINT32_INDEX_LEN;
ptrdiff_t offset = EmitCheck(cx, bce, len);
if (offset < 0)
return false;
jsbytecode *code = bce->code(offset);
code[0] = JSOP_NEWINIT;
code[1] = jsbytecode(key);
code[2] = 0;
code[3] = 0;
code[4] = 0;
UpdateDepth(cx, bce, offset);
CheckTypeSet(cx, bce, JSOP_NEWINIT);
return true;
}
bool
ParseNode::getConstantValue(JSContext *cx, bool strictChecks, MutableHandleValue vp)
{
switch (getKind()) {
case PNK_NUMBER:
vp.setNumber(pn_dval);
return true;
case PNK_STRING:
vp.setString(pn_atom);
return true;
case PNK_TRUE:
vp.setBoolean(true);
return true;
case PNK_FALSE:
vp.setBoolean(false);
return true;
case PNK_NULL:
vp.setNull();
return true;
case PNK_SPREAD:
return false;
case PNK_ARRAY: {
JS_ASSERT(isOp(JSOP_NEWINIT) && !(pn_xflags & PNX_NONCONST));
RootedObject obj(cx, NewDenseAllocatedArray(cx, pn_count, NULL, MaybeSingletonObject));
if (!obj)
return false;
unsigned idx = 0;
RootedId id(cx);
RootedValue value(cx);
for (ParseNode *pn = pn_head; pn; idx++, pn = pn->pn_next) {
if (!pn->getConstantValue(cx, strictChecks, &value))
return false;
id = INT_TO_JSID(idx);
if (!JSObject::defineGeneric(cx, obj, id, value, NULL, NULL, JSPROP_ENUMERATE))
return false;
}
JS_ASSERT(idx == pn_count);
types::FixArrayType(cx, obj);
vp.setObject(*obj);
return true;
}
case PNK_OBJECT: {
JS_ASSERT(isOp(JSOP_NEWINIT) && !(pn_xflags & PNX_NONCONST));
gc::AllocKind kind = GuessObjectGCKind(pn_count);
RootedObject obj(cx, NewBuiltinClassInstance(cx, &ObjectClass, kind, MaybeSingletonObject));
if (!obj)
return false;
RootedValue value(cx), idvalue(cx);
for (ParseNode *pn = pn_head; pn; pn = pn->pn_next) {
if (!pn->pn_right->getConstantValue(cx, strictChecks, &value))
return false;
ParseNode *pnid = pn->pn_left;
if (pnid->isKind(PNK_NUMBER)) {
idvalue = NumberValue(pnid->pn_dval);
} else {
JS_ASSERT(pnid->isKind(PNK_NAME) || pnid->isKind(PNK_STRING));
JS_ASSERT(pnid->pn_atom != cx->names().proto);
idvalue = StringValue(pnid->pn_atom);
}
uint32_t index;
if (IsDefinitelyIndex(idvalue, &index)) {
if (!JSObject::defineElement(cx, obj, index, value, NULL, NULL,
JSPROP_ENUMERATE))
{
return false;
}
continue;
}
JSAtom *name = ToAtom<CanGC>(cx, idvalue);
if (!name)
return false;
if (name->isIndex(&index)) {
if (!JSObject::defineElement(cx, obj, index, value, NULL, NULL, JSPROP_ENUMERATE))
return false;
} else {
if (!JSObject::defineProperty(cx, obj, name->asPropertyName(), value, NULL, NULL,
JSPROP_ENUMERATE))
{
return false;
}
}
}
types::FixObjectType(cx, obj);
vp.setObject(*obj);
return true;
}
default:
JS_NOT_REACHED("Unexpected node");
}
return false;
}
static bool
EmitSingletonInitialiser(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
RootedValue value(cx);
if (!pn->getConstantValue(cx, bce->sc->needStrictChecks(), &value))
return false;
JS_ASSERT(value.isObject());
ObjectBox *objbox = bce->parser->newObjectBox(&value.toObject());
if (!objbox)
return false;
return EmitObjectOp(cx, objbox, JSOP_OBJECT, bce);
}
/* See the SRC_FOR source note offsetBias comments later in this file. */
JS_STATIC_ASSERT(JSOP_NOP_LENGTH == 1);
JS_STATIC_ASSERT(JSOP_POP_LENGTH == 1);
class EmitLevelManager
{
BytecodeEmitter *bce;
public:
EmitLevelManager(BytecodeEmitter *bce) : bce(bce) { bce->emitLevel++; }
~EmitLevelManager() { bce->emitLevel--; }
};
static bool
EmitCatch(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
ptrdiff_t guardJump;
/*
* Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
* and save the block object atom.
*/
StmtInfoBCE *stmt = bce->topStmt;
JS_ASSERT(stmt->type == STMT_BLOCK && stmt->isBlockScope);
stmt->type = STMT_CATCH;
/* Go up one statement info record to the TRY or FINALLY record. */
stmt = stmt->down;
JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY);
/* Pick up the pending exception and bind it to the catch variable. */
if (Emit1(cx, bce, JSOP_EXCEPTION) < 0)
return false;
/*
* Dup the exception object if there is a guard for rethrowing to use
* it later when rethrowing or in other catches.
*/
if (pn->pn_kid2 && Emit1(cx, bce, JSOP_DUP) < 0)
return false;
ParseNode *pn2 = pn->pn_kid1;
switch (pn2->getKind()) {
#if JS_HAS_DESTRUCTURING
case PNK_ARRAY:
case PNK_OBJECT:
if (!EmitDestructuringOps(cx, bce, pn2))
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
break;
#endif
case PNK_NAME:
/* Inline and specialize BindNameToSlot for pn2. */
JS_ASSERT(!pn2->pn_cookie.isFree());
if (!EmitVarOp(cx, pn2, JSOP_SETLOCAL, bce))
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
break;
default:
JS_ASSERT(0);
}
/* Emit the guard expression, if there is one. */
if (pn->pn_kid2) {
if (!EmitTree(cx, bce, pn->pn_kid2))
return false;
/* ifeq <next block> */
guardJump = EmitJump(cx, bce, JSOP_IFEQ, 0);
if (guardJump < 0)
return false;
stmt->guardJump() = guardJump;
/* Pop duplicated exception object as we no longer need it. */
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
}
/* Emit the catch body. */
if (!EmitTree(cx, bce, pn->pn_kid3))
return false;
/*
* Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
* our PNK_LEXICALSCOPE parent, so the decompiler knows to pop.
*/
return NewSrcNote(cx, bce, SRC_CATCH) >= 0;
}
/*
* Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
* the comment on EmitSwitch.
*/
MOZ_NEVER_INLINE static bool
EmitTry(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
StmtInfoBCE stmtInfo(cx);
ptrdiff_t catchJump = -1;
/*
* Push stmtInfo to track jumps-over-catches and gosubs-to-finally
* for later fixup.
*
* When a finally block is active (STMT_FINALLY in our parse context),
* non-local jumps (including jumps-over-catches) result in a GOSUB
* being written into the bytecode stream and fixed-up later (c.f.
* EmitBackPatchOp and BackPatch).
*/
PushStatementBCE(bce, &stmtInfo, pn->pn_kid3 ? STMT_FINALLY : STMT_TRY, bce->offset());
/*
* Since an exception can be thrown at any place inside the try block,
* we need to restore the stack and the scope chain before we transfer
* the control to the exception handler.
*
* For that we store in a try note associated with the catch or
* finally block the stack depth upon the try entry. The interpreter
* uses this depth to properly unwind the stack and the scope chain.
*/
int depth = bce->stackDepth;
/* Mark try location for decompilation, then emit try block. */
if (Emit1(cx, bce, JSOP_TRY) < 0)
return false;
ptrdiff_t tryStart = bce->offset();
if (!EmitTree(cx, bce, pn->pn_kid1))
return false;
JS_ASSERT(depth == bce->stackDepth);
/* GOSUB to finally, if present. */
if (pn->pn_kid3) {
if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
return false;
if (EmitBackPatchOp(cx, bce, &stmtInfo.gosubs()) < 0)
return false;
}
/* Emit (hidden) jump over catch and/or finally. */
if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
return false;
if (EmitBackPatchOp(cx, bce, &catchJump) < 0)
return false;
ptrdiff_t tryEnd = bce->offset();
/* If this try has a catch block, emit it. */
ParseNode *lastCatch = NULL;
if (ParseNode *pn2 = pn->pn_kid2) {
unsigned count = 0; /* previous catch block's population */
/*
* The emitted code for a catch block looks like:
*
* [throwing] only if 2nd+ catch block
* [leaveblock] only if 2nd+ catch block
* enterblock
* exception
* [dup] only if catchguard
* setlocalpop <slot> or destructuring code
* [< catchguard code >] if there's a catchguard
* [ifeq <offset to next catch block>] " "
* [pop] only if catchguard
* < catch block contents >
* leaveblock
* goto <end of catch blocks> non-local; finally applies
*
* If there's no catch block without a catchguard, the last
* <offset to next catch block> points to rethrow code. This
* code will [gosub] to the finally code if appropriate, and is
* also used for the catch-all trynote for capturing exceptions
* thrown from catch{} blocks.
*/
for (ParseNode *pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
ptrdiff_t guardJump;
JS_ASSERT(bce->stackDepth == depth);
guardJump = stmtInfo.guardJump();
if (guardJump != -1) {
/* Fix up and clean up previous catch block. */
SetJumpOffsetAt(bce, guardJump);
/*
* Account for JSOP_ENTERBLOCK (whose block object count
* is saved below) and pushed exception object that we
* still have after the jumping from the previous guard.
*/
bce->stackDepth = depth + count + 1;
/*
* Move exception back to cx->exception to prepare for
* the next catch. We hide [throwing] from the decompiler
* since it compensates for the hidden JSOP_DUP at the
* start of the previous guarded catch.
*/
if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0 ||
Emit1(cx, bce, JSOP_THROWING) < 0) {
return false;
}
if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
return false;
EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
JS_ASSERT(bce->stackDepth == depth);
}
/*
* Emit the lexical scope and catch body. Save the catch's
* block object population via count, for use when targeting
* guardJump at the next catch (the guard mismatch case).
*/
JS_ASSERT(pn3->isKind(PNK_LEXICALSCOPE));
count = pn3->pn_objbox->object->as<StaticBlockObject>().slotCount();
if (!EmitTree(cx, bce, pn3))
return false;
/* gosub <finally>, if required */
if (pn->pn_kid3) {
if (EmitBackPatchOp(cx, bce, &stmtInfo.gosubs()) < 0)
return false;
JS_ASSERT(bce->stackDepth == depth);
}
/*
* Jump over the remaining catch blocks. This will get fixed
* up to jump to after catch/finally.
*/
if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
return false;
if (EmitBackPatchOp(cx, bce, &catchJump) < 0)
return false;
/*
* Save a pointer to the last catch node to handle try-finally
* and try-catch(guard)-finally special cases.
*/
lastCatch = pn3->expr();
}
}
/*
* Last catch guard jumps to the rethrow code sequence if none of the
* guards match. Target guardJump at the beginning of the rethrow
* sequence, just in case a guard expression throws and leaves the
* stack unbalanced.
*/
if (lastCatch && lastCatch->pn_kid2) {
SetJumpOffsetAt(bce, stmtInfo.guardJump());
/* Sync the stack to take into account pushed exception. */
JS_ASSERT(bce->stackDepth == depth);
bce->stackDepth = depth + 1;
/*
* Rethrow the exception, delegating executing of finally if any
* to the exception handler.
*/
if (NewSrcNote(cx, bce, SRC_HIDDEN) < 0 || Emit1(cx, bce, JSOP_THROW) < 0)
return false;
}
JS_ASSERT(bce->stackDepth == depth);
/* Emit finally handler if any. */
ptrdiff_t finallyStart = 0; /* to quell GCC uninitialized warnings */
if (pn->pn_kid3) {
/*
* Fix up the gosubs that might have been emitted before non-local
* jumps to the finally code.
*/
if (!BackPatch(cx, bce, stmtInfo.gosubs(), bce->code().end(), JSOP_GOSUB))
return false;
finallyStart = bce->offset();
/* Indicate that we're emitting a subroutine body. */
stmtInfo.type = STMT_SUBROUTINE;
if (!UpdateSourceCoordNotes(cx, bce, pn->pn_kid3->pn_pos.begin))
return false;
if (Emit1(cx, bce, JSOP_FINALLY) < 0 ||
!EmitTree(cx, bce, pn->pn_kid3) ||
Emit1(cx, bce, JSOP_RETSUB) < 0)
{
return false;
}
JS_ASSERT(bce->stackDepth == depth);
}
if (!PopStatementBCE(cx, bce))
return false;
/* ReconstructPCStack needs a NOP here to mark the end of the last catch block. */
if (Emit1(cx, bce, JSOP_NOP) < 0)
return false;
/* Fix up the end-of-try/catch jumps to come here. */
if (!BackPatch(cx, bce, catchJump, bce->code().end(), JSOP_GOTO))
return false;
/*
* Add the try note last, to let post-order give us the right ordering
* (first to last for a given nesting level, inner to outer by level).
*/
if (pn->pn_kid2 && !bce->tryNoteList.append(JSTRY_CATCH, depth, tryStart, tryEnd))
return false;
/*
* If we've got a finally, mark try+catch region with additional
* trynote to catch exceptions (re)thrown from a catch block or
* for the try{}finally{} case.
*/
if (pn->pn_kid3 && !bce->tryNoteList.append(JSTRY_FINALLY, depth, tryStart, finallyStart))
return false;
return true;
}
static bool
EmitIf(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
StmtInfoBCE stmtInfo(cx);
/* Initialize so we can detect else-if chains and avoid recursion. */
stmtInfo.type = STMT_IF;
ptrdiff_t beq = -1;
ptrdiff_t jmp = -1;
ptrdiff_t noteIndex = -1;
if_again:
/* Emit code for the condition before pushing stmtInfo. */
if (!EmitTree(cx, bce, pn->pn_kid1))
return false;
ptrdiff_t top = bce->offset();
if (stmtInfo.type == STMT_IF) {
PushStatementBCE(bce, &stmtInfo, STMT_IF, top);
} else {
/*
* We came here from the goto further below that detects else-if
* chains, so we must mutate stmtInfo back into a STMT_IF record.
* Also we need a note offset for SRC_IF_ELSE to help IonMonkey.
*/
JS_ASSERT(stmtInfo.type == STMT_ELSE);
stmtInfo.type = STMT_IF;
stmtInfo.update = top;
if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq))
return false;
}
/* Emit an annotated branch-if-false around the then part. */
ParseNode *pn3 = pn->pn_kid3;
noteIndex = NewSrcNote(cx, bce, pn3 ? SRC_IF_ELSE : SRC_IF);
if (noteIndex < 0)
return false;
beq = EmitJump(cx, bce, JSOP_IFEQ, 0);
if (beq < 0)
return false;
/* Emit code for the then and optional else parts. */
if (!EmitTree(cx, bce, pn->pn_kid2))
return false;
if (pn3) {
/* Modify stmtInfo so we know we're in the else part. */
stmtInfo.type = STMT_ELSE;
/*
* Emit a JSOP_BACKPATCH op to jump from the end of our then part
* around the else part. The PopStatementBCE call at the bottom of
* this function will fix up the backpatch chain linked from
* stmtInfo.breaks.
*/
jmp = EmitGoto(cx, bce, &stmtInfo, &stmtInfo.breaks);
if (jmp < 0)
return false;
/* Ensure the branch-if-false comes here, then emit the else. */
SetJumpOffsetAt(bce, beq);
if (pn3->isKind(PNK_IF)) {
pn = pn3;
goto if_again;
}
if (!EmitTree(cx, bce, pn3))
return false;
/*
* Annotate SRC_IF_ELSE with the offset from branch to jump, for
* IonMonkey's benefit. We can't just "back up" from the pc
* of the else clause, because we don't know whether an extended
* jump was required to leap from the end of the then clause over
* the else clause.
*/
if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq))
return false;
} else {
/* No else part, fixup the branch-if-false to come here. */
SetJumpOffsetAt(bce, beq);
}
return PopStatementBCE(cx, bce);
}
#if JS_HAS_BLOCK_SCOPE
/*
* pnLet represents one of:
*
* let-expression: (let (x = y) EXPR)
* let-statement: let (x = y) { ... }
*
* For a let-expression 'let (x = a, [y,z] = b) e', EmitLet produces:
*
* bytecode stackDepth srcnotes
* evaluate a +1
* evaluate b +1
* dup +1
* destructure y
* pick 1
* dup +1
* destructure z
* pick 1
* pop -1
* enterlet0
* evaluate e +1
* leaveblockexpr -3
*
* Note that, since enterlet0 simply changes fp->blockChain and does not
* otherwise touch the stack, evaluation of the let-var initializers must leave
* the initial value in the let-var's future slot.
*/
/*
* Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
* the comment on EmitSwitch.
*/
MOZ_NEVER_INLINE static bool
EmitLet(JSContext *cx, BytecodeEmitter *bce, ParseNode *pnLet)
{
JS_ASSERT(pnLet->isArity(PN_BINARY));
ParseNode *varList = pnLet->pn_left;
JS_ASSERT(varList->isArity(PN_LIST));
ParseNode *letBody = pnLet->pn_right;
JS_ASSERT(letBody->isLet() && letBody->isKind(PNK_LEXICALSCOPE));
Rooted<StaticBlockObject*> blockObj(cx, &letBody->pn_objbox->object->as<StaticBlockObject>());
int letHeadDepth = bce->stackDepth;
if (!EmitVariables(cx, bce, varList, PushInitialValues, true))
return false;
/* Push storage for hoisted let decls (e.g. 'let (x) { let y }'). */
uint32_t alreadyPushed = unsigned(bce->stackDepth - letHeadDepth);
uint32_t blockObjCount = blockObj->slotCount();
for (uint32_t i = alreadyPushed; i < blockObjCount; ++i) {
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
StmtInfoBCE stmtInfo(cx);
PushBlockScopeBCE(bce, &stmtInfo, *blockObj, bce->offset());
DebugOnly<ptrdiff_t> bodyBegin = bce->offset();
if (!EmitEnterBlock(cx, bce, letBody, JSOP_ENTERLET0))
return false;
if (!EmitTree(cx, bce, letBody->pn_expr))
return false;
JSOp leaveOp = letBody->getOp();
JS_ASSERT(leaveOp == JSOP_LEAVEBLOCK || leaveOp == JSOP_LEAVEBLOCKEXPR);
EMIT_UINT16_IMM_OP(leaveOp, blockObj->slotCount());
DebugOnly<ptrdiff_t> bodyEnd = bce->offset();
JS_ASSERT(bodyEnd > bodyBegin);
return PopStatementBCE(cx, bce);
}
#endif
/*
* Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
* the comment on EmitSwitch.
*/
MOZ_NEVER_INLINE static bool
EmitLexicalScope(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
JS_ASSERT(pn->isKind(PNK_LEXICALSCOPE));
JS_ASSERT(pn->getOp() == JSOP_LEAVEBLOCK);
StmtInfoBCE stmtInfo(cx);
ObjectBox *objbox = pn->pn_objbox;
StaticBlockObject &blockObj = objbox->object->as<StaticBlockObject>();
size_t slots = blockObj.slotCount();
PushBlockScopeBCE(bce, &stmtInfo, blockObj, bce->offset());
if (!EmitEnterBlock(cx, bce, pn, JSOP_ENTERBLOCK))
return false;
if (!EmitTree(cx, bce, pn->pn_expr))
return false;
EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, slots);
return PopStatementBCE(cx, bce);
}
static bool
EmitWith(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
StmtInfoBCE stmtInfo(cx);
if (!EmitTree(cx, bce, pn->pn_left))
return false;
PushStatementBCE(bce, &stmtInfo, STMT_WITH, bce->offset());
if (Emit1(cx, bce, JSOP_ENTERWITH) < 0)
return false;
if (!EmitTree(cx, bce, pn->pn_right))
return false;
if (Emit1(cx, bce, JSOP_LEAVEWITH) < 0)
return false;
return PopStatementBCE(cx, bce);
}
static bool
EmitForIn(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top)
{
StmtInfoBCE stmtInfo(cx);
PushStatementBCE(bce, &stmtInfo, STMT_FOR_IN_LOOP, top);
ParseNode *forHead = pn->pn_left;
ParseNode *forBody = pn->pn_right;
ParseNode *pn1 = forHead->pn_kid1;
bool letDecl = pn1 && pn1->isKind(PNK_LEXICALSCOPE);
JS_ASSERT_IF(letDecl, pn1->isLet());
Rooted<StaticBlockObject*>
blockObj(cx, letDecl ? &pn1->pn_objbox->object->as<StaticBlockObject>() : NULL);
uint32_t blockObjCount = blockObj ? blockObj->slotCount() : 0;
if (letDecl) {
/*
* The let's slot(s) will be under the iterator, but the block must not
* be entered (i.e. fp->blockChain set) until after evaluating the rhs.
* Thus, push to reserve space and enterblock after. The same argument
* applies when leaving the loop. Thus, a for-let-in loop looks like:
*
* push x N
* eval rhs
* iter
* enterlet1
* goto
* ... loop body
* ifne
* leaveforinlet
* enditer
* popn(N)
*/
for (uint32_t i = 0; i < blockObjCount; ++i) {
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
}
/*
* If the left part is 'var x', emit code to define x if necessary
* using a prolog opcode, but do not emit a pop. If the left part was
* originally 'var x = i', the parser will have rewritten it; see
* Parser::forStatement. 'for (let x = i in o)' is mercifully banned.
*/
if (pn1) {
ParseNode *decl = letDecl ? pn1->pn_expr : pn1;
JS_ASSERT(decl->isKind(PNK_VAR) || decl->isKind(PNK_LET));
bce->emittingForInit = true;
if (!EmitVariables(cx, bce, decl, DefineVars))
return false;
bce->emittingForInit = false;
}
/* Compile the object expression to the right of 'in'. */
if (!EmitTree(cx, bce, forHead->pn_kid3))
return false;
/*
* Emit a bytecode to convert top of stack value to the iterator
* object depending on the loop variant (for-in, for-each-in, or
* destructuring for-in).
*/
JS_ASSERT(pn->isOp(JSOP_ITER));
if (Emit2(cx, bce, JSOP_ITER, (uint8_t) pn->pn_iflags) < 0)
return false;
/* Enter the block before the loop body, after evaluating the obj. */
StmtInfoBCE letStmt(cx);
if (letDecl) {
PushBlockScopeBCE(bce, &letStmt, *blockObj, bce->offset());
letStmt.isForLetBlock = true;
if (!EmitEnterBlock(cx, bce, pn1, JSOP_ENTERLET1))
return false;
}
/* Annotate so IonMonkey can find the loop-closing jump. */
int noteIndex = NewSrcNote(cx, bce, SRC_FOR_IN);
if (noteIndex < 0)
return false;
/*
* Jump down to the loop condition to minimize overhead assuming at
* least one iteration, as the other loop forms do.
*/
ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
if (jmp < 0)
return false;
top = bce->offset();
SET_STATEMENT_TOP(&stmtInfo, top);
if (EmitLoopHead(cx, bce, NULL) < 0)
return false;
#ifdef DEBUG
int loopDepth = bce->stackDepth;
#endif
/*
* Emit code to get the next enumeration value and assign it to the
* left hand side. The JSOP_POP after this assignment is annotated
* so that the decompiler can distinguish 'for (x in y)' from
* 'for (var x in y)'.
*/
if (Emit1(cx, bce, JSOP_ITERNEXT) < 0)
return false;
if (!EmitAssignment(cx, bce, forHead->pn_kid2, JSOP_NOP, NULL))
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
/* The stack should be balanced around the assignment opcode sequence. */
JS_ASSERT(bce->stackDepth == loopDepth);
/* Emit code for the loop body. */
if (!EmitTree(cx, bce, forBody))
return false;
/* Set loop and enclosing "update" offsets, for continue. */
StmtInfoBCE *stmt = &stmtInfo;
do {
stmt->update = bce->offset();
} while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
/*
* Fixup the goto that starts the loop to jump down to JSOP_MOREITER.
*/
SetJumpOffsetAt(bce, jmp);
if (!EmitLoopEntry(cx, bce, NULL))
return false;
if (Emit1(cx, bce, JSOP_MOREITER) < 0)
return false;
ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset());
if (beq < 0)
return false;
/* Set the srcnote offset so we can find the closing jump. */
if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, beq - jmp))
return false;
/* Fixup breaks and continues before JSOP_ITER (and JSOP_LEAVEFORINLET). */
if (!PopStatementBCE(cx, bce))
return false;
if (letDecl) {
if (!PopStatementBCE(cx, bce))
return false;
if (Emit1(cx, bce, JSOP_LEAVEFORLETIN) < 0)
return false;
}
if (!bce->tryNoteList.append(JSTRY_ITER, bce->stackDepth, top, bce->offset()))
return false;
if (Emit1(cx, bce, JSOP_ENDITER) < 0)
return false;
if (letDecl)
EMIT_UINT16_IMM_OP(JSOP_POPN, blockObjCount);
return true;
}
static bool
EmitNormalFor(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top)
{
StmtInfoBCE stmtInfo(cx);
PushStatementBCE(bce, &stmtInfo, STMT_FOR_LOOP, top);
ParseNode *forHead = pn->pn_left;
ParseNode *forBody = pn->pn_right;
/* C-style for (init; cond; update) ... loop. */
JSOp op = JSOP_POP;
ParseNode *pn3 = forHead->pn_kid1;
if (!pn3) {
/* No initializer: emit an annotated nop for the decompiler. */
op = JSOP_NOP;
} else {
bce->emittingForInit = true;
#if JS_HAS_DESTRUCTURING
if (pn3->isKind(PNK_ASSIGN)) {
JS_ASSERT(pn3->isOp(JSOP_NOP));
if (!MaybeEmitGroupAssignment(cx, bce, op, pn3, GroupIsNotDecl, &op))
return false;
}
#endif
if (op == JSOP_POP) {
if (!UpdateSourceCoordNotes(cx, bce, pn3->pn_pos.begin))
return false;
if (!EmitTree(cx, bce, pn3))
return false;
if (pn3->isKind(PNK_VAR) || pn3->isKind(PNK_CONST) || pn3->isKind(PNK_LET)) {
/*
* Check whether a destructuring-initialized var decl
* was optimized to a group assignment. If so, we do
* not need to emit a pop below, so switch to a nop,
* just for the decompiler.
*/
JS_ASSERT(pn3->isArity(PN_LIST) || pn3->isArity(PN_BINARY));
if (pn3->pn_xflags & PNX_GROUPINIT)
op = JSOP_NOP;
}
}
bce->emittingForInit = false;
}
/*
* NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
* Use tmp to hold the biased srcnote "top" offset, which differs
* from the top local variable by the length of the JSOP_GOTO
* emitted in between tmp and top if this loop has a condition.
*/
int noteIndex = NewSrcNote(cx, bce, SRC_FOR);
if (noteIndex < 0 || Emit1(cx, bce, op) < 0)
return false;
ptrdiff_t tmp = bce->offset();
ptrdiff_t jmp = -1;
if (forHead->pn_kid2) {
/* Goto the loop condition, which branches back to iterate. */
jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
if (jmp < 0)
return false;
} else {
if (op != JSOP_NOP && Emit1(cx, bce, JSOP_NOP) < 0)
return false;
}
top = bce->offset();
SET_STATEMENT_TOP(&stmtInfo, top);
/* Emit code for the loop body. */
if (EmitLoopHead(cx, bce, forBody) < 0)
return false;
if (jmp == -1 && !EmitLoopEntry(cx, bce, forBody))
return false;
if (!EmitTree(cx, bce, forBody))
return false;
/* Set the second note offset so we can find the update part. */
JS_ASSERT(noteIndex != -1);
ptrdiff_t tmp2 = bce->offset();
/* Set loop and enclosing "update" offsets, for continue. */
StmtInfoBCE *stmt = &stmtInfo;
do {
stmt->update = bce->offset();
} while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
/* Check for update code to do before the condition (if any). */
pn3 = forHead->pn_kid3;
if (pn3) {
if (!UpdateSourceCoordNotes(cx, bce, pn3->pn_pos.begin))
return false;
op = JSOP_POP;
#if JS_HAS_DESTRUCTURING
if (pn3->isKind(PNK_ASSIGN)) {
JS_ASSERT(pn3->isOp(JSOP_NOP));
if (!MaybeEmitGroupAssignment(cx, bce, op, pn3, GroupIsNotDecl, &op))
return false;
}
#endif
if (op == JSOP_POP && !EmitTree(cx, bce, pn3))
return false;
/* Always emit the POP or NOP, to help the decompiler. */
if (Emit1(cx, bce, op) < 0)
return false;
/* Restore the absolute line number for source note readers. */
uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.end);
if (bce->currentLine() != lineNum) {
if (NewSrcNote2(cx, bce, SRC_SETLINE, ptrdiff_t(lineNum)) < 0)
return false;
bce->current->currentLine = lineNum;
bce->current->lastColumn = 0;
}
}
ptrdiff_t tmp3 = bce->offset();
if (forHead->pn_kid2) {
/* Fix up the goto from top to target the loop condition. */
JS_ASSERT(jmp >= 0);
SetJumpOffsetAt(bce, jmp);
if (!EmitLoopEntry(cx, bce, forHead->pn_kid2))
return false;
if (!EmitTree(cx, bce, forHead->pn_kid2))
return false;
}
/* Set the first note offset so we can find the loop condition. */
if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, tmp3 - tmp))
return false;
if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 1, tmp2 - tmp))
return false;
/* The third note offset helps us find the loop-closing jump. */
if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 2, bce->offset() - tmp))
return false;
/* If no loop condition, just emit a loop-closing jump. */
op = forHead->pn_kid2 ? JSOP_IFNE : JSOP_GOTO;
if (EmitJump(cx, bce, op, top - bce->offset()) < 0)
return false;
if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset()))
return false;
/* Now fixup all breaks and continues. */
return PopStatementBCE(cx, bce);
}
static inline bool
EmitFor(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top)
{
JS_ASSERT(pn->pn_left->isKind(PNK_FORIN) || pn->pn_left->isKind(PNK_FORHEAD));
return pn->pn_left->isKind(PNK_FORIN)
? EmitForIn(cx, bce, pn, top)
: EmitNormalFor(cx, bce, pn, top);
}
static JS_NEVER_INLINE bool
EmitFunc(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
FunctionBox *funbox = pn->pn_funbox;
RootedFunction fun(cx, funbox->function());
if (fun->isNative()) {
JS_ASSERT(IsAsmJSModuleNative(fun->native()));
return true;
}
JS_ASSERT_IF(fun->isInterpretedLazy(), fun->lazyScript());
/*
* Set the EMITTEDFUNCTION flag in function definitions once they have been
* emitted. Function definitions that need hoisting to the top of the
* function will be seen by EmitFunc in two places.
*/
if (pn->pn_dflags & PND_EMITTEDFUNCTION) {
JS_ASSERT_IF(fun->hasScript(), fun->nonLazyScript());
JS_ASSERT(pn->functionIsHoisted());
JS_ASSERT(bce->sc->isFunctionBox());
return true;
}
pn->pn_dflags |= PND_EMITTEDFUNCTION;
/*
* Mark as singletons any function which will only be executed once, or
* which is inner to a lambda we only expect to run once. In the latter
* case, if the lambda runs multiple times then CloneFunctionObject will
* make a deep clone of its contents.
*/
bool singleton =
cx->typeInferenceEnabled() &&
bce->script->compileAndGo &&
(bce->checkSingletonContext() ||
(!bce->isInLoop() &&
bce->parent &&
bce->parent->emittingRunOnceLambda));
if (!JSFunction::setTypeForScriptedFunction(cx, fun, singleton))
return false;
if (fun->isInterpretedLazy()) {
if (!fun->lazyScript()->sourceObject()) {
JSObject *scope = bce->blockChain;
if (!scope && bce->sc->isFunctionBox())
scope = bce->sc->asFunctionBox()->function();
fun->lazyScript()->setParent(scope, bce->script->sourceObject(), bce->script->originPrincipals);
}
} else {
SharedContext *outersc = bce->sc;
if (outersc->isFunctionBox() && outersc->asFunctionBox()->mightAliasLocals())
funbox->setMightAliasLocals(); // inherit mightAliasLocals from parent
JS_ASSERT_IF(outersc->strict, funbox->strict);
// Inherit most things (principals, version, etc) from the parent.
Rooted<JSScript*> parent(cx, bce->script);
CompileOptions options(cx);
options.setPrincipals(parent->principals())
.setOriginPrincipals(parent->originPrincipals)
.setCompileAndGo(parent->compileAndGo)
.setSelfHostingMode(parent->selfHosted)
.setNoScriptRval(false)
.setVersion(parent->getVersion());
bool generateBytecode = true;
#ifdef JS_ION
if (funbox->useAsm) {
RootedFunction moduleFun(cx);
// In a function like this:
//
// function f() { "use asm"; ... }
//
// funbox->asmStart points to the '"', and funbox->bufEnd points
// one past the final '}'. We need to exclude that final '}',
// so we use |funbox->bufEnd - 1| below.
//
if (!CompileAsmJS(cx, *bce->tokenStream(), pn, options,
bce->script->scriptSource(), funbox->asmStart, funbox->bufEnd - 1,
&moduleFun))
return false;
if (moduleFun) {
funbox->object = moduleFun;
generateBytecode = false;
}
}
#endif
if (generateBytecode) {
Rooted<JSObject*> enclosingScope(cx, EnclosingStaticScope(bce));
Rooted<ScriptSourceObject *> sourceObject(cx, bce->script->sourceObject());
Rooted<JSScript*> script(cx, JSScript::Create(cx, enclosingScope, false, options,
parent->staticLevel + 1,
sourceObject,
funbox->bufStart, funbox->bufEnd));
if (!script)
return false;
script->bindings = funbox->bindings;
uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.begin);
BytecodeEmitter bce2(bce, bce->parser, funbox, script, bce->insideEval,
bce->evalCaller, bce->hasGlobalScope, lineNum,
bce->emitterMode);
if (!bce2.init())
return false;
/* We measured the max scope depth when we parsed the function. */
if (!EmitFunctionScript(cx, &bce2, pn->pn_body))
return false;
if (funbox->usesArguments && funbox->usesApply)
script->usesArgumentsAndApply = true;
}
}
/* Make the function object a literal in the outer script's pool. */
unsigned index = bce->objectList.add(pn->pn_funbox);
/* Non-hoisted functions simply emit their respective op. */
if (!pn->functionIsHoisted())
return EmitIndex32(cx, pn->getOp(), index, bce);
/*
* For a script we emit the code as we parse. Thus the bytecode for
* top-level functions should go in the prolog to predefine their
* names in the variable object before the already-generated main code
* is executed. This extra work for top-level scripts is not necessary
* when we emit the code for a function. It is fully parsed prior to
* invocation of the emitter and calls to EmitTree for function
* definitions can be scheduled before generating the rest of code.
*/
if (!bce->sc->isFunctionBox()) {
JS_ASSERT(pn->pn_cookie.isFree());
JS_ASSERT(pn->getOp() == JSOP_NOP);
JS_ASSERT(!bce->topStmt);
bce->switchToProlog();
if (!EmitIndex32(cx, JSOP_DEFFUN, index, bce))
return false;
if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
return false;
bce->switchToMain();
} else {
#ifdef DEBUG
BindingIter bi(bce->script);
while (bi->name() != fun->atom())
bi++;
JS_ASSERT(bi->kind() == VARIABLE || bi->kind() == CONSTANT || bi->kind() == ARGUMENT);
JS_ASSERT(bi.frameIndex() < JS_BIT(20));
#endif
pn->pn_index = index;
if (!EmitIndexOp(cx, JSOP_LAMBDA, index, bce))
return false;
JS_ASSERT(pn->getOp() == JSOP_GETLOCAL || pn->getOp() == JSOP_GETARG);
JSOp setOp = pn->getOp() == JSOP_GETLOCAL ? JSOP_SETLOCAL : JSOP_SETARG;
if (!EmitVarOp(cx, pn, setOp, bce))
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
}
return true;
}
static bool
EmitDo(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
/* Emit an annotated nop so we know to decompile a 'do' keyword. */
ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_WHILE);
if (noteIndex < 0 || Emit1(cx, bce, JSOP_NOP) < 0)
return false;
ptrdiff_t noteIndex2 = NewSrcNote(cx, bce, SRC_WHILE);
if (noteIndex2 < 0)
return false;
/* Compile the loop body. */
ptrdiff_t top = EmitLoopHead(cx, bce, pn->pn_left);
if (top < 0)
return false;
StmtInfoBCE stmtInfo(cx);
PushStatementBCE(bce, &stmtInfo, STMT_DO_LOOP, top);
if (!EmitLoopEntry(cx, bce, NULL))
return false;
if (!EmitTree(cx, bce, pn->pn_left))
return false;
/* Set loop and enclosing label update offsets, for continue. */
ptrdiff_t off = bce->offset();
StmtInfoBCE *stmt = &stmtInfo;
do {
stmt->update = off;
} while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
/* Compile the loop condition, now that continues know where to go. */
if (!EmitTree(cx, bce, pn->pn_right))
return false;
/*
* Since we use JSOP_IFNE for other purposes as well as for do-while
* loops, we must store 1 + (beq - top) in the SRC_WHILE note offset,
* and the decompiler must get that delta and decompile recursively.
*/
ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset());
if (beq < 0)
return false;
if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset()))
return false;
/*
* Be careful: We must set noteIndex2 before noteIndex in case the noteIndex
* note gets bigger.
*/
if (!SetSrcNoteOffset(cx, bce, noteIndex2, 0, beq - top))
return false;
if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, 1 + (off - top)))
return false;
return PopStatementBCE(cx, bce);
}
static bool
EmitWhile(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top)
{
/*
* Minimize bytecodes issued for one or more iterations by jumping to
* the condition below the body and closing the loop if the condition
* is true with a backward branch. For iteration count i:
*
* i test at the top test at the bottom
* = =============== ==================
* 0 ifeq-pass goto; ifne-fail
* 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
* 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
* . . .
* N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
*/
StmtInfoBCE stmtInfo(cx);
PushStatementBCE(bce, &stmtInfo, STMT_WHILE_LOOP, top);
ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_WHILE);
if (noteIndex < 0)
return false;
ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
if (jmp < 0)
return false;
top = EmitLoopHead(cx, bce, pn->pn_right);
if (top < 0)
return false;
if (!EmitTree(cx, bce, pn->pn_right))
return false;
SetJumpOffsetAt(bce, jmp);
if (!EmitLoopEntry(cx, bce, pn->pn_left))
return false;
if (!EmitTree(cx, bce, pn->pn_left))
return false;
ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset());
if (beq < 0)
return false;
if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset()))
return false;
if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, beq - jmp))
return false;
return PopStatementBCE(cx, bce);
}
static bool
EmitBreak(JSContext *cx, BytecodeEmitter *bce, PropertyName *label)
{
StmtInfoBCE *stmt = bce->topStmt;
SrcNoteType noteType;
if (label) {
while (stmt->type != STMT_LABEL || stmt->label != label)
stmt = stmt->down;
noteType = SRC_BREAK2LABEL;
} else {
while (!stmt->isLoop() && stmt->type != STMT_SWITCH)
stmt = stmt->down;
noteType = (stmt->type == STMT_SWITCH) ? SRC_SWITCHBREAK : SRC_BREAK;
}
return EmitGoto(cx, bce, stmt, &stmt->breaks, noteType) >= 0;
}
static bool
EmitContinue(JSContext *cx, BytecodeEmitter *bce, PropertyName *label)
{
StmtInfoBCE *stmt = bce->topStmt;
if (label) {
/* Find the loop statement enclosed by the matching label. */
StmtInfoBCE *loop = NULL;
while (stmt->type != STMT_LABEL || stmt->label != label) {
if (stmt->isLoop())
loop = stmt;
stmt = stmt->down;
}
stmt = loop;
} else {
while (!stmt->isLoop())
stmt = stmt->down;
}
return EmitGoto(cx, bce, stmt, &stmt->continues, SRC_CONTINUE) >= 0;
}
static bool
EmitReturn(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
return false;
/* Push a return value */
if (ParseNode *pn2 = pn->pn_kid) {
if (!EmitTree(cx, bce, pn2))
return false;
} else {
/* No explicit return value provided */
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
/*
* EmitNonLocalJumpFixup may add fixup bytecode to close open try
* blocks having finally clauses and to exit intermingled let blocks.
* We can't simply transfer control flow to our caller in that case,
* because we must gosub to those finally clauses from inner to outer,
* with the correct stack pointer (i.e., after popping any with,
* for/in, etc., slots nested inside the finally's try).
*
* In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
* extra JSOP_RETRVAL after the fixups.
*/
ptrdiff_t top = bce->offset();
if (Emit1(cx, bce, JSOP_RETURN) < 0)
return false;
if (!EmitNonLocalJumpFixup(cx, bce, NULL))
return false;
if (top + JSOP_RETURN_LENGTH != bce->offset()) {
bce->code()[top] = JSOP_SETRVAL;
if (Emit1(cx, bce, JSOP_RETRVAL) < 0)
return false;
}
return true;
}
static bool
EmitStatementList(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top)
{
JS_ASSERT(pn->isArity(PN_LIST));
StmtInfoBCE stmtInfo(cx);
PushStatementBCE(bce, &stmtInfo, STMT_BLOCK, top);
ParseNode *pnchild = pn->pn_head;
if (pn->pn_xflags & PNX_DESTRUCT)
pnchild = pnchild->pn_next;
for (ParseNode *pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
if (!EmitTree(cx, bce, pn2))
return false;
}
return PopStatementBCE(cx, bce);
}
static bool
EmitStatement(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
JS_ASSERT(pn->isKind(PNK_SEMI));
ParseNode *pn2 = pn->pn_kid;
if (!pn2)
return true;
if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
return false;
/*
* Top-level or called-from-a-native JS_Execute/EvaluateScript,
* debugger, and eval frames may need the value of the ultimate
* expression statement as the script's result, despite the fact
* that it appears useless to the compiler.
*
* API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
* calling JS_Compile* to suppress JSOP_POPV.
*/
bool wantval = false;
bool useful = false;
if (bce->sc->isFunctionBox()) {
JS_ASSERT(!bce->script->noScriptRval);
} else {
useful = wantval = !bce->script->noScriptRval;
}
/* Don't eliminate expressions with side effects. */
if (!useful) {
if (!CheckSideEffects(cx, bce, pn2, &useful))
return false;
/*
* Don't eliminate apparently useless expressions if they are
* labeled expression statements. The pc->topStmt->update test
* catches the case where we are nesting in EmitTree for a labeled
* compound statement.
*/
if (bce->topStmt &&
bce->topStmt->type == STMT_LABEL &&
bce->topStmt->update >= bce->offset())
{
useful = true;
}
}
if (useful) {
JSOp op = wantval ? JSOP_POPV : JSOP_POP;
JS_ASSERT_IF(pn2->isKind(PNK_ASSIGN), pn2->isOp(JSOP_NOP));
#if JS_HAS_DESTRUCTURING
if (!wantval &&
pn2->isKind(PNK_ASSIGN) &&
!MaybeEmitGroupAssignment(cx, bce, op, pn2, GroupIsNotDecl, &op))
{
return false;
}
#endif
if (op != JSOP_NOP) {
if (!EmitTree(cx, bce, pn2))
return false;
if (Emit1(cx, bce, op) < 0)
return false;
}
} else if (!pn->isDirectivePrologueMember()) {
/* Don't complain about directive prologue members; just don't emit their code. */
bce->current->currentLine = bce->parser->tokenStream.srcCoords.lineNum(pn2->pn_pos.begin);
bce->current->lastColumn = 0;
if (!bce->reportStrictWarning(pn2, JSMSG_USELESS_EXPR))
return false;
}
return true;
}
static bool
EmitDelete(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
/*
* Under ECMA 3, deleting a non-reference returns true -- but alas we
* must evaluate the operand if it appears it might have side effects.
*/
ParseNode *pn2 = pn->pn_kid;
switch (pn2->getKind()) {
case PNK_NAME:
{
if (!BindNameToSlot(cx, bce, pn2))
return false;
JSOp op = pn2->getOp();
if (op == JSOP_FALSE) {
if (Emit1(cx, bce, op) < 0)
return false;
} else {
if (!EmitAtomOp(cx, pn2, op, bce))
return false;
}
break;
}
case PNK_DOT:
if (!EmitPropOp(cx, pn2, JSOP_DELPROP, bce, false))
return false;
break;
case PNK_ELEM:
if (!EmitElemOp(cx, pn2, JSOP_DELELEM, bce))
return false;
break;
default:
{
/*
* If useless, just emit JSOP_TRUE; otherwise convert delete foo()
* to foo(), true (a comma expression).
*/
bool useful = false;
if (!CheckSideEffects(cx, bce, pn2, &useful))
return false;
if (useful) {
JS_ASSERT_IF(pn2->isKind(PNK_CALL), !(pn2->pn_xflags & PNX_SETCALL));
if (!EmitTree(cx, bce, pn2))
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
}
if (Emit1(cx, bce, JSOP_TRUE) < 0)
return false;
}
}
return true;
}
static bool
EmitCallOrNew(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
bool callop = pn->isKind(PNK_CALL);
/*
* Emit callable invocation or operator new (constructor call) code.
* First, emit code for the left operand to evaluate the callable or
* constructable object expression.
*
* For operator new, we emit JSOP_GETPROP instead of JSOP_CALLPROP, etc.
* This is necessary to interpose the lambda-initialized method read
* barrier -- see the code in jsinterp.cpp for JSOP_LAMBDA followed by
* JSOP_{SET,INIT}PROP.
*
* Then (or in a call case that has no explicit reference-base
* object) we emit JSOP_UNDEFINED to produce the undefined |this|
* value required for calls (which non-strict mode functions
* will box into the global object).
*/
uint32_t argc = pn->pn_count - 1;
if (argc >= ARGC_LIMIT) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
callop ? JSMSG_TOO_MANY_FUN_ARGS : JSMSG_TOO_MANY_CON_ARGS);
return false;
}
bool emitArgs = true;
ParseNode *pn2 = pn->pn_head;
switch (pn2->getKind()) {
case PNK_NAME:
if (bce->emitterMode == BytecodeEmitter::SelfHosting &&
pn2->name() == cx->names().callFunction)
{
/*
* Special-casing of callFunction to emit bytecode that directly
* invokes the callee with the correct |this| object and arguments.
* callFunction(fun, thisArg, ...args) thus becomes:
* - emit lookup for fun
* - emit lookup for thisArg
* - emit lookups for ...args
*
* argc is set to the amount of actually emitted args and the
* emitting of args below is disabled by setting emitArgs to false.
*/
if (pn->pn_count < 3) {
bce->reportError(pn, JSMSG_MORE_ARGS_NEEDED, "callFunction", "1", "s");
return false;
}
ParseNode *funNode = pn2->pn_next;
if (!EmitTree(cx, bce, funNode))
return false;
ParseNode *thisArg = funNode->pn_next;
if (!EmitTree(cx, bce, thisArg))
return false;
if (Emit1(cx, bce, JSOP_NOTEARG) < 0)
return false;
bool oldEmittingForInit = bce->emittingForInit;
bce->emittingForInit = false;
for (ParseNode *argpn = thisArg->pn_next; argpn; argpn = argpn->pn_next) {
if (!EmitTree(cx, bce, argpn))
return false;
if (Emit1(cx, bce, JSOP_NOTEARG) < 0)
return false;
}
bce->emittingForInit = oldEmittingForInit;
argc -= 2;
emitArgs = false;
break;
}
if (!EmitNameOp(cx, bce, pn2, callop))
return false;
break;
case PNK_DOT:
if (!EmitPropOp(cx, pn2, pn2->getOp(), bce, callop))
return false;
break;
case PNK_ELEM:
JS_ASSERT(pn2->isOp(JSOP_GETELEM));
if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, bce))
return false;
break;
case PNK_FUNCTION:
/*
* Top level lambdas which are immediately invoked should be
* treated as only running once. Every time they execute we will
* create new types and scripts for their contents, to increase
* the quality of type information within them and enable more
* backend optimizations. Note that this does not depend on the
* lambda being invoked at most once (it may be named or be
* accessed via foo.caller indirection), as multiple executions
* will just cause the inner scripts to be repeatedly cloned.
*/
JS_ASSERT(!bce->emittingRunOnceLambda);
if (bce->checkSingletonContext()) {
bce->emittingRunOnceLambda = true;
if (!EmitTree(cx, bce, pn2))
return false;
bce->emittingRunOnceLambda = false;
} else {
if (!EmitTree(cx, bce, pn2))
return false;
}
callop = false;
break;
default:
if (!EmitTree(cx, bce, pn2))
return false;
callop = false; /* trigger JSOP_UNDEFINED after */
break;
}
if (!callop) {
JSOp thisop = pn->isKind(PNK_GENEXP) ? JSOP_THIS : JSOP_UNDEFINED;
if (Emit1(cx, bce, thisop) < 0)
return false;
if (Emit1(cx, bce, JSOP_NOTEARG) < 0)
return false;
}
if (emitArgs) {
/*
* Emit code for each argument in order, then emit the JSOP_*CALL or
* JSOP_NEW bytecode with a two-byte immediate telling how many args
* were pushed on the operand stack.
*/
bool oldEmittingForInit = bce->emittingForInit;
bce->emittingForInit = false;
for (ParseNode *pn3 = pn2->pn_next; pn3; pn3 = pn3->pn_next) {
if (!EmitTree(cx, bce, pn3))
return false;
if (Emit1(cx, bce, JSOP_NOTEARG) < 0)
return false;
}
bce->emittingForInit = oldEmittingForInit;
}
if (Emit3(cx, bce, pn->getOp(), ARGC_HI(argc), ARGC_LO(argc)) < 0)
return false;
CheckTypeSet(cx, bce, pn->getOp());
if (pn->isOp(JSOP_EVAL)) {
uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.begin);
EMIT_UINT16_IMM_OP(JSOP_LINENO, lineNum);
}
if (pn->pn_xflags & PNX_SETCALL) {
if (Emit1(cx, bce, JSOP_SETCALL) < 0)
return false;
}
return true;
}
static bool
EmitLogical(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
/*
* JSOP_OR converts the operand on the stack to boolean, leaves the original
* value on the stack and jumps if true; otherwise it falls into the next
* bytecode, which pops the left operand and then evaluates the right operand.
* The jump goes around the right operand evaluation.
*
* JSOP_AND converts the operand on the stack to boolean and jumps if false;
* otherwise it falls into the right operand's bytecode.
*/
if (pn->isArity(PN_BINARY)) {
if (!EmitTree(cx, bce, pn->pn_left))
return false;
ptrdiff_t top = EmitJump(cx, bce, JSOP_BACKPATCH, 0);
if (top < 0)
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
if (!EmitTree(cx, bce, pn->pn_right))
return false;
ptrdiff_t off = bce->offset();
jsbytecode *pc = bce->code(top);
SET_JUMP_OFFSET(pc, off - top);
*pc = pn->getOp();
return true;
}
JS_ASSERT(pn->isArity(PN_LIST));
JS_ASSERT(pn->pn_head->pn_next->pn_next);
/* Left-associative operator chain: avoid too much recursion. */
ParseNode *pn2 = pn->pn_head;
if (!EmitTree(cx, bce, pn2))
return false;
ptrdiff_t top = EmitJump(cx, bce, JSOP_BACKPATCH, 0);
if (top < 0)
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
/* Emit nodes between the head and the tail. */
ptrdiff_t jmp = top;
while ((pn2 = pn2->pn_next)->pn_next) {
if (!EmitTree(cx, bce, pn2))
return false;
ptrdiff_t off = EmitJump(cx, bce, JSOP_BACKPATCH, 0);
if (off < 0)
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
SET_JUMP_OFFSET(bce->code(jmp), off - jmp);
jmp = off;
}
if (!EmitTree(cx, bce, pn2))
return false;
pn2 = pn->pn_head;
ptrdiff_t off = bce->offset();
do {
jsbytecode *pc = bce->code(top);
ptrdiff_t tmp = GET_JUMP_OFFSET(pc);
SET_JUMP_OFFSET(pc, off - top);
*pc = pn->getOp();
top += tmp;
} while ((pn2 = pn2->pn_next)->pn_next);
return true;
}
/*
* Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
* the comment on EmitSwitch.
*/
MOZ_NEVER_INLINE static bool
EmitIncOrDec(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
/* Emit lvalue-specialized code for ++/-- operators. */
ParseNode *pn2 = pn->pn_kid;
switch (pn2->getKind()) {
case PNK_DOT:
if (!EmitPropIncDec(cx, pn, bce))
return false;
break;
case PNK_ELEM:
if (!EmitElemIncDec(cx, pn, bce))
return false;
break;
case PNK_CALL:
JS_ASSERT(pn2->pn_xflags & PNX_SETCALL);
if (!EmitTree(cx, bce, pn2))
return false;
break;
default:
JS_ASSERT(pn2->isKind(PNK_NAME));
pn2->setOp(JSOP_SETNAME);
if (!BindNameToSlot(cx, bce, pn2))
return false;
JSOp op = pn2->getOp();
bool maySet;
switch (op) {
case JSOP_SETLOCAL:
case JSOP_SETARG:
case JSOP_SETALIASEDVAR:
case JSOP_SETNAME:
case JSOP_SETGNAME:
maySet = true;
break;
default:
maySet = false;
}
if (op == JSOP_CALLEE) {
if (Emit1(cx, bce, op) < 0)
return false;
} else if (!pn2->pn_cookie.isFree()) {
if (maySet) {
if (!EmitVarIncDec(cx, pn, bce))
return false;
} else {
if (!EmitVarOp(cx, pn2, op, bce))
return false;
}
} else {
JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
if (maySet) {
if (!EmitNameIncDec(cx, pn, bce))
return false;
} else {
if (!EmitAtomOp(cx, pn2, op, bce))
return false;
}
break;
}
if (pn2->isConst()) {
if (Emit1(cx, bce, JSOP_POS) < 0)
return false;
bool post;
JSOp binop = GetIncDecInfo(pn->getKind(), &post);
if (!post) {
if (Emit1(cx, bce, JSOP_ONE) < 0)
return false;
if (Emit1(cx, bce, binop) < 0)
return false;
}
}
}
return true;
}
/*
* Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
* the comment on EmitSwitch.
*/
MOZ_NEVER_INLINE static bool
EmitLabeledStatement(JSContext *cx, BytecodeEmitter *bce, const LabeledStatement *pn)
{
/*
* Emit a JSOP_LABEL instruction. The argument is the offset to the statement
* following the labeled statement.
*/
jsatomid index;
if (!bce->makeAtomIndex(pn->label(), &index))
return false;
ptrdiff_t top = EmitJump(cx, bce, JSOP_LABEL, 0);
if (top < 0)
return false;
/* Emit code for the labeled statement. */
StmtInfoBCE stmtInfo(cx);
PushStatementBCE(bce, &stmtInfo, STMT_LABEL, bce->offset());
stmtInfo.label = pn->label();
if (!EmitTree(cx, bce, pn->statement()))
return false;
if (!PopStatementBCE(cx, bce))
return false;
/* Patch the JSOP_LABEL offset. */
SetJumpOffsetAt(bce, top);
return true;
}
static bool
EmitSyntheticStatements(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top)
{
JS_ASSERT(pn->isArity(PN_LIST));
StmtInfoBCE stmtInfo(cx);
PushStatementBCE(bce, &stmtInfo, STMT_SEQ, top);
ParseNode *pn2 = pn->pn_head;
if (pn->pn_xflags & PNX_DESTRUCT)
pn2 = pn2->pn_next;
for (; pn2; pn2 = pn2->pn_next) {
if (!EmitTree(cx, bce, pn2))
return false;
}
return PopStatementBCE(cx, bce);
}
static bool
EmitConditionalExpression(JSContext *cx, BytecodeEmitter *bce, ConditionalExpression &conditional)
{
/* Emit the condition, then branch if false to the else part. */
if (!EmitTree(cx, bce, &conditional.condition()))
return false;
ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_COND);
if (noteIndex < 0)
return false;
ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, 0);
if (beq < 0 || !EmitTree(cx, bce, &conditional.thenExpression()))
return false;
/* Jump around else, fixup the branch, emit else, fixup jump. */
ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0);
if (jmp < 0)
return false;
SetJumpOffsetAt(bce, beq);
/*
* Because each branch pushes a single value, but our stack budgeting
* analysis ignores branches, we now have to adjust bce->stackDepth to
* ignore the value pushed by the first branch. Execution will follow
* only one path, so we must decrement bce->stackDepth.
*
* Failing to do this will foil code, such as let expression and block
* code generation, which must use the stack depth to compute local
* stack indexes correctly.
*/
JS_ASSERT(bce->stackDepth > 0);
bce->stackDepth--;
if (!EmitTree(cx, bce, &conditional.elseExpression()))
return false;
SetJumpOffsetAt(bce, jmp);
return SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq);
}
/*
* Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
* the comment on EmitSwitch.
*/
MOZ_NEVER_INLINE static bool
EmitObject(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
#if JS_HAS_DESTRUCTURING_SHORTHAND
if (pn->pn_xflags & PNX_DESTRUCT) {
bce->reportError(pn, JSMSG_BAD_OBJECT_INIT);
return false;
}
#endif
if (!(pn->pn_xflags & PNX_NONCONST) && pn->pn_head && bce->checkSingletonContext())
return EmitSingletonInitialiser(cx, bce, pn);
/*
* Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
* a new object and in source order evaluating each property value and
* adding the property to the object, without invoking latent setters.
* We use the JSOP_NEWINIT and JSOP_INITELEM/JSOP_INITPROP bytecodes to
* ignore setters and to avoid dup'ing and popping the object as each
* property is added, as JSOP_SETELEM/JSOP_SETPROP would do.
*/
ptrdiff_t offset = bce->offset();
if (!EmitNewInit(cx, bce, JSProto_Object, pn))
return false;
/*
* Try to construct the shape of the object as we go, so we can emit a
* JSOP_NEWOBJECT with the final shape instead.
*/
RootedObject obj(cx);
if (bce->script->compileAndGo) {
gc::AllocKind kind = GuessObjectGCKind(pn->pn_count);
obj = NewBuiltinClassInstance(cx, &ObjectClass, kind);
if (!obj)
return false;
}
for (ParseNode *pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
/* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
ParseNode *pn3 = pn2->pn_left;
if (pn3->isKind(PNK_NUMBER)) {
if (!EmitNumberOp(cx, pn3->pn_dval, bce))
return false;
}
/* Emit code for the property initializer. */
if (!EmitTree(cx, bce, pn2->pn_right))
return false;
JSOp op = pn2->getOp();
JS_ASSERT(op == JSOP_INITPROP ||
op == JSOP_INITPROP_GETTER ||
op == JSOP_INITPROP_SETTER);
if (op == JSOP_INITPROP_GETTER || op == JSOP_INITPROP_SETTER)
obj = NULL;
if (pn3->isKind(PNK_NUMBER)) {
obj = NULL;
switch (op) {
case JSOP_INITPROP: op = JSOP_INITELEM; break;
case JSOP_INITPROP_GETTER: op = JSOP_INITELEM_GETTER; break;
case JSOP_INITPROP_SETTER: op = JSOP_INITELEM_SETTER; break;
default: JS_NOT_REACHED("Invalid op");
}
if (Emit1(cx, bce, op) < 0)
return false;
} else {
JS_ASSERT(pn3->isKind(PNK_NAME) || pn3->isKind(PNK_STRING));
jsatomid index;
if (!bce->makeAtomIndex(pn3->pn_atom, &index))
return false;
/*
* Disable NEWOBJECT on initializers that set __proto__, which has
* a non-standard setter on objects.
*/
if (pn3->pn_atom == cx->names().proto)
obj = NULL;
if (obj) {
JS_ASSERT(!obj->inDictionaryMode());
Rooted<jsid> id(cx, AtomToId(pn3->pn_atom));
RootedValue undefinedValue(cx, UndefinedValue());
if (!DefineNativeProperty(cx, obj, id, undefinedValue, NULL, NULL,
JSPROP_ENUMERATE, 0, 0))
{
return false;
}
if (obj->inDictionaryMode())
obj = NULL;
}
if (!EmitIndex32(cx, op, index, bce))
return false;
}
}
if (Emit1(cx, bce, JSOP_ENDINIT) < 0)
return false;
if (obj) {
/*
* The object survived and has a predictable shape: update the original
* bytecode.
*/
ObjectBox *objbox = bce->parser->newObjectBox(obj);
if (!objbox)
return false;
unsigned index = bce->objectList.add(objbox);
MOZ_STATIC_ASSERT(JSOP_NEWINIT_LENGTH == JSOP_NEWOBJECT_LENGTH,
"newinit and newobject must have equal length to edit in-place");
EMIT_UINT32_IN_PLACE(offset, JSOP_NEWOBJECT, uint32_t(index));
}
return true;
}
static bool
EmitArray(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
/*
* Emit code for [a, b, c] that is equivalent to constructing a new
* array and in source order evaluating each element value and adding
* it to the array, without invoking latent setters. We use the
* JSOP_NEWINIT and JSOP_INITELEM_ARRAY bytecodes to ignore setters and
* to avoid dup'ing and popping the array as each element is added, as
* JSOP_SETELEM/JSOP_SETPROP would do.
*/
#if JS_HAS_GENERATORS
if (pn->isKind(PNK_ARRAYCOMP)) {
if (!EmitNewInit(cx, bce, JSProto_Array, pn))
return false;
/*
* Pass the new array's stack index to the PNK_ARRAYPUSH case via
* bce->arrayCompDepth, then simply traverse the PNK_FOR node and
* its kids under pn2 to generate this comprehension.
*/
JS_ASSERT(bce->stackDepth > 0);
unsigned saveDepth = bce->arrayCompDepth;
bce->arrayCompDepth = (uint32_t) (bce->stackDepth - 1);
if (!EmitTree(cx, bce, pn->pn_head))
return false;
bce->arrayCompDepth = saveDepth;
/* Emit the usual op needed for decompilation. */
return Emit1(cx, bce, JSOP_ENDINIT) >= 0;
}
#endif /* JS_HAS_GENERATORS */
if (!(pn->pn_xflags & PNX_NONCONST) && pn->pn_head && bce->checkSingletonContext())
return EmitSingletonInitialiser(cx, bce, pn);
int32_t nspread = 0;
for (ParseNode *elt = pn->pn_head; elt; elt = elt->pn_next) {
if (elt->isKind(PNK_SPREAD))
nspread++;
}
ptrdiff_t off = EmitN(cx, bce, JSOP_NEWARRAY, 3);
if (off < 0)
return false;
CheckTypeSet(cx, bce, JSOP_NEWARRAY);
jsbytecode *pc = bce->code(off);
// For arrays with spread, this is a very pessimistic allocation, the
// minimum possible final size.
SET_UINT24(pc, pn->pn_count - nspread);
ParseNode *pn2 = pn->pn_head;
jsatomid atomIndex;
if (nspread && !EmitNumberOp(cx, 0, bce))
return false;
for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) {
if (pn2->isKind(PNK_ELISION)) {
if (Emit1(cx, bce, JSOP_HOLE) < 0)
return false;
} else {
ParseNode *expr = pn2->isKind(PNK_SPREAD) ? pn2->pn_kid : pn2;
if (!EmitTree(cx, bce, expr))
return false;
}
if (pn2->isKind(PNK_SPREAD)) {
if (Emit1(cx, bce, JSOP_SPREAD) < 0)
return false;
} else if (nspread) {
if (Emit1(cx, bce, JSOP_INITELEM_INC) < 0)
return false;
} else {
off = EmitN(cx, bce, JSOP_INITELEM_ARRAY, 3);
if (off < 0)
return false;
SET_UINT24(bce->code(off), atomIndex);
}
}
JS_ASSERT(atomIndex == pn->pn_count);
if (nspread) {
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
}
/* Emit an op to finish the array and aid in decompilation. */
return Emit1(cx, bce, JSOP_ENDINIT) >= 0;
}
static bool
EmitUnary(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
return false;
/* Unary op, including unary +/-. */
JSOp op = pn->getOp();
ParseNode *pn2 = pn->pn_kid;
if (op == JSOP_TYPEOF && !pn2->isKind(PNK_NAME))
op = JSOP_TYPEOFEXPR;
bool oldEmittingForInit = bce->emittingForInit;
bce->emittingForInit = false;
if (!EmitTree(cx, bce, pn2))
return false;
bce->emittingForInit = oldEmittingForInit;
return Emit1(cx, bce, op) >= 0;
}
static bool
EmitDefaults(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
JS_ASSERT(pn->isKind(PNK_ARGSBODY));
ParseNode *arg, *pnlast = pn->last();
for (arg = pn->pn_head; arg != pnlast; arg = arg->pn_next) {
if (!(arg->pn_dflags & PND_DEFAULT) || !arg->isKind(PNK_NAME))
continue;
if (!BindNameToSlot(cx, bce, arg))
return false;
if (!EmitVarOp(cx, arg, JSOP_GETARG, bce))
return false;
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
if (Emit1(cx, bce, JSOP_STRICTEQ) < 0)
return false;
// Emit source note to enable ion compilation.
if (NewSrcNote(cx, bce, SRC_IF) < 0)
return false;
ptrdiff_t jump = EmitJump(cx, bce, JSOP_IFEQ, 0);
if (jump < 0)
return false;
if (!EmitTree(cx, bce, arg->expr()))
return false;
if (!EmitVarOp(cx, arg, JSOP_SETARG, bce))
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
SET_JUMP_OFFSET(bce->code(jump), bce->offset() - jump);
}
return true;
}
bool
frontend::EmitTree(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
{
JS_CHECK_RECURSION(cx, return false);
EmitLevelManager elm(bce);
bool ok = true;
ptrdiff_t top = bce->offset();
pn->pn_offset = top;
/* Emit notes to tell the current bytecode's source line number. */
if (!UpdateLineNumberNotes(cx, bce, pn->pn_pos.begin))
return false;
switch (pn->getKind()) {
case PNK_FUNCTION:
ok = EmitFunc(cx, bce, pn);
break;
case PNK_ARGSBODY:
{
RootedFunction fun(cx, bce->sc->asFunctionBox()->function());
ParseNode *pnlast = pn->last();
// Carefully emit everything in the right order:
// 1. Destructuring
// 2. Functions
// 3. Defaults
ParseNode *pnchild = pnlast->pn_head;
if (pnlast->pn_xflags & PNX_DESTRUCT) {
// Assign the destructuring arguments before defining any functions,
// see bug 419662.
JS_ASSERT(pnchild->isKind(PNK_SEMI));
JS_ASSERT(pnchild->pn_kid->isKind(PNK_VAR) || pnchild->pn_kid->isKind(PNK_CONST));
if (!EmitTree(cx, bce, pnchild))
return false;
pnchild = pnchild->pn_next;
}
if (pnlast->pn_xflags & PNX_FUNCDEFS) {
// This block contains top-level function definitions. To ensure
// that we emit the bytecode defining them before the rest of code
// in the block we use a separate pass over functions. During the
// main pass later the emitter will add JSOP_NOP with source notes
// for the function to preserve the original functions position
// when decompiling.
//
// Currently this is used only for functions, as compile-as-we go
// mode for scripts does not allow separate emitter passes.
for (ParseNode *pn2 = pnchild; pn2; pn2 = pn2->pn_next) {
if (pn2->isKind(PNK_FUNCTION) && pn2->functionIsHoisted()) {
if (!EmitTree(cx, bce, pn2))
return false;
}
}
}
if (fun->hasDefaults()) {
ParseNode *rest = NULL;
bool restIsDefn = false;
if (fun->hasRest()) {
JS_ASSERT(!bce->sc->asFunctionBox()->argumentsHasLocalBinding());
// Defaults with a rest parameter need special handling. The
// rest parameter needs to be undefined while defaults are being
// processed. To do this, we create the rest argument and let it
// sit on the stack while processing defaults. The rest
// parameter's slot is set to undefined for the course of
// default processing.
rest = pn->pn_head;
while (rest->pn_next != pnlast)
rest = rest->pn_next;
restIsDefn = rest->isDefn();
if (Emit1(cx, bce, JSOP_REST) < 0)
return false;
CheckTypeSet(cx, bce, JSOP_REST);
// Only set the rest parameter if it's not aliased by a nested
// function in the body.
if (restIsDefn) {
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
if (!BindNameToSlot(cx, bce, rest))
return false;
if (!EmitVarOp(cx, rest, JSOP_SETARG, bce))
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
}
}
if (!EmitDefaults(cx, bce, pn))
return false;
if (fun->hasRest()) {
if (restIsDefn && !EmitVarOp(cx, rest, JSOP_SETARG, bce))
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
}
}
for (ParseNode *pn2 = pn->pn_head; pn2 != pnlast; pn2 = pn2->pn_next) {
// Only bind the parameter if it's not aliased by a nested function
// in the body.
if (!pn2->isDefn())
continue;
if (!BindNameToSlot(cx, bce, pn2))
return false;
if (pn2->pn_next == pnlast && fun->hasRest() && !fun->hasDefaults()) {
// Fill rest parameter. We handled the case with defaults above.
JS_ASSERT(!bce->sc->asFunctionBox()->argumentsHasLocalBinding());
bce->switchToProlog();
if (Emit1(cx, bce, JSOP_REST) < 0)
return false;
CheckTypeSet(cx, bce, JSOP_REST);
if (!EmitVarOp(cx, pn2, JSOP_SETARG, bce))
return false;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
bce->switchToMain();
}
}
ok = EmitTree(cx, bce, pnlast);
break;
}
case PNK_IF:
ok = EmitIf(cx, bce, pn);
break;
case PNK_SWITCH:
ok = EmitSwitch(cx, bce, pn);
break;
case PNK_WHILE:
ok = EmitWhile(cx, bce, pn, top);
break;
case PNK_DOWHILE:
ok = EmitDo(cx, bce, pn);
break;
case PNK_FOR:
ok = EmitFor(cx, bce, pn, top);
break;
case PNK_BREAK:
ok = EmitBreak(cx, bce, pn->as<BreakStatement>().label());
break;
case PNK_CONTINUE:
ok = EmitContinue(cx, bce, pn->as<ContinueStatement>().label());
break;
case PNK_WITH:
ok = EmitWith(cx, bce, pn);
break;
case PNK_TRY:
if (!EmitTry(cx, bce, pn))
return false;
break;
case PNK_CATCH:
if (!EmitCatch(cx, bce, pn))
return false;
break;
case PNK_VAR:
case PNK_CONST:
if (!EmitVariables(cx, bce, pn, InitializeVars))
return false;
break;
case PNK_RETURN:
ok = EmitReturn(cx, bce, pn);
break;
#if JS_HAS_GENERATORS
case PNK_YIELD:
JS_ASSERT(bce->sc->isFunctionBox());
if (pn->pn_kid) {
if (!EmitTree(cx, bce, pn->pn_kid))
return false;
} else {
if (Emit1(cx, bce, JSOP_UNDEFINED) < 0)
return false;
}
if (pn->pn_hidden && NewSrcNote(cx, bce, SRC_HIDDEN) < 0)
return false;
if (Emit1(cx, bce, JSOP_YIELD) < 0)
return false;
break;
#endif
case PNK_STATEMENTLIST:
ok = EmitStatementList(cx, bce, pn, top);
break;
case PNK_SEQ:
ok = EmitSyntheticStatements(cx, bce, pn, top);
break;
case PNK_SEMI:
ok = EmitStatement(cx, bce, pn);
break;
case PNK_LABEL:
ok = EmitLabeledStatement(cx, bce, &pn->as<LabeledStatement>());
break;
case PNK_COMMA:
{
for (ParseNode *pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
if (!EmitTree(cx, bce, pn2))
return false;
if (!pn2->pn_next)
break;
if (Emit1(cx, bce, JSOP_POP) < 0)
return false;
}
break;
}
case PNK_ASSIGN:
case PNK_ADDASSIGN:
case PNK_SUBASSIGN:
case PNK_BITORASSIGN:
case PNK_BITXORASSIGN:
case PNK_BITANDASSIGN:
case PNK_LSHASSIGN:
case PNK_RSHASSIGN:
case PNK_URSHASSIGN:
case PNK_MULASSIGN:
case PNK_DIVASSIGN:
case PNK_MODASSIGN:
if (!EmitAssignment(cx, bce, pn->pn_left, pn->getOp(), pn->pn_right))
return false;
break;
case PNK_CONDITIONAL:
ok = EmitConditionalExpression(cx, bce, pn->as<ConditionalExpression>());
break;
case PNK_OR:
case PNK_AND:
ok = EmitLogical(cx, bce, pn);
break;
case PNK_ADD:
case PNK_SUB:
case PNK_BITOR:
case PNK_BITXOR:
case PNK_BITAND:
case PNK_STRICTEQ:
case PNK_EQ:
case PNK_STRICTNE:
case PNK_NE:
case PNK_LT:
case PNK_LE:
case PNK_GT:
case PNK_GE:
case PNK_IN:
case PNK_INSTANCEOF:
case PNK_LSH:
case PNK_RSH:
case PNK_URSH:
case PNK_STAR:
case PNK_DIV:
case PNK_MOD:
if (pn->isArity(PN_LIST)) {
/* Left-associative operator chain: avoid too much recursion. */
ParseNode *pn2 = pn->pn_head;
if (!EmitTree(cx, bce, pn2))
return false;
JSOp op = pn->getOp();
while ((pn2 = pn2->pn_next) != NULL) {
if (!EmitTree(cx, bce, pn2))
return false;
if (Emit1(cx, bce, op) < 0)
return false;
}
} else {
/* Binary operators that evaluate both operands unconditionally. */
if (!EmitTree(cx, bce, pn->pn_left))
return false;
if (!EmitTree(cx, bce, pn->pn_right))
return false;
if (Emit1(cx, bce, pn->getOp()) < 0)
return false;
}
break;
case PNK_THROW:
case PNK_TYPEOF:
case PNK_VOID:
case PNK_NOT:
case PNK_BITNOT:
case PNK_POS:
case PNK_NEG:
ok = EmitUnary(cx, bce, pn);
break;
case PNK_PREINCREMENT:
case PNK_PREDECREMENT:
case PNK_POSTINCREMENT:
case PNK_POSTDECREMENT:
ok = EmitIncOrDec(cx, bce, pn);
break;
case PNK_DELETE:
ok = EmitDelete(cx, bce, pn);
break;
case PNK_DOT:
/*
* Pop a stack operand, convert it to object, get a property named by
* this bytecode's immediate-indexed atom operand, and push its value
* (not a reference to it).
*/
ok = EmitPropOp(cx, pn, pn->getOp(), bce, false);
break;
case PNK_ELEM:
/*
* Pop two operands, convert the left one to object and the right one
* to property name (atom or tagged int), get the named property, and
* push its value. Set the "obj" register to the result of ToObject
* on the left operand.
*/
ok = EmitElemOp(cx, pn, pn->getOp(), bce);
break;
case PNK_NEW:
case PNK_CALL:
case PNK_GENEXP:
ok = EmitCallOrNew(cx, bce, pn);
break;
case PNK_LEXICALSCOPE:
ok = EmitLexicalScope(cx, bce, pn);
break;
#if JS_HAS_BLOCK_SCOPE
case PNK_LET:
ok = pn->isArity(PN_BINARY)
? EmitLet(cx, bce, pn)
: EmitVariables(cx, bce, pn, InitializeVars);
break;
#endif /* JS_HAS_BLOCK_SCOPE */
#if JS_HAS_GENERATORS
case PNK_ARRAYPUSH: {
int slot;
/*
* The array object's stack index is in bce->arrayCompDepth. See below
* under the array initialiser code generator for array comprehension
* special casing. Note that the array object is a pure stack value,
* unaliased by blocks, so we can EmitUnaliasedVarOp.
*/
if (!EmitTree(cx, bce, pn->pn_kid))
return false;
slot = AdjustBlockSlot(cx, bce, bce->arrayCompDepth);
if (slot < 0)
return false;
if (!EmitUnaliasedVarOp(cx, pn->getOp(), slot, bce))
return false;
break;
}
#endif
case PNK_ARRAY:
#if JS_HAS_GENERATORS
case PNK_ARRAYCOMP:
#endif
ok = EmitArray(cx, bce, pn);
break;
case PNK_OBJECT:
ok = EmitObject(cx, bce, pn);
break;
case PNK_NAME:
if (!EmitNameOp(cx, bce, pn, false))
return false;
break;
case PNK_STRING:
ok = EmitAtomOp(cx, pn, pn->getOp(), bce);
break;
case PNK_NUMBER:
ok = EmitNumberOp(cx, pn->pn_dval, bce);
break;
case PNK_REGEXP:
ok = EmitRegExp(cx, bce->regexpList.add(pn->as<RegExpLiteral>().objbox()), bce);
break;
case PNK_TRUE:
case PNK_FALSE:
case PNK_THIS:
case PNK_NULL:
if (Emit1(cx, bce, pn->getOp()) < 0)
return false;
break;
case PNK_DEBUGGER:
if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin))
return false;
if (Emit1(cx, bce, JSOP_DEBUGGER) < 0)
return false;
break;
case PNK_NOP:
JS_ASSERT(pn->getArity() == PN_NULLARY);
break;
case PNK_MODULE:
// TODO: Add emitter support for modules
bce->reportError(NULL, JSMSG_SYNTAX_ERROR);
return false;
default:
JS_ASSERT(0);
}
/* bce->emitLevel == 1 means we're last on the stack, so finish up. */
if (ok && bce->emitLevel == 1) {
if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.end))
return false;
}
return ok;
}
static int
AllocSrcNote(JSContext *cx, SrcNotesVector &notes)
{
// Start it off moderately large to avoid repeated resizings early on.
if (notes.capacity() == 0 && !notes.reserve(1024))
return -1;
jssrcnote dummy = 0;
if (!notes.append(dummy)) {
js_ReportOutOfMemory(cx);
return -1;
}
return notes.length() - 1;
}
int
frontend::NewSrcNote(JSContext *cx, BytecodeEmitter *bce, SrcNoteType type)
{
SrcNotesVector &notes = bce->notes();
int index;
index = AllocSrcNote(cx, notes);
if (index < 0)
return -1;
/*
* Compute delta from the last annotated bytecode's offset. If it's too
* big to fit in sn, allocate one or more xdelta notes and reset sn.
*/
ptrdiff_t offset = bce->offset();
ptrdiff_t delta = offset - bce->lastNoteOffset();
bce->current->lastNoteOffset = offset;
if (delta >= SN_DELTA_LIMIT) {
do {
ptrdiff_t xdelta = Min(delta, SN_XDELTA_MASK);
SN_MAKE_XDELTA(&notes[index], xdelta);
delta -= xdelta;
index = AllocSrcNote(cx, notes);
if (index < 0)
return -1;
} while (delta >= SN_DELTA_LIMIT);
}
/*
* Initialize type and delta, then allocate the minimum number of notes
* needed for type's arity. Usually, we won't need more, but if an offset
* does take two bytes, SetSrcNoteOffset will grow notes.
*/
SN_MAKE_NOTE(&notes[index], type, delta);
for (int n = (int)js_SrcNoteSpec[type].arity; n > 0; n--) {
if (NewSrcNote(cx, bce, SRC_NULL) < 0)
return -1;
}
return index;
}
int
frontend::NewSrcNote2(JSContext *cx, BytecodeEmitter *bce, SrcNoteType type, ptrdiff_t offset)
{
int index;
index = NewSrcNote(cx, bce, type);
if (index >= 0) {
if (!SetSrcNoteOffset(cx, bce, index, 0, offset))
return -1;
}
return index;
}
int
frontend::NewSrcNote3(JSContext *cx, BytecodeEmitter *bce, SrcNoteType type, ptrdiff_t offset1,
ptrdiff_t offset2)
{
int index;
index = NewSrcNote(cx, bce, type);
if (index >= 0) {
if (!SetSrcNoteOffset(cx, bce, index, 0, offset1))
return -1;
if (!SetSrcNoteOffset(cx, bce, index, 1, offset2))
return -1;
}
return index;
}
bool
frontend::AddToSrcNoteDelta(JSContext *cx, BytecodeEmitter *bce, jssrcnote *sn, ptrdiff_t delta)
{
/*
* Called only from FinishTakingSrcNotes to add to main script note
* deltas, and only by a small positive amount.
*/
JS_ASSERT(bce->current == &bce->main);
JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT);
ptrdiff_t base = SN_DELTA(sn);
ptrdiff_t limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT;
ptrdiff_t newdelta = base + delta;
if (newdelta < limit) {
SN_SET_DELTA(sn, newdelta);
} else {
jssrcnote xdelta;
SN_MAKE_XDELTA(&xdelta, delta);
if (!(sn = bce->main.notes.insert(sn, xdelta)))
return false;
}
return true;
}
static bool
SetSrcNoteOffset(JSContext *cx, BytecodeEmitter *bce, unsigned index, unsigned which,
ptrdiff_t offset)
{
if (size_t(offset) > SN_MAX_OFFSET) {
ReportStatementTooLarge(cx, bce->topStmt);
return false;
}
SrcNotesVector &notes = bce->notes();
/* Find the offset numbered which (i.e., skip exactly which offsets). */
jssrcnote *sn = notes.begin() + index;
JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
JS_ASSERT((int) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
for (sn++; which; sn++, which--) {
if (*sn & SN_3BYTE_OFFSET_FLAG)
sn += 2;
}
/*
* See if the new offset requires three bytes either by being too big or if
* the offset has already been inflated (in which case, we need to stay big
* to not break the srcnote encoding if this isn't the last srcnote).
*/
if (offset > (ptrdiff_t)SN_3BYTE_OFFSET_MASK || (*sn & SN_3BYTE_OFFSET_FLAG)) {
/* Maybe this offset was already set to a three-byte value. */
if (!(*sn & SN_3BYTE_OFFSET_FLAG)) {
/* Insert two dummy bytes that will be overwritten shortly. */
jssrcnote dummy = 0;
if (!(sn = notes.insert(sn, dummy)) ||
!(sn = notes.insert(sn, dummy)))
{
js_ReportOutOfMemory(cx);
return false;
}
}
*sn++ = (jssrcnote)(SN_3BYTE_OFFSET_FLAG | (offset >> 16));
*sn++ = (jssrcnote)(offset >> 8);
}
*sn = (jssrcnote)offset;
return true;
}
#ifdef DEBUG_notme
#define DEBUG_srcnotesize
#endif
#ifdef DEBUG_srcnotesize
#define NBINS 10
static uint32_t hist[NBINS];
static void
DumpSrcNoteSizeHist()
{
static FILE *fp;
int i, n;
if (!fp) {
fp = fopen("/tmp/srcnotes.hist", "w");
if (!fp)
return;
setvbuf(fp, NULL, _IONBF, 0);
}
fprintf(fp, "SrcNote size histogram:\n");
for (i = 0; i < NBINS; i++) {
fprintf(fp, "%4u %4u ", JS_BIT(i), hist[i]);
for (n = (int) JS_HOWMANY(hist[i], 10); n > 0; --n)
fputc('*', fp);
fputc('\n', fp);
}
fputc('\n', fp);
}
#endif
/*
* Fill in the storage at notes with prolog and main srcnotes; the space at
* notes was allocated using the BytecodeEmitter::countFinalSourceNotes()
* method from BytecodeEmitter.h. SO DON'T CHANGE THIS FUNCTION WITHOUT AT
* LEAST CHECKING WHETHER BytecodeEmitter::countFinalSourceNotes() NEEDS
* CORRESPONDING CHANGES!
*/
bool
frontend::FinishTakingSrcNotes(JSContext *cx, BytecodeEmitter *bce, jssrcnote *notes)
{
JS_ASSERT(bce->current == &bce->main);
unsigned prologCount = bce->prolog.notes.length();
if (prologCount && bce->prolog.currentLine != bce->firstLine) {
bce->switchToProlog();
if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)bce->firstLine) < 0)
return false;
prologCount = bce->prolog.notes.length();
bce->switchToMain();
} else {
/*
* Either no prolog srcnotes, or no line number change over prolog.
* We don't need a SRC_SETLINE, but we may need to adjust the offset
* of the first main note, by adding to its delta and possibly even
* prepending SRC_XDELTA notes to it to account for prolog bytecodes
* that came at and after the last annotated bytecode.
*/
ptrdiff_t offset = bce->prologOffset() - bce->prolog.lastNoteOffset;
JS_ASSERT(offset >= 0);
if (offset > 0 && bce->main.notes.length() != 0) {
/* NB: Use as much of the first main note's delta as we can. */
jssrcnote *sn = bce->main.notes.begin();
ptrdiff_t delta = SN_IS_XDELTA(sn)
? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK)
: SN_DELTA_MASK - (*sn & SN_DELTA_MASK);
if (offset < delta)
delta = offset;
for (;;) {
if (!AddToSrcNoteDelta(cx, bce, sn, delta))
return false;
offset -= delta;
if (offset == 0)
break;
delta = Min(offset, SN_XDELTA_MASK);
sn = bce->main.notes.begin();
}
}
}
unsigned mainCount = bce->main.notes.length();
unsigned totalCount = prologCount + mainCount;
if (prologCount)
PodCopy(notes, bce->prolog.notes.begin(), prologCount);
PodCopy(notes + prologCount, bce->main.notes.begin(), mainCount);
SN_MAKE_TERMINATOR(&notes[totalCount]);
return true;
}
bool
CGTryNoteList::append(JSTryNoteKind kind, unsigned stackDepth, size_t start, size_t end)
{
JS_ASSERT(unsigned(uint16_t(stackDepth)) == stackDepth);
JS_ASSERT(start <= end);
JS_ASSERT(size_t(uint32_t(start)) == start);
JS_ASSERT(size_t(uint32_t(end)) == end);
JSTryNote note;
note.kind = kind;
note.stackDepth = uint16_t(stackDepth);
note.start = uint32_t(start);
note.length = uint32_t(end - start);
return list.append(note);
}
void
CGTryNoteList::finish(TryNoteArray *array)
{
JS_ASSERT(length() == array->length);
for (unsigned i = 0; i < length(); i++)
array->vector[i] = list[i];
}
/*
* Find the index of the given object for code generator.
*
* Since the emitter refers to each parsed object only once, for the index we
* use the number of already indexes objects. We also add the object to a list
* to convert the list to a fixed-size array when we complete code generation,
* see js::CGObjectList::finish below.
*
* Most of the objects go to BytecodeEmitter::objectList but for regexp we use
* a separated BytecodeEmitter::regexpList. In this way the emitted index can
* be directly used to store and fetch a reference to a cloned RegExp object
* that shares the same JSRegExp private data created for the object literal in
* objbox. We need a cloned object to hold lastIndex and other direct
* properties that should not be shared among threads sharing a precompiled
* function or script.
*
* If the code being compiled is function code, allocate a reserved slot in
* the cloned function object that shares its precompiled script with other
* cloned function objects and with the compiler-created clone-parent. There
* are nregexps = script->regexps()->length such reserved slots in each
* function object cloned from fun->object. NB: during compilation, a funobj
* slots element must never be allocated, because JSObject::allocSlot could
* hand out one of the slots that should be given to a regexp clone.
*
* If the code being compiled is global code, the cloned regexp are stored in
* fp->vars slot and to protect regexp slots from GC we set fp->nvars to
* nregexps.
*
* The slots initially contain undefined or null. We populate them lazily when
* JSOP_REGEXP is executed for the first time.
*
* Why clone regexp objects? ECMA specifies that when a regular expression
* literal is scanned, a RegExp object is created. In the spec, compilation
* and execution happen indivisibly, but in this implementation and many of
* its embeddings, code is precompiled early and re-executed in multiple
* threads, or using multiple global objects, or both, for efficiency.
*
* In such cases, naively following ECMA leads to wrongful sharing of RegExp
* objects, which makes for collisions on the lastIndex property (especially
* for global regexps) and on any ad-hoc properties. Also, __proto__ refers to
* the pre-compilation prototype, a pigeon-hole problem for instanceof tests.
*/
unsigned
CGObjectList::add(ObjectBox *objbox)
{
JS_ASSERT(!objbox->emitLink);
objbox->emitLink = lastbox;
lastbox = objbox;
return length++;
}
unsigned
CGObjectList::indexOf(JSObject *obj)
{
JS_ASSERT(length > 0);
unsigned index = length - 1;
for (ObjectBox *box = lastbox; box->object != obj; box = box->emitLink)
index--;
return index;
}
void
CGObjectList::finish(ObjectArray *array)
{
JS_ASSERT(length <= INDEX_LIMIT);
JS_ASSERT(length == array->length);
js::HeapPtrObject *cursor = array->vector + array->length;
ObjectBox *objbox = lastbox;
do {
--cursor;
JS_ASSERT(!*cursor);
*cursor = objbox->object;
} while ((objbox = objbox->emitLink) != NULL);
JS_ASSERT(cursor == array->vector);
}
void
CGConstList::finish(ConstArray *array)
{
JS_ASSERT(length() == array->length);
for (unsigned i = 0; i < length(); i++)
array->vector[i] = list[i];
}
/*
* We should try to get rid of offsetBias (always 0 or 1, where 1 is
* JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR.
*/
const JSSrcNoteSpec js_SrcNoteSpec[] = {
/* 0 */ {"null", 0},
/* 1 */ {"if", 0},
/* 2 */ {"if-else", 1},
/* 3 */ {"cond", 1},
/* 4 */ {"for", 3},
/* 5 */ {"while", 1},
/* 6 */ {"for-in", 1},
/* 7 */ {"continue", 0},
/* 8 */ {"break", 0},
/* 9 */ {"break2label", 0},
/* 10 */ {"switchbreak", 0},
/* 11 */ {"tableswitch", 1},
/* 12 */ {"condswitch", 2},
/* 13 */ {"nextcase", 1},
/* 14 */ {"assignop", 0},
/* 15 */ {"hidden", 0},
/* 16 */ {"catch", 0},
/* 17 */ {"colspan", 1},
/* 18 */ {"newline", 0},
/* 19 */ {"setline", 1},
/* 20 */ {"unused20", 0},
/* 21 */ {"unused21", 0},
/* 22 */ {"unused22", 0},
/* 23 */ {"unused23", 0},
/* 24 */ {"xdelta", 0},
};
JS_FRIEND_API(unsigned)
js_SrcNoteLength(jssrcnote *sn)
{
unsigned arity;
jssrcnote *base;
arity = (int)js_SrcNoteSpec[SN_TYPE(sn)].arity;
for (base = sn++; arity; sn++, arity--) {
if (*sn & SN_3BYTE_OFFSET_FLAG)
sn += 2;
}
return sn - base;
}
JS_FRIEND_API(ptrdiff_t)
js_GetSrcNoteOffset(jssrcnote *sn, unsigned which)
{
/* Find the offset numbered which (i.e., skip exactly which offsets). */
JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
JS_ASSERT((int) which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
for (sn++; which; sn++, which--) {
if (*sn & SN_3BYTE_OFFSET_FLAG)
sn += 2;
}
if (*sn & SN_3BYTE_OFFSET_FLAG) {
return (ptrdiff_t)(((uint32_t)(sn[0] & SN_3BYTE_OFFSET_MASK) << 16)
| (sn[1] << 8)
| sn[2]);
}
return (ptrdiff_t)*sn;
}