blob: 0a6cc35ad1c9c4f75be95b2befd108c4cdcb6144 [file] [log] [blame]
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sts=4 et sw=4 tw=99:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
* JS bytecode generation.
*/
#include "frontend/BytecodeEmitter.h"
#include "mozilla/ArrayUtils.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/FloatingPoint.h"
#include "mozilla/Maybe.h"
#include "mozilla/PodOperations.h"
#include "mozilla/UniquePtr.h"
#include <string.h>
#include "jsapi.h"
#include "jsatom.h"
#include "jscntxt.h"
#include "jsfun.h"
#include "jsnum.h"
#include "jsopcode.h"
#include "jsscript.h"
#include "jstypes.h"
#include "jsutil.h"
#include "asmjs/AsmJSLink.h"
#include "frontend/Parser.h"
#include "frontend/TokenStream.h"
#include "vm/Debugger.h"
#include "vm/GeneratorObject.h"
#include "vm/Stack.h"
#include "jsatominlines.h"
#include "jsobjinlines.h"
#include "jsscriptinlines.h"
#include "frontend/ParseMaps-inl.h"
#include "frontend/ParseNode-inl.h"
#include "vm/NativeObject-inl.h"
#include "vm/ScopeObject-inl.h"
using namespace js;
using namespace js::gc;
using namespace js::frontend;
using mozilla::Maybe;
using mozilla::Some;
using mozilla::DebugOnly;
using mozilla::NumberIsInt32;
using mozilla::PodCopy;
using mozilla::UniquePtr;
struct frontend::StmtInfoBCE : public StmtInfoBase
{
StmtInfoBCE* enclosing;
StmtInfoBCE* enclosingScope;
ptrdiff_t update; /* loop update offset (top if none) */
ptrdiff_t breaks; /* offset of last break in loop */
ptrdiff_t continues; /* offset of last continue in loop */
uint32_t blockScopeIndex; /* index of scope in BlockScopeArray */
explicit StmtInfoBCE(ExclusiveContext* cx) : StmtInfoBase(cx) {}
void setTop(ptrdiff_t top) {
update = top;
breaks = -1;
continues = -1;
}
/*
* To reuse space, alias two of the ptrdiff_t fields for use during
* try/catch/finally code generation and backpatching.
*
* Only a loop, switch, or label statement info record can have breaks and
* continues, and only a for loop has an update backpatch chain, so it's
* safe to overlay these for the "trying" StmtTypes.
*/
ptrdiff_t& gosubs() {
MOZ_ASSERT(type == StmtType::FINALLY);
return breaks;
}
ptrdiff_t& guardJump() {
MOZ_ASSERT(type == StmtType::TRY || type == StmtType::FINALLY);
return continues;
}
};
struct frontend::LoopStmtInfo : public StmtInfoBCE
{
int32_t stackDepth; // Stack depth when this loop was pushed.
uint32_t loopDepth; // Loop depth.
// Can we OSR into Ion from here? True unless there is non-loop state on the stack.
bool canIonOsr;
explicit LoopStmtInfo(ExclusiveContext* cx) : StmtInfoBCE(cx) {}
static LoopStmtInfo* fromStmtInfo(StmtInfoBCE* stmt) {
MOZ_ASSERT(stmt->isLoop());
return static_cast<LoopStmtInfo*>(stmt);
}
};
BytecodeEmitter::BytecodeEmitter(BytecodeEmitter* parent,
Parser<FullParseHandler>* parser, SharedContext* sc,
HandleScript script, Handle<LazyScript*> lazyScript,
bool insideEval, HandleScript evalCaller,
bool insideNonGlobalEval, uint32_t lineNum,
EmitterMode emitterMode)
: sc(sc),
cx(sc->context),
parent(parent),
script(cx, script),
lazyScript(cx, lazyScript),
prologue(cx, lineNum),
main(cx, lineNum),
current(&main),
parser(parser),
evalCaller(evalCaller),
stmtStack(cx),
atomIndices(cx),
firstLine(lineNum),
localsToFrameSlots_(cx),
stackDepth(0), maxStackDepth(0),
arrayCompDepth(0),
emitLevel(0),
constList(cx),
tryNoteList(cx),
blockScopeList(cx),
yieldOffsetList(cx),
typesetCount(0),
hasSingletons(false),
hasTryFinally(false),
emittingForInit(false),
emittingRunOnceLambda(false),
insideEval(insideEval),
insideNonGlobalEval(insideNonGlobalEval),
insideModule(false),
emitterMode(emitterMode)
{
MOZ_ASSERT_IF(evalCaller, insideEval);
MOZ_ASSERT_IF(emitterMode == LazyFunction, lazyScript);
}
bool
BytecodeEmitter::init()
{
return atomIndices.ensureMap(cx);
}
bool
BytecodeEmitter::updateLocalsToFrameSlots()
{
// Assign stack slots to unaliased locals (aliased locals are stored in the
// call object and don't need their own stack slots). We do this by filling
// a Vector that can be used to map a local to its stack slot.
if (localsToFrameSlots_.length() == script->bindings.numLocals()) {
// CompileScript calls updateNumBlockScoped to update the block scope
// depth. Do nothing if the depth didn't change.
return true;
}
localsToFrameSlots_.clear();
if (!localsToFrameSlots_.reserve(script->bindings.numLocals()))
return false;
uint32_t slot = 0;
for (BindingIter bi(script); !bi.done(); bi++) {
if (bi->kind() == Binding::ARGUMENT)
continue;
if (bi->aliased())
localsToFrameSlots_.infallibleAppend(UINT32_MAX);
else
localsToFrameSlots_.infallibleAppend(slot++);
}
for (size_t i = 0; i < script->bindings.numBlockScoped(); i++)
localsToFrameSlots_.infallibleAppend(slot++);
return true;
}
bool
BytecodeEmitter::emitCheck(ptrdiff_t delta, ptrdiff_t* offset)
{
*offset = code().length();
// Start it off moderately large to avoid repeated resizings early on.
// ~98% of cases fit within 1024 bytes.
if (code().capacity() == 0 && !code().reserve(1024))
return false;
if (!code().growBy(delta)) {
ReportOutOfMemory(cx);
return false;
}
return true;
}
void
BytecodeEmitter::updateDepth(ptrdiff_t target)
{
jsbytecode* pc = code(target);
int nuses = StackUses(nullptr, pc);
int ndefs = StackDefs(nullptr, pc);
stackDepth -= nuses;
MOZ_ASSERT(stackDepth >= 0);
stackDepth += ndefs;
if ((uint32_t)stackDepth > maxStackDepth)
maxStackDepth = stackDepth;
}
#ifdef DEBUG
bool
BytecodeEmitter::checkStrictOrSloppy(JSOp op)
{
if (IsCheckStrictOp(op) && !sc->strict())
return false;
if (IsCheckSloppyOp(op) && sc->strict())
return false;
return true;
}
#endif
bool
BytecodeEmitter::emit1(JSOp op)
{
MOZ_ASSERT(checkStrictOrSloppy(op));
ptrdiff_t offset;
if (!emitCheck(1, &offset))
return false;
jsbytecode* code = this->code(offset);
code[0] = jsbytecode(op);
updateDepth(offset);
return true;
}
bool
BytecodeEmitter::emit2(JSOp op, uint8_t op1)
{
MOZ_ASSERT(checkStrictOrSloppy(op));
ptrdiff_t offset;
if (!emitCheck(2, &offset))
return false;
jsbytecode* code = this->code(offset);
code[0] = jsbytecode(op);
code[1] = jsbytecode(op1);
updateDepth(offset);
return true;
}
bool
BytecodeEmitter::emit3(JSOp op, jsbytecode op1, jsbytecode op2)
{
MOZ_ASSERT(checkStrictOrSloppy(op));
/* These should filter through emitVarOp. */
MOZ_ASSERT(!IsArgOp(op));
MOZ_ASSERT(!IsLocalOp(op));
ptrdiff_t offset;
if (!emitCheck(3, &offset))
return false;
jsbytecode* code = this->code(offset);
code[0] = jsbytecode(op);
code[1] = op1;
code[2] = op2;
updateDepth(offset);
return true;
}
bool
BytecodeEmitter::emitN(JSOp op, size_t extra, ptrdiff_t* offset)
{
MOZ_ASSERT(checkStrictOrSloppy(op));
ptrdiff_t length = 1 + ptrdiff_t(extra);
ptrdiff_t off;
if (!emitCheck(length, &off))
return false;
jsbytecode* code = this->code(off);
code[0] = jsbytecode(op);
/* The remaining |extra| bytes are set by the caller */
/*
* Don't updateDepth if op's use-count comes from the immediate
* operand yet to be stored in the extra bytes after op.
*/
if (CodeSpec[op].nuses >= 0)
updateDepth(off);
if (offset)
*offset = off;
return true;
}
bool
BytecodeEmitter::emitJump(JSOp op, ptrdiff_t off, ptrdiff_t* jumpOffset)
{
ptrdiff_t offset;
if (!emitCheck(5, &offset))
return false;
jsbytecode* code = this->code(offset);
code[0] = jsbytecode(op);
SET_JUMP_OFFSET(code, off);
updateDepth(offset);
if (jumpOffset)
*jumpOffset = offset;
return true;
}
bool
BytecodeEmitter::emitCall(JSOp op, uint16_t argc, ParseNode* pn)
{
if (pn && !updateSourceCoordNotes(pn->pn_pos.begin))
return false;
return emit3(op, ARGC_HI(argc), ARGC_LO(argc));
}
bool
BytecodeEmitter::emitDupAt(unsigned slotFromTop)
{
MOZ_ASSERT(slotFromTop < unsigned(stackDepth));
if (slotFromTop >= JS_BIT(24)) {
reportError(nullptr, JSMSG_TOO_MANY_LOCALS);
return false;
}
ptrdiff_t off;
if (!emitN(JSOP_DUPAT, 3, &off))
return false;
jsbytecode* pc = code(off);
SET_UINT24(pc, slotFromTop);
return true;
}
/* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
const char js_with_statement_str[] = "with statement";
const char js_finally_block_str[] = "finally block";
static const char * const statementName[] = {
"label statement", /* LABEL */
"if statement", /* IF */
"else statement", /* ELSE */
"destructuring body", /* BODY */
"switch statement", /* SWITCH */
"block", /* BLOCK */
js_with_statement_str, /* WITH */
"catch block", /* CATCH */
"try block", /* TRY */
js_finally_block_str, /* FINALLY */
js_finally_block_str, /* SUBROUTINE */
"do loop", /* DO_LOOP */
"for loop", /* FOR_LOOP */
"for/in loop", /* FOR_IN_LOOP */
"for/of loop", /* FOR_OF_LOOP */
"while loop", /* WHILE_LOOP */
"spread", /* SPREAD */
};
static_assert(MOZ_ARRAY_LENGTH(statementName) == uint16_t(StmtType::LIMIT),
"statementName array and StmtType enum must be consistent");
static const char*
StatementName(StmtInfoBCE* stmt)
{
if (!stmt)
return js_script_str;
return statementName[uint16_t(stmt->type)];
}
static void
ReportStatementTooLarge(TokenStream& ts, StmtInfoBCE* stmt)
{
ts.reportError(JSMSG_NEED_DIET, StatementName(stmt));
}
/*
* Emit a backpatch op with offset pointing to the previous jump of this type,
* so that we can walk back up the chain fixing up the op and jump offset.
*/
bool
BytecodeEmitter::emitBackPatchOp(ptrdiff_t* lastp)
{
ptrdiff_t delta = offset() - *lastp;
*lastp = offset();
MOZ_ASSERT(delta > 0);
return emitJump(JSOP_BACKPATCH, delta);
}
static inline unsigned
LengthOfSetLine(unsigned line)
{
return 1 /* SN_SETLINE */ + (line > SN_4BYTE_OFFSET_MASK ? 4 : 1);
}
/* Updates line number notes, not column notes. */
bool
BytecodeEmitter::updateLineNumberNotes(uint32_t offset)
{
TokenStream* ts = &parser->tokenStream;
bool onThisLine;
if (!ts->srcCoords.isOnThisLine(offset, currentLine(), &onThisLine))
return ts->reportError(JSMSG_OUT_OF_MEMORY);
if (!onThisLine) {
unsigned line = ts->srcCoords.lineNum(offset);
unsigned delta = line - currentLine();
/*
* Encode any change in the current source line number by using
* either several SRC_NEWLINE notes or just one SRC_SETLINE note,
* whichever consumes less space.
*
* NB: We handle backward line number deltas (possible with for
* loops where the update part is emitted after the body, but its
* line number is <= any line number in the body) here by letting
* unsigned delta_ wrap to a very large number, which triggers a
* SRC_SETLINE.
*/
current->currentLine = line;
current->lastColumn = 0;
if (delta >= LengthOfSetLine(line)) {
if (!newSrcNote2(SRC_SETLINE, ptrdiff_t(line)))
return false;
} else {
do {
if (!newSrcNote(SRC_NEWLINE))
return false;
} while (--delta != 0);
}
}
return true;
}
/* Updates the line number and column number information in the source notes. */
bool
BytecodeEmitter::updateSourceCoordNotes(uint32_t offset)
{
if (!updateLineNumberNotes(offset))
return false;
uint32_t columnIndex = parser->tokenStream.srcCoords.columnIndex(offset);
ptrdiff_t colspan = ptrdiff_t(columnIndex) - ptrdiff_t(current->lastColumn);
if (colspan != 0) {
// If the column span is so large that we can't store it, then just
// discard this information. This can happen with minimized or otherwise
// machine-generated code. Even gigantic column numbers are still
// valuable if you have a source map to relate them to something real;
// but it's better to fail soft here.
if (!SN_REPRESENTABLE_COLSPAN(colspan))
return true;
if (!newSrcNote2(SRC_COLSPAN, SN_COLSPAN_TO_OFFSET(colspan)))
return false;
current->lastColumn = columnIndex;
}
return true;
}
bool
BytecodeEmitter::emitLoopHead(ParseNode* nextpn)
{
if (nextpn) {
/*
* Try to give the JSOP_LOOPHEAD the same line number as the next
* instruction. nextpn is often a block, in which case the next
* instruction typically comes from the first statement inside.
*/
MOZ_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST));
if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head)
nextpn = nextpn->pn_head;
if (!updateSourceCoordNotes(nextpn->pn_pos.begin))
return false;
}
return emit1(JSOP_LOOPHEAD);
}
bool
BytecodeEmitter::emitLoopEntry(ParseNode* nextpn)
{
if (nextpn) {
/* Update the line number, as for LOOPHEAD. */
MOZ_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST));
if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head)
nextpn = nextpn->pn_head;
if (!updateSourceCoordNotes(nextpn->pn_pos.begin))
return false;
}
LoopStmtInfo* loop = LoopStmtInfo::fromStmtInfo(innermostStmt());
MOZ_ASSERT(loop->loopDepth > 0);
uint8_t loopDepthAndFlags = PackLoopEntryDepthHintAndFlags(loop->loopDepth, loop->canIonOsr);
return emit2(JSOP_LOOPENTRY, loopDepthAndFlags);
}
void
BytecodeEmitter::checkTypeSet(JSOp op)
{
if (CodeSpec[op].format & JOF_TYPESET) {
if (typesetCount < UINT16_MAX)
typesetCount++;
}
}
bool
BytecodeEmitter::emitUint16Operand(JSOp op, uint32_t operand)
{
MOZ_ASSERT(operand <= UINT16_MAX);
if (!emit3(op, UINT16_HI(operand), UINT16_LO(operand)))
return false;
checkTypeSet(op);
return true;
}
bool
BytecodeEmitter::emitUint32Operand(JSOp op, uint32_t operand)
{
ptrdiff_t off;
if (!emitN(op, 4, &off))
return false;
SET_UINT32(code(off), operand);
checkTypeSet(op);
return true;
}
bool
BytecodeEmitter::flushPops(int* npops)
{
MOZ_ASSERT(*npops != 0);
if (!emitUint16Operand(JSOP_POPN, *npops))
return false;
*npops = 0;
return true;
}
namespace {
class NonLocalExitScope {
BytecodeEmitter* bce;
const uint32_t savedScopeIndex;
const int savedDepth;
uint32_t openScopeIndex;
NonLocalExitScope(const NonLocalExitScope&) = delete;
public:
explicit NonLocalExitScope(BytecodeEmitter* bce_)
: bce(bce_),
savedScopeIndex(bce->blockScopeList.length()),
savedDepth(bce->stackDepth),
openScopeIndex(UINT32_MAX)
{
if (StmtInfoBCE* stmt = bce->innermostScopeStmt())
openScopeIndex = stmt->blockScopeIndex;
}
~NonLocalExitScope() {
for (uint32_t n = savedScopeIndex; n < bce->blockScopeList.length(); n++)
bce->blockScopeList.recordEnd(n, bce->offset(), bce->inPrologue());
bce->stackDepth = savedDepth;
}
bool popScopeForNonLocalExit(uint32_t blockScopeIndex) {
uint32_t scopeObjectIndex = bce->blockScopeList.findEnclosingScope(blockScopeIndex);
uint32_t parent = openScopeIndex;
if (!bce->blockScopeList.append(scopeObjectIndex, bce->offset(), bce->inPrologue(), parent))
return false;
openScopeIndex = bce->blockScopeList.length() - 1;
return true;
}
bool prepareForNonLocalJump(StmtInfoBCE* toStmt);
};
/*
* Emit additional bytecode(s) for non-local jumps.
*/
bool
NonLocalExitScope::prepareForNonLocalJump(StmtInfoBCE* toStmt)
{
int npops = 0;
#define FLUSH_POPS() if (npops && !bce->flushPops(&npops)) return false
for (StmtInfoBCE* stmt = bce->innermostStmt(); stmt != toStmt; stmt = stmt->enclosing) {
switch (stmt->type) {
case StmtType::FINALLY:
FLUSH_POPS();
if (!bce->emitBackPatchOp(&stmt->gosubs()))
return false;
break;
case StmtType::WITH:
if (!bce->emit1(JSOP_LEAVEWITH))
return false;
MOZ_ASSERT(stmt->linksScope());
if (!popScopeForNonLocalExit(stmt->blockScopeIndex))
return false;
break;
case StmtType::FOR_OF_LOOP:
npops += 2;
break;
case StmtType::FOR_IN_LOOP:
/* The iterator and the current value are on the stack. */
npops += 1;
FLUSH_POPS();
if (!bce->emit1(JSOP_ENDITER))
return false;
break;
case StmtType::SPREAD:
MOZ_ASSERT_UNREACHABLE("can't break/continue/return from inside a spread");
break;
case StmtType::SUBROUTINE:
/*
* There's a [exception or hole, retsub pc-index] pair and the
* possible return value on the stack that we need to pop.
*/
npops += 3;
break;
default:;
}
if (stmt->isBlockScope) {
StaticBlockObject& blockObj = stmt->staticBlock();
if (blockObj.needsClone()) {
if (!bce->emit1(JSOP_POPBLOCKSCOPE))
return false;
} else {
if (!bce->emit1(JSOP_DEBUGLEAVEBLOCK))
return false;
}
if (!popScopeForNonLocalExit(stmt->blockScopeIndex))
return false;
}
}
FLUSH_POPS();
return true;
#undef FLUSH_POPS
}
} // anonymous namespace
bool
BytecodeEmitter::emitGoto(StmtInfoBCE* toStmt, ptrdiff_t* lastp, SrcNoteType noteType)
{
NonLocalExitScope nle(this);
if (!nle.prepareForNonLocalJump(toStmt))
return false;
if (noteType != SRC_NULL) {
if (!newSrcNote(noteType))
return false;
}
return emitBackPatchOp(lastp);
}
void
BytecodeEmitter::backPatch(ptrdiff_t last, jsbytecode* target, jsbytecode op)
{
jsbytecode* pc = code(last);
jsbytecode* stop = code(-1);
while (pc != stop) {
ptrdiff_t delta = GET_JUMP_OFFSET(pc);
ptrdiff_t span = target - pc;
SET_JUMP_OFFSET(pc, span);
*pc = op;
pc -= delta;
}
}
void
BytecodeEmitter::pushStatementInner(StmtInfoBCE* stmt, StmtType type, ptrdiff_t top)
{
stmt->setTop(top);
stmtStack.push(stmt, type);
}
void
BytecodeEmitter::pushStatement(StmtInfoBCE* stmt, StmtType type, ptrdiff_t top)
{
pushStatementInner(stmt, type, top);
MOZ_ASSERT(!stmt->isLoop());
}
void
BytecodeEmitter::pushLoopStatement(LoopStmtInfo* stmt, StmtType type, ptrdiff_t top)
{
pushStatementInner(stmt, type, top);
MOZ_ASSERT(stmt->isLoop());
LoopStmtInfo* enclosingLoop = nullptr;
for (StmtInfoBCE* outer = stmt->enclosing; outer; outer = outer->enclosing) {
if (outer->isLoop()) {
enclosingLoop = LoopStmtInfo::fromStmtInfo(outer);
break;
}
}
stmt->stackDepth = this->stackDepth;
stmt->loopDepth = enclosingLoop ? enclosingLoop->loopDepth + 1 : 1;
int loopSlots;
if (type == StmtType::SPREAD)
loopSlots = 3;
else if (type == StmtType::FOR_IN_LOOP || type == StmtType::FOR_OF_LOOP)
loopSlots = 2;
else
loopSlots = 0;
MOZ_ASSERT(loopSlots <= stmt->stackDepth);
if (enclosingLoop) {
stmt->canIonOsr = (enclosingLoop->canIonOsr &&
stmt->stackDepth == enclosingLoop->stackDepth + loopSlots);
} else {
stmt->canIonOsr = stmt->stackDepth == loopSlots;
}
}
JSObject*
BytecodeEmitter::innermostStaticScope() const
{
if (StmtInfoBCE* stmt = innermostScopeStmt())
return stmt->staticScope;
return sc->staticScope();
}
#ifdef DEBUG
static bool
AllLocalsAliased(StaticBlockObject& obj)
{
for (unsigned i = 0; i < obj.numVariables(); i++)
if (!obj.isAliased(i))
return false;
return true;
}
#endif
bool
BytecodeEmitter::computeAliasedSlots(Handle<StaticBlockObject*> blockObj)
{
uint32_t numAliased = script->bindings.numAliasedBodyLevelLocals();
for (unsigned i = 0; i < blockObj->numVariables(); i++) {
Definition* dn = blockObj->definitionParseNode(i);
MOZ_ASSERT(dn->isDefn());
uint32_t index = dn->pn_scopecoord.slot();
uint32_t slot;
if (isAliasedName(this, dn)) {
slot = blockObj->blockIndexToSlot(index);
blockObj->setAliased(i, true);
} else {
// blockIndexToLocalIndex returns the frame slot following the
// unaliased locals. We add numAliased so that the slot value
// comes after all (aliased and unaliased) body level locals.
slot = numAliased + blockObj->blockIndexToLocalIndex(index);
blockObj->setAliased(i, false);
}
if (!dn->pn_scopecoord.setSlot(parser->tokenStream, slot))
return false;
#ifdef DEBUG
for (ParseNode* pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
MOZ_ASSERT(pnu->pn_lexdef == dn);
MOZ_ASSERT(!(pnu->pn_dflags & PND_BOUND));
MOZ_ASSERT(pnu->pn_scopecoord.isFree());
}
#endif
}
MOZ_ASSERT_IF(sc->allLocalsAliased(), AllLocalsAliased(*blockObj));
return true;
}
void
BytecodeEmitter::computeLocalOffset(Handle<StaticBlockObject*> blockObj)
{
unsigned nbodyfixed = !sc->isGlobalContext()
? script->bindings.numUnaliasedBodyLevelLocals()
: 0;
unsigned localOffset = nbodyfixed;
if (StmtInfoBCE* stmt = innermostScopeStmt()) {
Rooted<NestedScopeObject*> outer(cx, stmt->staticScope);
for (; outer; outer = outer->enclosingNestedScope()) {
if (outer->is<StaticBlockObject>() && !IsStaticGlobalLexicalScope(outer)) {
StaticBlockObject& outerBlock = outer->as<StaticBlockObject>();
localOffset = outerBlock.localOffset() + outerBlock.numVariables();
break;
}
}
}
MOZ_ASSERT(localOffset + blockObj->numVariables()
<= nbodyfixed + script->bindings.numBlockScoped());
blockObj->setLocalOffset(localOffset);
}
// ~ Nested Scopes ~
//
// A nested scope is a region of a compilation unit (function, script, or eval
// code) with an additional node on the scope chain. This node may either be a
// "with" object or a "block" object. "With" objects represent "with" scopes.
// Block objects represent lexical scopes, and contain named block-scoped
// bindings, for example "let" bindings or the exception in a catch block.
// Those variables may be local and thus accessible directly from the stack, or
// "aliased" (accessed by name from nested functions, or dynamically via nested
// "eval" or "with") and only accessible through the scope chain.
//
// All nested scopes are present on the "static scope chain". A nested scope
// that is a "with" scope will be present on the scope chain at run-time as
// well. A block scope may or may not have a corresponding link on the run-time
// scope chain; if no variable declared in the block scope is "aliased", then no
// scope chain node is allocated.
//
// To help debuggers, the bytecode emitter arranges to record the PC ranges
// comprehended by a nested scope, and ultimately attach them to the JSScript.
// An element in the "block scope array" specifies the PC range, and links to a
// NestedScopeObject in the object list of the script. That scope object is
// linked to the previous link in the static scope chain, if any. The static
// scope chain at any pre-retire PC can be retrieved using
// JSScript::getStaticScope(jsbytecode* pc).
//
// Block scopes store their locals in the fixed part of a stack frame, after the
// "fixed var" bindings. A fixed var binding is a "var" or legacy "const"
// binding that occurs in a function (as opposed to a script or in eval code).
// Only functions have fixed var bindings.
//
// To assist the debugger, we emit a DEBUGLEAVEBLOCK opcode before leaving a
// block scope, if the block has no aliased locals. This allows DebugScopes
// to invalidate any association between a debugger scope object, which can
// proxy access to unaliased stack locals, and the actual live frame. In
// normal, non-debug mode, this opcode does not cause any baseline code to be
// emitted.
//
// If the block has aliased locals, no DEBUGLEAVEBLOCK is emitted, and
// POPBLOCKSCOPE itself balances the debug scope mapping. This gets around a
// comedic situation where DEBUGLEAVEBLOCK may remove a block scope from the
// debug scope map, but the immediate following POPBLOCKSCOPE adds it back due
// to an onStep hook.
//
// Enter a nested scope with enterNestedScope. It will emit
// PUSHBLOCKSCOPE/ENTERWITH if needed, and arrange to record the PC bounds of
// the scope. Leave a nested scope with leaveNestedScope, which, for blocks,
// will emit DEBUGLEAVEBLOCK and may emit POPBLOCKSCOPE. (For "with" scopes it
// emits LEAVEWITH, of course.) Pass enterNestedScope a fresh StmtInfoBCE
// object, and pass that same object to the corresponding leaveNestedScope. If
// the statement is a block scope, pass StmtType::BLOCK as stmtType; otherwise for
// with scopes pass StmtType::WITH.
//
bool
BytecodeEmitter::enterNestedScope(StmtInfoBCE* stmt, ObjectBox* objbox, StmtType stmtType)
{
Rooted<NestedScopeObject*> scopeObj(cx, &objbox->object->as<NestedScopeObject>());
uint32_t scopeObjectIndex = objectList.add(objbox);
switch (stmtType) {
case StmtType::BLOCK: {
Rooted<StaticBlockObject*> blockObj(cx, &scopeObj->as<StaticBlockObject>());
computeLocalOffset(blockObj);
if (!computeAliasedSlots(blockObj))
return false;
if (blockObj->needsClone()) {
if (!emitInternedObjectOp(scopeObjectIndex, JSOP_PUSHBLOCKSCOPE))
return false;
}
// Non-global block scopes are non-extensible. At this point the
// Parser has added all bindings to the StaticBlockObject, so we make
// it non-extensible.
if (!blockObj->makeNonExtensible(cx))
return false;
break;
}
case StmtType::WITH:
MOZ_ASSERT(scopeObj->is<StaticWithObject>());
if (!emitInternedObjectOp(scopeObjectIndex, JSOP_ENTERWITH))
return false;
break;
default:
MOZ_CRASH("Unexpected scope statement");
}
uint32_t parent = BlockScopeNote::NoBlockScopeIndex;
if (StmtInfoBCE* stmt = innermostScopeStmt())
parent = stmt->blockScopeIndex;
stmt->blockScopeIndex = blockScopeList.length();
if (!blockScopeList.append(scopeObjectIndex, offset(), inPrologue(), parent))
return false;
pushStatement(stmt, stmtType, offset());
scopeObj->initEnclosingScope(innermostStaticScope());
stmtStack.linkAsInnermostScopeStmt(stmt, *scopeObj);
MOZ_ASSERT(stmt->linksScope());
stmt->isBlockScope = (stmtType == StmtType::BLOCK);
return true;
}
// Patches |breaks| and |continues| unless the top statement info record
// represents a try-catch-finally suite.
void
BytecodeEmitter::popStatement()
{
if (!innermostStmt()->isTrying()) {
backPatch(innermostStmt()->breaks, code().end(), JSOP_GOTO);
backPatch(innermostStmt()->continues, code(innermostStmt()->update), JSOP_GOTO);
}
stmtStack.pop();
}
bool
BytecodeEmitter::leaveNestedScope(StmtInfoBCE* stmt)
{
MOZ_ASSERT(stmt == innermostScopeStmt());
MOZ_ASSERT(stmt->isBlockScope == !(stmt->type == StmtType::WITH));
uint32_t blockScopeIndex = stmt->blockScopeIndex;
#ifdef DEBUG
MOZ_ASSERT(blockScopeList.list[blockScopeIndex].length == 0);
uint32_t blockObjIndex = blockScopeList.list[blockScopeIndex].index;
ObjectBox* blockObjBox = objectList.find(blockObjIndex);
NestedScopeObject* staticScope = &blockObjBox->object->as<NestedScopeObject>();
MOZ_ASSERT(stmt->staticScope == staticScope);
MOZ_ASSERT_IF(!stmt->isBlockScope, staticScope->is<StaticWithObject>());
#endif
popStatement();
if (stmt->isBlockScope) {
if (stmt->staticScope->as<StaticBlockObject>().needsClone()) {
if (!emit1(JSOP_POPBLOCKSCOPE))
return false;
} else {
if (!emit1(JSOP_DEBUGLEAVEBLOCK))
return false;
}
} else {
if (!emit1(JSOP_LEAVEWITH))
return false;
}
blockScopeList.recordEnd(blockScopeIndex, offset(), inPrologue());
return true;
}
bool
BytecodeEmitter::emitIndex32(JSOp op, uint32_t index)
{
MOZ_ASSERT(checkStrictOrSloppy(op));
const size_t len = 1 + UINT32_INDEX_LEN;
MOZ_ASSERT(len == size_t(CodeSpec[op].length));
ptrdiff_t offset;
if (!emitCheck(len, &offset))
return false;
jsbytecode* code = this->code(offset);
code[0] = jsbytecode(op);
SET_UINT32_INDEX(code, index);
updateDepth(offset);
checkTypeSet(op);
return true;
}
bool
BytecodeEmitter::emitIndexOp(JSOp op, uint32_t index)
{
MOZ_ASSERT(checkStrictOrSloppy(op));
const size_t len = CodeSpec[op].length;
MOZ_ASSERT(len >= 1 + UINT32_INDEX_LEN);
ptrdiff_t offset;
if (!emitCheck(len, &offset))
return false;
jsbytecode* code = this->code(offset);
code[0] = jsbytecode(op);
SET_UINT32_INDEX(code, index);
updateDepth(offset);
checkTypeSet(op);
return true;
}
bool
BytecodeEmitter::emitAtomOp(JSAtom* atom, JSOp op)
{
MOZ_ASSERT(atom);
MOZ_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
// .generator lookups should be emitted as JSOP_GETALIASEDVAR instead of
// JSOP_GETNAME etc, to bypass |with| objects on the scope chain.
// It's safe to emit .this lookups though because |with| objects skip
// those.
MOZ_ASSERT_IF(op == JSOP_GETNAME || op == JSOP_GETGNAME,
!sc->isDotVariable(atom) || atom == cx->names().dotThis);
if (op == JSOP_GETPROP && atom == cx->names().length) {
/* Specialize length accesses for the interpreter. */
op = JSOP_LENGTH;
}
jsatomid index;
if (!makeAtomIndex(atom, &index))
return false;
return emitIndexOp(op, index);
}
bool
BytecodeEmitter::emitAtomOp(ParseNode* pn, JSOp op)
{
MOZ_ASSERT(pn->pn_atom != nullptr);
return emitAtomOp(pn->pn_atom, op);
}
bool
BytecodeEmitter::emitInternedObjectOp(uint32_t index, JSOp op)
{
MOZ_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
MOZ_ASSERT(index < objectList.length);
return emitIndex32(op, index);
}
bool
BytecodeEmitter::emitObjectOp(ObjectBox* objbox, JSOp op)
{
return emitInternedObjectOp(objectList.add(objbox), op);
}
bool
BytecodeEmitter::emitObjectPairOp(ObjectBox* objbox1, ObjectBox* objbox2, JSOp op)
{
uint32_t index = objectList.add(objbox1);
objectList.add(objbox2);
return emitInternedObjectOp(index, op);
}
bool
BytecodeEmitter::emitRegExp(uint32_t index)
{
return emitIndex32(JSOP_REGEXP, index);
}
bool
BytecodeEmitter::emitLocalOp(JSOp op, uint32_t slot)
{
MOZ_ASSERT(JOF_OPTYPE(op) != JOF_SCOPECOORD);
MOZ_ASSERT(IsLocalOp(op));
ptrdiff_t off;
if (!emitN(op, LOCALNO_LEN, &off))
return false;
SET_LOCALNO(code(off), slot);
return true;
}
bool
BytecodeEmitter::emitUnaliasedVarOp(JSOp op, uint32_t slot, MaybeCheckLexical checkLexical)
{
MOZ_ASSERT(JOF_OPTYPE(op) != JOF_SCOPECOORD);
if (IsLocalOp(op)) {
// Only unaliased locals have stack slots assigned to them. Convert the
// var index (which includes unaliased and aliased locals) to the stack
// slot index.
MOZ_ASSERT(localsToFrameSlots_[slot] <= slot);
slot = localsToFrameSlots_[slot];
if (checkLexical) {
MOZ_ASSERT(op != JSOP_INITLEXICAL);
if (!emitLocalOp(JSOP_CHECKLEXICAL, slot))
return false;
}
return emitLocalOp(op, slot);
}
MOZ_ASSERT(IsArgOp(op));
ptrdiff_t off;
if (!emitN(op, ARGNO_LEN, &off))
return false;
SET_ARGNO(code(off), slot);
return true;
}
bool
BytecodeEmitter::emitScopeCoordOp(JSOp op, ScopeCoordinate sc)
{
MOZ_ASSERT(JOF_OPTYPE(op) == JOF_SCOPECOORD);
unsigned n = SCOPECOORD_HOPS_LEN + SCOPECOORD_SLOT_LEN;
MOZ_ASSERT(int(n) + 1 /* op */ == CodeSpec[op].length);
ptrdiff_t off;
if (!emitN(op, n, &off))
return false;
jsbytecode* pc = code(off);
SET_SCOPECOORD_HOPS(pc, sc.hops());
pc += SCOPECOORD_HOPS_LEN;
SET_SCOPECOORD_SLOT(pc, sc.slot());
pc += SCOPECOORD_SLOT_LEN;
checkTypeSet(op);
return true;
}
bool
BytecodeEmitter::emitAliasedVarOp(JSOp op, ScopeCoordinate sc, MaybeCheckLexical checkLexical)
{
if (checkLexical) {
MOZ_ASSERT(op != JSOP_INITALIASEDLEXICAL);
if (!emitScopeCoordOp(JSOP_CHECKALIASEDLEXICAL, sc))
return false;
}
return emitScopeCoordOp(op, sc);
}
bool
BytecodeEmitter::lookupAliasedName(HandleScript script, PropertyName* name, uint32_t* pslot,
ParseNode* pn)
{
LazyScript::FreeVariable* freeVariables = nullptr;
uint32_t lexicalBegin = 0;
uint32_t numFreeVariables = 0;
if (emitterMode == BytecodeEmitter::LazyFunction) {
freeVariables = lazyScript->freeVariables();
lexicalBegin = script->bindings.lexicalBegin();
numFreeVariables = lazyScript->numFreeVariables();
}
/*
* Beware: BindingIter may contain more than one Binding for a given name
* (in the case of |function f(x,x) {}|) but only one will be aliased.
*/
uint32_t bindingIndex = 0;
uint32_t slot = CallObject::RESERVED_SLOTS;
for (BindingIter bi(script); !bi.done(); bi++) {
if (bi->aliased()) {
if (bi->name() == name) {
// Check if the free variable from a lazy script was marked as
// a possible hoisted use and is a lexical binding. If so,
// mark it as such so we emit a dead zone check.
if (freeVariables) {
for (uint32_t i = 0; i < numFreeVariables; i++) {
if (freeVariables[i].atom() == name) {
if (freeVariables[i].isHoistedUse() && bindingIndex >= lexicalBegin) {
MOZ_ASSERT(pn);
MOZ_ASSERT(pn->isUsed());
pn->pn_dflags |= PND_LEXICAL;
}
break;
}
}
}
*pslot = slot;
return true;
}
slot++;
}
bindingIndex++;
}
return false;
}
bool
BytecodeEmitter::lookupAliasedNameSlot(PropertyName* name, ScopeCoordinate* sc)
{
uint32_t slot;
if (!lookupAliasedName(script, name, &slot))
return false;
sc->setSlot(slot);
return true;
}
static inline MaybeCheckLexical
NodeNeedsCheckLexical(ParseNode* pn)
{
return pn->isHoistedLexicalUse() ? CheckLexical : DontCheckLexical;
}
static inline JSOp
UnaliasedVarOpToAliasedVarOp(JSOp op)
{
switch (op) {
case JSOP_GETARG: case JSOP_GETLOCAL: return JSOP_GETALIASEDVAR;
case JSOP_SETARG: case JSOP_SETLOCAL: return JSOP_SETALIASEDVAR;
case JSOP_INITLEXICAL: return JSOP_INITALIASEDLEXICAL;
default: MOZ_CRASH("unexpected var op");
}
}
static inline JSOp
CheckSetConstOp(JSOp op, ParseNode* pn)
{
if (pn->resolve()->isConst()) {
switch (op) {
case JSOP_GETLOCAL: case JSOP_GETALIASEDVAR: break;
case JSOP_INITLEXICAL: case JSOP_INITALIASEDLEXICAL: break;
case JSOP_SETLOCAL: return JSOP_THROWSETCONST;
case JSOP_SETALIASEDVAR: return JSOP_THROWSETALIASEDCONST;
default: MOZ_CRASH("unexpected set var op");
}
}
return op;
}
bool
BytecodeEmitter::emitVarOp(ParseNode* pn, JSOp op)
{
MOZ_ASSERT(pn->isKind(PNK_FUNCTION) || pn->isKind(PNK_NAME));
MOZ_ASSERT(!pn->pn_scopecoord.isFree());
if (pn->isDefn()) {
// The hop count needs to be computed even for definitions, due to
// cases like
//
// function outer() {
// function inner() { x = 42; }
// try {
// } catch (e) {
// G = function () { e = 43; };
// var x;
// }
// }
//
// The definition for x is emitted when the block scope for the catch
// is innermost. Moreover, that block scope has aliased bindings, so
// there is a non-0 hop count.
if (pn->pn_scopecoord.isHopsUnknown()) {
BytecodeEmitter* bceOfDef;
uint32_t hops = computeHops(pn, &bceOfDef);
MOZ_ASSERT(bceOfDef == this);
if (!pn->pn_scopecoord.setHops(parser->tokenStream, hops))
return false;
}
#ifdef DEBUG
BytecodeEmitter* bceOfDef;
uint32_t hops = computeHops(pn, &bceOfDef);
MOZ_ASSERT(bceOfDef == this);
MOZ_ASSERT(hops == pn->pn_scopecoord.hops());
#endif
if (!computeDefinitionIsAliased(this, pn->resolve(), &op))
return false;
}
// Aliased names had their JSOp changed by bindNameToSlot or above.
if (IsAliasedVarOp(op)) {
ScopeCoordinate sc;
sc.setHops(pn->pn_scopecoord.hops());
sc.setSlot(pn->pn_scopecoord.slot());
return emitAliasedVarOp(CheckSetConstOp(op, pn), sc, NodeNeedsCheckLexical(pn));
}
#ifdef DEBUG
BytecodeEmitter* bceOfDef;
// Call computeHops to get bceOfDef.
(void) computeHops(pn, &bceOfDef);
MOZ_ASSERT(!isAliasedName(bceOfDef, pn));
#endif
MOZ_ASSERT_IF(pn->isKind(PNK_NAME), IsArgOp(op) || IsLocalOp(op));
MOZ_ASSERT(pn->isUsed() || pn->isDefn());
return emitUnaliasedVarOp(CheckSetConstOp(op, pn), pn->pn_scopecoord.slot(),
NodeNeedsCheckLexical(pn));
}
static JSOp
GetIncDecInfo(ParseNodeKind kind, bool* post)
{
MOZ_ASSERT(kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT ||
kind == PNK_POSTDECREMENT || kind == PNK_PREDECREMENT);
*post = kind == PNK_POSTINCREMENT || kind == PNK_POSTDECREMENT;
return (kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT) ? JSOP_ADD : JSOP_SUB;
}
bool
BytecodeEmitter::emitVarIncDec(ParseNode* pn)
{
JSOp op = pn->pn_kid->getOp();
MOZ_ASSERT(IsArgOp(op) || IsLocalOp(op) || IsAliasedVarOp(op));
MOZ_ASSERT(pn->pn_kid->isKind(PNK_NAME));
MOZ_ASSERT(!pn->pn_kid->pn_scopecoord.isFree());
bool post;
JSOp binop = GetIncDecInfo(pn->getKind(), &post);
JSOp getOp, setOp;
if (IsLocalOp(op)) {
getOp = JSOP_GETLOCAL;
setOp = JSOP_SETLOCAL;
} else if (IsArgOp(op)) {
getOp = JSOP_GETARG;
setOp = JSOP_SETARG;
} else {
getOp = JSOP_GETALIASEDVAR;
setOp = JSOP_SETALIASEDVAR;
}
if (!emitVarOp(pn->pn_kid, getOp)) // V
return false;
if (!emit1(JSOP_POS)) // N
return false;
if (post && !emit1(JSOP_DUP)) // N? N
return false;
if (!emit1(JSOP_ONE)) // N? N 1
return false;
if (!emit1(binop)) // N? N+1
return false;
if (!emitVarOp(pn->pn_kid, setOp)) // N? N+1
return false;
if (post && !emit1(JSOP_POP)) // RESULT
return false;
return true;
}
bool
BytecodeEmitter::atBodyLevel() const
{
// 'eval' and non-syntactic scripts are always under an invisible lexical
// scope, but since it is not syntactic, it should still be considered at
// body level.
if (sc->staticScope()->is<StaticEvalObject>()) {
bool bl = !innermostStmt()->enclosing;
MOZ_ASSERT_IF(bl, innermostStmt()->type == StmtType::BLOCK);
MOZ_ASSERT_IF(bl, innermostStmt()->staticScope
->as<StaticBlockObject>()
.enclosingStaticScope() == sc->staticScope());
return bl;
}
return !innermostStmt() || sc->isModuleBox();
}
uint32_t
BytecodeEmitter::computeHops(ParseNode* pn, BytecodeEmitter** bceOfDefOut)
{
Definition* dn = pn->resolve();
MOZ_ASSERT(dn->isDefn());
MOZ_ASSERT(!dn->isPlaceholder());
MOZ_ASSERT(dn->isBound());
uint32_t hops = 0;
BytecodeEmitter* bceOfDef = this;
StaticScopeIter<NoGC> ssi(innermostStaticScope());
JSObject* defScope = blockScopeOfDef(dn);
while (ssi.staticScope() != defScope) {
if (ssi.hasSyntacticDynamicScopeObject())
hops++;
if (ssi.type() == StaticScopeIter<NoGC>::Function) {
MOZ_ASSERT(dn->isClosed());
bceOfDef = bceOfDef->parent;
}
ssi++;
}
*bceOfDefOut = bceOfDef;
return hops;
}
bool
BytecodeEmitter::isAliasedName(BytecodeEmitter* bceOfDef, ParseNode* pn)
{
// If the definition is in another function, it's definitely aliased.
if (bceOfDef != this)
return true;
Definition* dn = pn->resolve();
switch (dn->kind()) {
case Definition::LET:
case Definition::CONSTANT:
/*
* There are two ways to alias a let variable: nested functions and
* dynamic scope operations. (This is overly conservative since the
* bindingsAccessedDynamically flag, checked by allLocalsAliased, is
* function-wide.)
*
* In addition all locals in generators are marked as aliased, to ensure
* that they are allocated on scope chains instead of on the stack. See
* the definition of SharedContext::allLocalsAliased.
*/
return dn->isClosed() || sc->allLocalsAliased();
case Definition::ARG:
/*
* Consult the bindings, since they already record aliasing. We might
* be tempted to use the same definition as VAR/CONST/LET, but there is
* a problem caused by duplicate arguments: only the last argument with
* a given name is aliased. This is necessary to avoid generating a
* shape for the call object with with more than one name for a given
* slot (which violates internal engine invariants). All this means that
* the '|| sc->allLocalsAliased()' disjunct is incorrect since it will
* mark both parameters in function(x,x) as aliased.
*/
return script->formalIsAliased(pn->pn_scopecoord.slot());
case Definition::VAR:
MOZ_ASSERT_IF(sc->allLocalsAliased(), script->localIsAliased(pn->pn_scopecoord.slot()));
return script->localIsAliased(pn->pn_scopecoord.slot());
case Definition::PLACEHOLDER:
case Definition::NAMED_LAMBDA:
case Definition::MISSING:
case Definition::IMPORT:
MOZ_CRASH("unexpected dn->kind");
}
return false;
}
bool
BytecodeEmitter::computeDefinitionIsAliased(BytecodeEmitter* bceOfDef, Definition* dn, JSOp* op)
{
if (dn->isKnownAliased()) {
*op = UnaliasedVarOpToAliasedVarOp(*op);
} else if (isAliasedName(bceOfDef, dn)) {
// Translate the frame slot to a slot on the dynamic scope
// object. Aliased block bindings do not need adjusting; see
// computeAliasedSlots.
uint32_t slot = dn->pn_scopecoord.slot();
if (blockScopeOfDef(dn)->is<JSFunction>() ||
blockScopeOfDef(dn)->is<ModuleObject>())
{
MOZ_ASSERT(IsArgOp(*op) || slot < bceOfDef->script->bindings.numBodyLevelLocals());
MOZ_ALWAYS_TRUE(bceOfDef->lookupAliasedName(bceOfDef->script, dn->name(), &slot));
}
if (!dn->pn_scopecoord.setSlot(parser->tokenStream, slot))
return false;
*op = UnaliasedVarOpToAliasedVarOp(*op);
// Mark the definition as having already computed alias information.
dn->pn_dflags |= PND_KNOWNALIASED;
}
return true;
}
JSOp
BytecodeEmitter::strictifySetNameOp(JSOp op)
{
switch (op) {
case JSOP_SETNAME:
if (sc->strict())
op = JSOP_STRICTSETNAME;
break;
case JSOP_SETGNAME:
if (sc->strict())
op = JSOP_STRICTSETGNAME;
break;
default:;
}
return op;
}
void
BytecodeEmitter::strictifySetNameNode(ParseNode* pn)
{
pn->setOp(strictifySetNameOp(pn->getOp()));
}
/*
* Try to convert a *NAME op with a free name to a more specialized GNAME,
* INTRINSIC or ALIASEDVAR op, which optimize accesses on that name.
* Return true if a conversion was made.
*/
bool
BytecodeEmitter::tryConvertFreeName(ParseNode* pn)
{
/*
* In self-hosting mode, JSOP_*NAME is unconditionally converted to
* JSOP_*INTRINSIC. This causes lookups to be redirected to the special
* intrinsics holder in the global object, into which any missing values are
* cloned lazily upon first access.
*/
if (emitterMode == BytecodeEmitter::SelfHosting) {
JSOp op;
switch (pn->getOp()) {
case JSOP_GETNAME: op = JSOP_GETINTRINSIC; break;
case JSOP_SETNAME: op = JSOP_SETINTRINSIC; break;
/* Other *NAME ops aren't (yet) supported in self-hosted code. */
default: MOZ_CRASH("intrinsic");
}
pn->setOp(op);
return true;
}
/*
* When parsing inner functions lazily, parse nodes for outer functions no
* longer exist and only the function's scope chain is available for
* resolving upvar accesses within the inner function.
*/
if (emitterMode == BytecodeEmitter::LazyFunction) {
// The only statements within a lazy function which can push lexical
// scopes are try/catch blocks. Use generic ops in this case.
for (StmtInfoBCE* stmt = innermostStmt(); stmt; stmt = stmt->enclosing) {
if (stmt->type == StmtType::CATCH)
return true;
}
// Walk the static scope chain and look for an aliased binding with
// the name pn->pn_atom.
uint32_t hops = 0;
Maybe<uint32_t> slot;
FunctionBox* funbox = sc->asFunctionBox();
PropertyName* name = pn->pn_atom->asPropertyName();
for (StaticScopeIter<NoGC> ssi(funbox->staticScope()); !ssi.done(); ssi++) {
// Don't optimize names through non-global eval. For global eval
// we can use GNAME ops.
if (ssi.type() == StaticScopeIter<NoGC>::Eval) {
if (ssi.eval().isNonGlobal())
return false;
MOZ_ASSERT(!slot.isSome());
break;
}
if (!ssi.hasSyntacticDynamicScopeObject())
continue;
// Look up for name in function and block scopes.
if (ssi.type() == StaticScopeIter<NoGC>::Function) {
RootedScript funScript(cx, ssi.funScript());
if (funScript->funHasExtensibleScope() || ssi.fun().atom() == pn->pn_atom)
return false;
// Skip the current function, since we're trying to convert a
// free name.
if (script != funScript) {
uint32_t slot_;
if (lookupAliasedName(funScript, name, &slot_, pn)) {
slot = Some(slot_);
break;
}
}
} else if (ssi.type() == StaticScopeIter<NoGC>::Module) {
RootedScript moduleScript(cx, ssi.moduleScript());
uint32_t slot_;
if (lookupAliasedName(moduleScript, name, &slot_, pn)) {
slot = Some(slot_);
break;
}
// Convert module import accesses to use JSOP_GETIMPORT.
RootedModuleEnvironmentObject env(cx, ssi.module().environment());
RootedPropertyName propName(cx, name);
MOZ_ASSERT(env);
if (env->hasImportBinding(propName)) {
if (pn->getOp() == JSOP_GETNAME) {
pn->setOp(JSOP_GETIMPORT);
return true;
}
return false;
}
} else if (ssi.type() == StaticScopeIter<NoGC>::Block) {
RootedShape shape(cx, ssi.block().lookupAliasedName(name));
if (shape) {
// Don't optimize setting a 'const' binding. Let the slow
// path do the error checking.
if (!shape->writable() && pn->getOp() == JSOP_SETNAME)
return false;
slot = Some(shape->slot());
pn->pn_dflags |= PND_LEXICAL;
break;
}
} else {
MOZ_ASSERT(ssi.type() != StaticScopeIter<NoGC>::With);
}
hops++;
}
// If we found a scope binding name, convert the name op to an aliased
// var op.
if (slot.isSome()) {
JSOp op;
switch (pn->getOp()) {
case JSOP_GETNAME: op = JSOP_GETALIASEDVAR; break;
case JSOP_SETNAME: op = JSOP_SETALIASEDVAR; break;
default: return false;
}
pn->setOp(op);
MOZ_ALWAYS_TRUE(pn->pn_scopecoord.set(parser->tokenStream, hops, *slot));
return true;
}
}
// Unbound names aren't recognizable global-property references if the
// script is inside a non-global eval call.
if (insideNonGlobalEval)
return false;
// If we are inside a module then unbound names in a function may refer to
// imports, so we can't use GNAME ops here.
if (insideModule)
return false;
// Skip trying to use GNAME ops if we know our script has a non-syntactic
// scope, since they'll just get treated as NAME ops anyway.
if (script->hasNonSyntacticScope())
return false;
// Deoptimized names also aren't necessarily globals.
if (pn->isDeoptimized())
return false;
if (sc->isFunctionBox()) {
// Unbound names in function code may not be globals if new locals can
// be added to this function (or an enclosing one) to alias a global
// reference.
FunctionBox* funbox = sc->asFunctionBox();
if (funbox->mightAliasLocals())
return false;
}
// If this is eval code, being evaluated inside strict mode eval code,
// an "unbound" name might be a binding local to that outer eval:
//
// var x = "GLOBAL";
// eval('"use strict"; ' +
// 'var x; ' +
// 'eval("print(x)");'); // "undefined", not "GLOBAL"
//
// Given the enclosing eval code's strictness and its bindings (neither is
// readily available now), we could exactly check global-ness, but it's not
// worth the trouble for doubly-nested eval code. So we conservatively
// approximate. If the outer eval code is strict, then this eval code will
// be: thus, don't optimize if we're compiling strict code inside an eval.
//
// Though actually, we don't even need an inner eval. We could just as well
// have a lambda inside that outer strict mode eval and it would run into
// the same issue.
if (insideEval && sc->strict())
return false;
JSOp op;
switch (pn->getOp()) {
case JSOP_GETNAME: op = JSOP_GETGNAME; break;
case JSOP_SETNAME: op = strictifySetNameOp(JSOP_SETGNAME); break;
default: MOZ_CRASH("gname");
}
pn->setOp(op);
MOZ_ASSERT_IF(op == JSOP_INITGLEXICAL,
IsStaticGlobalLexicalScope(blockScopeOfDef(pn->resolve())));
return true;
}
/*
* BindNameToSlotHelper attempts to optimize name gets and sets to stack slot
* loads and stores, given the compile-time information in |this| and a PNK_NAME
* node pn. It returns false on error, true on success.
*
* The caller can test pn->pn_scopecoord.isFree() to tell whether optimization
* occurred, in which case bindNameToSlotHelper also updated pn->pn_op. If
* pn->pn_cookie.isFree() is still true on return, pn->pn_op still may have
* been optimized, e.g., from JSOP_GETNAME to JSOP_CALLEE. Whether or not
* pn->pn_op was modified, if this function finds an argument or local variable
* name, PND_CONST will be set in pn_dflags for read-only properties after a
* successful return.
*
* NB: if you add more opcodes specialized from JSOP_GETNAME, etc., don't forget
* to update the special cases in EmitFor (for-in) and emitAssignment (= and
* op=, e.g. +=).
*/
bool
BytecodeEmitter::bindNameToSlotHelper(ParseNode* pn)
{
MOZ_ASSERT(pn->isKind(PNK_NAME));
/* Don't attempt if 'pn' is already bound or deoptimized or a function. */
if (pn->isBound() || pn->isDeoptimized())
return true;
/* JSOP_CALLEE is pre-bound by definition. */
JSOp op = pn->getOp();
MOZ_ASSERT(op != JSOP_CALLEE);
MOZ_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
/*
* The parser already linked name uses to definitions when (where not
* prevented by non-lexical constructs like 'with' and 'eval').
*/
Definition* dn;
if (pn->isUsed()) {
MOZ_ASSERT(pn->pn_scopecoord.isFree());
dn = pn->pn_lexdef;
MOZ_ASSERT(dn->isDefn());
pn->pn_dflags |= (dn->pn_dflags & PND_CONST);
} else if (pn->isDefn()) {
dn = (Definition*) pn;
} else {
return true;
}
if (dn->pn_scopecoord.isFree()) {
if (evalCaller) {
MOZ_ASSERT(script->treatAsRunOnce() || sc->isFunctionBox());
/*
* Don't generate upvars on the left side of a for loop. See
* bug 470758.
*/
if (emittingForInit)
return true;
/*
* If this is an eval in the global scope, then unbound variables
* must be globals, so try to use GNAME ops.
*/
if (!evalCaller->functionOrCallerFunction() && tryConvertFreeName(pn)) {
pn->pn_dflags |= PND_BOUND;
return true;
}
/*
* Out of tricks, so we must rely on PICs to optimize named
* accesses from direct eval called from function code.
*/
return true;
}
/* Optimize accesses to undeclared globals. */
if (!tryConvertFreeName(pn))
return true;
pn->pn_dflags |= PND_BOUND;
return true;
}
/*
* At this point, we are only dealing with uses that have already been
* bound to definitions via pn_lexdef. The rest of this routine converts
* the parse node of the use from its initial JSOP_*NAME* op to a
* LOCAL/ARG op. This requires setting the node's pn_scopecoord with a
* pair (hops, slot) where 'hops' is the number of dynamic scopes between
* the use and the def and 'slot' is the index to emit as the immediate of
* the ARG/LOCAL op. For example, in this code:
*
* function(a,b,x) { return x }
* function(y) { function() { return y } }
*
* x will get (hops = 0, slot = 2) and y will get (hops = 1, slot = 0).
*/
MOZ_ASSERT(!pn->isDefn());
MOZ_ASSERT(pn->isUsed());
MOZ_ASSERT(pn->pn_lexdef);
MOZ_ASSERT(pn->pn_scopecoord.isFree());
/*
* We are compiling a function body and may be able to optimize name
* to stack slot. Look for an argument or variable in the function and
* rewrite pn_op and update pn accordingly.
*/
switch (dn->kind()) {
case Definition::ARG:
switch (op) {
case JSOP_GETNAME:
op = JSOP_GETARG; break;
case JSOP_SETNAME:
case JSOP_STRICTSETNAME:
op = JSOP_SETARG; break;
default: MOZ_CRASH("arg");
}
MOZ_ASSERT(!pn->isConst());
break;
case Definition::VAR:
case Definition::CONSTANT:
case Definition::LET:
switch (op) {
case JSOP_GETNAME:
op = JSOP_GETLOCAL; break;
case JSOP_SETNAME:
case JSOP_STRICTSETNAME:
op = JSOP_SETLOCAL; break;
default: MOZ_CRASH("local");
}
break;
case Definition::NAMED_LAMBDA: {
MOZ_ASSERT(dn->isOp(JSOP_CALLEE));
MOZ_ASSERT(op != JSOP_CALLEE);
/*
* Currently, the ALIASEDVAR ops do not support accessing the
* callee of a DeclEnvObject, so use NAME.
*/
JSFunction* fun = sc->asFunctionBox()->function();
if (blockScopeOfDef(dn) != fun)
return true;
MOZ_ASSERT(fun->isLambda());
MOZ_ASSERT(pn->pn_atom == fun->atom());
/*
* Leave pn->isOp(JSOP_GETNAME) if this->fun needs a CallObject to
* address two cases: a new binding introduced by eval, and
* assignment to the name in strict mode.
*
* var fun = (function f(s) { eval(s); return f; });
* assertEq(fun("var f = 42"), 42);
*
* ECMAScript specifies that a function expression's name is bound
* in a lexical environment distinct from that used to bind its
* named parameters, the arguments object, and its variables. The
* new binding for "var f = 42" shadows the binding for the
* function itself, so the name of the function will not refer to
* the function.
*
* (function f() { "use strict"; f = 12; })();
*
* Outside strict mode, assignment to a function expression's name
* has no effect. But in strict mode, this attempt to mutate an
* immutable binding must throw a TypeError. We implement this by
* not optimizing such assignments and by marking such functions as
* needsCallObject, ensuring that the function name is represented in
* the scope chain so that assignment will throw a TypeError.
*/
if (!sc->asFunctionBox()->needsCallObject()) {
op = JSOP_CALLEE;
pn->pn_dflags |= PND_CONST;
}
pn->setOp(op);
pn->pn_dflags |= PND_BOUND;
return true;
}
case Definition::PLACEHOLDER:
return true;
case Definition::IMPORT:
if (op == JSOP_GETNAME)
pn->setOp(JSOP_GETIMPORT);
return true;
case Definition::MISSING:
MOZ_CRASH("unexpected definition kind");
}
// The hop count is the number of dynamic scopes during execution that must
// be skipped to access the binding.
BytecodeEmitter* bceOfDef;
uint32_t slot = dn->pn_scopecoord.slot();
uint32_t hops = computeHops(pn, &bceOfDef);
/*
* Explicitly disallow accessing var/let bindings in global scope from
* nested functions. The reason for this limitation is that, since the
* global script is not included in the static scope chain (1. because it
* has no object to stand in the static scope chain, 2. to minimize memory
* bloat where a single live function keeps its whole global script
* alive.), ScopeCoordinateToTypeSet is not able to find the var/let's
* associated TypeSet.
*/
if (bceOfDef != this && bceOfDef->sc->isGlobalContext())
return true;
if (!pn->pn_scopecoord.set(parser->tokenStream, hops, slot))
return false;
if (!computeDefinitionIsAliased(bceOfDef, dn, &op))
return false;
// Re-set the slot on if it is aliased, since the slot would have been
// translated on dn.
if (IsAliasedVarOp(op)) {
MOZ_ASSERT(dn->isKnownAliased());
pn->pn_scopecoord.setSlot(parser->tokenStream, dn->pn_scopecoord.slot());
}
MOZ_ASSERT(!pn->isOp(op));
pn->setOp(op);
pn->pn_dflags |= PND_BOUND;
return true;
}
/*
* Attempts to bind the name, then checks that no dynamic scope lookup ops are
* emitted in self-hosting mode. NAME ops do lookups off current scope chain,
* and we do not want to allow self-hosted code to use the dynamic scope.
*/
bool
BytecodeEmitter::bindNameToSlot(ParseNode* pn)
{
if (!bindNameToSlotHelper(pn))
return false;
strictifySetNameNode(pn);
if (emitterMode == BytecodeEmitter::SelfHosting && !pn->isBound()) {
reportError(pn, JSMSG_SELFHOSTED_UNBOUND_NAME);
return false;
}
return true;
}
bool
BytecodeEmitter::checkSideEffects(ParseNode* pn, bool* answer)
{
JS_CHECK_RECURSION(cx, return false);
restart:
switch (pn->getKind()) {
// Trivial cases with no side effects.
case PNK_NOP:
case PNK_STRING:
case PNK_TEMPLATE_STRING:
case PNK_REGEXP:
case PNK_TRUE:
case PNK_FALSE:
case PNK_NULL:
case PNK_ELISION:
case PNK_GENERATOR:
case PNK_NUMBER:
case PNK_OBJECT_PROPERTY_NAME:
MOZ_ASSERT(pn->isArity(PN_NULLARY));
*answer = false;
return true;
// |this| can throw in derived class constructors, including nested arrow
// functions or eval.
case PNK_THIS:
MOZ_ASSERT(pn->isArity(PN_UNARY));
*answer = sc->needsThisTDZChecks();
return true;
// Trivial binary nodes with more token pos holders.
case PNK_NEWTARGET:
MOZ_ASSERT(pn->isArity(PN_BINARY));
MOZ_ASSERT(pn->pn_left->isKind(PNK_POSHOLDER));
MOZ_ASSERT(pn->pn_right->isKind(PNK_POSHOLDER));
*answer = false;
return true;
case PNK_BREAK:
case PNK_CONTINUE:
case PNK_DEBUGGER:
MOZ_ASSERT(pn->isArity(PN_NULLARY));
*answer = true;
return true;
// Watch out for getters!
case PNK_DOT:
MOZ_ASSERT(pn->isArity(PN_NAME));
*answer = true;
return true;
// Unary cases with side effects only if the child has them.
case PNK_TYPEOFEXPR:
case PNK_VOID:
case PNK_NOT:
case PNK_COMPUTED_NAME:
MOZ_ASSERT(pn->isArity(PN_UNARY));
return checkSideEffects(pn->pn_kid, answer);
// Looking up or evaluating the associated name could throw.
case PNK_TYPEOFNAME:
MOZ_ASSERT(pn->isArity(PN_UNARY));
*answer = true;
return true;
// These unary cases have side effects on the enclosing object/array,
// sure. But that's not the question this function answers: it's
// whether the operation may have a side effect on something *other* than
// the result of the overall operation in which it's embedded. The
// answer to that is no, for an object literal having a mutated prototype
// and an array comprehension containing no other effectful operations
// only produce a value, without affecting anything else.
case PNK_MUTATEPROTO:
case PNK_ARRAYPUSH:
MOZ_ASSERT(pn->isArity(PN_UNARY));
return checkSideEffects(pn->pn_kid, answer);
// Unary cases with obvious side effects.
case PNK_PREINCREMENT:
case PNK_POSTINCREMENT:
case PNK_PREDECREMENT:
case PNK_POSTDECREMENT:
case PNK_THROW:
MOZ_ASSERT(pn->isArity(PN_UNARY));
*answer = true;
return true;
// These might invoke valueOf/toString, even with a subexpression without
// side effects! Consider |+{ valueOf: null, toString: null }|.
case PNK_BITNOT:
case PNK_POS:
case PNK_NEG:
MOZ_ASSERT(pn->isArity(PN_UNARY));
*answer = true;
return true;
// This invokes the (user-controllable) iterator protocol.
case PNK_SPREAD:
MOZ_ASSERT(pn->isArity(PN_UNARY));
*answer = true;
return true;
case PNK_YIELD_STAR:
case PNK_YIELD:
MOZ_ASSERT(pn->isArity(PN_BINARY));
*answer = true;
return true;
// Deletion generally has side effects, even if isolated cases have none.
case PNK_DELETENAME:
case PNK_DELETEPROP:
case PNK_DELETEELEM:
MOZ_ASSERT(pn->isArity(PN_UNARY));
*answer = true;
return true;
// Deletion of a non-Reference expression has side effects only through
// evaluating the expression.
case PNK_DELETEEXPR: {
MOZ_ASSERT(pn->isArity(PN_UNARY));
ParseNode* expr = pn->pn_kid;
return checkSideEffects(expr, answer);
}
case PNK_SEMI:
MOZ_ASSERT(pn->isArity(PN_UNARY));
if (ParseNode* expr = pn->pn_kid)
return checkSideEffects(expr, answer);
*answer = false;
return true;
// Binary cases with obvious side effects.
case PNK_ASSIGN:
case PNK_ADDASSIGN:
case PNK_SUBASSIGN:
case PNK_BITORASSIGN:
case PNK_BITXORASSIGN:
case PNK_BITANDASSIGN:
case PNK_LSHASSIGN:
case PNK_RSHASSIGN:
case PNK_URSHASSIGN:
case PNK_MULASSIGN:
case PNK_DIVASSIGN:
case PNK_MODASSIGN:
case PNK_POWASSIGN:
case PNK_SETTHIS:
MOZ_ASSERT(pn->isArity(PN_BINARY));
*answer = true;
return true;
case PNK_STATEMENTLIST:
case PNK_CATCHLIST:
// Strict equality operations and logical operators are well-behaved and
// perform no conversions.
case PNK_OR:
case PNK_AND:
case PNK_STRICTEQ:
case PNK_STRICTNE:
// Any subexpression of a comma expression could be effectful.
case PNK_COMMA:
MOZ_ASSERT(pn->pn_count > 0);
// Subcomponents of a literal may be effectful.
case PNK_ARRAY:
case PNK_OBJECT:
MOZ_ASSERT(pn->isArity(PN_LIST));
for (ParseNode* item = pn->pn_head; item; item = item->pn_next) {
if (!checkSideEffects(item, answer))
return false;
if (*answer)
return true;
}
return true;
// Most other binary operations (parsed as lists in SpiderMonkey) may
// perform conversions triggering side effects. Math operations perform
// ToNumber and may fail invoking invalid user-defined toString/valueOf:
// |5 < { toString: null }|. |instanceof| throws if provided a
// non-object constructor: |null instanceof null|. |in| throws if given
// a non-object RHS: |5 in null|.
case PNK_BITOR:
case PNK_BITXOR:
case PNK_BITAND:
case PNK_EQ:
case PNK_NE:
case PNK_LT:
case PNK_LE:
case PNK_GT:
case PNK_GE:
case PNK_INSTANCEOF:
case PNK_IN:
case PNK_LSH:
case PNK_RSH:
case PNK_URSH:
case PNK_ADD:
case PNK_SUB:
case PNK_STAR:
case PNK_DIV:
case PNK_MOD:
case PNK_POW:
MOZ_ASSERT(pn->isArity(PN_LIST));
MOZ_ASSERT(pn->pn_count >= 2);
*answer = true;
return true;
case PNK_COLON:
case PNK_CASE:
MOZ_ASSERT(pn->isArity(PN_BINARY));
if (!checkSideEffects(pn->pn_left, answer))
return false;
if (*answer)
return true;
return checkSideEffects(pn->pn_right, answer);
// More getters.
case PNK_ELEM:
MOZ_ASSERT(pn->isArity(PN_BINARY));
*answer = true;
return true;
// These affect visible names in this code, or in other code.
case PNK_IMPORT:
case PNK_EXPORT_FROM:
case PNK_EXPORT_DEFAULT:
MOZ_ASSERT(pn->isArity(PN_BINARY));
*answer = true;
return true;
// Likewise.
case PNK_EXPORT:
MOZ_ASSERT(pn->isArity(PN_UNARY));
*answer = true;
return true;
// Every part of a loop might be effect-free, but looping infinitely *is*
// an effect. (Language lawyer trivia: C++ says threads can be assumed
// to exit or have side effects, C++14 [intro.multithread]p27, so a C++
// implementation's equivalent of the below could set |*answer = false;|
// if all loop sub-nodes set |*answer = false|!)
case PNK_DOWHILE:
case PNK_WHILE:
case PNK_FOR:
case PNK_COMPREHENSIONFOR:
MOZ_ASSERT(pn->isArity(PN_BINARY));
*answer = true;
return true;
// Declarations affect the name set of the relevant scope.
case PNK_VAR:
case PNK_CONST:
case PNK_LET:
MOZ_ASSERT(pn->isArity(PN_LIST));
*answer = true;
return true;
case PNK_IF:
case PNK_CONDITIONAL:
MOZ_ASSERT(pn->isArity(PN_TERNARY));
if (!checkSideEffects(pn->pn_kid1, answer))
return false;
if (*answer)
return true;
if (!checkSideEffects(pn->pn_kid2, answer))
return false;
if (*answer)
return true;
if ((pn = pn->pn_kid3))
goto restart;
return true;
// Function calls can invoke non-local code.
case PNK_NEW:
case PNK_CALL:
case PNK_TAGGED_TEMPLATE:
case PNK_SUPERCALL:
MOZ_ASSERT(pn->isArity(PN_LIST));
*answer = true;
return true;
// Classes typically introduce names. Even if no name is introduced,
// the heritage and/or class body (through computed property names)
// usually have effects.
case PNK_CLASS:
MOZ_ASSERT(pn->isArity(PN_TERNARY));
*answer = true;
return true;
// |with| calls |ToObject| on its expression and so throws if that value
// is null/undefined.
case PNK_WITH:
MOZ_ASSERT(pn->isArity(PN_BINARY));
*answer = true;
return true;
case PNK_RETURN:
MOZ_ASSERT(pn->isArity(PN_BINARY));
*answer = true;
return true;
case PNK_NAME:
MOZ_ASSERT(pn->isArity(PN_NAME));
*answer = true;
return true;
// Shorthands could trigger getters: the |x| in the object literal in
// |with ({ get x() { throw 42; } }) ({ x });|, for example, triggers
// one. (Of course, it isn't necessary to use |with| for a shorthand to
// trigger a getter.)
case PNK_SHORTHAND:
MOZ_ASSERT(pn->isArity(PN_BINARY));
*answer = true;
return true;
case PNK_FUNCTION:
MOZ_ASSERT(pn->isArity(PN_CODE));
/*
* A named function, contrary to ES3, is no longer effectful, because
* we bind its name lexically (using JSOP_CALLEE) instead of creating
* an Object instance and binding a readonly, permanent property in it
* (the object and binding can be detected and hijacked or captured).
* This is a bug fix to ES3; it is fixed in ES3.1 drafts.
*/
*answer = false;
return true;
case PNK_MODULE:
*answer = false;
return true;
// Generator expressions have no side effects on their own.
case PNK_GENEXP:
MOZ_ASSERT(pn->isArity(PN_LIST));
*answer = false;
return true;
case PNK_TRY:
MOZ_ASSERT(pn->isArity(PN_TERNARY));
if (!checkSideEffects(pn->pn_kid1, answer))
return false;
if (*answer)
return true;
if (ParseNode* catchList = pn->pn_kid2) {
MOZ_ASSERT(catchList->isKind(PNK_CATCHLIST));
if (!checkSideEffects(catchList, answer))
return false;
if (*answer)
return true;
}
if (ParseNode* finallyBlock = pn->pn_kid3) {
if (!checkSideEffects(finallyBlock, answer))
return false;
}
return true;
case PNK_CATCH:
MOZ_ASSERT(pn->isArity(PN_TERNARY));
if (!checkSideEffects(pn->pn_kid1, answer))
return false;
if (*answer)
return true;
if (ParseNode* cond = pn->pn_kid2) {
if (!checkSideEffects(cond, answer))
return false;
if (*answer)
return true;
}
return checkSideEffects(pn->pn_kid3, answer);
case PNK_SWITCH:
case PNK_LETBLOCK:
MOZ_ASSERT(pn->isArity(PN_BINARY));
if (!checkSideEffects(pn->pn_left, answer))
return false;
return *answer || checkSideEffects(pn->pn_right, answer);
case PNK_LABEL:
case PNK_LEXICALSCOPE:
MOZ_ASSERT(pn->isArity(PN_NAME));
return checkSideEffects(pn->expr(), answer);
// We could methodically check every interpolated expression, but it's
// probably not worth the trouble. Treat template strings as effect-free
// only if they don't contain any substitutions.
case PNK_TEMPLATE_STRING_LIST:
MOZ_ASSERT(pn->isArity(PN_LIST));
MOZ_ASSERT(pn->pn_count > 0);
MOZ_ASSERT((pn->pn_count % 2) == 1,
"template strings must alternate template and substitution "
"parts");
*answer = pn->pn_count > 1;
return true;
case PNK_ARRAYCOMP:
MOZ_ASSERT(pn->isArity(PN_LIST));
MOZ_ASSERT(pn->pn_count == 1);
return checkSideEffects(pn->pn_head, answer);
case PNK_ARGSBODY:
*answer = true;
return true;
case PNK_FORIN: // by PNK_FOR/PNK_COMPREHENSIONFOR
case PNK_FOROF: // by PNK_FOR/PNK_COMPREHENSIONFOR
case PNK_FORHEAD: // by PNK_FOR/PNK_COMPREHENSIONFOR
case PNK_CLASSMETHOD: // by PNK_CLASS
case PNK_CLASSNAMES: // by PNK_CLASS
case PNK_CLASSMETHODLIST: // by PNK_CLASS
case PNK_IMPORT_SPEC_LIST: // by PNK_IMPORT
case PNK_IMPORT_SPEC: // by PNK_IMPORT
case PNK_EXPORT_BATCH_SPEC:// by PNK_EXPORT
case PNK_EXPORT_SPEC_LIST: // by PNK_EXPORT
case PNK_EXPORT_SPEC: // by PNK_EXPORT
case PNK_CALLSITEOBJ: // by PNK_TAGGED_TEMPLATE
case PNK_POSHOLDER: // by PNK_NEWTARGET
case PNK_SUPERBASE: // by PNK_ELEM and others
MOZ_CRASH("handled by parent nodes");
case PNK_LIMIT: // invalid sentinel value
MOZ_CRASH("invalid node kind");
}
MOZ_CRASH("invalid, unenumerated ParseNodeKind value encountered in "
"BytecodeEmitter::checkSideEffects");
}
bool
BytecodeEmitter::isInLoop()
{
for (StmtInfoBCE* stmt = innermostStmt(); stmt; stmt = stmt->enclosing) {
if (stmt->isLoop())
return true;
}
return false;
}
bool
BytecodeEmitter::checkSingletonContext()
{
if (!script->treatAsRunOnce() || sc->isFunctionBox() || isInLoop())
return false;
hasSingletons = true;
return true;
}
bool
BytecodeEmitter::checkRunOnceContext()
{
return checkSingletonContext() || (!isInLoop() && isRunOnceLambda());
}
bool
BytecodeEmitter::needsImplicitThis()
{
// Short-circuit if there is an enclosing 'with' static scope.
if (sc->inWith())
return true;
// Otherwise walk the statement stack.
for (StmtInfoBCE* stmt = innermostStmt(); stmt; stmt = stmt->enclosing) {
if (stmt->type == StmtType::WITH)
return true;
}
return false;
}
void
BytecodeEmitter::tellDebuggerAboutCompiledScript(ExclusiveContext* cx)
{
// Note: when parsing off thread the resulting scripts need to be handed to
// the debugger after rejoining to the main thread.
if (!cx->isJSContext())
return;
// Lazy scripts are never top level (despite always being invoked with a
// nullptr parent), and so the hook should never be fired.
if (emitterMode != LazyFunction && !parent) {
Debugger::onNewScript(cx->asJSContext(), script);
}
}
inline TokenStream*
BytecodeEmitter::tokenStream()
{
return &parser->tokenStream;
}
bool
BytecodeEmitter::reportError(ParseNode* pn, unsigned errorNumber, ...)
{
TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos;
va_list args;
va_start(args, errorNumber);
bool result = tokenStream()->reportCompileErrorNumberVA(pos.begin, JSREPORT_ERROR,
errorNumber, args);
va_end(args);
return result;
}
bool
BytecodeEmitter::reportStrictWarning(ParseNode* pn, unsigned errorNumber, ...)
{
TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos;
va_list args;
va_start(args, errorNumber);
bool result = tokenStream()->reportStrictWarningErrorNumberVA(pos.begin, errorNumber, args);
va_end(args);
return result;
}
bool
BytecodeEmitter::reportStrictModeError(ParseNode* pn, unsigned errorNumber, ...)
{
TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos;
va_list args;
va_start(args, errorNumber);
bool result = tokenStream()->reportStrictModeErrorNumberVA(pos.begin, sc->strict(),
errorNumber, args);
va_end(args);
return result;
}
bool
BytecodeEmitter::emitNewInit(JSProtoKey key)
{
const size_t len = 1 + UINT32_INDEX_LEN;
ptrdiff_t offset;
if (!emitCheck(len, &offset))
return false;
jsbytecode* code = this->code(offset);
code[0] = JSOP_NEWINIT;
code[1] = jsbytecode(key);
code[2] = 0;
code[3] = 0;
code[4] = 0;
updateDepth(offset);
checkTypeSet(JSOP_NEWINIT);
return true;
}
bool
BytecodeEmitter::iteratorResultShape(unsigned* shape)
{
// No need to do any guessing for the object kind, since we know exactly how
// many properties we plan to have.
gc::AllocKind kind = gc::GetGCObjectKind(2);
RootedPlainObject obj(cx, NewBuiltinClassInstance<PlainObject>(cx, kind, TenuredObject));
if (!obj)
return false;
Rooted<jsid> value_id(cx, AtomToId(cx->names().value));
Rooted<jsid> done_id(cx, AtomToId(cx->names().done));
if (!NativeDefineProperty(cx, obj, value_id, UndefinedHandleValue, nullptr, nullptr,
JSPROP_ENUMERATE))
{
return false;
}
if (!NativeDefineProperty(cx, obj, done_id, UndefinedHandleValue, nullptr, nullptr,
JSPROP_ENUMERATE))
{
return false;
}
ObjectBox* objbox = parser->newObjectBox(obj);
if (!objbox)
return false;
*shape = objectList.add(objbox);
return true;
}
bool
BytecodeEmitter::emitPrepareIteratorResult()
{
unsigned shape;
if (!iteratorResultShape(&shape))
return false;
return emitIndex32(JSOP_NEWOBJECT, shape);
}
bool
BytecodeEmitter::emitFinishIteratorResult(bool done)
{
jsatomid value_id;
if (!makeAtomIndex(cx->names().value, &value_id))
return false;
jsatomid done_id;
if (!makeAtomIndex(cx->names().done, &done_id))
return false;
if (!emitIndex32(JSOP_INITPROP, value_id))
return false;
if (!emit1(done ? JSOP_TRUE : JSOP_FALSE))
return false;
if (!emitIndex32(JSOP_INITPROP, done_id))
return false;
return true;
}
bool
BytecodeEmitter::emitNameOp(ParseNode* pn, bool callContext)
{
if (!bindNameToSlot(pn))
return false;
JSOp op = pn->getOp();
if (op == JSOP_CALLEE) {
if (!emit1(op))
return false;
} else {
if (!pn->pn_scopecoord.isFree()) {
MOZ_ASSERT(JOF_OPTYPE(op) != JOF_ATOM);
if (!emitVarOp(pn, op))
return false;
} else {
if (!emitAtomOp(pn, op))
return false;
}
}
/* Need to provide |this| value for call */
if (callContext) {
if (op == JSOP_GETNAME || op == JSOP_GETGNAME) {
JSOp thisOp = needsImplicitThis() ? JSOP_IMPLICITTHIS : JSOP_GIMPLICITTHIS;
if (!emitAtomOp(pn, thisOp))
return false;
} else {
if (!emit1(JSOP_UNDEFINED))
return false;
}
}
return true;
}
bool
BytecodeEmitter::emitPropLHS(ParseNode* pn)
{
MOZ_ASSERT(pn->isKind(PNK_DOT));
MOZ_ASSERT(!pn->as<PropertyAccess>().isSuper());
ParseNode* pn2 = pn->maybeExpr();
/*
* If the object operand is also a dotted property reference, reverse the
* list linked via pn_expr temporarily so we can iterate over it from the
* bottom up (reversing again as we go), to avoid excessive recursion.
*/
if (pn2->isKind(PNK_DOT) && !pn2->as<PropertyAccess>().isSuper()) {
ParseNode* pndot = pn2;
ParseNode* pnup = nullptr;
ParseNode* pndown;
for (;;) {
/* Reverse pndot->pn_expr to point up, not down. */
MOZ_ASSERT(!pndot->isUsed());
pndown = pndot->pn_expr;
pndot->pn_expr = pnup;
if (!pndown->isKind(PNK_DOT) || pndown->as<PropertyAccess>().isSuper())
break;
pnup = pndot;
pndot = pndown;
}
/* pndown is a primary expression, not a dotted property reference. */
if (!emitTree(pndown))
return false;
do {
/* Walk back up the list, emitting annotated name ops. */
if (!emitAtomOp(pndot, JSOP_GETPROP))
return false;
/* Reverse the pn_expr link again. */
pnup = pndot->pn_expr;
pndot->pn_expr = pndown;
pndown = pndot;
} while ((pndot = pnup) != nullptr);
return true;
}
// The non-optimized case.
return emitTree(pn2);
}
bool
BytecodeEmitter::emitSuperPropLHS(ParseNode* superBase, bool isCall)
{
if (!emitGetThisForSuperBase(superBase))
return false;
if (isCall && !emit1(JSOP_DUP))
return false;
if (!emit1(JSOP_SUPERBASE))
return false;
return true;
}
bool
BytecodeEmitter::emitPropOp(ParseNode* pn, JSOp op)
{
MOZ_ASSERT(pn->isArity(PN_NAME));
if (!emitPropLHS(pn))
return false;
if (op == JSOP_CALLPROP && !emit1(JSOP_DUP))
return false;
if (!emitAtomOp(pn, op))
return false;
if (op == JSOP_CALLPROP && !emit1(JSOP_SWAP))
return false;
return true;
}
bool
BytecodeEmitter::emitSuperPropOp(ParseNode* pn, JSOp op, bool isCall)
{
ParseNode* base = &pn->as<PropertyAccess>().expression();
if (!emitSuperPropLHS(base, isCall))
return false;
if (!emitAtomOp(pn, op))
return false;
if (isCall && !emit1(JSOP_SWAP))
return false;
return true;
}
bool
BytecodeEmitter::emitPropIncDec(ParseNode* pn)
{
MOZ_ASSERT(pn->pn_kid->isKind(PNK_DOT));
bool post;
bool isSuper = pn->pn_kid->as<PropertyAccess>().isSuper();
JSOp binop = GetIncDecInfo(pn->getKind(), &post);
if (isSuper) {
ParseNode* base = &pn->pn_kid->as<PropertyAccess>().expression();
if (!emitSuperPropLHS(base)) // THIS OBJ
return false;
if (!emit1(JSOP_DUP2)) // THIS OBJ THIS OBJ
return false;
} else {
if (!emitPropLHS(pn->pn_kid)) // OBJ
return false;
if (!emit1(JSOP_DUP)) // OBJ OBJ
return false;
}
if (!emitAtomOp(pn->pn_kid, isSuper? JSOP_GETPROP_SUPER : JSOP_GETPROP)) // OBJ V
return false;
if (!emit1(JSOP_POS)) // OBJ N
return false;
if (post && !emit1(JSOP_DUP)) // OBJ N? N
return false;
if (!emit1(JSOP_ONE)) // OBJ N? N 1
return false;
if (!emit1(binop)) // OBJ N? N+1
return false;
if (post) {
if (!emit2(JSOP_PICK, 2 + isSuper)) // N? N+1 OBJ
return false;
if (!emit1(JSOP_SWAP)) // N? OBJ N+1
return false;
if (isSuper) {
if (!emit2(JSOP_PICK, 3)) // N THIS N+1 OBJ
return false;
if (!emit1(JSOP_SWAP)) // N THIS OBJ N+1
return false;
}
}
JSOp setOp = isSuper ? sc->strict() ? JSOP_STRICTSETPROP_SUPER : JSOP_SETPROP_SUPER
: sc->strict() ? JSOP_STRICTSETPROP : JSOP_SETPROP;
if (!emitAtomOp(pn->pn_kid, setOp)) // N? N+1
return false;
if (post && !emit1(JSOP_POP)) // RESULT
return false;
return true;
}
bool
BytecodeEmitter::emitNameIncDec(ParseNode* pn)
{
const JSCodeSpec* cs = &CodeSpec[pn->pn_kid->getOp()];
bool global = (cs->format & JOF_GNAME);
bool post;
JSOp binop = GetIncDecInfo(pn->getKind(), &post);
if (!emitAtomOp(pn->pn_kid, global ? JSOP_BINDGNAME : JSOP_BINDNAME)) // OBJ
return false;
if (!emitAtomOp(pn->pn_kid, global ? JSOP_GETGNAME : JSOP_GETNAME)) // OBJ V
return false;
if (!emit1(JSOP_POS)) // OBJ N
return false;
if (post && !emit1(JSOP_DUP)) // OBJ N? N
return false;
if (!emit1(JSOP_ONE)) // OBJ N? N 1
return false