blob: 14d014bca65d51213474430b408d94d2a33728f5 [file] [log] [blame]
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/bytecode-graph-builder.h"
#include "src/ast/ast.h"
#include "src/codegen/source-position-table.h"
#include "src/codegen/tick-counter.h"
#include "src/compiler/access-builder.h"
#include "src/compiler/bytecode-analysis.h"
#include "src/compiler/compiler-source-position-table.h"
#include "src/compiler/js-heap-broker.h"
#include "src/compiler/linkage.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/operator-properties.h"
#include "src/compiler/simplified-operator.h"
#include "src/compiler/state-values-utils.h"
#include "src/interpreter/bytecode-array-iterator.h"
#include "src/interpreter/bytecode-flags.h"
#include "src/interpreter/bytecodes.h"
#include "src/objects/js-array-inl.h"
#include "src/objects/js-generator.h"
#include "src/objects/literal-objects-inl.h"
#include "src/objects/objects-inl.h"
#include "src/objects/smi.h"
#include "src/objects/template-objects.h"
namespace v8 {
namespace internal {
namespace compiler {
class BytecodeGraphBuilder {
public:
BytecodeGraphBuilder(JSHeapBroker* broker, Zone* local_zone,
NativeContextRef const& native_context,
SharedFunctionInfoRef const& shared_info,
FeedbackCellRef const& feedback_cell,
BailoutId osr_offset, JSGraph* jsgraph,
CallFrequency const& invocation_frequency,
SourcePositionTable* source_positions, int inlining_id,
CodeKind code_kind, BytecodeGraphBuilderFlags flags,
TickCounter* tick_counter);
BytecodeGraphBuilder(const BytecodeGraphBuilder&) = delete;
BytecodeGraphBuilder& operator=(const BytecodeGraphBuilder&) = delete;
// Creates a graph by visiting bytecodes.
void CreateGraph();
private:
class Environment;
class OsrIteratorState;
struct SubEnvironment;
void RemoveMergeEnvironmentsBeforeOffset(int limit_offset);
void AdvanceToOsrEntryAndPeelLoops();
// Advance {bytecode_iterator} to the given offset. If possible, also advance
// {source_position_iterator} while updating the source position table.
void AdvanceIteratorsTo(int bytecode_offset);
void VisitSingleBytecode();
void VisitBytecodes();
// Get or create the node that represents the outer function closure.
Node* GetFunctionClosure();
CodeKind code_kind() const { return code_kind_; }
bool native_context_independent() const {
return CodeKindIsNativeContextIndependentJSFunction(code_kind_);
}
bool is_turboprop() const { return code_kind_ == CodeKind::TURBOPROP; }
bool generate_full_feedback_collection() const {
// NCI code currently collects full feedback.
DCHECK_IMPLIES(native_context_independent(),
CollectFeedbackInGenericLowering());
return native_context_independent();
}
static JSTypeHintLowering::LoweringResult NoChange() {
return JSTypeHintLowering::LoweringResult::NoChange();
}
bool CanApplyTypeHintLowering(IrOpcode::Value opcode) const {
return !generate_full_feedback_collection() ||
!IrOpcode::IsFeedbackCollectingOpcode(opcode);
}
bool CanApplyTypeHintLowering(const Operator* op) const {
return CanApplyTypeHintLowering(static_cast<IrOpcode::Value>(op->opcode()));
}
// The node representing the current feedback vector is generated once prior
// to visiting bytecodes, and is later passed as input to other nodes that
// may need it.
// TODO(jgruber): Remove feedback_vector() and rename feedback_vector_node()
// to feedback_vector() once all uses of the direct heap object reference
// have been replaced with a Node* reference.
void CreateFeedbackVectorNode();
Node* BuildLoadFeedbackVector();
Node* feedback_vector_node() const {
DCHECK_NOT_NULL(feedback_vector_node_);
return feedback_vector_node_;
}
void CreateFeedbackCellNode();
Node* BuildLoadFeedbackCell();
Node* feedback_cell_node() const {
DCHECK_NOT_NULL(feedback_cell_node_);
return feedback_cell_node_;
}
// Same as above for the feedback vector node.
void CreateNativeContextNode();
Node* BuildLoadNativeContext();
Node* native_context_node() const {
DCHECK_NOT_NULL(native_context_node_);
return native_context_node_;
}
Node* BuildLoadFeedbackCell(int index);
// Checks the optimization marker and potentially triggers compilation or
// installs the finished code object.
// Only relevant for specific code kinds (see CodeKindCanTierUp).
void MaybeBuildTierUpCheck();
// Like bytecode, NCI code must collect call feedback to preserve proper
// behavior of inlining heuristics when tiering up to Turbofan in the future.
// The invocation count (how often a particular JSFunction has been called)
// is tracked by the callee. For bytecode, this happens in the
// InterpreterEntryTrampoline, for NCI code it happens here in the prologue.
void MaybeBuildIncrementInvocationCount();
// Builder for loading the a native context field.
Node* BuildLoadNativeContextField(int index);
// Helper function for creating a feedback source containing type feedback
// vector and a feedback slot.
FeedbackSource CreateFeedbackSource(int slot_id);
FeedbackSource CreateFeedbackSource(FeedbackSlot slot);
void set_environment(Environment* env) { environment_ = env; }
const Environment* environment() const { return environment_; }
Environment* environment() { return environment_; }
// Node creation helpers
Node* NewNode(const Operator* op, bool incomplete = false) {
return MakeNode(op, 0, static_cast<Node**>(nullptr), incomplete);
}
template <class... Args>
Node* NewNode(const Operator* op, Node* n0, Args... nodes) {
Node* buffer[] = {n0, nodes...};
return MakeNode(op, arraysize(buffer), buffer);
}
// Helpers to create new control nodes.
Node* NewIfTrue() { return NewNode(common()->IfTrue()); }
Node* NewIfFalse() { return NewNode(common()->IfFalse()); }
Node* NewIfValue(int32_t value) { return NewNode(common()->IfValue(value)); }
Node* NewIfDefault() { return NewNode(common()->IfDefault()); }
Node* NewMerge() { return NewNode(common()->Merge(1), true); }
Node* NewLoop() { return NewNode(common()->Loop(1), true); }
Node* NewBranch(Node* condition, BranchHint hint = BranchHint::kNone,
IsSafetyCheck is_safety_check = IsSafetyCheck::kSafetyCheck) {
return NewNode(common()->Branch(hint, is_safety_check), condition);
}
Node* NewSwitch(Node* condition, int control_output_count) {
return NewNode(common()->Switch(control_output_count), condition);
}
// Creates a new Phi node having {count} input values.
Node* NewPhi(int count, Node* input, Node* control);
Node* NewEffectPhi(int count, Node* input, Node* control);
// Helpers for merging control, effect or value dependencies.
Node* MergeControl(Node* control, Node* other);
Node* MergeEffect(Node* effect, Node* other_effect, Node* control);
Node* MergeValue(Node* value, Node* other_value, Node* control);
// The main node creation chokepoint. Adds context, frame state, effect,
// and control dependencies depending on the operator.
Node* MakeNode(const Operator* op, int value_input_count,
Node* const* value_inputs, bool incomplete = false);
Node** EnsureInputBufferSize(int size);
Node* const* GetCallArgumentsFromRegisters(Node* callee, Node* receiver,
interpreter::Register first_arg,
int arg_count);
Node* const* ProcessCallVarArgs(ConvertReceiverMode receiver_mode,
Node* callee, interpreter::Register first_reg,
int arg_count);
Node* const* GetConstructArgumentsFromRegister(
Node* target, Node* new_target, interpreter::Register first_arg,
int arg_count);
Node* ProcessCallRuntimeArguments(const Operator* call_runtime_op,
interpreter::Register receiver,
size_t reg_count);
// Prepare information for eager deoptimization. This information is carried
// by dedicated {Checkpoint} nodes that are wired into the effect chain.
// Conceptually this frame state is "before" a given operation.
void PrepareEagerCheckpoint();
// Prepare information for lazy deoptimization. This information is attached
// to the given node and the output value produced by the node is combined.
// Conceptually this frame state is "after" a given operation.
void PrepareFrameState(Node* node, OutputFrameStateCombine combine);
void PrepareFrameState(Node* node, OutputFrameStateCombine combine,
BailoutId bailout_id);
void BuildCreateArguments(CreateArgumentsType type);
Node* BuildLoadGlobal(NameRef name, uint32_t feedback_slot_index,
TypeofMode typeof_mode);
enum class StoreMode {
// Check the prototype chain before storing.
kNormal,
// Store value to the receiver without checking the prototype chain.
kOwn,
};
void BuildNamedStore(StoreMode store_mode);
void BuildLdaLookupSlot(TypeofMode typeof_mode);
void BuildLdaLookupContextSlot(TypeofMode typeof_mode);
void BuildLdaLookupGlobalSlot(TypeofMode typeof_mode);
void BuildCallVarArgs(ConvertReceiverMode receiver_mode);
void BuildCall(ConvertReceiverMode receiver_mode, Node* const* args,
size_t arg_count, int slot_id);
void BuildCall(ConvertReceiverMode receiver_mode,
std::initializer_list<Node*> args, int slot_id) {
BuildCall(receiver_mode, args.begin(), args.size(), slot_id);
}
void BuildUnaryOp(const Operator* op);
void BuildBinaryOp(const Operator* op);
void BuildBinaryOpWithImmediate(const Operator* op);
void BuildCompareOp(const Operator* op);
void BuildDelete(LanguageMode language_mode);
void BuildCastOperator(const Operator* op);
void BuildHoleCheckAndThrow(Node* condition, Runtime::FunctionId runtime_id,
Node* name = nullptr);
// Optional early lowering to the simplified operator level. Note that
// the result has already been wired into the environment just like
// any other invocation of {NewNode} would do.
JSTypeHintLowering::LoweringResult TryBuildSimplifiedUnaryOp(
const Operator* op, Node* operand, FeedbackSlot slot);
JSTypeHintLowering::LoweringResult TryBuildSimplifiedBinaryOp(
const Operator* op, Node* left, Node* right, FeedbackSlot slot);
JSTypeHintLowering::LoweringResult TryBuildSimplifiedForInNext(
Node* receiver, Node* cache_array, Node* cache_type, Node* index,
FeedbackSlot slot);
JSTypeHintLowering::LoweringResult TryBuildSimplifiedForInPrepare(
Node* receiver, FeedbackSlot slot);
JSTypeHintLowering::LoweringResult TryBuildSimplifiedToNumber(
Node* input, FeedbackSlot slot);
JSTypeHintLowering::LoweringResult TryBuildSimplifiedCall(const Operator* op,
Node* const* args,
int arg_count,
FeedbackSlot slot);
JSTypeHintLowering::LoweringResult TryBuildSimplifiedConstruct(
const Operator* op, Node* const* args, int arg_count, FeedbackSlot slot);
JSTypeHintLowering::LoweringResult TryBuildSimplifiedGetIterator(
const Operator* op, Node* receiver, FeedbackSlot load_slot,
FeedbackSlot call_slot);
JSTypeHintLowering::LoweringResult TryBuildSimplifiedLoadNamed(
const Operator* op, FeedbackSlot slot);
JSTypeHintLowering::LoweringResult TryBuildSimplifiedLoadKeyed(
const Operator* op, Node* receiver, Node* key, FeedbackSlot slot);
JSTypeHintLowering::LoweringResult TryBuildSimplifiedStoreNamed(
const Operator* op, Node* receiver, Node* value, FeedbackSlot slot);
JSTypeHintLowering::LoweringResult TryBuildSimplifiedStoreKeyed(
const Operator* op, Node* receiver, Node* key, Node* value,
FeedbackSlot slot);
// Applies the given early reduction onto the current environment.
void ApplyEarlyReduction(JSTypeHintLowering::LoweringResult reduction);
// Check the context chain for extensions, for lookup fast paths.
Environment* CheckContextExtensions(uint32_t depth);
// Slow path taken when we cannot figure out the current scope info.
Environment* CheckContextExtensionsSlowPath(uint32_t depth);
// Helper function that tries to get the current scope info.
base::Optional<ScopeInfoRef> TryGetScopeInfo();
// Helper function to create a context extension check.
Environment* CheckContextExtensionAtDepth(Environment* slow_environment,
uint32_t depth);
// Helper function to create for-in mode from the recorded type feedback.
ForInMode GetForInMode(FeedbackSlot slot);
// Helper function to compute call frequency from the recorded type
// feedback. Returns unknown if invocation count is unknown. Returns 0 if
// feedback is insufficient.
CallFrequency ComputeCallFrequency(int slot_id) const;
// Helper function to extract the speculation mode from the recorded type
// feedback. Returns kDisallowSpeculation if feedback is insufficient.
SpeculationMode GetSpeculationMode(int slot_id) const;
// Helpers for building the implicit FunctionEntry and IterationBody
// StackChecks.
void BuildFunctionEntryStackCheck();
void BuildIterationBodyStackCheck();
// Control flow plumbing.
void BuildJump();
void BuildJumpIf(Node* condition);
void BuildJumpIfNot(Node* condition);
void BuildJumpIfEqual(Node* comperand);
void BuildJumpIfNotEqual(Node* comperand);
void BuildJumpIfTrue();
void BuildJumpIfFalse();
void BuildJumpIfToBooleanTrue();
void BuildJumpIfToBooleanFalse();
void BuildJumpIfNotHole();
void BuildJumpIfJSReceiver();
void BuildUpdateInterruptBudget(int delta);
void BuildSwitchOnSmi(Node* condition);
void BuildSwitchOnGeneratorState(
const ZoneVector<ResumeJumpTarget>& resume_jump_targets,
bool allow_fallthrough_on_executing);
// Simulates control flow by forward-propagating environments.
void MergeIntoSuccessorEnvironment(int target_offset);
void BuildLoopHeaderEnvironment(int current_offset);
void SwitchToMergeEnvironment(int current_offset);
// Simulates control flow that exits the function body.
void MergeControlToLeaveFunction(Node* exit);
// Builds loop exit nodes for every exited loop between the current bytecode
// offset and {target_offset}.
void BuildLoopExitsForBranch(int target_offset);
void BuildLoopExitsForFunctionExit(const BytecodeLivenessState* liveness);
void BuildLoopExitsUntilLoop(int loop_offset,
const BytecodeLivenessState* liveness);
// Helper for building a return (from an actual return or a suspend).
void BuildReturn(const BytecodeLivenessState* liveness);
// Simulates entry and exit of exception handlers.
void ExitThenEnterExceptionHandlers(int current_offset);
// Update the current position of the {SourcePositionTable} to that of the
// bytecode at {offset}, if any.
void UpdateSourcePosition(int offset);
// Growth increment for the temporary buffer used to construct input lists to
// new nodes.
static const int kInputBufferSizeIncrement = 64;
// An abstract representation for an exception handler that is being
// entered and exited while the graph builder is iterating over the
// underlying bytecode. The exception handlers within the bytecode are
// well scoped, hence will form a stack during iteration.
struct ExceptionHandler {
int start_offset_; // Start offset of the handled area in the bytecode.
int end_offset_; // End offset of the handled area in the bytecode.
int handler_offset_; // Handler entry offset within the bytecode.
int context_register_; // Index of register holding handler context.
};
Graph* graph() const { return jsgraph_->graph(); }
CommonOperatorBuilder* common() const { return jsgraph_->common(); }
Zone* graph_zone() const { return graph()->zone(); }
JSGraph* jsgraph() const { return jsgraph_; }
Isolate* isolate() const { return jsgraph_->isolate(); }
JSOperatorBuilder* javascript() const { return jsgraph_->javascript(); }
SimplifiedOperatorBuilder* simplified() const {
return jsgraph_->simplified();
}
Zone* local_zone() const { return local_zone_; }
BytecodeArrayRef bytecode_array() const {
return shared_info().GetBytecodeArray();
}
FeedbackVectorRef const& feedback_vector() const { return feedback_vector_; }
const JSTypeHintLowering& type_hint_lowering() const {
return type_hint_lowering_;
}
const FrameStateFunctionInfo* frame_state_function_info() const {
return frame_state_function_info_;
}
SourcePositionTableIterator& source_position_iterator() {
return *source_position_iterator_.get();
}
interpreter::BytecodeArrayIterator& bytecode_iterator() {
return bytecode_iterator_;
}
BytecodeAnalysis const& bytecode_analysis() const {
return bytecode_analysis_;
}
int currently_peeled_loop_offset() const {
return currently_peeled_loop_offset_;
}
void set_currently_peeled_loop_offset(int offset) {
currently_peeled_loop_offset_ = offset;
}
bool skip_first_stack_check() const { return skip_first_stack_check_; }
int current_exception_handler() const { return current_exception_handler_; }
void set_current_exception_handler(int index) {
current_exception_handler_ = index;
}
bool needs_eager_checkpoint() const { return needs_eager_checkpoint_; }
void mark_as_needing_eager_checkpoint(bool value) {
needs_eager_checkpoint_ = value;
}
JSHeapBroker* broker() const { return broker_; }
NativeContextRef native_context() const { return native_context_; }
SharedFunctionInfoRef shared_info() const { return shared_info_; }
bool should_disallow_heap_access() const {
return broker_->is_concurrent_inlining();
}
#define DECLARE_VISIT_BYTECODE(name, ...) void Visit##name();
BYTECODE_LIST(DECLARE_VISIT_BYTECODE)
#undef DECLARE_VISIT_BYTECODE
JSHeapBroker* const broker_;
Zone* const local_zone_;
JSGraph* const jsgraph_;
// The native context for which we optimize.
NativeContextRef const native_context_;
SharedFunctionInfoRef const shared_info_;
FeedbackCellRef const feedback_cell_;
FeedbackVectorRef const feedback_vector_;
CallFrequency const invocation_frequency_;
JSTypeHintLowering const type_hint_lowering_;
const FrameStateFunctionInfo* const frame_state_function_info_;
std::unique_ptr<SourcePositionTableIterator> source_position_iterator_;
interpreter::BytecodeArrayIterator bytecode_iterator_;
BytecodeAnalysis const& bytecode_analysis_;
Environment* environment_;
bool const osr_;
int currently_peeled_loop_offset_;
const bool skip_first_stack_check_;
// Merge environments are snapshots of the environment at points where the
// control flow merges. This models a forward data flow propagation of all
// values from all predecessors of the merge in question. They are indexed by
// the bytecode offset
ZoneMap<int, Environment*> merge_environments_;
// Generator merge environments are snapshots of the current resume
// environment, tracing back through loop headers to the resume switch of a
// generator. They allow us to model a single resume jump as several switch
// statements across loop headers, keeping those loop headers reducible,
// without having to merge the "executing" environments of the generator into
// the "resuming" ones. They are indexed by the suspend id of the resume.
ZoneMap<int, Environment*> generator_merge_environments_;
// Exception handlers currently entered by the iteration.
ZoneStack<ExceptionHandler> exception_handlers_;
int current_exception_handler_;
// Temporary storage for building node input lists.
int input_buffer_size_;
Node** input_buffer_;
const CodeKind code_kind_;
Node* feedback_cell_node_;
Node* feedback_vector_node_;
Node* native_context_node_;
// Optimization to only create checkpoints when the current position in the
// control-flow is not effect-dominated by another checkpoint already. All
// operations that do not have observable side-effects can be re-evaluated.
bool needs_eager_checkpoint_;
// Nodes representing values in the activation record.
SetOncePointer<Node> function_closure_;
// Control nodes that exit the function body.
ZoneVector<Node*> exit_controls_;
StateValuesCache state_values_cache_;
// The source position table, to be populated.
SourcePositionTable* const source_positions_;
SourcePosition const start_position_;
TickCounter* const tick_counter_;
static constexpr int kBinaryOperationHintIndex = 1;
static constexpr int kBinaryOperationSmiHintIndex = 1;
static constexpr int kCompareOperationHintIndex = 1;
static constexpr int kCountOperationHintIndex = 0;
static constexpr int kUnaryOperationHintIndex = 0;
};
// The abstract execution environment simulates the content of the interpreter
// register file. The environment performs SSA-renaming of all tracked nodes at
// split and merge points in the control flow.
class BytecodeGraphBuilder::Environment : public ZoneObject {
public:
Environment(BytecodeGraphBuilder* builder, int register_count,
int parameter_count,
interpreter::Register incoming_new_target_or_generator,
Node* control_dependency);
// Specifies whether environment binding methods should attach frame state
// inputs to nodes representing the value being bound. This is done because
// the {OutputFrameStateCombine} is closely related to the binding method.
enum FrameStateAttachmentMode { kAttachFrameState, kDontAttachFrameState };
int parameter_count() const { return parameter_count_; }
int register_count() const { return register_count_; }
Node* LookupAccumulator() const;
Node* LookupRegister(interpreter::Register the_register) const;
Node* LookupGeneratorState() const;
void BindAccumulator(Node* node,
FrameStateAttachmentMode mode = kDontAttachFrameState);
void BindRegister(interpreter::Register the_register, Node* node,
FrameStateAttachmentMode mode = kDontAttachFrameState);
void BindRegistersToProjections(
interpreter::Register first_reg, Node* node,
FrameStateAttachmentMode mode = kDontAttachFrameState);
void BindGeneratorState(Node* node);
void RecordAfterState(Node* node,
FrameStateAttachmentMode mode = kDontAttachFrameState);
// Effect dependency tracked by this environment.
Node* GetEffectDependency() { return effect_dependency_; }
void UpdateEffectDependency(Node* dependency) {
effect_dependency_ = dependency;
}
// Preserve a checkpoint of the environment for the IR graph. Any
// further mutation of the environment will not affect checkpoints.
Node* Checkpoint(BailoutId bytecode_offset, OutputFrameStateCombine combine,
const BytecodeLivenessState* liveness);
// Control dependency tracked by this environment.
Node* GetControlDependency() const { return control_dependency_; }
void UpdateControlDependency(Node* dependency) {
control_dependency_ = dependency;
}
Node* Context() const { return context_; }
void SetContext(Node* new_context) { context_ = new_context; }
Environment* Copy();
void Merge(Environment* other, const BytecodeLivenessState* liveness);
void FillWithOsrValues();
void PrepareForLoop(const BytecodeLoopAssignments& assignments,
const BytecodeLivenessState* liveness);
void PrepareForLoopExit(Node* loop,
const BytecodeLoopAssignments& assignments,
const BytecodeLivenessState* liveness);
private:
friend Zone;
explicit Environment(const Environment* copy);
bool StateValuesRequireUpdate(Node** state_values, Node** values, int count);
void UpdateStateValues(Node** state_values, Node** values, int count);
Node* GetStateValuesFromCache(Node** values, int count,
const BitVector* liveness, int liveness_offset);
int RegisterToValuesIndex(interpreter::Register the_register) const;
Zone* zone() const { return builder_->local_zone(); }
Graph* graph() const { return builder_->graph(); }
CommonOperatorBuilder* common() const { return builder_->common(); }
BytecodeGraphBuilder* builder() const { return builder_; }
const NodeVector* values() const { return &values_; }
NodeVector* values() { return &values_; }
int register_base() const { return register_base_; }
int accumulator_base() const { return accumulator_base_; }
BytecodeGraphBuilder* builder_;
int register_count_;
int parameter_count_;
Node* context_;
Node* control_dependency_;
Node* effect_dependency_;
NodeVector values_;
Node* parameters_state_values_;
Node* generator_state_;
int register_base_;
int accumulator_base_;
};
// A helper for creating a temporary sub-environment for simple branches.
struct BytecodeGraphBuilder::SubEnvironment final {
public:
explicit SubEnvironment(BytecodeGraphBuilder* builder)
: builder_(builder), parent_(builder->environment()->Copy()) {}
~SubEnvironment() { builder_->set_environment(parent_); }
private:
BytecodeGraphBuilder* builder_;
BytecodeGraphBuilder::Environment* parent_;
};
// Issues:
// - Scopes - intimately tied to AST. Need to eval what is needed.
// - Need to resolve closure parameter treatment.
BytecodeGraphBuilder::Environment::Environment(
BytecodeGraphBuilder* builder, int register_count, int parameter_count,
interpreter::Register incoming_new_target_or_generator,
Node* control_dependency)
: builder_(builder),
register_count_(register_count),
parameter_count_(parameter_count),
control_dependency_(control_dependency),
effect_dependency_(control_dependency),
values_(builder->local_zone()),
parameters_state_values_(nullptr),
generator_state_(nullptr) {
// The layout of values_ is:
//
// [receiver] [parameters] [registers] [accumulator]
//
// parameter[0] is the receiver (this), parameters 1..N are the
// parameters supplied to the method (arg0..argN-1). The accumulator
// is stored separately.
// Parameters including the receiver
for (int i = 0; i < parameter_count; i++) {
const char* debug_name = (i == 0) ? "%this" : nullptr;
const Operator* op = common()->Parameter(i, debug_name);
Node* parameter = builder->graph()->NewNode(op, graph()->start());
values()->push_back(parameter);
}
// Registers
register_base_ = static_cast<int>(values()->size());
Node* undefined_constant = builder->jsgraph()->UndefinedConstant();
values()->insert(values()->end(), register_count, undefined_constant);
// Accumulator
accumulator_base_ = static_cast<int>(values()->size());
values()->push_back(undefined_constant);
// Context
int context_index = Linkage::GetJSCallContextParamIndex(parameter_count);
const Operator* op = common()->Parameter(context_index, "%context");
context_ = builder->graph()->NewNode(op, graph()->start());
// Incoming new.target or generator register
if (incoming_new_target_or_generator.is_valid()) {
int new_target_index =
Linkage::GetJSCallNewTargetParamIndex(parameter_count);
const Operator* op = common()->Parameter(new_target_index, "%new.target");
Node* new_target_node = builder->graph()->NewNode(op, graph()->start());
int values_index = RegisterToValuesIndex(incoming_new_target_or_generator);
values()->at(values_index) = new_target_node;
}
}
BytecodeGraphBuilder::Environment::Environment(
const BytecodeGraphBuilder::Environment* other)
: builder_(other->builder_),
register_count_(other->register_count_),
parameter_count_(other->parameter_count_),
context_(other->context_),
control_dependency_(other->control_dependency_),
effect_dependency_(other->effect_dependency_),
values_(other->zone()),
parameters_state_values_(other->parameters_state_values_),
generator_state_(other->generator_state_),
register_base_(other->register_base_),
accumulator_base_(other->accumulator_base_) {
values_ = other->values_;
}
int BytecodeGraphBuilder::Environment::RegisterToValuesIndex(
interpreter::Register the_register) const {
if (the_register.is_parameter()) {
return the_register.ToParameterIndex(parameter_count());
} else {
return the_register.index() + register_base();
}
}
Node* BytecodeGraphBuilder::Environment::LookupAccumulator() const {
return values()->at(accumulator_base_);
}
Node* BytecodeGraphBuilder::Environment::LookupGeneratorState() const {
DCHECK_NOT_NULL(generator_state_);
return generator_state_;
}
Node* BytecodeGraphBuilder::Environment::LookupRegister(
interpreter::Register the_register) const {
if (the_register.is_current_context()) {
return Context();
} else if (the_register.is_function_closure()) {
return builder()->GetFunctionClosure();
} else {
int values_index = RegisterToValuesIndex(the_register);
return values()->at(values_index);
}
}
void BytecodeGraphBuilder::Environment::BindAccumulator(
Node* node, FrameStateAttachmentMode mode) {
if (mode == FrameStateAttachmentMode::kAttachFrameState) {
builder()->PrepareFrameState(node, OutputFrameStateCombine::PokeAt(0));
}
values()->at(accumulator_base_) = node;
}
void BytecodeGraphBuilder::Environment::BindGeneratorState(Node* node) {
generator_state_ = node;
}
void BytecodeGraphBuilder::Environment::BindRegister(
interpreter::Register the_register, Node* node,
FrameStateAttachmentMode mode) {
int values_index = RegisterToValuesIndex(the_register);
if (mode == FrameStateAttachmentMode::kAttachFrameState) {
builder()->PrepareFrameState(node, OutputFrameStateCombine::PokeAt(
accumulator_base_ - values_index));
}
values()->at(values_index) = node;
}
void BytecodeGraphBuilder::Environment::BindRegistersToProjections(
interpreter::Register first_reg, Node* node,
FrameStateAttachmentMode mode) {
int values_index = RegisterToValuesIndex(first_reg);
if (mode == FrameStateAttachmentMode::kAttachFrameState) {
builder()->PrepareFrameState(node, OutputFrameStateCombine::PokeAt(
accumulator_base_ - values_index));
}
for (int i = 0; i < node->op()->ValueOutputCount(); i++) {
values()->at(values_index + i) =
builder()->NewNode(common()->Projection(i), node);
}
}
void BytecodeGraphBuilder::Environment::RecordAfterState(
Node* node, FrameStateAttachmentMode mode) {
if (mode == FrameStateAttachmentMode::kAttachFrameState) {
builder()->PrepareFrameState(node, OutputFrameStateCombine::Ignore());
}
}
BytecodeGraphBuilder::Environment* BytecodeGraphBuilder::Environment::Copy() {
return zone()->New<Environment>(this);
}
void BytecodeGraphBuilder::Environment::Merge(
BytecodeGraphBuilder::Environment* other,
const BytecodeLivenessState* liveness) {
// Create a merge of the control dependencies of both environments and update
// the current environment's control dependency accordingly.
Node* control = builder()->MergeControl(GetControlDependency(),
other->GetControlDependency());
UpdateControlDependency(control);
// Create a merge of the effect dependencies of both environments and update
// the current environment's effect dependency accordingly.
Node* effect = builder()->MergeEffect(GetEffectDependency(),
other->GetEffectDependency(), control);
UpdateEffectDependency(effect);
// Introduce Phi nodes for values that are live and have differing inputs at
// the merge point, potentially extending an existing Phi node if possible.
context_ = builder()->MergeValue(context_, other->context_, control);
for (int i = 0; i < parameter_count(); i++) {
values_[i] = builder()->MergeValue(values_[i], other->values_[i], control);
}
for (int i = 0; i < register_count(); i++) {
int index = register_base() + i;
if (liveness == nullptr || liveness->RegisterIsLive(i)) {
#if DEBUG
// We only do these DCHECKs when we are not in the resume path of a
// generator -- this is, when either there is no generator state at all,
// or the generator state is not the constant "executing" value.
if (generator_state_ == nullptr ||
NumberMatcher(generator_state_)
.Is(JSGeneratorObject::kGeneratorExecuting)) {
DCHECK_NE(values_[index], builder()->jsgraph()->OptimizedOutConstant());
DCHECK_NE(other->values_[index],
builder()->jsgraph()->OptimizedOutConstant());
}
#endif
values_[index] =
builder()->MergeValue(values_[index], other->values_[index], control);
} else {
values_[index] = builder()->jsgraph()->OptimizedOutConstant();
}
}
if (liveness == nullptr || liveness->AccumulatorIsLive()) {
DCHECK_NE(values_[accumulator_base()],
builder()->jsgraph()->OptimizedOutConstant());
DCHECK_NE(other->values_[accumulator_base()],
builder()->jsgraph()->OptimizedOutConstant());
values_[accumulator_base()] =
builder()->MergeValue(values_[accumulator_base()],
other->values_[accumulator_base()], control);
} else {
values_[accumulator_base()] = builder()->jsgraph()->OptimizedOutConstant();
}
if (generator_state_ != nullptr) {
DCHECK_NOT_NULL(other->generator_state_);
generator_state_ = builder()->MergeValue(generator_state_,
other->generator_state_, control);
}
}
void BytecodeGraphBuilder::Environment::PrepareForLoop(
const BytecodeLoopAssignments& assignments,
const BytecodeLivenessState* liveness) {
// Create a control node for the loop header.
Node* control = builder()->NewLoop();
// Create a Phi for external effects.
Node* effect = builder()->NewEffectPhi(1, GetEffectDependency(), control);
UpdateEffectDependency(effect);
// Create Phis for any values that are live on entry to the loop and may be
// updated by the end of the loop.
context_ = builder()->NewPhi(1, context_, control);
for (int i = 0; i < parameter_count(); i++) {
if (assignments.ContainsParameter(i)) {
values_[i] = builder()->NewPhi(1, values_[i], control);
}
}
for (int i = 0; i < register_count(); i++) {
if (assignments.ContainsLocal(i) &&
(liveness == nullptr || liveness->RegisterIsLive(i))) {
int index = register_base() + i;
values_[index] = builder()->NewPhi(1, values_[index], control);
}
}
// The accumulator should not be live on entry.
DCHECK_IMPLIES(liveness != nullptr, !liveness->AccumulatorIsLive());
if (generator_state_ != nullptr) {
generator_state_ = builder()->NewPhi(1, generator_state_, control);
}
// Connect to the loop end.
Node* terminate = builder()->graph()->NewNode(
builder()->common()->Terminate(), effect, control);
builder()->exit_controls_.push_back(terminate);
}
void BytecodeGraphBuilder::Environment::FillWithOsrValues() {
Node* start = graph()->start();
// Create OSR values for each environment value.
SetContext(graph()->NewNode(
common()->OsrValue(Linkage::kOsrContextSpillSlotIndex), start));
int size = static_cast<int>(values()->size());
for (int i = 0; i < size; i++) {
int idx = i; // Indexing scheme follows {StandardFrame}, adapt accordingly.
if (i >= register_base()) idx += InterpreterFrameConstants::kExtraSlotCount;
if (i >= accumulator_base()) idx = Linkage::kOsrAccumulatorRegisterIndex;
values()->at(i) = graph()->NewNode(common()->OsrValue(idx), start);
}
}
bool BytecodeGraphBuilder::Environment::StateValuesRequireUpdate(
Node** state_values, Node** values, int count) {
if (*state_values == nullptr) {
return true;
}
Node::Inputs inputs = (*state_values)->inputs();
if (inputs.count() != count) return true;
for (int i = 0; i < count; i++) {
if (inputs[i] != values[i]) {
return true;
}
}
return false;
}
void BytecodeGraphBuilder::Environment::PrepareForLoopExit(
Node* loop, const BytecodeLoopAssignments& assignments,
const BytecodeLivenessState* liveness) {
DCHECK_EQ(loop->opcode(), IrOpcode::kLoop);
Node* control = GetControlDependency();
// Create the loop exit node.
Node* loop_exit = graph()->NewNode(common()->LoopExit(), control, loop);
UpdateControlDependency(loop_exit);
// Rename the effect.
Node* effect_rename = graph()->NewNode(common()->LoopExitEffect(),
GetEffectDependency(), loop_exit);
UpdateEffectDependency(effect_rename);
// TODO(jarin) We should also rename context here. However, unconditional
// renaming confuses global object and native context specialization.
// We should only rename if the context is assigned in the loop.
// Rename the environment values if they were assigned in the loop and are
// live after exiting the loop.
for (int i = 0; i < parameter_count(); i++) {
if (assignments.ContainsParameter(i)) {
Node* rename =
graph()->NewNode(common()->LoopExitValue(), values_[i], loop_exit);
values_[i] = rename;
}
}
for (int i = 0; i < register_count(); i++) {
if (assignments.ContainsLocal(i) &&
(liveness == nullptr || liveness->RegisterIsLive(i))) {
Node* rename = graph()->NewNode(common()->LoopExitValue(),
values_[register_base() + i], loop_exit);
values_[register_base() + i] = rename;
}
}
if (liveness == nullptr || liveness->AccumulatorIsLive()) {
Node* rename = graph()->NewNode(common()->LoopExitValue(),
values_[accumulator_base()], loop_exit);
values_[accumulator_base()] = rename;
}
if (generator_state_ != nullptr) {
generator_state_ = graph()->NewNode(common()->LoopExitValue(),
generator_state_, loop_exit);
}
}
void BytecodeGraphBuilder::Environment::UpdateStateValues(Node** state_values,
Node** values,
int count) {
if (StateValuesRequireUpdate(state_values, values, count)) {
const Operator* op = common()->StateValues(count, SparseInputMask::Dense());
(*state_values) = graph()->NewNode(op, count, values);
}
}
Node* BytecodeGraphBuilder::Environment::GetStateValuesFromCache(
Node** values, int count, const BitVector* liveness, int liveness_offset) {
return builder_->state_values_cache_.GetNodeForValues(
values, static_cast<size_t>(count), liveness, liveness_offset);
}
Node* BytecodeGraphBuilder::Environment::Checkpoint(
BailoutId bailout_id, OutputFrameStateCombine combine,
const BytecodeLivenessState* liveness) {
if (parameter_count() == register_count()) {
// Re-use the state-value cache if the number of local registers happens
// to match the parameter count.
parameters_state_values_ = GetStateValuesFromCache(
&values()->at(0), parameter_count(), nullptr, 0);
} else {
UpdateStateValues(&parameters_state_values_, &values()->at(0),
parameter_count());
}
Node* registers_state_values =
GetStateValuesFromCache(&values()->at(register_base()), register_count(),
liveness ? &liveness->bit_vector() : nullptr, 0);
bool accumulator_is_live = !liveness || liveness->AccumulatorIsLive();
Node* accumulator_state_value =
accumulator_is_live && combine != OutputFrameStateCombine::PokeAt(0)
? values()->at(accumulator_base())
: builder()->jsgraph()->OptimizedOutConstant();
const Operator* op = common()->FrameState(
bailout_id, combine, builder()->frame_state_function_info());
Node* result = graph()->NewNode(
op, parameters_state_values_, registers_state_values,
accumulator_state_value, Context(), builder()->GetFunctionClosure(),
builder()->graph()->start());
return result;
}
BytecodeGraphBuilder::BytecodeGraphBuilder(
JSHeapBroker* broker, Zone* local_zone,
NativeContextRef const& native_context,
SharedFunctionInfoRef const& shared_info,
FeedbackCellRef const& feedback_cell, BailoutId osr_offset,
JSGraph* jsgraph, CallFrequency const& invocation_frequency,
SourcePositionTable* source_positions, int inlining_id, CodeKind code_kind,
BytecodeGraphBuilderFlags flags, TickCounter* tick_counter)
: broker_(broker),
local_zone_(local_zone),
jsgraph_(jsgraph),
native_context_(native_context),
shared_info_(shared_info),
feedback_cell_(feedback_cell),
feedback_vector_(feedback_cell.value().AsFeedbackVector()),
invocation_frequency_(invocation_frequency),
type_hint_lowering_(
broker, jsgraph, feedback_vector_,
(flags & BytecodeGraphBuilderFlag::kBailoutOnUninitialized)
? JSTypeHintLowering::kBailoutOnUninitialized
: JSTypeHintLowering::kNoFlags),
frame_state_function_info_(common()->CreateFrameStateFunctionInfo(
FrameStateType::kInterpretedFunction,
bytecode_array().parameter_count(), bytecode_array().register_count(),
shared_info.object())),
source_position_iterator_(std::make_unique<SourcePositionTableIterator>(
bytecode_array().SourcePositionTable())),
bytecode_iterator_(
std::make_unique<OffHeapBytecodeArray>(bytecode_array())),
bytecode_analysis_(broker_->GetBytecodeAnalysis(
bytecode_array().object(), osr_offset,
flags & BytecodeGraphBuilderFlag::kAnalyzeEnvironmentLiveness,
should_disallow_heap_access()
? SerializationPolicy::kAssumeSerialized
: SerializationPolicy::kSerializeIfNeeded)),
environment_(nullptr),
osr_(!osr_offset.IsNone()),
currently_peeled_loop_offset_(-1),
skip_first_stack_check_(flags &
BytecodeGraphBuilderFlag::kSkipFirstStackCheck),
merge_environments_(local_zone),
generator_merge_environments_(local_zone),
exception_handlers_(local_zone),
current_exception_handler_(0),
input_buffer_size_(0),
input_buffer_(nullptr),
code_kind_(code_kind),
feedback_cell_node_(nullptr),
feedback_vector_node_(nullptr),
native_context_node_(nullptr),
needs_eager_checkpoint_(true),
exit_controls_(local_zone),
state_values_cache_(jsgraph),
source_positions_(source_positions),
start_position_(shared_info.StartPosition(), inlining_id),
tick_counter_(tick_counter) {}
Node* BytecodeGraphBuilder::GetFunctionClosure() {
if (!function_closure_.is_set()) {
int index = Linkage::kJSCallClosureParamIndex;
const Operator* op = common()->Parameter(index, "%closure");
Node* node = NewNode(op, graph()->start());
function_closure_.set(node);
}
return function_closure_.get();
}
void BytecodeGraphBuilder::CreateFeedbackCellNode() {
DCHECK_NULL(feedback_cell_node_);
if (native_context_independent()) {
feedback_cell_node_ = BuildLoadFeedbackCell();
} else if (is_turboprop()) {
feedback_cell_node_ = jsgraph()->Constant(feedback_cell_);
}
}
Node* BytecodeGraphBuilder::BuildLoadFeedbackCell() {
DCHECK(native_context_independent());
DCHECK_NULL(feedback_cell_node_);
return NewNode(
simplified()->LoadField(AccessBuilder::ForJSFunctionFeedbackCell()),
GetFunctionClosure());
}
void BytecodeGraphBuilder::CreateFeedbackVectorNode() {
DCHECK_NULL(feedback_vector_node_);
feedback_vector_node_ = native_context_independent()
? BuildLoadFeedbackVector()
: jsgraph()->Constant(feedback_vector());
}
Node* BytecodeGraphBuilder::BuildLoadFeedbackVector() {
DCHECK(native_context_independent());
DCHECK_NULL(feedback_vector_node_);
// The feedback vector must exist and remain live while the generated code
// lives. Specifically that means it must be created when NCI code is
// installed, and must not be flushed.
return NewNode(simplified()->LoadField(AccessBuilder::ForFeedbackCellValue()),
feedback_cell_node());
}
Node* BytecodeGraphBuilder::BuildLoadFeedbackCell(int index) {
if (native_context_independent()) {
// TODO(jgruber,v8:8888): Assumes that the feedback vector has been
// allocated.
Node* closure_feedback_cell_array =
NewNode(simplified()->LoadField(
AccessBuilder::ForFeedbackVectorClosureFeedbackCellArray()),
feedback_vector_node());
return NewNode(
simplified()->LoadField(AccessBuilder::ForFixedArraySlot(index)),
closure_feedback_cell_array);
} else {
return jsgraph()->Constant(feedback_vector().GetClosureFeedbackCell(index));
}
}
void BytecodeGraphBuilder::CreateNativeContextNode() {
DCHECK_NULL(native_context_node_);
native_context_node_ = native_context_independent()
? BuildLoadNativeContext()
: jsgraph()->Constant(native_context());
}
Node* BytecodeGraphBuilder::BuildLoadNativeContext() {
DCHECK(native_context_independent());
DCHECK_NULL(native_context_node_);
Node* context_map = NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
environment()->Context());
return NewNode(simplified()->LoadField(AccessBuilder::ForMapNativeContext()),
context_map);
}
void BytecodeGraphBuilder::MaybeBuildTierUpCheck() {
// For OSR we don't tier up, so we don't need to build this check. Also
// tiering up currently tail calls to IET which tail calls aren't supported
// with OSR. See AdjustStackPointerForTailCall.
if (!CodeKindCanTierUp(code_kind()) || osr_) return;
int parameter_count = bytecode_array().parameter_count();
Node* target = GetFunctionClosure();
Node* new_target = graph()->NewNode(
common()->Parameter(
Linkage::GetJSCallNewTargetParamIndex(parameter_count),
"%new.target"),
graph()->start());
Node* argc = graph()->NewNode(
common()->Parameter(Linkage::GetJSCallArgCountParamIndex(parameter_count),
"%argc"),
graph()->start());
DCHECK_EQ(environment()->Context()->opcode(), IrOpcode::kParameter);
Node* context = environment()->Context();
NewNode(simplified()->TierUpCheck(), feedback_vector_node(), target,
new_target, argc, context);
}
void BytecodeGraphBuilder::MaybeBuildIncrementInvocationCount() {
if (!generate_full_feedback_collection()) return;
Node* current_invocation_count =
NewNode(simplified()->LoadField(
AccessBuilder::ForFeedbackVectorInvocationCount()),
feedback_vector_node());
Node* next_invocation_count =
NewNode(simplified()->NumberAdd(), current_invocation_count,
jsgraph()->SmiConstant(1));
NewNode(simplified()->StoreField(
AccessBuilder::ForFeedbackVectorInvocationCount()),
feedback_vector_node(), next_invocation_count);
}
Node* BytecodeGraphBuilder::BuildLoadNativeContextField(int index) {
Node* result = NewNode(javascript()->LoadContext(0, index, true));
NodeProperties::ReplaceContextInput(result, native_context_node());
return result;
}
FeedbackSource BytecodeGraphBuilder::CreateFeedbackSource(int slot_id) {
return CreateFeedbackSource(FeedbackVector::ToSlot(slot_id));
}
FeedbackSource BytecodeGraphBuilder::CreateFeedbackSource(FeedbackSlot slot) {
return FeedbackSource(feedback_vector(), slot);
}
void BytecodeGraphBuilder::CreateGraph() {
DisallowHeapAccessIf disallow_heap_access(should_disallow_heap_access());
SourcePositionTable::Scope pos_scope(source_positions_, start_position_);
// Set up the basic structure of the graph. Outputs for {Start} are the formal
// parameters (including the receiver) plus new target, number of arguments,
// context and closure.
int start_output_arity = StartNode::OutputArityForFormalParameterCount(
bytecode_array().parameter_count());
graph()->SetStart(graph()->NewNode(common()->Start(start_output_arity)));
Environment env(this, bytecode_array().register_count(),
bytecode_array().parameter_count(),
bytecode_array().incoming_new_target_or_generator_register(),
graph()->start());
set_environment(&env);
CreateFeedbackCellNode();
CreateFeedbackVectorNode();
MaybeBuildTierUpCheck();
MaybeBuildIncrementInvocationCount();
CreateNativeContextNode();
VisitBytecodes();
// Finish the basic structure of the graph.
DCHECK_NE(0u, exit_controls_.size());
int const input_count = static_cast<int>(exit_controls_.size());
Node** const inputs = &exit_controls_.front();
Node* end = graph()->NewNode(common()->End(input_count), input_count, inputs);
graph()->SetEnd(end);
}
void BytecodeGraphBuilder::PrepareEagerCheckpoint() {
if (needs_eager_checkpoint()) {
// Create an explicit checkpoint node for before the operation. This only
// needs to happen if we aren't effect-dominated by a {Checkpoint} already.
mark_as_needing_eager_checkpoint(false);
Node* node = NewNode(common()->Checkpoint());
DCHECK_EQ(1, OperatorProperties::GetFrameStateInputCount(node->op()));
DCHECK_EQ(IrOpcode::kDead,
NodeProperties::GetFrameStateInput(node)->opcode());
BailoutId bailout_id(bytecode_iterator().current_offset());
const BytecodeLivenessState* liveness_before =
bytecode_analysis().GetInLivenessFor(
bytecode_iterator().current_offset());
Node* frame_state_before = environment()->Checkpoint(
bailout_id, OutputFrameStateCombine::Ignore(), liveness_before);
NodeProperties::ReplaceFrameStateInput(node, frame_state_before);
#ifdef DEBUG
} else {
// In case we skipped checkpoint creation above, we must be able to find an
// existing checkpoint that effect-dominates the nodes about to be created.
// Starting a search from the current effect-dependency has to succeed.
Node* effect = environment()->GetEffectDependency();
while (effect->opcode() != IrOpcode::kCheckpoint) {
DCHECK(effect->op()->HasProperty(Operator::kNoWrite));
DCHECK_EQ(1, effect->op()->EffectInputCount());
effect = NodeProperties::GetEffectInput(effect);
}
}
#else
}
#endif // DEBUG
}
void BytecodeGraphBuilder::PrepareFrameState(Node* node,
OutputFrameStateCombine combine) {
if (OperatorProperties::HasFrameStateInput(node->op())) {
PrepareFrameState(node, combine,
BailoutId(bytecode_iterator().current_offset()));
}
}
void BytecodeGraphBuilder::PrepareFrameState(Node* node,
OutputFrameStateCombine combine,
BailoutId bailout_id) {
if (OperatorProperties::HasFrameStateInput(node->op())) {
// Add the frame state for after the operation. The node in question has
// already been created and had a {Dead} frame state input up until now.
DCHECK_EQ(1, OperatorProperties::GetFrameStateInputCount(node->op()));
DCHECK_EQ(IrOpcode::kDead,
NodeProperties::GetFrameStateInput(node)->opcode());
DCHECK_IMPLIES(bailout_id.ToInt() == kFunctionEntryBytecodeOffset,
bytecode_iterator().current_offset() == 0);
// If we have kFunctionEntryBytecodeOffset as the bailout_id, we want to get
// the liveness at the moment of function entry. This is the same as the IN
// liveness of the first actual bytecode.
const BytecodeLivenessState* liveness_after =
bailout_id.ToInt() == kFunctionEntryBytecodeOffset
? bytecode_analysis().GetInLivenessFor(0)
: bytecode_analysis().GetOutLivenessFor(bailout_id.ToInt());
Node* frame_state_after =
environment()->Checkpoint(bailout_id, combine, liveness_after);
NodeProperties::ReplaceFrameStateInput(node, frame_state_after);
}
}
void BytecodeGraphBuilder::AdvanceIteratorsTo(int bytecode_offset) {
for (; bytecode_iterator().current_offset() != bytecode_offset;
bytecode_iterator().Advance()) {
UpdateSourcePosition(bytecode_iterator().current_offset());
}
}
// Stores the state of the SourcePosition iterator, and the index to the
// current exception handlers stack. We need, during the OSR graph generation,
// to backup the states of these iterators at the LoopHeader offset of each
// outer loop which contains the OSR loop. The iterators are then restored when
// peeling the loops, so that both exception handling and synchronisation with
// the source position can be achieved.
class BytecodeGraphBuilder::OsrIteratorState {
public:
explicit OsrIteratorState(BytecodeGraphBuilder* graph_builder)
: graph_builder_(graph_builder),
saved_states_(graph_builder->local_zone()) {}
void ProcessOsrPrelude() {
ZoneVector<int> outer_loop_offsets(graph_builder_->local_zone());
int osr_entry = graph_builder_->bytecode_analysis().osr_entry_point();
// We find here the outermost loop which contains the OSR loop.
int outermost_loop_offset = osr_entry;
while ((outermost_loop_offset = graph_builder_->bytecode_analysis()
.GetLoopInfoFor(outermost_loop_offset)
.parent_offset()) != -1) {
outer_loop_offsets.push_back(outermost_loop_offset);
}
outermost_loop_offset =
outer_loop_offsets.empty() ? osr_entry : outer_loop_offsets.back();
graph_builder_->AdvanceIteratorsTo(outermost_loop_offset);
// We save some iterators states at the offsets of the loop headers of the
// outer loops (the ones containing the OSR loop). They will be used for
// jumping back in the bytecode.
for (ZoneVector<int>::const_reverse_iterator it =
outer_loop_offsets.crbegin();
it != outer_loop_offsets.crend(); ++it) {
graph_builder_->AdvanceIteratorsTo(*it);
graph_builder_->ExitThenEnterExceptionHandlers(
graph_builder_->bytecode_iterator().current_offset());
saved_states_.push(IteratorsStates(
graph_builder_->current_exception_handler(),
graph_builder_->source_position_iterator().GetState()));
}
// Finishing by advancing to the OSR entry
graph_builder_->AdvanceIteratorsTo(osr_entry);
// Enters all remaining exception handler which end before the OSR loop
// so that on next call of VisitSingleBytecode they will get popped from
// the exception handlers stack.
graph_builder_->ExitThenEnterExceptionHandlers(osr_entry);
graph_builder_->set_currently_peeled_loop_offset(
graph_builder_->bytecode_analysis()
.GetLoopInfoFor(osr_entry)
.parent_offset());
}
void RestoreState(int target_offset, int new_parent_offset) {
graph_builder_->bytecode_iterator().SetOffset(target_offset);
// In case of a return, we must not build loop exits for
// not-yet-built outer loops.
graph_builder_->set_currently_peeled_loop_offset(new_parent_offset);
IteratorsStates saved_state = saved_states_.top();
graph_builder_->source_position_iterator().RestoreState(
saved_state.source_iterator_state_);
graph_builder_->set_current_exception_handler(
saved_state.exception_handler_index_);
saved_states_.pop();
}
private:
struct IteratorsStates {
int exception_handler_index_;
SourcePositionTableIterator::IndexAndPositionState source_iterator_state_;
IteratorsStates(int exception_handler_index,
SourcePositionTableIterator::IndexAndPositionState
source_iterator_state)
: exception_handler_index_(exception_handler_index),
source_iterator_state_(source_iterator_state) {}
};
BytecodeGraphBuilder* graph_builder_;
ZoneStack<IteratorsStates> saved_states_;
};
void BytecodeGraphBuilder::RemoveMergeEnvironmentsBeforeOffset(
int limit_offset) {
if (!merge_environments_.empty()) {
ZoneMap<int, Environment*>::iterator it = merge_environments_.begin();
ZoneMap<int, Environment*>::iterator stop_it = merge_environments_.end();
while (it != stop_it && it->first <= limit_offset) {
it = merge_environments_.erase(it);
}
}
}
void BytecodeGraphBuilder::BuildFunctionEntryStackCheck() {
if (!skip_first_stack_check()) {
Node* node =
NewNode(javascript()->StackCheck(StackCheckKind::kJSFunctionEntry));
PrepareFrameState(node, OutputFrameStateCombine::Ignore(),
BailoutId(kFunctionEntryBytecodeOffset));
}
}
void BytecodeGraphBuilder::BuildIterationBodyStackCheck() {
Node* node =
NewNode(javascript()->StackCheck(StackCheckKind::kJSIterationBody));
environment()->RecordAfterState(node, Environment::kAttachFrameState);
}
// We will iterate through the OSR loop, then its parent, and so on
// until we have reached the outmost loop containing the OSR loop. We do
// not generate nodes for anything before the outermost loop.
void BytecodeGraphBuilder::AdvanceToOsrEntryAndPeelLoops() {
OsrIteratorState iterator_states(this);
iterator_states.ProcessOsrPrelude();
int osr_entry = bytecode_analysis().osr_entry_point();
DCHECK_EQ(bytecode_iterator().current_offset(), osr_entry);
environment()->FillWithOsrValues();
// Suppose we have n nested loops, loop_0 being the outermost one, and
// loop_n being the OSR loop. We start iterating the bytecode at the header
// of loop_n (the OSR loop), and then we peel the part of the the body of
// loop_{n-1} following the end of loop_n. We then rewind the iterator to
// the header of loop_{n-1}, and so on until we have partly peeled loop 0.
// The full loop_0 body will be generating with the rest of the function,
// outside the OSR generation.
// To do so, if we are visiting a loop, we continue to visit what's left
// of its parent, and then when reaching the parent's JumpLoop, we do not
// create any jump for that but rewind the bytecode iterator to visit the
// parent loop entirely, and so on.
int current_parent_offset =
bytecode_analysis().GetLoopInfoFor(osr_entry).parent_offset();
while (current_parent_offset != -1) {
const LoopInfo& current_parent_loop =
bytecode_analysis().GetLoopInfoFor(current_parent_offset);
// We iterate until the back edge of the parent loop, which we detect by
// the offset that the JumpLoop targets.
for (; !bytecode_iterator().done(); bytecode_iterator().Advance()) {
if (bytecode_iterator().current_bytecode() ==
interpreter::Bytecode::kJumpLoop &&
bytecode_iterator().GetJumpTargetOffset() == current_parent_offset) {
// Reached the end of the current parent loop.
break;
}
VisitSingleBytecode();
}
DCHECK(!bytecode_iterator()
.done()); // Should have found the loop's jump target.
// We also need to take care of the merge environments and exceptions
// handlers here because the omitted JumpLoop bytecode can still be the
// target of jumps or the first bytecode after a try block.
ExitThenEnterExceptionHandlers(bytecode_iterator().current_offset());
SwitchToMergeEnvironment(bytecode_iterator().current_offset());
// This jump is the jump of our parent loop, which is not yet created.
// So we do not build the jump nodes, but restore the bytecode and the
// SourcePosition iterators to the values they had when we were visiting
// the offset pointed at by the JumpLoop we've just reached.
// We have already built nodes for inner loops, but now we will
// iterate again over them and build new nodes corresponding to the same
// bytecode offsets. Any jump or reference to this inner loops must now
// point to the new nodes we will build, hence we clear the relevant part
// of the environment.
// Completely clearing the environment is not possible because merge
// environments for forward jumps out of the loop need to be preserved
// (e.g. a return or a labeled break in the middle of a loop).
RemoveMergeEnvironmentsBeforeOffset(bytecode_iterator().current_offset());
iterator_states.RestoreState(current_parent_offset,
current_parent_loop.parent_offset());
current_parent_offset = current_parent_loop.parent_offset();
}
}
void BytecodeGraphBuilder::VisitSingleBytecode() {
tick_counter_->TickAndMaybeEnterSafepoint();
int current_offset = bytecode_iterator().current_offset();
UpdateSourcePosition(current_offset);
ExitThenEnterExceptionHandlers(current_offset);
DCHECK_GE(exception_handlers_.empty() ? current_offset
: exception_handlers_.top().end_offset_,
current_offset);
SwitchToMergeEnvironment(current_offset);
if (environment() != nullptr) {
BuildLoopHeaderEnvironment(current_offset);
switch (bytecode_iterator().current_bytecode()) {
#define BYTECODE_CASE(name, ...) \
case interpreter::Bytecode::k##name: \
Visit##name(); \
break;
BYTECODE_LIST(BYTECODE_CASE)
#undef BYTECODE_CASE
}
}
}
void BytecodeGraphBuilder::VisitBytecodes() {
if (!bytecode_analysis().resume_jump_targets().empty()) {
environment()->BindGeneratorState(
jsgraph()->SmiConstant(JSGeneratorObject::kGeneratorExecuting));
}
if (osr_) {
// We peel the OSR loop and any outer loop containing it except that we
// leave the nodes corresponding to the whole outermost loop (including
// the last copies of the loops it contains) to be generated by the normal
// bytecode iteration below.
AdvanceToOsrEntryAndPeelLoops();
} else {
BuildFunctionEntryStackCheck();
}
bool has_one_shot_bytecode = false;
for (; !bytecode_iterator().done(); bytecode_iterator().Advance()) {
if (interpreter::Bytecodes::IsOneShotBytecode(
bytecode_iterator().current_bytecode())) {
has_one_shot_bytecode = true;
}
VisitSingleBytecode();
}
if (!should_disallow_heap_access() && has_one_shot_bytecode) {
// (For concurrent inlining this is done in the serializer instead.)
isolate()->CountUsage(
v8::Isolate::UseCounterFeature::kOptimizedFunctionWithOneShotBytecode);
}
DCHECK(exception_handlers_.empty());
}
void BytecodeGraphBuilder::VisitLdaZero() {
Node* node = jsgraph()->ZeroConstant();
environment()->BindAccumulator(node);
}
void BytecodeGraphBuilder::VisitLdaSmi() {
Node* node = jsgraph()->Constant(bytecode_iterator().GetImmediateOperand(0));
environment()->BindAccumulator(node);
}
void BytecodeGraphBuilder::VisitLdaConstant() {
ObjectRef object(
broker(), bytecode_iterator().GetConstantForIndexOperand(0, isolate()));
Node* node = jsgraph()->Constant(object);
environment()->BindAccumulator(node);
}
void BytecodeGraphBuilder::VisitLdaUndefined() {
Node* node = jsgraph()->UndefinedConstant();
environment()->BindAccumulator(node);
}
void BytecodeGraphBuilder::VisitLdaNull() {
Node* node = jsgraph()->NullConstant();
environment()->BindAccumulator(node);
}
void BytecodeGraphBuilder::VisitLdaTheHole() {
Node* node = jsgraph()->TheHoleConstant();
environment()->BindAccumulator(node);
}
void BytecodeGraphBuilder::VisitLdaTrue() {
Node* node = jsgraph()->TrueConstant();
environment()->BindAccumulator(node);
}
void BytecodeGraphBuilder::VisitLdaFalse() {
Node* node = jsgraph()->FalseConstant();
environment()->BindAccumulator(node);
}
void BytecodeGraphBuilder::VisitLdar() {
Node* value =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
environment()->BindAccumulator(value);
}
void BytecodeGraphBuilder::VisitStar() {
Node* value = environment()->LookupAccumulator();
environment()->BindRegister(bytecode_iterator().GetRegisterOperand(0), value);
}
void BytecodeGraphBuilder::VisitMov() {
Node* value =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
environment()->BindRegister(bytecode_iterator().GetRegisterOperand(1), value);
}
Node* BytecodeGraphBuilder::BuildLoadGlobal(NameRef name,
uint32_t feedback_slot_index,
TypeofMode typeof_mode) {
FeedbackSource feedback = CreateFeedbackSource(feedback_slot_index);
DCHECK(IsLoadGlobalICKind(broker()->GetFeedbackSlotKind(feedback)));
const Operator* op =
javascript()->LoadGlobal(name.object(), feedback, typeof_mode);
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
return NewNode(op, feedback_vector_node());
}
void BytecodeGraphBuilder::VisitLdaGlobal() {
PrepareEagerCheckpoint();
NameRef name(broker(),
bytecode_iterator().GetConstantForIndexOperand(0, isolate()));
uint32_t feedback_slot_index = bytecode_iterator().GetIndexOperand(1);
Node* node =
BuildLoadGlobal(name, feedback_slot_index, TypeofMode::NOT_INSIDE_TYPEOF);
environment()->BindAccumulator(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitLdaGlobalInsideTypeof() {
PrepareEagerCheckpoint();
NameRef name(broker(),
bytecode_iterator().GetConstantForIndexOperand(0, isolate()));
uint32_t feedback_slot_index = bytecode_iterator().GetIndexOperand(1);
Node* node =
BuildLoadGlobal(name, feedback_slot_index, TypeofMode::INSIDE_TYPEOF);
environment()->BindAccumulator(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitStaGlobal() {
PrepareEagerCheckpoint();
NameRef name(broker(),
bytecode_iterator().GetConstantForIndexOperand(0, isolate()));
FeedbackSource feedback =
CreateFeedbackSource(bytecode_iterator().GetIndexOperand(1));
Node* value = environment()->LookupAccumulator();
LanguageMode language_mode =
GetLanguageModeFromSlotKind(broker()->GetFeedbackSlotKind(feedback));
const Operator* op =
javascript()->StoreGlobal(language_mode, name.object(), feedback);
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
Node* node = NewNode(op, value, feedback_vector_node());
environment()->RecordAfterState(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitStaInArrayLiteral() {
PrepareEagerCheckpoint();
Node* value = environment()->LookupAccumulator();
Node* array =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
Node* index =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(1));
FeedbackSource feedback =
CreateFeedbackSource(bytecode_iterator().GetIndexOperand(2));
const Operator* op = javascript()->StoreInArrayLiteral(feedback);
JSTypeHintLowering::LoweringResult lowering =
TryBuildSimplifiedStoreKeyed(op, array, index, value, feedback.slot);
if (lowering.IsExit()) return;
Node* node = nullptr;
if (lowering.IsSideEffectFree()) {
node = lowering.value();
} else {
DCHECK(!lowering.Changed());
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
node = NewNode(op, array, index, value, feedback_vector_node());
}
environment()->RecordAfterState(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitStaDataPropertyInLiteral() {
PrepareEagerCheckpoint();
Node* object =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
Node* name =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(1));
Node* value = environment()->LookupAccumulator();
int flags = bytecode_iterator().GetFlagOperand(2);
FeedbackSource feedback =
CreateFeedbackSource(bytecode_iterator().GetIndexOperand(3));
const Operator* op = javascript()->StoreDataPropertyInLiteral(feedback);
JSTypeHintLowering::LoweringResult lowering =
TryBuildSimplifiedStoreKeyed(op, object, name, value, feedback.slot);
if (lowering.IsExit()) return;
Node* node = nullptr;
if (lowering.IsSideEffectFree()) {
node = lowering.value();
} else {
DCHECK(!lowering.Changed());
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
node = NewNode(op, object, name, value, jsgraph()->Constant(flags),
feedback_vector_node());
}
environment()->RecordAfterState(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitCollectTypeProfile() {
PrepareEagerCheckpoint();
Node* position =
jsgraph()->Constant(bytecode_iterator().GetImmediateOperand(0));
Node* value = environment()->LookupAccumulator();
Node* vector = jsgraph()->Constant(feedback_vector());
const Operator* op = javascript()->CallRuntime(Runtime::kCollectTypeProfile);
Node* node = NewNode(op, position, value, vector);
environment()->RecordAfterState(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitLdaContextSlot() {
const Operator* op = javascript()->LoadContext(
bytecode_iterator().GetUnsignedImmediateOperand(2),
bytecode_iterator().GetIndexOperand(1), false);
Node* node = NewNode(op);
Node* context =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
NodeProperties::ReplaceContextInput(node, context);
environment()->BindAccumulator(node);
}
void BytecodeGraphBuilder::VisitLdaImmutableContextSlot() {
const Operator* op = javascript()->LoadContext(
bytecode_iterator().GetUnsignedImmediateOperand(2),
bytecode_iterator().GetIndexOperand(1), true);
Node* node = NewNode(op);
Node* context =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
NodeProperties::ReplaceContextInput(node, context);
environment()->BindAccumulator(node);
}
void BytecodeGraphBuilder::VisitLdaCurrentContextSlot() {
const Operator* op = javascript()->LoadContext(
0, bytecode_iterator().GetIndexOperand(0), false);
Node* node = NewNode(op);
environment()->BindAccumulator(node);
}
void BytecodeGraphBuilder::VisitLdaImmutableCurrentContextSlot() {
const Operator* op = javascript()->LoadContext(
0, bytecode_iterator().GetIndexOperand(0), true);
Node* node = NewNode(op);
environment()->BindAccumulator(node);
}
void BytecodeGraphBuilder::VisitStaContextSlot() {
const Operator* op = javascript()->StoreContext(
bytecode_iterator().GetUnsignedImmediateOperand(2),
bytecode_iterator().GetIndexOperand(1));
Node* value = environment()->LookupAccumulator();
Node* node = NewNode(op, value);
Node* context =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
NodeProperties::ReplaceContextInput(node, context);
}
void BytecodeGraphBuilder::VisitStaCurrentContextSlot() {
const Operator* op =
javascript()->StoreContext(0, bytecode_iterator().GetIndexOperand(0));
Node* value = environment()->LookupAccumulator();
NewNode(op, value);
}
void BytecodeGraphBuilder::BuildLdaLookupSlot(TypeofMode typeof_mode) {
PrepareEagerCheckpoint();
Node* name = jsgraph()->Constant(ObjectRef(
broker(), bytecode_iterator().GetConstantForIndexOperand(0, isolate())));
const Operator* op =
javascript()->CallRuntime(typeof_mode == TypeofMode::NOT_INSIDE_TYPEOF
? Runtime::kLoadLookupSlot
: Runtime::kLoadLookupSlotInsideTypeof);
Node* value = NewNode(op, name);
environment()->BindAccumulator(value, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitLdaLookupSlot() {
BuildLdaLookupSlot(TypeofMode::NOT_INSIDE_TYPEOF);
}
void BytecodeGraphBuilder::VisitLdaLookupSlotInsideTypeof() {
BuildLdaLookupSlot(TypeofMode::INSIDE_TYPEOF);
}
BytecodeGraphBuilder::Environment*
BytecodeGraphBuilder::CheckContextExtensionAtDepth(
Environment* slow_environment, uint32_t depth) {
Node* extension_slot = NewNode(
javascript()->LoadContext(depth, Context::EXTENSION_INDEX, false));
Node* check_no_extension =
NewNode(simplified()->ReferenceEqual(), extension_slot,
jsgraph()->UndefinedConstant());
NewBranch(check_no_extension);
{
SubEnvironment sub_environment(this);
NewIfFalse();
// If there is an extension, merge into the slow path.
if (slow_environment == nullptr) {
slow_environment = environment();
NewMerge();
} else {
slow_environment->Merge(environment(),
bytecode_analysis().GetInLivenessFor(
bytecode_iterator().current_offset()));
}
}
NewIfTrue();
// Do nothing on if there is no extension, eventually falling through to
// the fast path.
DCHECK_NOT_NULL(slow_environment);
return slow_environment;
}
base::Optional<ScopeInfoRef> BytecodeGraphBuilder::TryGetScopeInfo() {
Node* context = environment()->Context();
switch (context->opcode()) {
case IrOpcode::kJSCreateFunctionContext:
return ScopeInfoRef(
broker(),
CreateFunctionContextParametersOf(context->op()).scope_info());
case IrOpcode::kJSCreateBlockContext:
case IrOpcode::kJSCreateCatchContext:
case IrOpcode::kJSCreateWithContext:
return ScopeInfoRef(broker(), ScopeInfoOf(context->op()));
case IrOpcode::kParameter: {
ScopeInfoRef scope_info = shared_info_.scope_info();
if (scope_info.HasOuterScopeInfo()) {
scope_info = scope_info.OuterScopeInfo();
}
return scope_info;
}
default:
return base::nullopt;
}
}
BytecodeGraphBuilder::Environment* BytecodeGraphBuilder::CheckContextExtensions(
uint32_t depth) {
base::Optional<ScopeInfoRef> maybe_scope_info = TryGetScopeInfo();
if (!maybe_scope_info.has_value()) {
return CheckContextExtensionsSlowPath(depth);
}
ScopeInfoRef scope_info = maybe_scope_info.value();
// We only need to check up to the last-but-one depth, because an eval
// in the same scope as the variable itself has no way of shadowing it.
Environment* slow_environment = nullptr;
for (uint32_t d = 0; d < depth; d++) {
if (scope_info.HasContextExtensionSlot()) {
slow_environment = CheckContextExtensionAtDepth(slow_environment, d);
}
DCHECK_IMPLIES(!scope_info.HasOuterScopeInfo(), d + 1 == depth);
if (scope_info.HasOuterScopeInfo()) {
scope_info = scope_info.OuterScopeInfo();
}
}
// The depth can be zero, in which case no slow-path checks are built, and
// the slow path environment can be null.
DCHECK_IMPLIES(slow_environment == nullptr, depth == 0);
return slow_environment;
}
BytecodeGraphBuilder::Environment*
BytecodeGraphBuilder::CheckContextExtensionsSlowPath(uint32_t depth) {
// Output environment where the context has an extension
Environment* slow_environment = nullptr;
// We only need to check up to the last-but-one depth, because an eval
// in the same scope as the variable itself has no way of shadowing it.
for (uint32_t d = 0; d < depth; d++) {
Node* has_extension = NewNode(javascript()->HasContextExtension(d));
Environment* undefined_extension_env;
NewBranch(has_extension);
{
SubEnvironment sub_environment(this);
NewIfTrue();
slow_environment = CheckContextExtensionAtDepth(slow_environment, d);
undefined_extension_env = environment();
}
NewIfFalse();
environment()->Merge(undefined_extension_env,
bytecode_analysis().GetInLivenessFor(
bytecode_iterator().current_offset()));
mark_as_needing_eager_checkpoint(true);
// Do nothing on if there is no extension, eventually falling through to
// the fast path.
}
// The depth can be zero, in which case no slow-path checks are built, and
// the slow path environment can be null.
DCHECK_IMPLIES(slow_environment == nullptr, depth == 0);
return slow_environment;
}
void BytecodeGraphBuilder::BuildLdaLookupContextSlot(TypeofMode typeof_mode) {
uint32_t depth = bytecode_iterator().GetUnsignedImmediateOperand(2);
// Check if any context in the depth has an extension.
Environment* slow_environment = CheckContextExtensions(depth);
// Fast path, do a context load.
{
uint32_t slot_index = bytecode_iterator().GetIndexOperand(1);
const Operator* op = javascript()->LoadContext(depth, slot_index, false);
environment()->BindAccumulator(NewNode(op));
}
// Only build the slow path if there were any slow-path checks.
if (slow_environment != nullptr) {
// Add a merge to the fast environment.
NewMerge();
Environment* fast_environment = environment();
// Slow path, do a runtime load lookup.
set_environment(slow_environment);
{
Node* name = jsgraph()->Constant(ObjectRef(
broker(),
bytecode_iterator().GetConstantForIndexOperand(0, isolate())));
const Operator* op =
javascript()->CallRuntime(typeof_mode == TypeofMode::NOT_INSIDE_TYPEOF
? Runtime::kLoadLookupSlot
: Runtime::kLoadLookupSlotInsideTypeof);
Node* value = NewNode(op, name);
environment()->BindAccumulator(value, Environment::kAttachFrameState);
}
fast_environment->Merge(environment(),
bytecode_analysis().GetOutLivenessFor(
bytecode_iterator().current_offset()));
set_environment(fast_environment);
mark_as_needing_eager_checkpoint(true);
}
}
void BytecodeGraphBuilder::VisitLdaLookupContextSlot() {
BuildLdaLookupContextSlot(TypeofMode::NOT_INSIDE_TYPEOF);
}
void BytecodeGraphBuilder::VisitLdaLookupContextSlotInsideTypeof() {
BuildLdaLookupContextSlot(TypeofMode::INSIDE_TYPEOF);
}
void BytecodeGraphBuilder::BuildLdaLookupGlobalSlot(TypeofMode typeof_mode) {
uint32_t depth = bytecode_iterator().GetUnsignedImmediateOperand(2);
// Check if any context in the depth has an extension.
Environment* slow_environment = CheckContextExtensions(depth);
// Fast path, do a global load.
{
PrepareEagerCheckpoint();
NameRef name(broker(),
bytecode_iterator().GetConstantForIndexOperand(0, isolate()));
uint32_t feedback_slot_index = bytecode_iterator().GetIndexOperand(1);
Node* node = BuildLoadGlobal(name, feedback_slot_index, typeof_mode);
environment()->BindAccumulator(node, Environment::kAttachFrameState);
}
// Only build the slow path if there were any slow-path checks.
if (slow_environment != nullptr) {
// Add a merge to the fast environment.
NewMerge();
Environment* fast_environment = environment();
// Slow path, do a runtime load lookup.
set_environment(slow_environment);
{
Node* name = jsgraph()->Constant(NameRef(
broker(),
bytecode_iterator().GetConstantForIndexOperand(0, isolate())));
const Operator* op =
javascript()->CallRuntime(typeof_mode == TypeofMode::NOT_INSIDE_TYPEOF
? Runtime::kLoadLookupSlot
: Runtime::kLoadLookupSlotInsideTypeof);
Node* value = NewNode(op, name);
environment()->BindAccumulator(value, Environment::kAttachFrameState);
}
fast_environment->Merge(environment(),
bytecode_analysis().GetOutLivenessFor(
bytecode_iterator().current_offset()));
set_environment(fast_environment);
mark_as_needing_eager_checkpoint(true);
}
}
void BytecodeGraphBuilder::VisitLdaLookupGlobalSlot() {
BuildLdaLookupGlobalSlot(TypeofMode::NOT_INSIDE_TYPEOF);
}
void BytecodeGraphBuilder::VisitLdaLookupGlobalSlotInsideTypeof() {
BuildLdaLookupGlobalSlot(TypeofMode::INSIDE_TYPEOF);
}
void BytecodeGraphBuilder::VisitStaLookupSlot() {
PrepareEagerCheckpoint();
Node* value = environment()->LookupAccumulator();
Node* name = jsgraph()->Constant(ObjectRef(
broker(), bytecode_iterator().GetConstantForIndexOperand(0, isolate())));
int bytecode_flags = bytecode_iterator().GetFlagOperand(1);
LanguageMode language_mode = static_cast<LanguageMode>(
interpreter::StoreLookupSlotFlags::LanguageModeBit::decode(
bytecode_flags));
LookupHoistingMode lookup_hoisting_mode = static_cast<LookupHoistingMode>(
interpreter::StoreLookupSlotFlags::LookupHoistingModeBit::decode(
bytecode_flags));
DCHECK_IMPLIES(lookup_hoisting_mode == LookupHoistingMode::kLegacySloppy,
is_sloppy(language_mode));
const Operator* op = javascript()->CallRuntime(
is_strict(language_mode)
? Runtime::kStoreLookupSlot_Strict
: lookup_hoisting_mode == LookupHoistingMode::kLegacySloppy
? Runtime::kStoreLookupSlot_SloppyHoisting
: Runtime::kStoreLookupSlot_Sloppy);
Node* store = NewNode(op, name, value);
environment()->BindAccumulator(store, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitLdaNamedProperty() {
PrepareEagerCheckpoint();
Node* object =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
NameRef name(broker(),
bytecode_iterator().GetConstantForIndexOperand(1, isolate()));
FeedbackSource feedback =
CreateFeedbackSource(bytecode_iterator().GetIndexOperand(2));
const Operator* op = javascript()->LoadNamed(name.object(), feedback);
JSTypeHintLowering::LoweringResult lowering =
TryBuildSimplifiedLoadNamed(op, feedback.slot);
if (lowering.IsExit()) return;
Node* node = nullptr;
if (lowering.IsSideEffectFree()) {
node = lowering.value();
} else {
DCHECK(!lowering.Changed());
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
node = NewNode(op, object, feedback_vector_node());
}
environment()->BindAccumulator(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitLdaNamedPropertyNoFeedback() {
PrepareEagerCheckpoint();
Node* object =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
NameRef name(broker(),
bytecode_iterator().GetConstantForIndexOperand(1, isolate()));
const Operator* op = javascript()->LoadNamed(name.object(), FeedbackSource());
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
Node* node = NewNode(op, object, feedback_vector_node());
environment()->BindAccumulator(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitLdaNamedPropertyFromSuper() {
PrepareEagerCheckpoint();
Node* receiver =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
Node* home_object = environment()->LookupAccumulator();
NameRef name(broker(),
bytecode_iterator().GetConstantForIndexOperand(1, isolate()));
FeedbackSource feedback =
CreateFeedbackSource(bytecode_iterator().GetIndexOperand(2));
const Operator* op =
javascript()->LoadNamedFromSuper(name.object(), feedback);
JSTypeHintLowering::LoweringResult lowering =
TryBuildSimplifiedLoadNamed(op, feedback.slot);
if (lowering.IsExit()) return;
Node* node = nullptr;
if (lowering.IsSideEffectFree()) {
node = lowering.value();
} else {
DCHECK(!lowering.Changed());
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
node = NewNode(op, receiver, home_object, feedback_vector_node());
}
environment()->BindAccumulator(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitLdaKeyedProperty() {
PrepareEagerCheckpoint();
Node* key = environment()->LookupAccumulator();
Node* object =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
FeedbackSource feedback =
CreateFeedbackSource(bytecode_iterator().GetIndexOperand(1));
const Operator* op = javascript()->LoadProperty(feedback);
JSTypeHintLowering::LoweringResult lowering =
TryBuildSimplifiedLoadKeyed(op, object, key, feedback.slot);
if (lowering.IsExit()) return;
Node* node = nullptr;
if (lowering.IsSideEffectFree()) {
node = lowering.value();
} else {
DCHECK(!lowering.Changed());
STATIC_ASSERT(JSLoadPropertyNode::ObjectIndex() == 0);
STATIC_ASSERT(JSLoadPropertyNode::KeyIndex() == 1);
STATIC_ASSERT(JSLoadPropertyNode::FeedbackVectorIndex() == 2);
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
node = NewNode(op, object, key, feedback_vector_node());
}
environment()->BindAccumulator(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::BuildNamedStore(StoreMode store_mode) {
PrepareEagerCheckpoint();
Node* value = environment()->LookupAccumulator();
Node* object =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
NameRef name(broker(),
bytecode_iterator().GetConstantForIndexOperand(1, isolate()));
FeedbackSource feedback =
CreateFeedbackSource(bytecode_iterator().GetIndexOperand(2));
const Operator* op;
if (store_mode == StoreMode::kOwn) {
DCHECK_EQ(FeedbackSlotKind::kStoreOwnNamed,
broker()->GetFeedbackSlotKind(feedback));
op = javascript()->StoreNamedOwn(name.object(), feedback);
} else {
DCHECK_EQ(StoreMode::kNormal, store_mode);
LanguageMode language_mode =
GetLanguageModeFromSlotKind(broker()->GetFeedbackSlotKind(feedback));
op = javascript()->StoreNamed(language_mode, name.object(), feedback);
}
JSTypeHintLowering::LoweringResult lowering =
TryBuildSimplifiedStoreNamed(op, object, value, feedback.slot);
if (lowering.IsExit()) return;
Node* node = nullptr;
if (lowering.IsSideEffectFree()) {
node = lowering.value();
} else {
DCHECK(!lowering.Changed());
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
node = NewNode(op, object, value, feedback_vector_node());
}
environment()->RecordAfterState(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitStaNamedProperty() {
BuildNamedStore(StoreMode::kNormal);
}
void BytecodeGraphBuilder::VisitStaNamedPropertyNoFeedback() {
PrepareEagerCheckpoint();
Node* value = environment()->LookupAccumulator();
Node* object =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
NameRef name(broker(),
bytecode_iterator().GetConstantForIndexOperand(1, isolate()));
LanguageMode language_mode =
static_cast<LanguageMode>(bytecode_iterator().GetFlagOperand(2));
const Operator* op =
javascript()->StoreNamed(language_mode, name.object(), FeedbackSource());
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
Node* node = NewNode(op, object, value, feedback_vector_node());
environment()->RecordAfterState(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitStaNamedOwnProperty() {
BuildNamedStore(StoreMode::kOwn);
}
void BytecodeGraphBuilder::VisitStaKeyedProperty() {
PrepareEagerCheckpoint();
Node* value = environment()->LookupAccumulator();
Node* object =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
Node* key =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(1));
FeedbackSource source =
CreateFeedbackSource(bytecode_iterator().GetIndexOperand(2));
LanguageMode language_mode =
GetLanguageModeFromSlotKind(broker()->GetFeedbackSlotKind(source));
const Operator* op = javascript()->StoreProperty(language_mode, source);
JSTypeHintLowering::LoweringResult lowering =
TryBuildSimplifiedStoreKeyed(op, object, key, value, source.slot);
if (lowering.IsExit()) return;
Node* node = nullptr;
if (lowering.IsSideEffectFree()) {
node = lowering.value();
} else {
DCHECK(!lowering.Changed());
STATIC_ASSERT(JSStorePropertyNode::ObjectIndex() == 0);
STATIC_ASSERT(JSStorePropertyNode::KeyIndex() == 1);
STATIC_ASSERT(JSStorePropertyNode::ValueIndex() == 2);
STATIC_ASSERT(JSStorePropertyNode::FeedbackVectorIndex() == 3);
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
node = NewNode(op, object, key, value, feedback_vector_node());
}
environment()->RecordAfterState(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitLdaModuleVariable() {
int32_t cell_index = bytecode_iterator().GetImmediateOperand(0);
uint32_t depth = bytecode_iterator().GetUnsignedImmediateOperand(1);
Node* module =
NewNode(javascript()->LoadContext(depth, Context::EXTENSION_INDEX, true));
Node* value = NewNode(javascript()->LoadModule(cell_index), module);
environment()->BindAccumulator(value);
}
void BytecodeGraphBuilder::VisitStaModuleVariable() {
int32_t cell_index = bytecode_iterator().GetImmediateOperand(0);
uint32_t depth = bytecode_iterator().GetUnsignedImmediateOperand(1);
Node* module =
NewNode(javascript()->LoadContext(depth, Context::EXTENSION_INDEX, true));
Node* value = environment()->LookupAccumulator();
NewNode(javascript()->StoreModule(cell_index), module, value);
}
void BytecodeGraphBuilder::VisitPushContext() {
Node* new_context = environment()->LookupAccumulator();
environment()->BindRegister(bytecode_iterator().GetRegisterOperand(0),
environment()->Context());
environment()->SetContext(new_context);
}
void BytecodeGraphBuilder::VisitPopContext() {
Node* context =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
environment()->SetContext(context);
}
void BytecodeGraphBuilder::VisitCreateClosure() {
SharedFunctionInfoRef shared_info(
broker(), bytecode_iterator().GetConstantForIndexOperand(0, isolate()));
AllocationType allocation =
interpreter::CreateClosureFlags::PretenuredBit::decode(
bytecode_iterator().GetFlagOperand(2))
? AllocationType::kOld
: AllocationType::kYoung;
const Operator* op = javascript()->CreateClosure(
shared_info.object(),
jsgraph()->isolate()->builtins()->builtin_handle(Builtins::kCompileLazy),
allocation);
Node* closure = NewNode(
op, BuildLoadFeedbackCell(bytecode_iterator().GetIndexOperand(1)));
environment()->BindAccumulator(closure);
}
void BytecodeGraphBuilder::VisitCreateBlockContext() {
DisallowHeapAccessIf no_heap_access(should_disallow_heap_access());
ScopeInfoRef scope_info(
broker(), bytecode_iterator().GetConstantForIndexOperand(0, isolate()));
const Operator* op = javascript()->CreateBlockContext(scope_info.object());
Node* context = NewNode(op);
environment()->BindAccumulator(context);
}
void BytecodeGraphBuilder::VisitCreateFunctionContext() {
DisallowHeapAccessIf no_heap_access(should_disallow_heap_access());
ScopeInfoRef scope_info(
broker(), bytecode_iterator().GetConstantForIndexOperand(0, isolate()));
uint32_t slots = bytecode_iterator().GetUnsignedImmediateOperand(1);
const Operator* op = javascript()->CreateFunctionContext(
scope_info.object(), slots, FUNCTION_SCOPE);
Node* context = NewNode(op);
environment()->BindAccumulator(context);
}
void BytecodeGraphBuilder::VisitCreateEvalContext() {
DisallowHeapAccessIf no_heap_access(should_disallow_heap_access());
ScopeInfoRef scope_info(
broker(), bytecode_iterator().GetConstantForIndexOperand(0, isolate()));
uint32_t slots = bytecode_iterator().GetUnsignedImmediateOperand(1);
const Operator* op = javascript()->CreateFunctionContext(scope_info.object(),
slots, EVAL_SCOPE);
Node* context = NewNode(op);
environment()->BindAccumulator(context);
}
void BytecodeGraphBuilder::VisitCreateCatchContext() {
DisallowHeapAccessIf no_heap_access(should_disallow_heap_access());
interpreter::Register reg = bytecode_iterator().GetRegisterOperand(0);
Node* exception = environment()->LookupRegister(reg);
ScopeInfoRef scope_info(
broker(), bytecode_iterator().GetConstantForIndexOperand(1, isolate()));
const Operator* op = javascript()->CreateCatchContext(scope_info.object());
Node* context = NewNode(op, exception);
environment()->BindAccumulator(context);
}
void BytecodeGraphBuilder::VisitCreateWithContext() {
DisallowHeapAccessIf no_heap_access(should_disallow_heap_access());
Node* object =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
ScopeInfoRef scope_info(
broker(), bytecode_iterator().GetConstantForIndexOperand(1, isolate()));
const Operator* op = javascript()->CreateWithContext(scope_info.object());
Node* context = NewNode(op, object);
environment()->BindAccumulator(context);
}
void BytecodeGraphBuilder::BuildCreateArguments(CreateArgumentsType type) {
const Operator* op = javascript()->CreateArguments(type);
Node* object = NewNode(op, GetFunctionClosure());
environment()->BindAccumulator(object, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitCreateMappedArguments() {
BuildCreateArguments(CreateArgumentsType::kMappedArguments);
}
void BytecodeGraphBuilder::VisitCreateUnmappedArguments() {
BuildCreateArguments(CreateArgumentsType::kUnmappedArguments);
}
void BytecodeGraphBuilder::VisitCreateRestParameter() {
BuildCreateArguments(CreateArgumentsType::kRestParameter);
}
void BytecodeGraphBuilder::VisitCreateRegExpLiteral() {
StringRef constant_pattern(
broker(), bytecode_iterator().GetConstantForIndexOperand(0, isolate()));
int const slot_id = bytecode_iterator().GetIndexOperand(1);
FeedbackSource pair = CreateFeedbackSource(slot_id);
int literal_flags = bytecode_iterator().GetFlagOperand(2);
STATIC_ASSERT(JSCreateLiteralRegExpNode::FeedbackVectorIndex() == 0);
const Operator* op = javascript()->CreateLiteralRegExp(
constant_pattern.object(), pair, literal_flags);
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
Node* literal = NewNode(op, feedback_vector_node());
environment()->BindAccumulator(literal, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitCreateArrayLiteral() {
ArrayBoilerplateDescriptionRef array_boilerplate_description(
broker(), bytecode_iterator().GetConstantForIndexOperand(0, isolate()));
int const slot_id = bytecode_iterator().GetIndexOperand(1);
FeedbackSource pair = CreateFeedbackSource(slot_id);
int bytecode_flags = bytecode_iterator().GetFlagOperand(2);
int literal_flags =
interpreter::CreateArrayLiteralFlags::FlagsBits::decode(bytecode_flags);
// Disable allocation site mementos. Only unoptimized code will collect
// feedback about allocation site. Once the code is optimized we expect the
// data to converge. So, we disable allocation site mementos in optimized
// code. We can revisit this when we have data to the contrary.
literal_flags |= ArrayLiteral::kDisableMementos;
int number_of_elements =
array_boilerplate_description.constants_elements_length();
STATIC_ASSERT(JSCreateLiteralArrayNode::FeedbackVectorIndex() == 0);
const Operator* op =
javascript()->CreateLiteralArray(array_boilerplate_description.object(),
pair, literal_flags, number_of_elements);
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
Node* literal = NewNode(op, feedback_vector_node());
environment()->BindAccumulator(literal, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitCreateEmptyArrayLiteral() {
int const slot_id = bytecode_iterator().GetIndexOperand(0);
FeedbackSource pair = CreateFeedbackSource(slot_id);
const Operator* op = javascript()->CreateEmptyLiteralArray(pair);
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
Node* literal = NewNode(op, feedback_vector_node());
environment()->BindAccumulator(literal);
}
void BytecodeGraphBuilder::VisitCreateArrayFromIterable() {
Node* iterable = NewNode(javascript()->CreateArrayFromIterable(),
environment()->LookupAccumulator());
environment()->BindAccumulator(iterable, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitCreateObjectLiteral() {
ObjectBoilerplateDescriptionRef constant_properties(
broker(), bytecode_iterator().GetConstantForIndexOperand(0, isolate()));
int const slot_id = bytecode_iterator().GetIndexOperand(1);
FeedbackSource pair = CreateFeedbackSource(slot_id);
int bytecode_flags = bytecode_iterator().GetFlagOperand(2);
int literal_flags =
interpreter::CreateObjectLiteralFlags::FlagsBits::decode(bytecode_flags);
int number_of_properties = constant_properties.size();
STATIC_ASSERT(JSCreateLiteralObjectNode::FeedbackVectorIndex() == 0);
const Operator* op = javascript()->CreateLiteralObject(
constant_properties.object(), pair, literal_flags, number_of_properties);
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
Node* literal = NewNode(op, feedback_vector_node());
environment()->BindAccumulator(literal, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitCreateEmptyObjectLiteral() {
Node* literal = NewNode(javascript()->CreateEmptyLiteralObject());
environment()->BindAccumulator(literal);
}
void BytecodeGraphBuilder::VisitCloneObject() {
PrepareEagerCheckpoint();
Node* source =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
int flags = bytecode_iterator().GetFlagOperand(1);
int slot = bytecode_iterator().GetIndexOperand(2);
const Operator* op =
javascript()->CloneObject(CreateFeedbackSource(slot), flags);
STATIC_ASSERT(JSCloneObjectNode::SourceIndex() == 0);
STATIC_ASSERT(JSCloneObjectNode::FeedbackVectorIndex() == 1);
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
Node* value = NewNode(op, source, feedback_vector_node());
environment()->BindAccumulator(value, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitGetTemplateObject() {
DisallowHeapAccessIf no_heap_access(should_disallow_heap_access());
FeedbackSource source =
CreateFeedbackSource(bytecode_iterator().GetIndexOperand(1));
TemplateObjectDescriptionRef description(
broker(), bytecode_iterator().GetConstantForIndexOperand(0, isolate()));
STATIC_ASSERT(JSGetTemplateObjectNode::FeedbackVectorIndex() == 0);
const Operator* op = javascript()->GetTemplateObject(
description.object(), shared_info().object(), source);
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
Node* template_object = NewNode(op, feedback_vector_node());
environment()->BindAccumulator(template_object);
}
Node* const* BytecodeGraphBuilder::GetCallArgumentsFromRegisters(
Node* callee, Node* receiver, interpreter::Register first_arg,
int arg_count) {
const int arity = JSCallNode::ArityForArgc(arg_count);
Node** all = local_zone()->NewArray<Node*>(static_cast<size_t>(arity));
int cursor = 0;
STATIC_ASSERT(JSCallNode::TargetIndex() == 0);
STATIC_ASSERT(JSCallNode::ReceiverIndex() == 1);
STATIC_ASSERT(JSCallNode::FirstArgumentIndex() == 2);
STATIC_ASSERT(JSCallNode::kFeedbackVectorIsLastInput);
all[cursor++] = callee;
all[cursor++] = receiver;
// The function arguments are in consecutive registers.
const int arg_base = first_arg.index();
for (int i = 0; i < arg_count; ++i) {
all[cursor++] =
environment()->LookupRegister(interpreter::Register(arg_base + i));
}
all[cursor++] = feedback_vector_node();
DCHECK_EQ(cursor, arity);
return all;
}
void BytecodeGraphBuilder::BuildCall(ConvertReceiverMode receiver_mode,
Node* const* args, size_t arg_count,
int slot_id) {
DCHECK_EQ(interpreter::Bytecodes::GetReceiverMode(
bytecode_iterator().current_bytecode()),
receiver_mode);
PrepareEagerCheckpoint();
FeedbackSource feedback = CreateFeedbackSource(slot_id);
CallFrequency frequency = ComputeCallFrequency(slot_id);
SpeculationMode speculation_mode = GetSpeculationMode(slot_id);
const Operator* op =
javascript()->Call(arg_count, frequency, feedback, receiver_mode,
speculation_mode, CallFeedbackRelation::kRelated);
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
JSTypeHintLowering::LoweringResult lowering = TryBuildSimplifiedCall(
op, args, static_cast<int>(arg_count), feedback.slot);
if (lowering.IsExit()) return;
Node* node = nullptr;
if (lowering.IsSideEffectFree()) {
node = lowering.value();
} else {
DCHECK(!lowering.Changed());
node = MakeNode(op, static_cast<int>(arg_count), args);
}
environment()->BindAccumulator(node, Environment::kAttachFrameState);
}
Node* const* BytecodeGraphBuilder::ProcessCallVarArgs(
ConvertReceiverMode receiver_mode, Node* callee,
interpreter::Register first_reg, int arg_count) {
DCHECK_GE(arg_count, 0);
Node* receiver_node;
interpreter::Register first_arg;
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
// The receiver is implicit (and undefined), the arguments are in
// consecutive registers.
receiver_node = jsgraph()->UndefinedConstant();
first_arg = first_reg;
} else {
// The receiver is the first register, followed by the arguments in the
// consecutive registers.
receiver_node = environment()->LookupRegister(first_reg);
first_arg = interpreter::Register(first_reg.index() + 1);
}
Node* const* call_args = GetCallArgumentsFromRegisters(callee, receiver_node,
first_arg, arg_count);
return call_args;
}
void BytecodeGraphBuilder::BuildCallVarArgs(ConvertReceiverMode receiver_mode) {
DCHECK_EQ(interpreter::Bytecodes::GetReceiverMode(
bytecode_iterator().current_bytecode()),
receiver_mode);
Node* callee =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
interpreter::Register first_reg = bytecode_iterator().GetRegisterOperand(1);
size_t reg_count = bytecode_iterator().GetRegisterCountOperand(2);
int const slot_id = bytecode_iterator().GetIndexOperand(3);
int arg_count = receiver_mode == ConvertReceiverMode::kNullOrUndefined
? static_cast<int>(reg_count)
: static_cast<int>(reg_count) - 1;
Node* const* call_args =
ProcessCallVarArgs(receiver_mode, callee, first_reg, arg_count);
BuildCall(receiver_mode, call_args, JSCallNode::ArityForArgc(arg_count),
slot_id);
}
void BytecodeGraphBuilder::VisitCallAnyReceiver() {
BuildCallVarArgs(ConvertReceiverMode::kAny);
}
void BytecodeGraphBuilder::VisitCallNoFeedback() {
DCHECK_EQ(interpreter::Bytecodes::GetReceiverMode(
bytecode_iterator().current_bytecode()),
ConvertReceiverMode::kAny);
PrepareEagerCheckpoint();
Node* callee =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
interpreter::Register first_reg = bytecode_iterator().GetRegisterOperand(1);
size_t reg_count = bytecode_iterator().GetRegisterCountOperand(2);
// The receiver is the first register, followed by the arguments in the
// consecutive registers.
int arg_count = static_cast<int>(reg_count) - 1;
int arity = JSCallNode::ArityForArgc(arg_count);
// Setting call frequency to a value less than min_inlining frequency to
// prevent inlining of one-shot call node.
DCHECK(CallFrequency::kNoFeedbackCallFrequency < FLAG_min_inlining_frequency);
const Operator* call = javascript()->Call(
arity, CallFrequency(CallFrequency::kNoFeedbackCallFrequency));
Node* const* call_args = ProcessCallVarArgs(ConvertReceiverMode::kAny, callee,
first_reg, arg_count);
Node* value = MakeNode(call, arity, call_args);
environment()->BindAccumulator(value, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitCallProperty() {
BuildCallVarArgs(ConvertReceiverMode::kNotNullOrUndefined);
}
void BytecodeGraphBuilder::VisitCallProperty0() {
Node* callee =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
Node* receiver =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(1));
int const slot_id = bytecode_iterator().GetIndexOperand(2);
BuildCall(ConvertReceiverMode::kNotNullOrUndefined,
{callee, receiver, feedback_vector_node()}, slot_id);
}
void BytecodeGraphBuilder::VisitCallProperty1() {
Node* callee =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
Node* receiver =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(1));
Node* arg0 =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(2));
int const slot_id = bytecode_iterator().GetIndexOperand(3);
BuildCall(ConvertReceiverMode::kNotNullOrUndefined,
{callee, receiver, arg0, feedback_vector_node()}, slot_id);
}
void BytecodeGraphBuilder::VisitCallProperty2() {
Node* callee =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
Node* receiver =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(1));
Node* arg0 =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(2));
Node* arg1 =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(3));
int const slot_id = bytecode_iterator().GetIndexOperand(4);
BuildCall(ConvertReceiverMode::kNotNullOrUndefined,
{callee, receiver, arg0, arg1, feedback_vector_node()}, slot_id);
}
void BytecodeGraphBuilder::VisitCallUndefinedReceiver() {
BuildCallVarArgs(ConvertReceiverMode::kNullOrUndefined);
}
void BytecodeGraphBuilder::VisitCallUndefinedReceiver0() {
Node* callee =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
Node* receiver = jsgraph()->UndefinedConstant();
int const slot_id = bytecode_iterator().GetIndexOperand(1);
BuildCall(ConvertReceiverMode::kNullOrUndefined,
{callee, receiver, feedback_vector_node()}, slot_id);
}
void BytecodeGraphBuilder::VisitCallUndefinedReceiver1() {
Node* callee =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
Node* receiver = jsgraph()->UndefinedConstant();
Node* arg0 =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(1));
int const slot_id = bytecode_iterator().GetIndexOperand(2);
BuildCall(ConvertReceiverMode::kNullOrUndefined,
{callee, receiver, arg0, feedback_vector_node()}, slot_id);
}
void BytecodeGraphBuilder::VisitCallUndefinedReceiver2() {
Node* callee =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
Node* receiver = jsgraph()->UndefinedConstant();
Node* arg0 =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(1));
Node* arg1 =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(2));
int const slot_id = bytecode_iterator().GetIndexOperand(3);
BuildCall(ConvertReceiverMode::kNullOrUndefined,
{callee, receiver, arg0, arg1, feedback_vector_node()}, slot_id);
}
void BytecodeGraphBuilder::VisitCallWithSpread() {
PrepareEagerCheckpoint();
Node* callee =
environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
interpreter::Register receiver = bytecode_iterator().GetRegisterOperand(1);
Node* receiver_node = environment()->LookupRegister(receiver);
size_t reg_count = bytecode_iterator().GetRegisterCountOperand(2);
interpreter::Register first_arg = interpreter::Register(receiver.index() + 1);
int arg_count = static_cast<int>(reg_count) - 1;
Node* const* args = GetCallArgumentsFromRegisters(callee, receiver_node,
first_arg, arg_count);
int const slot_id = bytecode_iterator().GetIndexOperand(3);
FeedbackSource feedback = CreateFeedbackSource(slot_id);
CallFrequency frequency = ComputeCallFrequency(slot_id);
SpeculationMode speculation_mode = GetSpeculationMode(slot_id);
const Operator* op = javascript()->CallWithSpread(
JSCallWithSpreadNode::ArityForArgc(arg_count), frequency, feedback,
speculation_mode);
DCHECK(IrOpcode::IsFeedbackCollectingOpcode(op->opcode()));
JSTypeHintLowering::LoweringResult lowering = TryBuildSimplifiedCall(
op, args, static_cast<int>(arg_count), feedback.slot);
if (lowering.IsExit()) return;
Node* node = nullptr;
if (lowering.IsSideEffectFree()) {
node = lowering.value();
} else {
DCHECK(!lowering.Changed());
node = MakeNode(op, JSCallWithSpreadNode::ArityForArgc(arg_count), args);
}
environment()->BindAccumulator(node, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitCallJSRuntime() {
PrepareEagerCheckpoint();
Node* callee = BuildLoadNativeContextField(
bytecode_iterator().GetNativeContextIndexOperand(0));
interpreter::Register first_reg = bytecode_iterator().GetRegisterOperand(1);
size_t reg_count = bytecode_iterator().GetRegisterCountOperand(2);
int arg_count = static_cast<int>(reg_count);
int arity = JSCallNode::ArityForArgc(arg_count);
const Operator* call = javascript()->Call(arity);
Node* const* call_args = ProcessCallVarArgs(
ConvertReceiverMode::kNullOrUndefined, callee, first_reg, arg_count);
Node* value = MakeNode(call, arity, call_args);
environment()->BindAccumulator(value, Environment::kAttachFrameState);
}
Node* BytecodeGraphBuilder::ProcessCallRuntimeArguments(
const Operator* call_runtime_op, interpreter::Register receiver,
size_t reg_count) {
int arg_count = static_cast<int>(reg_count);
// arity is args.
int arity = arg_count;
Node** all = local_zone()->NewArray<Node*>(static_cast<size_t>(arity));
int first_arg_index = receiver.index();
for (int i = 0; i < static_cast<int>(reg_count); ++i) {
all[i] = environment()->LookupRegister(
interpreter::Register(first_arg_index + i));
}
Node* value = MakeNode(call_runtime_op, arity, all);
return value;
}
void BytecodeGraphBuilder::VisitCallRuntime() {
PrepareEagerCheckpoint();
Runtime::FunctionId function_id = bytecode_iterator().GetRuntimeIdOperand(0);
interpreter::Register receiver = bytecode_iterator().GetRegisterOperand(1);
size_t reg_count = bytecode_iterator().GetRegisterCountOperand(2);
// Create node to perform the runtime call.
const Operator* call = javascript()->CallRuntime(function_id, reg_count);
Node* value = ProcessCallRuntimeArguments(call, receiver, reg_count);
environment()->BindAccumulator(value, Environment::kAttachFrameState);
// Connect to the end if {function_id} is non-returning.
if (Runtime::IsNonReturning(function_id)) {
// TODO(7099): Investigate if we need LoopExit node here.
Node* control = NewNode(common()->Throw());
MergeControlToLeaveFunction(control);
}
}
void BytecodeGraphBuilder::VisitCallRuntimeForPair() {
PrepareEagerCheckpoint();
Runtime::FunctionId functionId = bytecode_iterator().GetRuntimeIdOperand(0);
interpreter::Register receiver = bytecode_iterator().GetRegisterOperand(1);
size_t reg_count = bytecode_iterator().GetRegisterCountOperand(2);
interpreter::Register first_return =
bytecode_iterator().GetRegisterOperand(3);
// Create node to perform the runtime call.
const Operator* call = javascript()->CallRuntime(functionId, reg_count);
Node* return_pair = ProcessCallRuntimeArguments(call, receiver, reg_count);
environment()->BindRegistersToProjections(first_return, return_pair,
Environment::kAttachFrameState);
}
Node* const* BytecodeGraphBuilder::GetConstructArgumentsFromRegister(
Node* target, Node* new_target, interpreter::Register first_arg,
int arg_count) {
const int arity = JSConstructNode::ArityForArgc(arg_count);
Node** all = local_zone()->NewArray<Node*>(static_cast<size_t>(arity));
int cursor = 0;
STATIC_ASSERT(JSConstructNode::TargetIndex() == 0);