blob: fa085780c00a542052eef644bcf15c17db4e99b3 [file] [log] [blame]
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/wasm-compiler.h"
#include <memory>
#include "src/base/optional.h"
#include "src/base/platform/elapsed-timer.h"
#include "src/base/platform/platform.h"
#include "src/base/small-vector.h"
#include "src/base/v8-fallthrough.h"
#include "src/codegen/assembler-inl.h"
#include "src/codegen/assembler.h"
#include "src/codegen/code-factory.h"
#include "src/codegen/compiler.h"
#include "src/codegen/interface-descriptors.h"
#include "src/codegen/machine-type.h"
#include "src/codegen/optimized-compilation-info.h"
#include "src/compiler/backend/code-generator.h"
#include "src/compiler/backend/instruction-selector.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/compiler-source-position-table.h"
#include "src/compiler/diamond.h"
#include "src/compiler/graph-assembler.h"
#include "src/compiler/graph-visualizer.h"
#include "src/compiler/graph.h"
#include "src/compiler/int64-lowering.h"
#include "src/compiler/linkage.h"
#include "src/compiler/machine-operator.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/node-origin-table.h"
#include "src/compiler/node-properties.h"
#include "src/compiler/pipeline.h"
#include "src/compiler/simd-scalar-lowering.h"
#include "src/compiler/zone-stats.h"
#include "src/execution/isolate-inl.h"
#include "src/heap/factory.h"
#include "src/logging/counters.h"
#include "src/logging/log.h"
#include "src/objects/heap-number.h"
#include "src/roots/roots.h"
#include "src/tracing/trace-event.h"
#include "src/trap-handler/trap-handler.h"
#include "src/utils/vector.h"
#include "src/wasm/function-body-decoder-impl.h"
#include "src/wasm/function-compiler.h"
#include "src/wasm/graph-builder-interface.h"
#include "src/wasm/jump-table-assembler.h"
#include "src/wasm/memory-tracing.h"
#include "src/wasm/object-access.h"
#include "src/wasm/wasm-code-manager.h"
#include "src/wasm/wasm-constants.h"
#include "src/wasm/wasm-limits.h"
#include "src/wasm/wasm-linkage.h"
#include "src/wasm/wasm-module.h"
#include "src/wasm/wasm-objects-inl.h"
#include "src/wasm/wasm-opcodes-inl.h"
namespace v8 {
namespace internal {
namespace compiler {
namespace {
#define FATAL_UNSUPPORTED_OPCODE(opcode) \
FATAL("Unsupported opcode 0x%x:%s", (opcode), \
wasm::WasmOpcodes::OpcodeName(opcode));
MachineType assert_size(int expected_size, MachineType type) {
DCHECK_EQ(expected_size, ElementSizeInBytes(type.representation()));
return type;
}
#define WASM_INSTANCE_OBJECT_SIZE(name) \
(WasmInstanceObject::k##name##OffsetEnd - \
WasmInstanceObject::k##name##Offset + 1) // NOLINT(whitespace/indent)
#define WASM_INSTANCE_OBJECT_OFFSET(name) \
wasm::ObjectAccess::ToTagged(WasmInstanceObject::k##name##Offset)
// We would like to use gasm_->Call() to implement this macro,
// but this doesn't work currently when we try to call it from functions
// which set IfSuccess/IfFailure control paths (e.g. within Throw()).
// TODO(manoskouk): Maybe clean this up at some point?
#define CALL_BUILTIN(name, ...) \
SetEffect(graph()->NewNode( \
mcgraph()->common()->Call(GetBuiltinCallDescriptor<name##Descriptor>( \
this, StubCallMode::kCallBuiltinPointer)), \
GetBuiltinPointerTarget(Builtins::k##name), ##__VA_ARGS__, effect(), \
control()))
#define LOAD_INSTANCE_FIELD(name, type) \
gasm_->Load(assert_size(WASM_INSTANCE_OBJECT_SIZE(name), type), \
instance_node_.get(), WASM_INSTANCE_OBJECT_OFFSET(name))
#define LOAD_FULL_POINTER(base_pointer, byte_offset) \
gasm_->Load(MachineType::Pointer(), base_pointer, byte_offset)
#define LOAD_TAGGED_POINTER(base_pointer, byte_offset) \
gasm_->Load(MachineType::TaggedPointer(), base_pointer, byte_offset)
#define LOAD_TAGGED_ANY(base_pointer, byte_offset) \
gasm_->Load(MachineType::AnyTagged(), base_pointer, byte_offset)
#define LOAD_FIXED_ARRAY_SLOT(array_node, index, type) \
gasm_->Load(type, array_node, \
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(index))
#define LOAD_FIXED_ARRAY_SLOT_SMI(array_node, index) \
LOAD_FIXED_ARRAY_SLOT(array_node, index, MachineType::TaggedSigned())
#define LOAD_FIXED_ARRAY_SLOT_PTR(array_node, index) \
LOAD_FIXED_ARRAY_SLOT(array_node, index, MachineType::TaggedPointer())
#define LOAD_FIXED_ARRAY_SLOT_ANY(array_node, index) \
LOAD_FIXED_ARRAY_SLOT(array_node, index, MachineType::AnyTagged())
#define STORE_RAW(base, offset, val, rep, barrier) \
STORE_RAW_NODE_OFFSET(base, gasm_->Int32Constant(offset), val, rep, barrier)
#define STORE_RAW_NODE_OFFSET(base, node_offset, val, rep, barrier) \
gasm_->Store(StoreRepresentation(rep, barrier), base, node_offset, val)
// This can be used to store tagged Smi values only.
#define STORE_FIXED_ARRAY_SLOT_SMI(array_node, index, value) \
STORE_RAW(array_node, \
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(index), value, \
MachineRepresentation::kTaggedSigned, kNoWriteBarrier)
// This can be used to store any tagged (Smi and HeapObject) value.
#define STORE_FIXED_ARRAY_SLOT_ANY(array_node, index, value) \
STORE_RAW(array_node, \
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(index), value, \
MachineRepresentation::kTagged, kFullWriteBarrier)
void EnsureEnd(MachineGraph* mcgraph) {
Graph* g = mcgraph->graph();
if (g->end() == nullptr) {
g->SetEnd(g->NewNode(mcgraph->common()->End(0)));
}
}
void MergeControlToEnd(MachineGraph* mcgraph, Node* node) {
EnsureEnd(mcgraph);
NodeProperties::MergeControlToEnd(mcgraph->graph(), mcgraph->common(), node);
}
bool ContainsSimd(const wasm::FunctionSig* sig) {
for (auto type : sig->all()) {
if (type == wasm::kWasmS128) return true;
}
return false;
}
bool ContainsInt64(const wasm::FunctionSig* sig) {
for (auto type : sig->all()) {
if (type == wasm::kWasmI64) return true;
}
return false;
}
template <typename BuiltinDescriptor>
CallDescriptor* GetBuiltinCallDescriptor(WasmGraphBuilder* builder,
StubCallMode stub_mode) {
BuiltinDescriptor interface_descriptor;
return Linkage::GetStubCallDescriptor(
builder->mcgraph()->zone(), // zone
interface_descriptor, // descriptor
interface_descriptor.GetStackParameterCount(), // stack parameter count
CallDescriptor::kNoFlags, // flags
Operator::kNoProperties, // properties
stub_mode); // stub call mode
}
} // namespace
class WasmGraphAssembler : public GraphAssembler {
public:
WasmGraphAssembler(MachineGraph* mcgraph, Zone* zone)
: GraphAssembler(mcgraph, zone) {}
};
WasmGraphBuilder::WasmGraphBuilder(
wasm::CompilationEnv* env, Zone* zone, MachineGraph* mcgraph,
const wasm::FunctionSig* sig,
compiler::SourcePositionTable* source_position_table)
: gasm_(std::make_unique<WasmGraphAssembler>(mcgraph, zone)),
zone_(zone),
mcgraph_(mcgraph),
env_(env),
has_simd_(ContainsSimd(sig)),
untrusted_code_mitigations_(FLAG_untrusted_code_mitigations),
sig_(sig),
source_position_table_(source_position_table) {
DCHECK_IMPLIES(use_trap_handler(), trap_handler::IsTrapHandlerEnabled());
DCHECK_NOT_NULL(mcgraph_);
}
// Destructor define here where the definition of {WasmGraphAssembler} is
// available.
WasmGraphBuilder::~WasmGraphBuilder() = default;
Node* WasmGraphBuilder::Error() { return mcgraph()->Dead(); }
Node* WasmGraphBuilder::Start(unsigned params) {
Node* start = graph()->NewNode(mcgraph()->common()->Start(params));
graph()->SetStart(start);
return start;
}
Node* WasmGraphBuilder::Param(unsigned index) {
return graph()->NewNode(mcgraph()->common()->Parameter(index),
graph()->start());
}
Node* WasmGraphBuilder::Loop(Node* entry) {
return graph()->NewNode(mcgraph()->common()->Loop(1), entry);
}
Node* WasmGraphBuilder::TerminateLoop(Node* effect, Node* control) {
Node* terminate =
graph()->NewNode(mcgraph()->common()->Terminate(), effect, control);
MergeControlToEnd(mcgraph(), terminate);
return terminate;
}
Node* WasmGraphBuilder::TerminateThrow(Node* effect, Node* control) {
Node* terminate =
graph()->NewNode(mcgraph()->common()->Throw(), effect, control);
MergeControlToEnd(mcgraph(), terminate);
return terminate;
}
bool WasmGraphBuilder::IsPhiWithMerge(Node* phi, Node* merge) {
return phi && IrOpcode::IsPhiOpcode(phi->opcode()) &&
NodeProperties::GetControlInput(phi) == merge;
}
bool WasmGraphBuilder::ThrowsException(Node* node, Node** if_success,
Node** if_exception) {
if (node->op()->HasProperty(compiler::Operator::kNoThrow)) {
return false;
}
*if_success = graph()->NewNode(mcgraph()->common()->IfSuccess(), node);
*if_exception =
graph()->NewNode(mcgraph()->common()->IfException(), node, node);
return true;
}
void WasmGraphBuilder::AppendToMerge(Node* merge, Node* from) {
DCHECK(IrOpcode::IsMergeOpcode(merge->opcode()));
merge->AppendInput(mcgraph()->zone(), from);
int new_size = merge->InputCount();
NodeProperties::ChangeOp(
merge, mcgraph()->common()->ResizeMergeOrPhi(merge->op(), new_size));
}
void WasmGraphBuilder::AppendToPhi(Node* phi, Node* from) {
DCHECK(IrOpcode::IsPhiOpcode(phi->opcode()));
int new_size = phi->InputCount();
phi->InsertInput(mcgraph()->zone(), phi->InputCount() - 1, from);
NodeProperties::ChangeOp(
phi, mcgraph()->common()->ResizeMergeOrPhi(phi->op(), new_size));
}
Node* WasmGraphBuilder::Merge(unsigned count, Node** controls) {
return graph()->NewNode(mcgraph()->common()->Merge(count), count, controls);
}
Node* WasmGraphBuilder::Phi(wasm::ValueType type, unsigned count,
Node** vals_and_control) {
DCHECK(IrOpcode::IsMergeOpcode(vals_and_control[count]->opcode()));
return graph()->NewNode(
mcgraph()->common()->Phi(type.machine_representation(), count), count + 1,
vals_and_control);
}
Node* WasmGraphBuilder::EffectPhi(unsigned count, Node** effects_and_control) {
DCHECK(IrOpcode::IsMergeOpcode(effects_and_control[count]->opcode()));
return graph()->NewNode(mcgraph()->common()->EffectPhi(count), count + 1,
effects_and_control);
}
Node* WasmGraphBuilder::RefNull() {
return LOAD_FULL_POINTER(
BuildLoadIsolateRoot(),
IsolateData::root_slot_offset(RootIndex::kNullValue));
}
Node* WasmGraphBuilder::RefFunc(uint32_t function_index) {
auto call_descriptor = GetBuiltinCallDescriptor<WasmRefFuncDescriptor>(
this, StubCallMode::kCallWasmRuntimeStub);
// A direct call to a wasm runtime stub defined in this module.
// Just encode the stub index. This will be patched at relocation.
Node* call_target = mcgraph()->RelocatableIntPtrConstant(
wasm::WasmCode::kWasmRefFunc, RelocInfo::WASM_STUB_CALL);
return SetEffectControl(graph()->NewNode(
mcgraph()->common()->Call(call_descriptor), call_target,
mcgraph()->Uint32Constant(function_index), effect(), control()));
}
Node* WasmGraphBuilder::RefAsNonNull(Node* arg,
wasm::WasmCodePosition position) {
TrapIfTrue(wasm::kTrapIllegalCast, gasm_->WordEqual(arg, RefNull()),
position);
return arg;
}
Node* WasmGraphBuilder::NoContextConstant() {
return mcgraph()->IntPtrConstant(0);
}
Node* WasmGraphBuilder::BuildLoadIsolateRoot() {
// The IsolateRoot is loaded from the instance node so that the generated
// code is Isolate independent. This can be overridden by setting a specific
// node in {isolate_root_node_} beforehand.
if (isolate_root_node_.is_set()) return isolate_root_node_.get();
return LOAD_INSTANCE_FIELD(IsolateRoot, MachineType::Pointer());
}
Node* WasmGraphBuilder::Int32Constant(int32_t value) {
return mcgraph()->Int32Constant(value);
}
Node* WasmGraphBuilder::Int64Constant(int64_t value) {
return mcgraph()->Int64Constant(value);
}
void WasmGraphBuilder::StackCheck(wasm::WasmCodePosition position) {
DCHECK_NOT_NULL(env_); // Wrappers don't get stack checks.
if (!FLAG_wasm_stack_checks || !env_->runtime_exception_support) {
return;
}
Node* limit_address = graph()->NewNode(
mcgraph()->machine()->Load(MachineType::Pointer()), instance_node_.get(),
mcgraph()->Int32Constant(WASM_INSTANCE_OBJECT_OFFSET(StackLimitAddress)),
effect(), control());
Node* limit = SetEffect(graph()->NewNode(
mcgraph()->machine()->Load(MachineType::Pointer()), limit_address,
mcgraph()->IntPtrConstant(0), limit_address, control()));
Node* check = SetEffect(graph()->NewNode(
mcgraph()->machine()->StackPointerGreaterThan(StackCheckKind::kWasm),
limit, effect()));
Diamond stack_check(graph(), mcgraph()->common(), check, BranchHint::kTrue);
stack_check.Chain(control());
if (stack_check_call_operator_ == nullptr) {
// Build and cache the stack check call operator and the constant
// representing the stack check code.
auto call_descriptor = Linkage::GetStubCallDescriptor(
mcgraph()->zone(), // zone
NoContextDescriptor{}, // descriptor
0, // stack parameter count
CallDescriptor::kNoFlags, // flags
Operator::kNoProperties, // properties
StubCallMode::kCallWasmRuntimeStub); // stub call mode
// A direct call to a wasm runtime stub defined in this module.
// Just encode the stub index. This will be patched at relocation.
stack_check_code_node_.set(mcgraph()->RelocatableIntPtrConstant(
wasm::WasmCode::kWasmStackGuard, RelocInfo::WASM_STUB_CALL));
stack_check_call_operator_ = mcgraph()->common()->Call(call_descriptor);
}
Node* call = graph()->NewNode(stack_check_call_operator_.get(),
stack_check_code_node_.get(), effect(),
stack_check.if_false);
SetSourcePosition(call, position);
Node* ephi = stack_check.EffectPhi(effect(), call);
SetEffectControl(ephi, stack_check.merge);
}
void WasmGraphBuilder::PatchInStackCheckIfNeeded() {
if (!needs_stack_check_) return;
Node* start = graph()->start();
// Place a stack check which uses a dummy node as control and effect.
Node* dummy = graph()->NewNode(mcgraph()->common()->Dead());
SetEffectControl(dummy);
// The function-prologue stack check is associated with position 0, which
// is never a position of any instruction in the function.
StackCheck(0);
// In testing, no steck checks were emitted. Nothing to rewire then.
if (effect() == dummy) return;
// Now patch all control uses of {start} to use {control} and all effect uses
// to use {effect} instead. Then rewire the dummy node to use start instead.
NodeProperties::ReplaceUses(start, start, effect(), control());
NodeProperties::ReplaceUses(dummy, nullptr, start, start);
}
Node* WasmGraphBuilder::Binop(wasm::WasmOpcode opcode, Node* left, Node* right,
wasm::WasmCodePosition position) {
const Operator* op;
MachineOperatorBuilder* m = mcgraph()->machine();
switch (opcode) {
case wasm::kExprI32Add:
op = m->Int32Add();
break;
case wasm::kExprI32Sub:
op = m->Int32Sub();
break;
case wasm::kExprI32Mul:
op = m->Int32Mul();
break;
case wasm::kExprI32DivS:
return BuildI32DivS(left, right, position);
case wasm::kExprI32DivU:
return BuildI32DivU(left, right, position);
case wasm::kExprI32RemS:
return BuildI32RemS(left, right, position);
case wasm::kExprI32RemU:
return BuildI32RemU(left, right, position);
case wasm::kExprI32And:
op = m->Word32And();
break;
case wasm::kExprI32Ior:
op = m->Word32Or();
break;
case wasm::kExprI32Xor:
op = m->Word32Xor();
break;
case wasm::kExprI32Shl:
op = m->Word32Shl();
right = MaskShiftCount32(right);
break;
case wasm::kExprI32ShrU:
op = m->Word32Shr();
right = MaskShiftCount32(right);
break;
case wasm::kExprI32ShrS:
op = m->Word32Sar();
right = MaskShiftCount32(right);
break;
case wasm::kExprI32Ror:
op = m->Word32Ror();
right = MaskShiftCount32(right);
break;
case wasm::kExprI32Rol:
if (m->Word32Rol().IsSupported()) {
op = m->Word32Rol().op();
right = MaskShiftCount32(right);
break;
}
return BuildI32Rol(left, right);
case wasm::kExprI32Eq:
op = m->Word32Equal();
break;
case wasm::kExprI32Ne:
return Invert(Binop(wasm::kExprI32Eq, left, right));
case wasm::kExprI32LtS:
op = m->Int32LessThan();
break;
case wasm::kExprI32LeS:
op = m->Int32LessThanOrEqual();
break;
case wasm::kExprI32LtU:
op = m->Uint32LessThan();
break;
case wasm::kExprI32LeU:
op = m->Uint32LessThanOrEqual();
break;
case wasm::kExprI32GtS:
op = m->Int32LessThan();
std::swap(left, right);
break;
case wasm::kExprI32GeS:
op = m->Int32LessThanOrEqual();
std::swap(left, right);
break;
case wasm::kExprI32GtU:
op = m->Uint32LessThan();
std::swap(left, right);
break;
case wasm::kExprI32GeU:
op = m->Uint32LessThanOrEqual();
std::swap(left, right);
break;
case wasm::kExprI64And:
op = m->Word64And();
break;
case wasm::kExprI64Add:
op = m->Int64Add();
break;
case wasm::kExprI64Sub:
op = m->Int64Sub();
break;
case wasm::kExprI64Mul:
op = m->Int64Mul();
break;
case wasm::kExprI64DivS:
return BuildI64DivS(left, right, position);
case wasm::kExprI64DivU:
return BuildI64DivU(left, right, position);
case wasm::kExprI64RemS:
return BuildI64RemS(left, right, position);
case wasm::kExprI64RemU:
return BuildI64RemU(left, right, position);
case wasm::kExprI64Ior:
op = m->Word64Or();
break;
case wasm::kExprI64Xor:
op = m->Word64Xor();
break;
case wasm::kExprI64Shl:
op = m->Word64Shl();
right = MaskShiftCount64(right);
break;
case wasm::kExprI64ShrU:
op = m->Word64Shr();
right = MaskShiftCount64(right);
break;
case wasm::kExprI64ShrS:
op = m->Word64Sar();
right = MaskShiftCount64(right);
break;
case wasm::kExprI64Eq:
op = m->Word64Equal();
break;
case wasm::kExprI64Ne:
return Invert(Binop(wasm::kExprI64Eq, left, right));
case wasm::kExprI64LtS:
op = m->Int64LessThan();
break;
case wasm::kExprI64LeS:
op = m->Int64LessThanOrEqual();
break;
case wasm::kExprI64LtU:
op = m->Uint64LessThan();
break;
case wasm::kExprI64LeU:
op = m->Uint64LessThanOrEqual();
break;
case wasm::kExprI64GtS:
op = m->Int64LessThan();
std::swap(left, right);
break;
case wasm::kExprI64GeS:
op = m->Int64LessThanOrEqual();
std::swap(left, right);
break;
case wasm::kExprI64GtU:
op = m->Uint64LessThan();
std::swap(left, right);
break;
case wasm::kExprI64GeU:
op = m->Uint64LessThanOrEqual();
std::swap(left, right);
break;
case wasm::kExprI64Ror:
op = m->Word64Ror();
right = MaskShiftCount64(right);
break;
case wasm::kExprI64Rol:
if (m->Word64Rol().IsSupported()) {
op = m->Word64Rol().op();
right = MaskShiftCount64(right);
break;
} else if (m->Word32Rol().IsSupported()) {
op = m->Word64Rol().placeholder();
break;
}
return BuildI64Rol(left, right);
case wasm::kExprF32CopySign:
return BuildF32CopySign(left, right);
case wasm::kExprF64CopySign:
return BuildF64CopySign(left, right);
case wasm::kExprF32Add:
op = m->Float32Add();
break;
case wasm::kExprF32Sub:
op = m->Float32Sub();
break;
case wasm::kExprF32Mul:
op = m->Float32Mul();
break;
case wasm::kExprF32Div:
op = m->Float32Div();
break;
case wasm::kExprF32Eq:
op = m->Float32Equal();
break;
case wasm::kExprF32Ne:
return Invert(Binop(wasm::kExprF32Eq, left, right));
case wasm::kExprF32Lt:
op = m->Float32LessThan();
break;
case wasm::kExprF32Ge:
op = m->Float32LessThanOrEqual();
std::swap(left, right);
break;
case wasm::kExprF32Gt:
op = m->Float32LessThan();
std::swap(left, right);
break;
case wasm::kExprF32Le:
op = m->Float32LessThanOrEqual();
break;
case wasm::kExprF64Add:
op = m->Float64Add();
break;
case wasm::kExprF64Sub:
op = m->Float64Sub();
break;
case wasm::kExprF64Mul:
op = m->Float64Mul();
break;
case wasm::kExprF64Div:
op = m->Float64Div();
break;
case wasm::kExprF64Eq:
op = m->Float64Equal();
break;
case wasm::kExprF64Ne:
return Invert(Binop(wasm::kExprF64Eq, left, right));
case wasm::kExprF64Lt:
op = m->Float64LessThan();
break;
case wasm::kExprF64Le:
op = m->Float64LessThanOrEqual();
break;
case wasm::kExprF64Gt:
op = m->Float64LessThan();
std::swap(left, right);
break;
case wasm::kExprF64Ge:
op = m->Float64LessThanOrEqual();
std::swap(left, right);
break;
case wasm::kExprF32Min:
op = m->Float32Min();
break;
case wasm::kExprF64Min:
op = m->Float64Min();
break;
case wasm::kExprF32Max:
op = m->Float32Max();
break;
case wasm::kExprF64Max:
op = m->Float64Max();
break;
case wasm::kExprF64Pow:
return BuildF64Pow(left, right);
case wasm::kExprF64Atan2:
op = m->Float64Atan2();
break;
case wasm::kExprF64Mod:
return BuildF64Mod(left, right);
case wasm::kExprRefEq:
return gasm_->TaggedEqual(left, right);
case wasm::kExprI32AsmjsDivS:
return BuildI32AsmjsDivS(left, right);
case wasm::kExprI32AsmjsDivU:
return BuildI32AsmjsDivU(left, right);
case wasm::kExprI32AsmjsRemS:
return BuildI32AsmjsRemS(left, right);
case wasm::kExprI32AsmjsRemU:
return BuildI32AsmjsRemU(left, right);
case wasm::kExprI32AsmjsStoreMem8:
return BuildAsmjsStoreMem(MachineType::Int8(), left, right);
case wasm::kExprI32AsmjsStoreMem16:
return BuildAsmjsStoreMem(MachineType::Int16(), left, right);
case wasm::kExprI32AsmjsStoreMem:
return BuildAsmjsStoreMem(MachineType::Int32(), left, right);
case wasm::kExprF32AsmjsStoreMem:
return BuildAsmjsStoreMem(MachineType::Float32(), left, right);
case wasm::kExprF64AsmjsStoreMem:
return BuildAsmjsStoreMem(MachineType::Float64(), left, right);
default:
FATAL_UNSUPPORTED_OPCODE(opcode);
}
return graph()->NewNode(op, left, right);
}
Node* WasmGraphBuilder::Unop(wasm::WasmOpcode opcode, Node* input,
wasm::WasmCodePosition position) {
const Operator* op;
MachineOperatorBuilder* m = mcgraph()->machine();
switch (opcode) {
case wasm::kExprI32Eqz:
op = m->Word32Equal();
return graph()->NewNode(op, input, mcgraph()->Int32Constant(0));
case wasm::kExprF32Abs:
op = m->Float32Abs();
break;
case wasm::kExprF32Neg: {
op = m->Float32Neg();
break;
}
case wasm::kExprF32Sqrt:
op = m->Float32Sqrt();
break;
case wasm::kExprF64Abs:
op = m->Float64Abs();
break;
case wasm::kExprF64Neg: {
op = m->Float64Neg();
break;
}
case wasm::kExprF64Sqrt:
op = m->Float64Sqrt();
break;
case wasm::kExprI32SConvertF32:
case wasm::kExprI32UConvertF32:
case wasm::kExprI32SConvertF64:
case wasm::kExprI32UConvertF64:
case wasm::kExprI32SConvertSatF64:
case wasm::kExprI32UConvertSatF64:
case wasm::kExprI32SConvertSatF32:
case wasm::kExprI32UConvertSatF32:
return BuildIntConvertFloat(input, position, opcode);
case wasm::kExprI32AsmjsSConvertF64:
return BuildI32AsmjsSConvertF64(input);
case wasm::kExprI32AsmjsUConvertF64:
return BuildI32AsmjsUConvertF64(input);
case wasm::kExprF32ConvertF64:
op = m->TruncateFloat64ToFloat32();
break;
case wasm::kExprF64SConvertI32:
op = m->ChangeInt32ToFloat64();
break;
case wasm::kExprF64UConvertI32:
op = m->ChangeUint32ToFloat64();
break;
case wasm::kExprF32SConvertI32:
op = m->RoundInt32ToFloat32();
break;
case wasm::kExprF32UConvertI32:
op = m->RoundUint32ToFloat32();
break;
case wasm::kExprI32AsmjsSConvertF32:
return BuildI32AsmjsSConvertF32(input);
case wasm::kExprI32AsmjsUConvertF32:
return BuildI32AsmjsUConvertF32(input);
case wasm::kExprF64ConvertF32:
op = m->ChangeFloat32ToFloat64();
break;
case wasm::kExprF32ReinterpretI32:
op = m->BitcastInt32ToFloat32();
break;
case wasm::kExprI32ReinterpretF32:
op = m->BitcastFloat32ToInt32();
break;
case wasm::kExprI32Clz:
op = m->Word32Clz();
break;
case wasm::kExprI32Ctz: {
if (m->Word32Ctz().IsSupported()) {
op = m->Word32Ctz().op();
break;
} else if (m->Word32ReverseBits().IsSupported()) {
Node* reversed = graph()->NewNode(m->Word32ReverseBits().op(), input);
Node* result = graph()->NewNode(m->Word32Clz(), reversed);
return result;
} else {
return BuildI32Ctz(input);
}
}
case wasm::kExprI32Popcnt: {
if (m->Word32Popcnt().IsSupported()) {
op = m->Word32Popcnt().op();
break;
} else {
return BuildI32Popcnt(input);
}
}
case wasm::kExprF32Floor: {
if (!m->Float32RoundDown().IsSupported()) return BuildF32Floor(input);
op = m->Float32RoundDown().op();
break;
}
case wasm::kExprF32Ceil: {
if (!m->Float32RoundUp().IsSupported()) return BuildF32Ceil(input);
op = m->Float32RoundUp().op();
break;
}
case wasm::kExprF32Trunc: {
if (!m->Float32RoundTruncate().IsSupported()) return BuildF32Trunc(input);
op = m->Float32RoundTruncate().op();
break;
}
case wasm::kExprF32NearestInt: {
if (!m->Float32RoundTiesEven().IsSupported())
return BuildF32NearestInt(input);
op = m->Float32RoundTiesEven().op();
break;
}
case wasm::kExprF64Floor: {
if (!m->Float64RoundDown().IsSupported()) return BuildF64Floor(input);
op = m->Float64RoundDown().op();
break;
}
case wasm::kExprF64Ceil: {
if (!m->Float64RoundUp().IsSupported()) return BuildF64Ceil(input);
op = m->Float64RoundUp().op();
break;
}
case wasm::kExprF64Trunc: {
if (!m->Float64RoundTruncate().IsSupported()) return BuildF64Trunc(input);
op = m->Float64RoundTruncate().op();
break;
}
case wasm::kExprF64NearestInt: {
if (!m->Float64RoundTiesEven().IsSupported())
return BuildF64NearestInt(input);
op = m->Float64RoundTiesEven().op();
break;
}
case wasm::kExprF64Acos: {
return BuildF64Acos(input);
}
case wasm::kExprF64Asin: {
return BuildF64Asin(input);
}
case wasm::kExprF64Atan:
op = m->Float64Atan();
break;
case wasm::kExprF64Cos: {
op = m->Float64Cos();
break;
}
case wasm::kExprF64Sin: {
op = m->Float64Sin();
break;
}
case wasm::kExprF64Tan: {
op = m->Float64Tan();
break;
}
case wasm::kExprF64Exp: {
op = m->Float64Exp();
break;
}
case wasm::kExprF64Log:
op = m->Float64Log();
break;
case wasm::kExprI32ConvertI64:
op = m->TruncateInt64ToInt32();
break;
case wasm::kExprI64SConvertI32:
op = m->ChangeInt32ToInt64();
break;
case wasm::kExprI64UConvertI32:
op = m->ChangeUint32ToUint64();
break;
case wasm::kExprF64ReinterpretI64:
op = m->BitcastInt64ToFloat64();
break;
case wasm::kExprI64ReinterpretF64:
op = m->BitcastFloat64ToInt64();
break;
case wasm::kExprI64Clz:
op = m->Word64Clz();
break;
case wasm::kExprI64Ctz: {
OptionalOperator ctz64 = m->Word64Ctz();
if (ctz64.IsSupported()) {
op = ctz64.op();
break;
} else if (m->Is32() && m->Word32Ctz().IsSupported()) {
op = ctz64.placeholder();
break;
} else if (m->Word64ReverseBits().IsSupported()) {
Node* reversed = graph()->NewNode(m->Word64ReverseBits().op(), input);
Node* result = graph()->NewNode(m->Word64Clz(), reversed);
return result;
} else {
return BuildI64Ctz(input);
}
}
case wasm::kExprI64Popcnt: {
OptionalOperator popcnt64 = m->Word64Popcnt();
if (popcnt64.IsSupported()) {
op = popcnt64.op();
} else if (m->Is32() && m->Word32Popcnt().IsSupported()) {
op = popcnt64.placeholder();
} else {
return BuildI64Popcnt(input);
}
break;
}
case wasm::kExprI64Eqz:
op = m->Word64Equal();
return graph()->NewNode(op, input, mcgraph()->Int64Constant(0));
case wasm::kExprF32SConvertI64:
if (m->Is32()) {
return BuildF32SConvertI64(input);
}
op = m->RoundInt64ToFloat32();
break;
case wasm::kExprF32UConvertI64:
if (m->Is32()) {
return BuildF32UConvertI64(input);
}
op = m->RoundUint64ToFloat32();
break;
case wasm::kExprF64SConvertI64:
if (m->Is32()) {
return BuildF64SConvertI64(input);
}
op = m->RoundInt64ToFloat64();
break;
case wasm::kExprF64UConvertI64:
if (m->Is32()) {
return BuildF64UConvertI64(input);
}
op = m->RoundUint64ToFloat64();
break;
case wasm::kExprI32SExtendI8:
op = m->SignExtendWord8ToInt32();
break;
case wasm::kExprI32SExtendI16:
op = m->SignExtendWord16ToInt32();
break;
case wasm::kExprI64SExtendI8:
op = m->SignExtendWord8ToInt64();
break;
case wasm::kExprI64SExtendI16:
op = m->SignExtendWord16ToInt64();
break;
case wasm::kExprI64SExtendI32:
op = m->SignExtendWord32ToInt64();
break;
case wasm::kExprI64SConvertF32:
case wasm::kExprI64UConvertF32:
case wasm::kExprI64SConvertF64:
case wasm::kExprI64UConvertF64:
case wasm::kExprI64SConvertSatF32:
case wasm::kExprI64UConvertSatF32:
case wasm::kExprI64SConvertSatF64:
case wasm::kExprI64UConvertSatF64:
return mcgraph()->machine()->Is32()
? BuildCcallConvertFloat(input, position, opcode)
: BuildIntConvertFloat(input, position, opcode);
case wasm::kExprRefIsNull:
return graph()->NewNode(m->WordEqual(), input, RefNull());
case wasm::kExprI32AsmjsLoadMem8S:
return BuildAsmjsLoadMem(MachineType::Int8(), input);
case wasm::kExprI32AsmjsLoadMem8U:
return BuildAsmjsLoadMem(MachineType::Uint8(), input);
case wasm::kExprI32AsmjsLoadMem16S:
return BuildAsmjsLoadMem(MachineType::Int16(), input);
case wasm::kExprI32AsmjsLoadMem16U:
return BuildAsmjsLoadMem(MachineType::Uint16(), input);
case wasm::kExprI32AsmjsLoadMem:
return BuildAsmjsLoadMem(MachineType::Int32(), input);
case wasm::kExprF32AsmjsLoadMem:
return BuildAsmjsLoadMem(MachineType::Float32(), input);
case wasm::kExprF64AsmjsLoadMem:
return BuildAsmjsLoadMem(MachineType::Float64(), input);
default:
FATAL_UNSUPPORTED_OPCODE(opcode);
}
return graph()->NewNode(op, input);
}
Node* WasmGraphBuilder::Float32Constant(float value) {
return mcgraph()->Float32Constant(value);
}
Node* WasmGraphBuilder::Float64Constant(double value) {
return mcgraph()->Float64Constant(value);
}
Node* WasmGraphBuilder::Simd128Constant(const uint8_t value[16]) {
has_simd_ = true;
return graph()->NewNode(mcgraph()->machine()->S128Const(value));
}
namespace {
Node* Branch(MachineGraph* mcgraph, Node* cond, Node** true_node,
Node** false_node, Node* control, BranchHint hint) {
DCHECK_NOT_NULL(cond);
DCHECK_NOT_NULL(control);
Node* branch =
mcgraph->graph()->NewNode(mcgraph->common()->Branch(hint), cond, control);
*true_node = mcgraph->graph()->NewNode(mcgraph->common()->IfTrue(), branch);
*false_node = mcgraph->graph()->NewNode(mcgraph->common()->IfFalse(), branch);
return branch;
}
} // namespace
Node* WasmGraphBuilder::BranchNoHint(Node* cond, Node** true_node,
Node** false_node) {
return Branch(mcgraph(), cond, true_node, false_node, control(),
BranchHint::kNone);
}
Node* WasmGraphBuilder::BranchExpectTrue(Node* cond, Node** true_node,
Node** false_node) {
return Branch(mcgraph(), cond, true_node, false_node, control(),
BranchHint::kTrue);
}
Node* WasmGraphBuilder::BranchExpectFalse(Node* cond, Node** true_node,
Node** false_node) {
return Branch(mcgraph(), cond, true_node, false_node, control(),
BranchHint::kFalse);
}
TrapId WasmGraphBuilder::GetTrapIdForTrap(wasm::TrapReason reason) {
// TODO(wasm): "!env_" should not happen when compiling an actual wasm
// function.
if (!env_ || !env_->runtime_exception_support) {
// We use TrapId::kInvalid as a marker to tell the code generator
// to generate a call to a testing c-function instead of a runtime
// stub. This code should only be called from a cctest.
return TrapId::kInvalid;
}
switch (reason) {
#define TRAPREASON_TO_TRAPID(name) \
case wasm::k##name: \
static_assert( \
static_cast<int>(TrapId::k##name) == wasm::WasmCode::kThrowWasm##name, \
"trap id mismatch"); \
return TrapId::k##name;
FOREACH_WASM_TRAPREASON(TRAPREASON_TO_TRAPID)
#undef TRAPREASON_TO_TRAPID
default:
UNREACHABLE();
}
}
Node* WasmGraphBuilder::TrapIfTrue(wasm::TrapReason reason, Node* cond,
wasm::WasmCodePosition position) {
TrapId trap_id = GetTrapIdForTrap(reason);
Node* node = SetControl(graph()->NewNode(mcgraph()->common()->TrapIf(trap_id),
cond, effect(), control()));
SetSourcePosition(node, position);
return node;
}
Node* WasmGraphBuilder::TrapIfFalse(wasm::TrapReason reason, Node* cond,
wasm::WasmCodePosition position) {
TrapId trap_id = GetTrapIdForTrap(reason);
Node* node = SetControl(graph()->NewNode(
mcgraph()->common()->TrapUnless(trap_id), cond, effect(), control()));
SetSourcePosition(node, position);
return node;
}
// Add a check that traps if {node} is equal to {val}.
Node* WasmGraphBuilder::TrapIfEq32(wasm::TrapReason reason, Node* node,
int32_t val,
wasm::WasmCodePosition position) {
Int32Matcher m(node);
if (m.HasResolvedValue() && !m.Is(val)) return graph()->start();
if (val == 0) {
return TrapIfFalse(reason, node, position);
} else {
return TrapIfTrue(reason,
graph()->NewNode(mcgraph()->machine()->Word32Equal(),
node, mcgraph()->Int32Constant(val)),
position);
}
}
// Add a check that traps if {node} is zero.
Node* WasmGraphBuilder::ZeroCheck32(wasm::TrapReason reason, Node* node,
wasm::WasmCodePosition position) {
return TrapIfEq32(reason, node, 0, position);
}
// Add a check that traps if {node} is equal to {val}.
Node* WasmGraphBuilder::TrapIfEq64(wasm::TrapReason reason, Node* node,
int64_t val,
wasm::WasmCodePosition position) {
Int64Matcher m(node);
if (m.HasResolvedValue() && !m.Is(val)) return graph()->start();
return TrapIfTrue(reason,
graph()->NewNode(mcgraph()->machine()->Word64Equal(), node,
mcgraph()->Int64Constant(val)),
position);
}
// Add a check that traps if {node} is zero.
Node* WasmGraphBuilder::ZeroCheck64(wasm::TrapReason reason, Node* node,
wasm::WasmCodePosition position) {
return TrapIfEq64(reason, node, 0, position);
}
Node* WasmGraphBuilder::Switch(unsigned count, Node* key) {
// The instruction selector will use {kArchTableSwitch} for large switches,
// which has limited input count, see {InstructionSelector::EmitTableSwitch}.
DCHECK_LE(count, Instruction::kMaxInputCount - 2); // value_range + 2
DCHECK_LE(count, wasm::kV8MaxWasmFunctionBrTableSize + 1); // plus IfDefault
return graph()->NewNode(mcgraph()->common()->Switch(count), key, control());
}
Node* WasmGraphBuilder::IfValue(int32_t value, Node* sw) {
DCHECK_EQ(IrOpcode::kSwitch, sw->opcode());
return graph()->NewNode(mcgraph()->common()->IfValue(value), sw);
}
Node* WasmGraphBuilder::IfDefault(Node* sw) {
DCHECK_EQ(IrOpcode::kSwitch, sw->opcode());
return graph()->NewNode(mcgraph()->common()->IfDefault(), sw);
}
Node* WasmGraphBuilder::Return(Vector<Node*> vals) {
unsigned count = static_cast<unsigned>(vals.size());
base::SmallVector<Node*, 8> buf(count + 3);
buf[0] = mcgraph()->Int32Constant(0);
if (count > 0) {
memcpy(buf.data() + 1, vals.begin(), sizeof(void*) * count);
}
buf[count + 1] = effect();
buf[count + 2] = control();
Node* ret = graph()->NewNode(mcgraph()->common()->Return(count), count + 3,
buf.data());
MergeControlToEnd(mcgraph(), ret);
return ret;
}
Node* WasmGraphBuilder::Trap(wasm::TrapReason reason,
wasm::WasmCodePosition position) {
TrapIfFalse(reason, Int32Constant(0), position);
Return(Vector<Node*>{});
return nullptr;
}
Node* WasmGraphBuilder::MaskShiftCount32(Node* node) {
static const int32_t kMask32 = 0x1F;
if (!mcgraph()->machine()->Word32ShiftIsSafe()) {
// Shifts by constants are so common we pattern-match them here.
Int32Matcher match(node);
if (match.HasResolvedValue()) {
int32_t masked = (match.ResolvedValue() & kMask32);
if (match.ResolvedValue() != masked)
node = mcgraph()->Int32Constant(masked);
} else {
node = graph()->NewNode(mcgraph()->machine()->Word32And(), node,
mcgraph()->Int32Constant(kMask32));
}
}
return node;
}
Node* WasmGraphBuilder::MaskShiftCount64(Node* node) {
static const int64_t kMask64 = 0x3F;
if (!mcgraph()->machine()->Word32ShiftIsSafe()) {
// Shifts by constants are so common we pattern-match them here.
Int64Matcher match(node);
if (match.HasResolvedValue()) {
int64_t masked = (match.ResolvedValue() & kMask64);
if (match.ResolvedValue() != masked)
node = mcgraph()->Int64Constant(masked);
} else {
node = graph()->NewNode(mcgraph()->machine()->Word64And(), node,
mcgraph()->Int64Constant(kMask64));
}
}
return node;
}
namespace {
bool ReverseBytesSupported(MachineOperatorBuilder* m, size_t size_in_bytes) {
switch (size_in_bytes) {
case 4:
case 16:
return true;
case 8:
return m->Is64();
default:
break;
}
return false;
}
} // namespace
Node* WasmGraphBuilder::BuildChangeEndiannessStore(
Node* node, MachineRepresentation mem_rep, wasm::ValueType wasmtype) {
Node* result;
Node* value = node;
MachineOperatorBuilder* m = mcgraph()->machine();
int valueSizeInBytes = wasmtype.element_size_bytes();
int valueSizeInBits = 8 * valueSizeInBytes;
bool isFloat = false;
switch (wasmtype.kind()) {
case wasm::ValueType::kF64:
value = graph()->NewNode(m->BitcastFloat64ToInt64(), node);
isFloat = true;
V8_FALLTHROUGH;
case wasm::ValueType::kI64:
result = mcgraph()->Int64Constant(0);
break;
case wasm::ValueType::kF32:
value = graph()->NewNode(m->BitcastFloat32ToInt32(), node);
isFloat = true;
V8_FALLTHROUGH;
case wasm::ValueType::kI32:
result = mcgraph()->Int32Constant(0);
break;
case wasm::ValueType::kS128:
DCHECK(ReverseBytesSupported(m, valueSizeInBytes));
break;
default:
UNREACHABLE();
}
if (mem_rep == MachineRepresentation::kWord8) {
// No need to change endianness for byte size, return original node
return node;
}
if (wasmtype == wasm::kWasmI64 && mem_rep < MachineRepresentation::kWord64) {
// In case we store lower part of WasmI64 expression, we can truncate
// upper 32bits
value = graph()->NewNode(m->TruncateInt64ToInt32(), value);
valueSizeInBytes = wasm::kWasmI32.element_size_bytes();
valueSizeInBits = 8 * valueSizeInBytes;
if (mem_rep == MachineRepresentation::kWord16) {
value =
graph()->NewNode(m->Word32Shl(), value, mcgraph()->Int32Constant(16));
}
} else if (wasmtype == wasm::kWasmI32 &&
mem_rep == MachineRepresentation::kWord16) {
value =
graph()->NewNode(m->Word32Shl(), value, mcgraph()->Int32Constant(16));
}
int i;
uint32_t shiftCount;
if (ReverseBytesSupported(m, valueSizeInBytes)) {
switch (valueSizeInBytes) {
case 4:
result = graph()->NewNode(m->Word32ReverseBytes(), value);
break;
case 8:
result = graph()->NewNode(m->Word64ReverseBytes(), value);
break;
case 16:
result = graph()->NewNode(m->Simd128ReverseBytes(), value);
break;
default:
UNREACHABLE();
break;
}
} else {
for (i = 0, shiftCount = valueSizeInBits - 8; i < valueSizeInBits / 2;
i += 8, shiftCount -= 16) {
Node* shiftLower;
Node* shiftHigher;
Node* lowerByte;
Node* higherByte;
DCHECK_LT(0, shiftCount);
DCHECK_EQ(0, (shiftCount + 8) % 16);
if (valueSizeInBits > 32) {
shiftLower = graph()->NewNode(m->Word64Shl(), value,
mcgraph()->Int64Constant(shiftCount));
shiftHigher = graph()->NewNode(m->Word64Shr(), value,
mcgraph()->Int64Constant(shiftCount));
lowerByte = graph()->NewNode(
m->Word64And(), shiftLower,
mcgraph()->Int64Constant(static_cast<uint64_t>(0xFF)
<< (valueSizeInBits - 8 - i)));
higherByte = graph()->NewNode(
m->Word64And(), shiftHigher,
mcgraph()->Int64Constant(static_cast<uint64_t>(0xFF) << i));
result = graph()->NewNode(m->Word64Or(), result, lowerByte);
result = graph()->NewNode(m->Word64Or(), result, higherByte);
} else {
shiftLower = graph()->NewNode(m->Word32Shl(), value,
mcgraph()->Int32Constant(shiftCount));
shiftHigher = graph()->NewNode(m->Word32Shr(), value,
mcgraph()->Int32Constant(shiftCount));
lowerByte = graph()->NewNode(
m->Word32And(), shiftLower,
mcgraph()->Int32Constant(static_cast<uint32_t>(0xFF)
<< (valueSizeInBits - 8 - i)));
higherByte = graph()->NewNode(
m->Word32And(), shiftHigher,
mcgraph()->Int32Constant(static_cast<uint32_t>(0xFF) << i));
result = graph()->NewNode(m->Word32Or(), result, lowerByte);
result = graph()->NewNode(m->Word32Or(), result, higherByte);
}
}
}
if (isFloat) {
switch (wasmtype.kind()) {
case wasm::ValueType::kF64:
result = graph()->NewNode(m->BitcastInt64ToFloat64(), result);
break;
case wasm::ValueType::kF32:
result = graph()->NewNode(m->BitcastInt32ToFloat32(), result);
break;
default:
UNREACHABLE();
break;
}
}
return result;
}
Node* WasmGraphBuilder::BuildChangeEndiannessLoad(Node* node,
MachineType memtype,
wasm::ValueType wasmtype) {
Node* result;
Node* value = node;
MachineOperatorBuilder* m = mcgraph()->machine();
int valueSizeInBytes = ElementSizeInBytes(memtype.representation());
int valueSizeInBits = 8 * valueSizeInBytes;
bool isFloat = false;
switch (memtype.representation()) {
case MachineRepresentation::kFloat64:
value = graph()->NewNode(m->BitcastFloat64ToInt64(), node);
isFloat = true;
V8_FALLTHROUGH;
case MachineRepresentation::kWord64:
result = mcgraph()->Int64Constant(0);
break;
case MachineRepresentation::kFloat32:
value = graph()->NewNode(m->BitcastFloat32ToInt32(), node);
isFloat = true;
V8_FALLTHROUGH;
case MachineRepresentation::kWord32:
case MachineRepresentation::kWord16:
result = mcgraph()->Int32Constant(0);
break;
case MachineRepresentation::kWord8:
// No need to change endianness for byte size, return original node
return node;
break;
case MachineRepresentation::kSimd128:
DCHECK(ReverseBytesSupported(m, valueSizeInBytes));
break;
default:
UNREACHABLE();
}
int i;
uint32_t shiftCount;
if (ReverseBytesSupported(m, valueSizeInBytes < 4 ? 4 : valueSizeInBytes)) {
switch (valueSizeInBytes) {
case 2:
result =
graph()->NewNode(m->Word32ReverseBytes(),
graph()->NewNode(m->Word32Shl(), value,
mcgraph()->Int32Constant(16)));
break;
case 4:
result = graph()->NewNode(m->Word32ReverseBytes(), value);
break;
case 8:
result = graph()->NewNode(m->Word64ReverseBytes(), value);
break;
case 16:
result = graph()->NewNode(m->Simd128ReverseBytes(), value);
break;
default:
UNREACHABLE();
}
} else {
for (i = 0, shiftCount = valueSizeInBits - 8; i < valueSizeInBits / 2;
i += 8, shiftCount -= 16) {
Node* shiftLower;
Node* shiftHigher;
Node* lowerByte;
Node* higherByte;
DCHECK_LT(0, shiftCount);
DCHECK_EQ(0, (shiftCount + 8) % 16);
if (valueSizeInBits > 32) {
shiftLower = graph()->NewNode(m->Word64Shl(), value,
mcgraph()->Int64Constant(shiftCount));
shiftHigher = graph()->NewNode(m->Word64Shr(), value,
mcgraph()->Int64Constant(shiftCount));
lowerByte = graph()->NewNode(
m->Word64And(), shiftLower,
mcgraph()->Int64Constant(static_cast<uint64_t>(0xFF)
<< (valueSizeInBits - 8 - i)));
higherByte = graph()->NewNode(
m->Word64And(), shiftHigher,
mcgraph()->Int64Constant(static_cast<uint64_t>(0xFF) << i));
result = graph()->NewNode(m->Word64Or(), result, lowerByte);
result = graph()->NewNode(m->Word64Or(), result, higherByte);
} else {
shiftLower = graph()->NewNode(m->Word32Shl(), value,
mcgraph()->Int32Constant(shiftCount));
shiftHigher = graph()->NewNode(m->Word32Shr(), value,
mcgraph()->Int32Constant(shiftCount));
lowerByte = graph()->NewNode(
m->Word32And(), shiftLower,
mcgraph()->Int32Constant(static_cast<uint32_t>(0xFF)
<< (valueSizeInBits - 8 - i)));
higherByte = graph()->NewNode(
m->Word32And(), shiftHigher,
mcgraph()->Int32Constant(static_cast<uint32_t>(0xFF) << i));
result = graph()->NewNode(m->Word32Or(), result, lowerByte);
result = graph()->NewNode(m->Word32Or(), result, higherByte);
}
}
}
if (isFloat) {
switch (memtype.representation()) {
case MachineRepresentation::kFloat64:
result = graph()->NewNode(m->BitcastInt64ToFloat64(), result);
break;
case MachineRepresentation::kFloat32:
result = graph()->NewNode(m->BitcastInt32ToFloat32(), result);
break;
default:
UNREACHABLE();
break;
}
}
// We need to sign extend the value
if (memtype.IsSigned()) {
DCHECK(!isFloat);
if (valueSizeInBits < 32) {
Node* shiftBitCount;
// Perform sign extension using following trick
// result = (x << machine_width - type_width) >> (machine_width -
// type_width)
if (wasmtype == wasm::kWasmI64) {
shiftBitCount = mcgraph()->Int32Constant(64 - valueSizeInBits);
result = graph()->NewNode(
m->Word64Sar(),
graph()->NewNode(m->Word64Shl(),
graph()->NewNode(m->ChangeInt32ToInt64(), result),
shiftBitCount),
shiftBitCount);
} else if (wasmtype == wasm::kWasmI32) {
shiftBitCount = mcgraph()->Int32Constant(32 - valueSizeInBits);
result = graph()->NewNode(
m->Word32Sar(),
graph()->NewNode(m->Word32Shl(), result, shiftBitCount),
shiftBitCount);
}
}
}
return result;
}
Node* WasmGraphBuilder::BuildF32CopySign(Node* left, Node* right) {
Node* result = Unop(
wasm::kExprF32ReinterpretI32,
Binop(wasm::kExprI32Ior,
Binop(wasm::kExprI32And, Unop(wasm::kExprI32ReinterpretF32, left),
mcgraph()->Int32Constant(0x7FFFFFFF)),
Binop(wasm::kExprI32And, Unop(wasm::kExprI32ReinterpretF32, right),
mcgraph()->Int32Constant(0x80000000))));
return result;
}
Node* WasmGraphBuilder::BuildF64CopySign(Node* left, Node* right) {
if (mcgraph()->machine()->Is64()) {
return gasm_->BitcastInt64ToFloat64(gasm_->Word64Or(
gasm_->Word64And(gasm_->BitcastFloat64ToInt64(left),
gasm_->Int64Constant(0x7FFFFFFFFFFFFFFF)),
gasm_->Word64And(gasm_->BitcastFloat64ToInt64(right),
gasm_->Int64Constant(0x8000000000000000))));
}
DCHECK(mcgraph()->machine()->Is32());
Node* high_word_left = gasm_->Float64ExtractHighWord32(left);
Node* high_word_right = gasm_->Float64ExtractHighWord32(right);
Node* new_high_word = gasm_->Word32Or(
gasm_->Word32And(high_word_left, gasm_->Int32Constant(0x7FFFFFFF)),
gasm_->Word32And(high_word_right, gasm_->Int32Constant(0x80000000)));
return gasm_->Float64InsertHighWord32(left, new_high_word);
}
namespace {
MachineType IntConvertType(wasm::WasmOpcode opcode) {
switch (opcode) {
case wasm::kExprI32SConvertF32:
case wasm::kExprI32SConvertF64:
case wasm::kExprI32SConvertSatF32:
case wasm::kExprI32SConvertSatF64:
return MachineType::Int32();
case wasm::kExprI32UConvertF32:
case wasm::kExprI32UConvertF64:
case wasm::kExprI32UConvertSatF32:
case wasm::kExprI32UConvertSatF64:
return MachineType::Uint32();
case wasm::kExprI64SConvertF32:
case wasm::kExprI64SConvertF64:
case wasm::kExprI64SConvertSatF32:
case wasm::kExprI64SConvertSatF64:
return MachineType::Int64();
case wasm::kExprI64UConvertF32:
case wasm::kExprI64UConvertF64:
case wasm::kExprI64UConvertSatF32:
case wasm::kExprI64UConvertSatF64:
return MachineType::Uint64();
default:
UNREACHABLE();
}
}
MachineType FloatConvertType(wasm::WasmOpcode opcode) {
switch (opcode) {
case wasm::kExprI32SConvertF32:
case wasm::kExprI32UConvertF32:
case wasm::kExprI32SConvertSatF32:
case wasm::kExprI64SConvertF32:
case wasm::kExprI64UConvertF32:
case wasm::kExprI32UConvertSatF32:
case wasm::kExprI64SConvertSatF32:
case wasm::kExprI64UConvertSatF32:
return MachineType::Float32();
case wasm::kExprI32SConvertF64:
case wasm::kExprI32UConvertF64:
case wasm::kExprI64SConvertF64:
case wasm::kExprI64UConvertF64:
case wasm::kExprI32SConvertSatF64:
case wasm::kExprI32UConvertSatF64:
case wasm::kExprI64SConvertSatF64:
case wasm::kExprI64UConvertSatF64:
return MachineType::Float64();
default:
UNREACHABLE();
}
}
const Operator* ConvertOp(WasmGraphBuilder* builder, wasm::WasmOpcode opcode) {
switch (opcode) {
case wasm::kExprI32SConvertF32:
return builder->mcgraph()->machine()->TruncateFloat32ToInt32(
TruncateKind::kSetOverflowToMin);
case wasm::kExprI32SConvertSatF32:
return builder->mcgraph()->machine()->TruncateFloat32ToInt32(
TruncateKind::kArchitectureDefault);
case wasm::kExprI32UConvertF32:
return builder->mcgraph()->machine()->TruncateFloat32ToUint32(
TruncateKind::kSetOverflowToMin);
case wasm::kExprI32UConvertSatF32:
return builder->mcgraph()->machine()->TruncateFloat32ToUint32(
TruncateKind::kArchitectureDefault);
case wasm::kExprI32SConvertF64:
case wasm::kExprI32SConvertSatF64:
return builder->mcgraph()->machine()->ChangeFloat64ToInt32();
case wasm::kExprI32UConvertF64:
case wasm::kExprI32UConvertSatF64:
return builder->mcgraph()->machine()->TruncateFloat64ToUint32();
case wasm::kExprI64SConvertF32:
case wasm::kExprI64SConvertSatF32:
return builder->mcgraph()->machine()->TryTruncateFloat32ToInt64();
case wasm::kExprI64UConvertF32:
case wasm::kExprI64UConvertSatF32:
return builder->mcgraph()->machine()->TryTruncateFloat32ToUint64();
case wasm::kExprI64SConvertF64:
case wasm::kExprI64SConvertSatF64:
return builder->mcgraph()->machine()->TryTruncateFloat64ToInt64();
case wasm::kExprI64UConvertF64:
case wasm::kExprI64UConvertSatF64:
return builder->mcgraph()->machine()->TryTruncateFloat64ToUint64();
default:
UNREACHABLE();
}
}
wasm::WasmOpcode ConvertBackOp(wasm::WasmOpcode opcode) {
switch (opcode) {
case wasm::kExprI32SConvertF32:
case wasm::kExprI32SConvertSatF32:
return wasm::kExprF32SConvertI32;
case wasm::kExprI32UConvertF32:
case wasm::kExprI32UConvertSatF32:
return wasm::kExprF32UConvertI32;
case wasm::kExprI32SConvertF64:
case wasm::kExprI32SConvertSatF64:
return wasm::kExprF64SConvertI32;
case wasm::kExprI32UConvertF64:
case wasm::kExprI32UConvertSatF64:
return wasm::kExprF64UConvertI32;
default:
UNREACHABLE();
}
}
bool IsTrappingConvertOp(wasm::WasmOpcode opcode) {
switch (opcode) {
case wasm::kExprI32SConvertF32:
case wasm::kExprI32UConvertF32:
case wasm::kExprI32SConvertF64:
case wasm::kExprI32UConvertF64:
case wasm::kExprI64SConvertF32:
case wasm::kExprI64UConvertF32:
case wasm::kExprI64SConvertF64:
case wasm::kExprI64UConvertF64:
return true;
case wasm::kExprI32SConvertSatF64:
case wasm::kExprI32UConvertSatF64:
case wasm::kExprI32SConvertSatF32:
case wasm::kExprI32UConvertSatF32:
case wasm::kExprI64SConvertSatF32:
case wasm::kExprI64UConvertSatF32:
case wasm::kExprI64SConvertSatF64:
case wasm::kExprI64UConvertSatF64:
return false;
default:
UNREACHABLE();
}
}
Node* Zero(WasmGraphBuilder* builder, const MachineType& ty) {
switch (ty.representation()) {
case MachineRepresentation::kWord32:
return builder->Int32Constant(0);
case MachineRepresentation::kWord64:
return builder->Int64Constant(0);
case MachineRepresentation::kFloat32:
return builder->Float32Constant(0.0);
case MachineRepresentation::kFloat64:
return builder->Float64Constant(0.0);
default:
UNREACHABLE();
}
}
Node* Min(WasmGraphBuilder* builder, const MachineType& ty) {
switch (ty.semantic()) {
case MachineSemantic::kInt32:
return builder->Int32Constant(std::numeric_limits<int32_t>::min());
case MachineSemantic::kUint32:
return builder->Int32Constant(std::numeric_limits<uint32_t>::min());
case MachineSemantic::kInt64:
return builder->Int64Constant(std::numeric_limits<int64_t>::min());
case MachineSemantic::kUint64:
return builder->Int64Constant(std::numeric_limits<uint64_t>::min());
default:
UNREACHABLE();
}
}
Node* Max(WasmGraphBuilder* builder, const MachineType& ty) {
switch (ty.semantic()) {
case MachineSemantic::kInt32:
return builder->Int32Constant(std::numeric_limits<int32_t>::max());
case MachineSemantic::kUint32:
return builder->Int32Constant(std::numeric_limits<uint32_t>::max());
case MachineSemantic::kInt64:
return builder->Int64Constant(std::numeric_limits<int64_t>::max());
case MachineSemantic::kUint64:
return builder->Int64Constant(std::numeric_limits<uint64_t>::max());
default:
UNREACHABLE();
}
}
wasm::WasmOpcode TruncOp(const MachineType& ty) {
switch (ty.representation()) {
case MachineRepresentation::kFloat32:
return wasm::kExprF32Trunc;
case MachineRepresentation::kFloat64:
return wasm::kExprF64Trunc;
default:
UNREACHABLE();
}
}
wasm::WasmOpcode NeOp(const MachineType& ty) {
switch (ty.representation()) {
case MachineRepresentation::kFloat32:
return wasm::kExprF32Ne;
case MachineRepresentation::kFloat64:
return wasm::kExprF64Ne;
default:
UNREACHABLE();
}
}
wasm::WasmOpcode LtOp(const MachineType& ty) {
switch (ty.representation()) {
case MachineRepresentation::kFloat32:
return wasm::kExprF32Lt;
case MachineRepresentation::kFloat64:
return wasm::kExprF64Lt;
default:
UNREACHABLE();
}
}
Node* ConvertTrapTest(WasmGraphBuilder* builder, wasm::WasmOpcode opcode,
const MachineType& int_ty, const MachineType& float_ty,
Node* trunc, Node* converted_value) {
if (int_ty.representation() == MachineRepresentation::kWord32) {
Node* check = builder->Unop(ConvertBackOp(opcode), converted_value);
return builder->Binop(NeOp(float_ty), trunc, check);
}
return builder->graph()->NewNode(builder->mcgraph()->common()->Projection(1),
trunc, builder->graph()->start());
}
Node* ConvertSaturateTest(WasmGraphBuilder* builder, wasm::WasmOpcode opcode,
const MachineType& int_ty,
const MachineType& float_ty, Node* trunc,
Node* converted_value) {
Node* test = ConvertTrapTest(builder, opcode, int_ty, float_ty, trunc,
converted_value);
if (int_ty.representation() == MachineRepresentation::kWord64) {
test = builder->Binop(wasm::kExprI64Eq, test, builder->Int64Constant(0));
}
return test;
}
} // namespace
Node* WasmGraphBuilder::BuildIntConvertFloat(Node* input,
wasm::WasmCodePosition position,
wasm::WasmOpcode opcode) {
const MachineType int_ty = IntConvertType(opcode);
const MachineType float_ty = FloatConvertType(opcode);
const Operator* conv_op = ConvertOp(this, opcode);
Node* trunc = nullptr;
Node* converted_value = nullptr;
const bool is_int32 =
int_ty.representation() == MachineRepresentation::kWord32;
if (is_int32) {
trunc = Unop(TruncOp(float_ty), input);
converted_value = graph()->NewNode(conv_op, trunc);
} else {
trunc = graph()->NewNode(conv_op, input);
converted_value = graph()->NewNode(mcgraph()->common()->Projection(0),
trunc, graph()->start());
}
if (IsTrappingConvertOp(opcode)) {
Node* test =
ConvertTrapTest(this, opcode, int_ty, float_ty, trunc, converted_value);
if (is_int32) {
TrapIfTrue(wasm::kTrapFloatUnrepresentable, test, position);
} else {
ZeroCheck64(wasm::kTrapFloatUnrepresentable, test, position);
}
return converted_value;
}
if (mcgraph()->machine()->SatConversionIsSafe()) {
return converted_value;
}
Node* test = ConvertSaturateTest(this, opcode, int_ty, float_ty, trunc,
converted_value);
Diamond tl_d(graph(), mcgraph()->common(), test, BranchHint::kFalse);
tl_d.Chain(control());
Node* nan_test = Binop(NeOp(float_ty), input, input);
Diamond nan_d(graph(), mcgraph()->common(), nan_test, BranchHint::kFalse);
nan_d.Nest(tl_d, true);
Node* neg_test = Binop(LtOp(float_ty), input, Zero(this, float_ty));
Diamond sat_d(graph(), mcgraph()->common(), neg_test, BranchHint::kNone);
sat_d.Nest(nan_d, false);
Node* sat_val =
sat_d.Phi(int_ty.representation(), Min(this, int_ty), Max(this, int_ty));
Node* nan_val =
nan_d.Phi(int_ty.representation(), Zero(this, int_ty), sat_val);
return tl_d.Phi(int_ty.representation(), nan_val, converted_value);
}
Node* WasmGraphBuilder::BuildI32AsmjsSConvertF32(Node* input) {
MachineOperatorBuilder* m = mcgraph()->machine();
// asm.js must use the wacky JS semantics.
input = graph()->NewNode(m->ChangeFloat32ToFloat64(), input);
return graph()->NewNode(m->TruncateFloat64ToWord32(), input);
}
Node* WasmGraphBuilder::BuildI32AsmjsSConvertF64(Node* input) {
MachineOperatorBuilder* m = mcgraph()->machine();
// asm.js must use the wacky JS semantics.
return graph()->NewNode(m->TruncateFloat64ToWord32(), input);
}
Node* WasmGraphBuilder::BuildI32AsmjsUConvertF32(Node* input) {
MachineOperatorBuilder* m = mcgraph()->machine();
// asm.js must use the wacky JS semantics.
input = graph()->NewNode(m->ChangeFloat32ToFloat64(), input);
return graph()->NewNode(m->TruncateFloat64ToWord32(), input);
}
Node* WasmGraphBuilder::BuildI32AsmjsUConvertF64(Node* input) {
MachineOperatorBuilder* m = mcgraph()->machine();
// asm.js must use the wacky JS semantics.
return graph()->NewNode(m->TruncateFloat64ToWord32(), input);
}
Node* WasmGraphBuilder::BuildBitCountingCall(Node* input, ExternalReference ref,
MachineRepresentation input_type) {
Node* stack_slot_param = StoreArgsInStackSlot({{input_type, input}});
MachineType sig_types[] = {MachineType::Int32(), MachineType::Pointer()};
MachineSignature sig(1, 1, sig_types);
Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(ref));
return BuildCCall(&sig, function, stack_slot_param);
}
Node* WasmGraphBuilder::BuildI32Ctz(Node* input) {
return BuildBitCountingCall(input, ExternalReference::wasm_word32_ctz(),
MachineRepresentation::kWord32);
}
Node* WasmGraphBuilder::BuildI64Ctz(Node* input) {
return Unop(wasm::kExprI64UConvertI32,
BuildBitCountingCall(input, ExternalReference::wasm_word64_ctz(),
MachineRepresentation::kWord64));
}
Node* WasmGraphBuilder::BuildI32Popcnt(Node* input) {
return BuildBitCountingCall(input, ExternalReference::wasm_word32_popcnt(),
MachineRepresentation::kWord32);
}
Node* WasmGraphBuilder::BuildI64Popcnt(Node* input) {
return Unop(
wasm::kExprI64UConvertI32,
BuildBitCountingCall(input, ExternalReference::wasm_word64_popcnt(),
MachineRepresentation::kWord64));
}
Node* WasmGraphBuilder::BuildF32Trunc(Node* input) {
MachineType type = MachineType::Float32();
ExternalReference ref = ExternalReference::wasm_f32_trunc();
return BuildCFuncInstruction(ref, type, input);
}
Node* WasmGraphBuilder::BuildF32Floor(Node* input) {
MachineType type = MachineType::Float32();
ExternalReference ref = ExternalReference::wasm_f32_floor();
return BuildCFuncInstruction(ref, type, input);
}
Node* WasmGraphBuilder::BuildF32Ceil(Node* input) {
MachineType type = MachineType::Float32();
ExternalReference ref = ExternalReference::wasm_f32_ceil();
return BuildCFuncInstruction(ref, type, input);
}
Node* WasmGraphBuilder::BuildF32NearestInt(Node* input) {
MachineType type = MachineType::Float32();
ExternalReference ref = ExternalReference::wasm_f32_nearest_int();
return BuildCFuncInstruction(ref, type, input);
}
Node* WasmGraphBuilder::BuildF64Trunc(Node* input) {
MachineType type = MachineType::Float64();
ExternalReference ref = ExternalReference::wasm_f64_trunc();
return BuildCFuncInstruction(ref, type, input);
}
Node* WasmGraphBuilder::BuildF64Floor(Node* input) {
MachineType type = MachineType::Float64();
ExternalReference ref = ExternalReference::wasm_f64_floor();
return BuildCFuncInstruction(ref, type, input);
}
Node* WasmGraphBuilder::BuildF64Ceil(Node* input) {
MachineType type = MachineType::Float64();
ExternalReference ref = ExternalReference::wasm_f64_ceil();
return BuildCFuncInstruction(ref, type, input);
}
Node* WasmGraphBuilder::BuildF64NearestInt(Node* input) {
MachineType type = MachineType::Float64();
ExternalReference ref = ExternalReference::wasm_f64_nearest_int();
return BuildCFuncInstruction(ref, type, input);
}
Node* WasmGraphBuilder::BuildF64Acos(Node* input) {
MachineType type = MachineType::Float64();
ExternalReference ref = ExternalReference::f64_acos_wrapper_function();
return BuildCFuncInstruction(ref, type, input);
}
Node* WasmGraphBuilder::BuildF64Asin(Node* input) {
MachineType type = MachineType::Float64();
ExternalReference ref = ExternalReference::f64_asin_wrapper_function();
return BuildCFuncInstruction(ref, type, input);
}
Node* WasmGraphBuilder::BuildF64Pow(Node* left, Node* right) {
MachineType type = MachineType::Float64();
ExternalReference ref = ExternalReference::wasm_float64_pow();
return BuildCFuncInstruction(ref, type, left, right);
}
Node* WasmGraphBuilder::BuildF64Mod(Node* left, Node* right) {
MachineType type = MachineType::Float64();
ExternalReference ref = ExternalReference::f64_mod_wrapper_function();
return BuildCFuncInstruction(ref, type, left, right);
}
Node* WasmGraphBuilder::BuildCFuncInstruction(ExternalReference ref,
MachineType type, Node* input0,
Node* input1) {
// We do truncation by calling a C function which calculates the result.
// The input is passed to the C function as a byte buffer holding the two
// input doubles. We reserve this byte buffer as a stack slot, store the
// parameters in this buffer slots, pass a pointer to the buffer to the C
// function, and after calling the C function we collect the return value from
// the buffer.
Node* stack_slot;
if (input1) {
stack_slot = StoreArgsInStackSlot(
{{type.representation(), input0}, {type.representation(), input1}});
} else {
stack_slot = StoreArgsInStackSlot({{type.representation(), input0}});
}
MachineType sig_types[] = {MachineType::Pointer()};
MachineSignature sig(0, 1, sig_types);
Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(ref));
BuildCCall(&sig, function, stack_slot);
return SetEffect(graph()->NewNode(mcgraph()->machine()->Load(type),
stack_slot, mcgraph()->Int32Constant(0),
effect(), control()));
}
Node* WasmGraphBuilder::BuildF32SConvertI64(Node* input) {
// TODO(titzer/bradnelson): Check handlng of asm.js case.
return BuildIntToFloatConversionInstruction(
input, ExternalReference::wasm_int64_to_float32(),
MachineRepresentation::kWord64, MachineType::Float32());
}
Node* WasmGraphBuilder::BuildF32UConvertI64(Node* input) {
// TODO(titzer/bradnelson): Check handlng of asm.js case.
return BuildIntToFloatConversionInstruction(
input, ExternalReference::wasm_uint64_to_float32(),
MachineRepresentation::kWord64, MachineType::Float32());
}
Node* WasmGraphBuilder::BuildF64SConvertI64(Node* input) {
return BuildIntToFloatConversionInstruction(
input, ExternalReference::wasm_int64_to_float64(),
MachineRepresentation::kWord64, MachineType::Float64());
}
Node* WasmGraphBuilder::BuildF64UConvertI64(Node* input) {
return BuildIntToFloatConversionInstruction(
input, ExternalReference::wasm_uint64_to_float64(),
MachineRepresentation::kWord64, MachineType::Float64());
}
Node* WasmGraphBuilder::BuildIntToFloatConversionInstruction(
Node* input, ExternalReference ref,
MachineRepresentation parameter_representation,
const MachineType result_type) {
int stack_slot_size =
std::max(ElementSizeInBytes(parameter_representation),
ElementSizeInBytes(result_type.representation()));
Node* stack_slot =
graph()->NewNode(mcgraph()->machine()->StackSlot(stack_slot_size));
const Operator* store_op = mcgraph()->machine()->Store(
StoreRepresentation(parameter_representation, kNoWriteBarrier));
SetEffect(graph()->NewNode(store_op, stack_slot, mcgraph()->Int32Constant(0),
input, effect(), control()));
MachineType sig_types[] = {MachineType::Pointer()};
MachineSignature sig(0, 1, sig_types);
Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(ref));
BuildCCall(&sig, function, stack_slot);
return SetEffect(graph()->NewNode(mcgraph()->machine()->Load(result_type),
stack_slot, mcgraph()->Int32Constant(0),
effect(), control()));
}
namespace {
ExternalReference convert_ccall_ref(WasmGraphBuilder* builder,
wasm::WasmOpcode opcode) {
switch (opcode) {
case wasm::kExprI64SConvertF32:
case wasm::kExprI64SConvertSatF32:
return ExternalReference::wasm_float32_to_int64();
case wasm::kExprI64UConvertF32:
case wasm::kExprI64UConvertSatF32:
return ExternalReference::wasm_float32_to_uint64();
case wasm::kExprI64SConvertF64:
case wasm::kExprI64SConvertSatF64:
return ExternalReference::wasm_float64_to_int64();
case wasm::kExprI64UConvertF64:
case wasm::kExprI64UConvertSatF64:
return ExternalReference::wasm_float64_to_uint64();
default:
UNREACHABLE();
}
}
} // namespace
Node* WasmGraphBuilder::BuildCcallConvertFloat(Node* input,
wasm::WasmCodePosition position,
wasm::WasmOpcode opcode) {
const MachineType int_ty = IntConvertType(opcode);
const MachineType float_ty = FloatConvertType(opcode);
ExternalReference call_ref = convert_ccall_ref(this, opcode);
int stack_slot_size = std::max(ElementSizeInBytes(int_ty.representation()),
ElementSizeInBytes(float_ty.representation()));
Node* stack_slot =
graph()->NewNode(mcgraph()->machine()->StackSlot(stack_slot_size));
const Operator* store_op = mcgraph()->machine()->Store(
StoreRepresentation(float_ty.representation(), kNoWriteBarrier));
SetEffect(graph()->NewNode(store_op, stack_slot, Int32Constant(0), input,
effect(), control()));
MachineType sig_types[] = {MachineType::Int32(), MachineType::Pointer()};
MachineSignature sig(1, 1, sig_types);
Node* function =
graph()->NewNode(mcgraph()->common()->ExternalConstant(call_ref));
Node* overflow = BuildCCall(&sig, function, stack_slot);
if (IsTrappingConvertOp(opcode)) {
ZeroCheck32(wasm::kTrapFloatUnrepresentable, overflow, position);
return SetEffect(graph()->NewNode(mcgraph()->machine()->Load(int_ty),
stack_slot, Int32Constant(0), effect(),
control()));
}
Node* test = Binop(wasm::kExprI32Eq, overflow, Int32Constant(0), position);
Diamond tl_d(graph(), mcgraph()->common(), test, BranchHint::kFalse);
tl_d.Chain(control());
Node* nan_test = Binop(NeOp(float_ty), input, input);
Diamond nan_d(graph(), mcgraph()->common(), nan_test, BranchHint::kFalse);
nan_d.Nest(tl_d, true);
Node* neg_test = Binop(LtOp(float_ty), input, Zero(this, float_ty));
Diamond sat_d(graph(), mcgraph()->common(), neg_test, BranchHint::kNone);
sat_d.Nest(nan_d, false);
Node* sat_val =
sat_d.Phi(int_ty.representation(), Min(this, int_ty), Max(this, int_ty));
Node* load =
SetEffect(graph()->NewNode(mcgraph()->machine()->Load(int_ty), stack_slot,
Int32Constant(0), effect(), control()));
Node* nan_val =
nan_d.Phi(int_ty.representation(), Zero(this, int_ty), sat_val);
return tl_d.Phi(int_ty.representation(), nan_val, load);
}
Node* WasmGraphBuilder::MemoryGrow(Node* input) {
needs_stack_check_ = true;
WasmMemoryGrowDescriptor interface_descriptor;
auto call_descriptor = Linkage::GetStubCallDescriptor(
mcgraph()->zone(), // zone
interface_descriptor, // descriptor
interface_descriptor.GetStackParameterCount(), // stack parameter count
CallDescriptor::kNoFlags, // flags
Operator::kNoProperties, // properties
StubCallMode::kCallWasmRuntimeStub); // stub call mode
// A direct call to a wasm runtime stub defined in this module.
// Just encode the stub index. This will be patched at relocation.
Node* call_target = mcgraph()->RelocatableIntPtrConstant(
wasm::WasmCode::kWasmMemoryGrow, RelocInfo::WASM_STUB_CALL);
return SetEffectControl(
graph()->NewNode(mcgraph()->common()->Call(call_descriptor), call_target,
input, effect(), control()));
}
Node* WasmGraphBuilder::Throw(uint32_t exception_index,
const wasm::WasmException* exception,
const Vector<Node*> values,
wasm::WasmCodePosition position) {
needs_stack_check_ = true;
uint32_t encoded_size = WasmExceptionPackage::GetEncodedSize(exception);
Node* create_parameters[] = {
LoadExceptionTagFromTable(exception_index),
BuildChangeUint31ToSmi(mcgraph()->Uint32Constant(encoded_size))};
Node* except_obj =
BuildCallToRuntime(Runtime::kWasmThrowCreate, create_parameters,
arraysize(create_parameters));
SetSourcePosition(except_obj, position);
Node* values_array = CALL_BUILTIN(
WasmGetOwnProperty, except_obj,
LOAD_FULL_POINTER(BuildLoadIsolateRoot(),
IsolateData::root_slot_offset(
RootIndex::kwasm_exception_values_symbol)),
LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer()));
uint32_t index = 0;
const wasm::WasmExceptionSig* sig = exception->sig;
MachineOperatorBuilder* m = mcgraph()->machine();
for (size_t i = 0; i < sig->parameter_count(); ++i) {
Node* value = values[i];
switch (sig->GetParam(i).kind()) {
case wasm::ValueType::kF32:
value = graph()->NewNode(m->BitcastFloat32ToInt32(), value);
V8_FALLTHROUGH;
case wasm::ValueType::kI32:
BuildEncodeException32BitValue(values_array, &index, value);
break;
case wasm::ValueType::kF64:
value = graph()->NewNode(m->BitcastFloat64ToInt64(), value);
V8_FALLTHROUGH;
case wasm::ValueType::kI64: {
Node* upper32 = graph()->NewNode(
m->TruncateInt64ToInt32(),
Binop(wasm::kExprI64ShrU, value, Int64Constant(32)));
BuildEncodeException32BitValue(values_array, &index, upper32);
Node* lower32 = graph()->NewNode(m->TruncateInt64ToInt32(), value);
BuildEncodeException32BitValue(values_array, &index, lower32);
break;
}
case wasm::ValueType::kS128:
BuildEncodeException32BitValue(
values_array, &index,
graph()->NewNode(m->I32x4ExtractLane(0), value));
BuildEncodeException32BitValue(
values_array, &index,
graph()->NewNode(m->I32x4ExtractLane(1), value));
BuildEncodeException32BitValue(
values_array, &index,
graph()->NewNode(m->I32x4ExtractLane(2), value));
BuildEncodeException32BitValue(
values_array, &index,
graph()->NewNode(m->I32x4ExtractLane(3), value));
break;
case wasm::ValueType::kRef:
case wasm::ValueType::kOptRef:
STORE_FIXED_ARRAY_SLOT_ANY(values_array, index, value);
++index;
break;
case wasm::ValueType::kRtt: // TODO(7748): Implement.
case wasm::ValueType::kI8:
case wasm::ValueType::kI16:
case wasm::ValueType::kStmt:
case wasm::ValueType::kBottom:
UNREACHABLE();
}
}
DCHECK_EQ(encoded_size, index);
WasmThrowDescriptor interface_descriptor;
auto call_descriptor = Linkage::GetStubCallDescriptor(
mcgraph()->zone(), interface_descriptor,
interface_descriptor.GetStackParameterCount(), CallDescriptor::kNoFlags,
Operator::kNoProperties, StubCallMode::kCallWasmRuntimeStub);
Node* call_target = mcgraph()->RelocatableIntPtrConstant(
wasm::WasmCode::kWasmThrow, RelocInfo::WASM_STUB_CALL);
Node* call = SetEffectControl(
graph()->NewNode(mcgraph()->common()->Call(call_descriptor), call_target,
except_obj, effect(), control()));
SetSourcePosition(call, position);
return call;
}
void WasmGraphBuilder::BuildEncodeException32BitValue(Node* values_array,
uint32_t* index,
Node* value) {
MachineOperatorBuilder* machine = mcgraph()->machine();
Node* upper_halfword_as_smi = BuildChangeUint31ToSmi(
graph()->NewNode(machine->Word32Shr(), value, Int32Constant(16)));
STORE_FIXED_ARRAY_SLOT_SMI(values_array, *index, upper_halfword_as_smi);
++(*index);
Node* lower_halfword_as_smi = BuildChangeUint31ToSmi(
graph()->NewNode(machine->Word32And(), value, Int32Constant(0xFFFFu)));
STORE_FIXED_ARRAY_SLOT_SMI(values_array, *index, lower_halfword_as_smi);
++(*index);
}
Node* WasmGraphBuilder::BuildDecodeException32BitValue(Node* values_array,
uint32_t* index) {
MachineOperatorBuilder* machine = mcgraph()->machine();
Node* upper =
BuildChangeSmiToInt32(LOAD_FIXED_ARRAY_SLOT_SMI(values_array, *index));
(*index)++;
upper = graph()->NewNode(machine->Word32Shl(), upper, Int32Constant(16));
Node* lower =
BuildChangeSmiToInt32(LOAD_FIXED_ARRAY_SLOT_SMI(values_array, *index));
(*index)++;
Node* value = graph()->NewNode(machine->Word32Or(), upper, lower);
return value;
}
Node* WasmGraphBuilder::BuildDecodeException64BitValue(Node* values_array,
uint32_t* index) {
Node* upper = Binop(wasm::kExprI64Shl,
Unop(wasm::kExprI64UConvertI32,
BuildDecodeException32BitValue(values_array, index)),
Int64Constant(32));
Node* lower = Unop(wasm::kExprI64UConvertI32,
BuildDecodeException32BitValue(values_array, index));
return Binop(wasm::kExprI64Ior, upper, lower);
}
Node* WasmGraphBuilder::Rethrow(Node* except_obj) {
// TODO(v8:8091): Currently the message of the original exception is not being
// preserved when rethrown to the console. The pending message will need to be
// saved when caught and restored here while being rethrown.
WasmThrowDescriptor interface_descriptor;
auto call_descriptor = Linkage::GetStubCallDescriptor(
mcgraph()->zone(), interface_descriptor,
interface_descriptor.GetStackParameterCount(), CallDescriptor::kNoFlags,
Operator::kNoProperties, StubCallMode::kCallWasmRuntimeStub);
Node* call_target = mcgraph()->RelocatableIntPtrConstant(
wasm::WasmCode::kWasmRethrow, RelocInfo::WASM_STUB_CALL);
return gasm_->Call(call_descriptor, call_target, except_obj);
}
Node* WasmGraphBuilder::ExceptionTagEqual(Node* caught_tag,
Node* expected_tag) {
MachineOperatorBuilder* machine = mcgraph()->machine();
return graph()->NewNode(machine->WordEqual(), caught_tag, expected_tag);
}
Node* WasmGraphBuilder::LoadExceptionTagFromTable(uint32_t exception_index) {
Node* exceptions_table =
LOAD_INSTANCE_FIELD(ExceptionsTable, MachineType::TaggedPointer());
Node* tag = LOAD_FIXED_ARRAY_SLOT_PTR(exceptions_table, exception_index);
return tag;
}
Node* WasmGraphBuilder::GetExceptionTag(Node* except_obj,
wasm::WasmCodePosition position) {
TrapIfTrue(wasm::kTrapBrOnExnNull, gasm_->WordEqual(RefNull(), except_obj),
position);
return CALL_BUILTIN(
WasmGetOwnProperty, except_obj,
LOAD_FULL_POINTER(
BuildLoadIsolateRoot(),
IsolateData::root_slot_offset(RootIndex::kwasm_exception_tag_symbol)),
LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer()));
}
Node* WasmGraphBuilder::GetExceptionValues(Node* except_obj,
const wasm::WasmException* exception,
Vector<Node*> values) {
Node* values_array = CALL_BUILTIN(
WasmGetOwnProperty, except_obj,
LOAD_FULL_POINTER(BuildLoadIsolateRoot(),
IsolateData::root_slot_offset(
RootIndex::kwasm_exception_values_symbol)),
LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer()));
uint32_t index = 0;
const wasm::WasmExceptionSig* sig = exception->sig;
DCHECK_EQ(sig->parameter_count(), values.size());
for (size_t i = 0; i < sig->parameter_count(); ++i) {
Node* value;
switch (sig->GetParam(i).kind()) {
case wasm::ValueType::kI32:
value = BuildDecodeException32BitValue(values_array, &index);
break;
case wasm::ValueType::kI64:
value = BuildDecodeException64BitValue(values_array, &index);
break;
case wasm::ValueType::kF32: {
value = Unop(wasm::kExprF32ReinterpretI32,
BuildDecodeException32BitValue(values_array, &index));
break;
}
case wasm::ValueType::kF64: {
value = Unop(wasm::kExprF64ReinterpretI64,
BuildDecodeException64BitValue(values_array, &index));
break;
}
case wasm::ValueType::kS128:
value = graph()->NewNode(
mcgraph()->machine()->I32x4Splat(),
BuildDecodeException32BitValue(values_array, &index));
value = graph()->NewNode(
mcgraph()->machine()->I32x4ReplaceLane(1), value,
BuildDecodeException32BitValue(values_array, &index));
value = graph()->NewNode(
mcgraph()->machine()->I32x4ReplaceLane(2), value,
BuildDecodeException32BitValue(values_array, &index));
value = graph()->NewNode(
mcgraph()->machine()->I32x4ReplaceLane(3), value,
BuildDecodeException32BitValue(values_array, &index));
break;
case wasm::ValueType::kRef:
case wasm::ValueType::kOptRef:
value = LOAD_FIXED_ARRAY_SLOT_ANY(values_array, index);
++index;
break;
case wasm::ValueType::kRtt: // TODO(7748): Implement.
case wasm::ValueType::kI8:
case wasm::ValueType::kI16:
case wasm::ValueType::kStmt:
case wasm::ValueType::kBottom:
UNREACHABLE();
}
values[i] = value;
}
DCHECK_EQ(index, WasmExceptionPackage::GetEncodedSize(exception));
return values_array;
}
Node* WasmGraphBuilder::BuildI32DivS(Node* left, Node* right,
wasm::WasmCodePosition position) {
MachineOperatorBuilder* m = mcgraph()->machine();
ZeroCheck32(wasm::kTrapDivByZero, right, position);
Node* before = control();
Node* denom_is_m1;
Node* denom_is_not_m1;
BranchExpectFalse(
graph()->NewNode(m->Word32Equal(), right, mcgraph()->Int32Constant(-1)),
&denom_is_m1, &denom_is_not_m1);
SetControl(denom_is_m1);
TrapIfEq32(wasm::kTrapDivUnrepresentable, left, kMinInt, position);
if (control() != denom_is_m1) {
SetControl(graph()->NewNode(mcgraph()->common()->Merge(2), denom_is_not_m1,
control()));
} else {
SetControl(before);
}
return graph()->NewNode(m->Int32Div(), left, right, control());
}
Node* WasmGraphBuilder::BuildI32RemS(Node* left, Node* right,
wasm::WasmCodePosition position) {
MachineOperatorBuilder* m = mcgraph()->machine();
ZeroCheck32(wasm::kTrapRemByZero, right, position);
Diamond d(
graph(), mcgraph()->common(),
graph()->NewNode(m->Word32Equal(), right, mcgraph()->Int32Constant(-1)),
BranchHint::kFalse);
d.Chain(control());
return d.Phi(MachineRepresentation::kWord32, mcgraph()->Int32Constant(0),
graph()->NewNode(m->Int32Mod(), left, right, d.if_false));
}
Node* WasmGraphBuilder::BuildI32DivU(Node* left, Node* right,
wasm::WasmCodePosition position) {
MachineOperatorBuilder* m = mcgraph()->machine();
return graph()->NewNode(m->Uint32Div(), left, right,
ZeroCheck32(wasm::kTrapDivByZero, right, position));
}
Node* WasmGraphBuilder::BuildI32RemU(Node* left, Node* right,
wasm::WasmCodePosition position) {
MachineOperatorBuilder* m = mcgraph()->machine();
return graph()->NewNode(m->Uint32Mod(), left, right,
ZeroCheck32(wasm::kTrapRemByZero, right, position));
}
Node* WasmGraphBuilder::BuildI32AsmjsDivS(Node* left, Node* right) {
MachineOperatorBuilder* m = mcgraph()->machine();
Int32Matcher mr(right);
if (mr.HasResolvedValue()) {
if (mr.ResolvedValue() == 0) {
return mcgraph()->Int32Constant(0);
} else if (mr.ResolvedValue() == -1) {
// The result is the negation of the left input.
return graph()->NewNode(m->Int32Sub(), mcgraph()->Int32Constant(0), left);
}
return graph()->NewNode(m->Int32Div(), left, right, control());
}
// asm.js semantics return 0 on divide or mod by zero.
if (m->Int32DivIsSafe()) {
// The hardware instruction does the right thing (e.g. arm).
return graph()->NewNode(m->Int32Div(), left, right, graph()->start());
}
// Check denominator for zero.
Diamond z(
graph(), mcgraph()->common(),
graph()->NewNode(m->Word32Equal(), right, mcgraph()->Int32Constant(0)),
BranchHint::kFalse);
// Check numerator for -1. (avoid minint / -1 case).
Diamond n(
graph(), mcgraph()->common(),
graph()->NewNode(m->Word32Equal(), right, mcgraph()->Int32Constant(-1)),
BranchHint::kFalse);
Node* div = graph()->NewNode(m->Int32Div(), left, right, z.if_false);
Node* neg =
graph()->NewNode(m->Int32Sub(), mcgraph()->Int32Constant(0), left);
return n.Phi(
MachineRepresentation::kWord32, neg,
z.Phi(MachineRepresentation::kWord32, mcgraph()->Int32Constant(0), div));
}
Node* WasmGraphBuilder::BuildI32AsmjsRemS(Node* left, Node* right) {
CommonOperatorBuilder* c = mcgraph()->common();
MachineOperatorBuilder* m = mcgraph()->machine();
Node* const zero = mcgraph()->Int32Constant(0);
Int32Matcher mr(right);
if (mr.HasResolvedValue()) {
if (mr.ResolvedValue() == 0 || mr.ResolvedValue() == -1) {
return zero;
}
return graph()->NewNode(m->Int32Mod(), left, right, control());
}
// General case for signed integer modulus, with optimization for (unknown)
// power of 2 right hand side.
//
// if 0 < right then
// msk = right - 1
// if right & msk != 0 then
// left % right
// else
// if left < 0 then
// -(-left & msk)
// else
// left & msk
// else
// if right < -1 then
// left % right
// else
// zero
//
// Note: We do not use the Diamond helper class here, because it really hurts
// readability with nested diamonds.
Node* const minus_one = mcgraph()->Int32Constant(-1);
const Operator* const merge_op = c->Merge(2);
const Operator* const phi_op = c->Phi(MachineRepresentation::kWord32, 2);
Node* check0 = graph()->NewNode(m->Int32LessThan(), zero, right);
Node* branch0 =
graph()->NewNode(c->Branch(BranchHint::kTrue), check0, graph()->start());
Node* if_true0 = graph()->NewNode(c->IfTrue(), branch0);
Node* true0;
{
Node* msk = graph()->NewNode(m->Int32Add(), right, minus_one);
Node* check1 = graph()->NewNode(m->Word32And(), right, msk);
Node* branch1 = graph()->NewNode(c->Branch(), check1, if_true0);
Node* if_true1 = graph()->NewNode(c->IfTrue(), branch1);
Node* true1 = graph()->NewNode(m->Int32Mod(), left, right, if_true1);
Node* if_false1 = graph()->NewNode(c->IfFalse(), branch1);
Node* false1;
{
Node* check2 = graph()->NewNode(m->Int32LessThan(), left, zero);
Node* branch2 =
graph()->NewNode(c->Branch(BranchHint::kFalse), check2, if_false1);
Node* if_true2 = graph()->NewNode(c->IfTrue(), branch2);
Node* true2 = graph()->NewNode(
m->Int32Sub(), zero,
graph()->NewNode(m->Word32And(),
graph()->NewNode(m->Int32Sub(), zero, left), msk));
Node* if_false2 = graph()->NewNode(c->IfFalse(), branch2);
Node* false2 = graph()->NewNode(m->Word32And(), left, msk);
if_false1 = graph()->NewNode(merge_op, if_true2, if_false2);
false1 = graph()->NewNode(phi_op, true2, false2, if_false1);
}
if_true0 = graph()->NewNode(merge_op, if_true1, if_false1);
true0 = graph()->NewNode(phi_op, true1, false1, if_true0);
}
Node* if_false0 = graph()->NewNode(c->IfFalse(), branch0);
Node* false0;
{
Node* check1 = graph()->NewNode(m->Int32LessThan(), right, minus_one);
Node* branch1 =
graph()->NewNode(c->Branch(BranchHint::kTrue), check1, if_false0);
Node* if_true1 = graph()->NewNode(c->IfTrue(), branch1);
Node* true1 = graph()->NewNode(m->Int32Mod(), left, right, if_true1);
Node* if_false1 = graph()->NewNode(c->IfFalse(), branch1);
Node* false1 = zero;
if_false0 = graph()->NewNode(merge_op, if_true1, if_false1);
false0 = graph()->NewNode(phi_op, true1, false1, if_false0);
}
Node* merge0 = graph()->NewNode(merge_op, if_true0, if_false0);
return graph()->NewNode(phi_op, true0, false0, merge0);
}
Node* WasmGraphBuilder::BuildI32AsmjsDivU(Node* left, Node* right) {
MachineOperatorBuilder* m = mcgraph()->machine();
// asm.js semantics return 0 on divide or mod by zero.
if (m->Uint32DivIsSafe()) {
// The hardware instruction does the right thing (e.g. arm).
return graph()->NewNode(m->Uint32Div(), left, right, graph()->start());
}
// Explicit check for x % 0.
Diamond z(
graph(), mcgraph()->common(),
graph()->NewNode(m->Word32Equal(), right, mcgraph()->Int32Constant(0)),
BranchHint::kFalse);
return z.Phi(MachineRepresentation::kWord32, mcgraph()->Int32Constant(0),
graph()->NewNode(mcgraph()->machine()->Uint32Div(), left, right,
z.if_false));
}
Node* WasmGraphBuilder::BuildI32AsmjsRemU(Node* left, Node* right) {
MachineOperatorBuilder* m = mcgraph()->machine();
// asm.js semantics return 0 on divide or mod by zero.
// Explicit check for x % 0.
Diamond z(
graph(), mcgraph()->common(),
graph()->NewNode(m->Word32Equal(), right, mcgraph()->Int32Constant(0)),
BranchHint::kFalse);
Node* rem = graph()->NewNode(mcgraph()->machine()->Uint32Mod(), left, right,
z.if_false);
return z.Phi(MachineRepresentation::kWord32, mcgraph()->Int32Constant(0),
rem);
}
Node* WasmGraphBuilder::BuildI64DivS(Node* left, Node* right,
wasm::WasmCodePosition position) {
if (mcgraph()->machine()->Is32()) {
return BuildDiv64Call(left, right, ExternalReference::wasm_int64_div(),
MachineType::Int64(), wasm::kTrapDivByZero, position);
}
ZeroCheck64(wasm::kTrapDivByZero, right, position);
Node* before = control();
Node* denom_is_m1;
Node* denom_is_not_m1;
BranchExpectFalse(graph()->NewNode(mcgraph()->machine()->Word64Equal(), right,
mcgraph()->Int64Constant(-1)),
&denom_is_m1, &denom_is_not_m1);
SetControl(denom_is_m1);
TrapIfEq64(wasm::kTrapDivUnrepresentable, left,
std::numeric_limits<int64_t>::min(), position);
if (control() != denom_is_m1) {
SetControl(graph()->NewNode(mcgraph()->common()->Merge(2), denom_is_not_m1,
control()));
} else {
SetControl(before);
}
return graph()->NewNode(mcgraph()->machine()->Int64Div(), left, right,
control());
}
Node* WasmGraphBuilder::BuildI64RemS(Node* left, Node* right,
wasm::WasmCodePosition position) {
if (mcgraph()->machine()->Is32()) {
return BuildDiv64Call(left, right, ExternalReference::wasm_int64_mod(),
MachineType::Int64(), wasm::kTrapRemByZero, position);
}
ZeroCheck64(wasm::kTrapRemByZero, right, position);
Diamond d(mcgraph()->graph(), mcgraph()->common(),
graph()->NewNode(mcgraph()->machine()->Word64Equal(), right,
mcgraph()->Int64Constant(-1)));
d.Chain(control());
Node* rem = graph()->NewNode(mcgraph()->machine()->Int64Mod(), left, right,
d.if_false);
return d.Phi(MachineRepresentation::kWord64, mcgraph()->Int64Constant(0),
rem);
}
Node* WasmGraphBuilder::BuildI64DivU(Node* left, Node* right,
wasm::WasmCodePosition position) {
if (mcgraph()->machine()->Is32()) {
return BuildDiv64Call(left, right, ExternalReference::wasm_uint64_div(),
MachineType::Int64(), wasm::kTrapDivByZero, position);
}
return graph()->NewNode(mcgraph()->machine()->Uint64Div(), left, right,
ZeroCheck64(wasm::kTrapDivByZero, right, position));
}
Node* WasmGraphBuilder::BuildI64RemU(Node* left, Node* right,
wasm::WasmCodePosition position) {
if (mcgraph()->machine()->Is32()) {
return BuildDiv64Call(left, right, ExternalReference::wasm_uint64_mod(),
MachineType::Int64(), wasm::kTrapRemByZero, position);
}
return graph()->NewNode(mcgraph()->machine()->Uint64Mod(), left, right,
ZeroCheck64(wasm::kTrapRemByZero, right, position));
}
Node* WasmGraphBuilder::GetBuiltinPointerTarget(int builtin_id) {
static_assert(std::is_same<Smi, BuiltinPtr>(), "BuiltinPtr must be Smi");
return graph()->NewNode(mcgraph()->common()->NumberConstant(builtin_id));
}
Node* WasmGraphBuilder::BuildDiv64Call(Node* left, Node* right,
ExternalReference ref,
MachineType result_type,
wasm::TrapReason trap_zero,
wasm::WasmCodePosition position) {
Node* stack_slot =
StoreArgsInStackSlot({{MachineRepresentation::kWord64, left},
{MachineRepresentation::kWord64, right}});
MachineType sig_types[] = {MachineType::Int32(), MachineType::Pointer()};
MachineSignature sig(1, 1, sig_types);
Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(ref));
Node* call = BuildCCall(&sig, function, stack_slot);
ZeroCheck32(trap_zero, call, position);
TrapIfEq32(wasm::kTrapDivUnrepresentable, call, -1, position);
return SetEffect(graph()->NewNode(mcgraph()->machine()->Load(result_type),
stack_slot, mcgraph()->Int32Constant(0),
effect(), control()));
}
template <typename... Args>
Node* WasmGraphBuilder::BuildCCall(MachineSignature* sig, Node* function,
Args... args) {
DCHECK_LE(sig->return_count(), 1);
DCHECK_EQ(sizeof...(args), sig->parameter_count());
Node* const call_args[] = {function, args..., effect(), control()};
auto call_descriptor =
Linkage::GetSimplifiedCDescriptor(mcgraph()->zone(), sig);
const Operator* op = mcgraph()->common()->Call(call_descriptor);
return SetEffect(graph()->NewNode(op, arraysize(call_args), call_args));
}
Node* WasmGraphBuilder::BuildCallNode(const wasm::FunctionSig* sig,
Vector<Node*> args,
wasm::WasmCodePosition position,
Node* instance_node, const Operator* op) {
if (instance_node == nullptr) {
DCHECK_NOT_NULL(instance_node_);
instance_node = instance_node_.get();
}
needs_stack_check_ = true;
const size_t params = sig->parameter_count();
const size_t extra = 3; // instance_node, effect, and control.
const size_t count = 1 + params + extra;
// Reallocate the buffer to make space for extra inputs.
base::SmallVector<Node*, 16 + extra> inputs(count);
DCHECK_EQ(1 + params, args.size());
// Make room for the instance_node parameter at index 1, just after code.
inputs[0] = args[0]; // code
inputs[1] = instance_node;
if (params > 0) memcpy(&inputs[2], &args[1], params * sizeof(Node*));
// Add effect and control inputs.
inputs[params + 2] = effect();
inputs[params + 3] = control();
Node* call = graph()->NewNode(op, static_cast<int>(count), inputs.begin());
// Return calls have no effect output. Other calls are the new effect node.
if (op->EffectOutputCount() > 0) SetEffect(call);
DCHECK(position == wasm<