/// \return a MachineInstrBuilder for the newly created instruction.
MachineInstrBuilder buildIntrinsic(Intrinsic::ID ID, ArrayRef<unsigned> Res,
bool HasSideEffects);
+ MachineInstrBuilder buildIntrinsic(Intrinsic::ID ID, ArrayRef<DstOp> Res,
+ bool HasSideEffects);
/// Build and insert \p Res = G_FPTRUNC \p Op
///
return MIB;
}
+MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
+ ArrayRef<DstOp> Results,
+ bool HasSideEffects) {
+ auto MIB =
+ buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
+ : TargetOpcode::G_INTRINSIC);
+ for (DstOp Result : Results)
+ Result.addDefToMIB(*getMRI(), MIB);
+ MIB.addIntrinsicID(ID);
+ return MIB;
+}
+
MachineInstrBuilder MachineIRBuilder::buildTrunc(const DstOp &Res,
const SrcOp &Op) {
return buildInstr(TargetOpcode::G_TRUNC, Res, Op);
EXPECT_TRUE(CheckMachineFunction(*MF, CheckStr)) << *MF;
}
+
+TEST_F(GISelMITest, BuildIntrinsic) {
+ if (!TM)
+ return;
+
+ LLT S64 = LLT::scalar(64);
+ SmallVector<unsigned, 4> Copies;
+ collectCopies(Copies, MF);
+
+ // Make sure DstOp version works. sqrt is just a placeholder intrinsic.
+ B.buildIntrinsic(Intrinsic::sqrt, {S64}, false)
+ .addUse(Copies[0]);
+
+ // Make sure register version works
+ SmallVector<unsigned, 1> Results;
+ Results.push_back(MRI->createGenericVirtualRegister(S64));
+ B.buildIntrinsic(Intrinsic::sqrt, Results, false)
+ .addUse(Copies[1]);
+
+ auto CheckStr = R"(
+ ; CHECK: [[COPY0:%[0-9]+]]:_(s64) = COPY $x0
+ ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
+ ; CHECK: [[SQRT0:%[0-9]+]]:_(s64) = G_INTRINSIC intrinsic(@llvm.sqrt), [[COPY0]]:_
+ ; CHECK: [[SQRT1:%[0-9]+]]:_(s64) = G_INTRINSIC intrinsic(@llvm.sqrt), [[COPY1]]:_
+ )";
+
+ EXPECT_TRUE(CheckMachineFunction(*MF, CheckStr)) << *MF;
+}