/// - MMOIdx - MMO index
/// - Size - The size in bytes of the memory access
GIM_CheckMemorySizeEqualTo,
+
+ /// Check the address space of the memory access for the given machine memory
+ /// operand.
+ /// - InsnID - Instruction ID
+ /// - MMOIdx - MMO index
+ /// - NumAddrSpace - Number of valid address spaces
+ /// - AddrSpaceN - An allowed space of the memory access
+ /// - AddrSpaceN+1 ...
+ GIM_CheckMemoryAddressSpace,
+
/// Check the size of the memory access for the given machine memory operand
/// against the size of an operand.
/// - InsnID - Instruction ID
return false;
break;
}
+ case GIM_CheckMemoryAddressSpace: {
+ int64_t InsnID = MatchTable[CurrentIdx++];
+ int64_t MMOIdx = MatchTable[CurrentIdx++];
+ // This accepts a list of possible address spaces.
+ const int NumAddrSpace = MatchTable[CurrentIdx++];
+
+ if (State.MIs[InsnID]->getNumMemOperands() <= MMOIdx) {
+ if (handleReject() == RejectAndGiveUp)
+ return false;
+ break;
+ }
+
+ // Need to still jump to the end of the list of address spaces if we find
+ // a match earlier.
+ const uint64_t LastIdx = CurrentIdx + NumAddrSpace;
+
+ const MachineMemOperand *MMO
+ = *(State.MIs[InsnID]->memoperands_begin() + MMOIdx);
+ const unsigned MMOAddrSpace = MMO->getAddrSpace();
+
+ bool Success = false;
+ for (int I = 0; I != NumAddrSpace; ++I) {
+ unsigned AddrSpace = MatchTable[CurrentIdx++];
+ DEBUG_WITH_TYPE(
+ TgtInstructionSelector::getName(),
+ dbgs() << "addrspace(" << MMOAddrSpace << ") vs "
+ << AddrSpace << '\n');
+
+ if (AddrSpace == MMOAddrSpace) {
+ Success = true;
+ break;
+ }
+ }
+
+ CurrentIdx = LastIdx;
+ if (!Success && handleReject() == RejectAndGiveUp)
+ return false;
+ break;
+ }
case GIM_CheckMemorySizeEqualTo: {
int64_t InsnID = MatchTable[CurrentIdx++];
int64_t MMOIdx = MatchTable[CurrentIdx++];
// cast<StoreSDNode>(N)->isTruncatingStore();
bit IsTruncStore = ?;
+ // cast<MemSDNode>(N)->getAddressSpace() ==
+ // If this empty, accept any address space.
+ list<int> AddressSpaces = ?;
+
// cast<AtomicSDNode>(N)->getOrdering() == AtomicOrdering::Monotonic
bit IsAtomicOrderingMonotonic = ?;
// cast<AtomicSDNode>(N)->getOrdering() == AtomicOrdering::Acquire
// cast<LoadSDNode>(N)->getMemoryVT().getScalarType() == MVT::<VT>;
// cast<StoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::<VT>;
ValueType ScalarMemoryVT = ?;
+
+ // TODO: Add alignment
}
// PatFrag - A version of PatFrags matching only a single fragment.
--- /dev/null
+// RUN: llvm-tblgen -gen-dag-isel -I %p/../../include %s 2>&1 | FileCheck -check-prefix=SDAG %s
+// RUN: llvm-tblgen -gen-global-isel -optimize-match-table=false -I %p/../../include %s -o - < %s | FileCheck -check-prefix=GISEL %s
+
+include "llvm/Target/Target.td"
+
+def TestTargetInstrInfo : InstrInfo;
+
+
+def TestTarget : Target {
+ let InstructionSet = TestTargetInstrInfo;
+}
+
+def R0 : Register<"r0"> { let Namespace = "MyTarget"; }
+def GPR32 : RegisterClass<"MyTarget", [i32], 32, (add R0)>;
+
+
+// With one address space
+def pat_frag_a : PatFrag <(ops node:$ptr), (load node:$ptr), [{}]> {
+ let AddressSpaces = [ 999 ];
+ let IsLoad = 1; // FIXME: Can this be inferred?
+ let MemoryVT = i32;
+}
+
+// With multiple address spaces
+def pat_frag_b : PatFrag <(ops node:$ptr), (load node:$ptr), [{}]> {
+ let AddressSpaces = [ 123, 455 ];
+ let IsLoad = 1; // FIXME: Can this be inferred?
+ let MemoryVT = i32;
+}
+
+def inst_a : Instruction {
+ let OutOperandList = (outs GPR32:$dst);
+ let InOperandList = (ins GPR32:$src);
+}
+
+def inst_b : Instruction {
+ let OutOperandList = (outs GPR32:$dst);
+ let InOperandList = (ins GPR32:$src);
+}
+
+// SDAG: case 2: {
+// SDAG: // Predicate_pat_frag_a
+// SDAG-NEXT: SDNode *N = Node;
+// SDAG-NEXT: (void)N;
+// SDAG-NEXT: unsigned AddrSpace = cast<MemSDNode>(N)->getAddressSpace();
+
+// SDAG-NEXT: if (AddrSpace != 999)
+// SDAG-NEXT: return false;
+// SDAG-NEXT: if (cast<MemSDNode>(N)->getMemoryVT() != MVT::i32) return false;
+// SDAG-NEXT: return true;
+
+// GISEL: GIM_Try, /*On fail goto*//*Label 0*/ 47, // Rule ID 0 //
+// GISEL-NEXT: GIM_CheckNumOperands, /*MI*/0, /*Expected*/2,
+// GISEL-NEXT: GIM_CheckOpcode, /*MI*/0, TargetOpcode::G_LOAD,
+// GISEL-NEXT: GIM_CheckMemorySizeEqualToLLT, /*MI*/0, /*MMO*/0, /*OpIdx*/0,
+// GISEL-NEXT: GIM_CheckMemoryAddressSpace, /*MI*/0, /*MMO*/0, /*NumAddrSpace*/1, /*AddrSpace*/999,
+// GISEL-NEXT: GIM_CheckMemorySizeEqualTo, /*MI*/0, /*MMO*/0, /*Size*/4,
+// GISEL-NEXT: GIM_CheckAtomicOrdering, /*MI*/0, /*Order*/(int64_t)AtomicOrdering::NotAtomic,
+def : Pat <
+ (pat_frag_a GPR32:$src),
+ (inst_a GPR32:$src)
+>;
+
+// SDAG: case 3: {
+// SDAG-NEXT: // Predicate_pat_frag_b
+// SDAG-NEXT: SDNode *N = Node;
+// SDAG-NEXT: (void)N;
+// SDAG-NEXT: unsigned AddrSpace = cast<MemSDNode>(N)->getAddressSpace();
+// SDAG-NEXT: if (AddrSpace != 123 && AddrSpace != 455)
+// SDAG-NEXT: return false;
+// SDAG-NEXT: if (cast<MemSDNode>(N)->getMemoryVT() != MVT::i32) return false;
+// SDAG-NEXT: return true;
+
+
+// GISEL: GIM_Try, /*On fail goto*//*Label 1*/ 95, // Rule ID 1 //
+// GISEL-NEXT: GIM_CheckNumOperands, /*MI*/0, /*Expected*/2,
+// GISEL-NEXT: GIM_CheckOpcode, /*MI*/0, TargetOpcode::G_LOAD,
+// GISEL-NEXT: GIM_CheckMemorySizeEqualToLLT, /*MI*/0, /*MMO*/0, /*OpIdx*/0,
+// GISEL-NEXT: GIM_CheckMemoryAddressSpace, /*MI*/0, /*MMO*/0, /*NumAddrSpace*/2, /*AddrSpace*/123, /*AddrSpace*/455,
+// GISEL-NEXT: GIM_CheckMemorySizeEqualTo, /*MI*/0, /*MMO*/0, /*Size*/4,
+// GISEL-NEXT: GIM_CheckAtomicOrdering, /*MI*/0, /*Order*/(int64_t)AtomicOrdering::NotAtomic,
+def : Pat <
+ (pat_frag_b GPR32:$src),
+ (inst_b GPR32:$src)
+>;
}
if (isLoad() || isStore() || isAtomic()) {
- StringRef SDNodeName =
- isLoad() ? "LoadSDNode" : isStore() ? "StoreSDNode" : "AtomicSDNode";
+ if (ListInit *AddressSpaces = getAddressSpaces()) {
+ Code += "unsigned AddrSpace = cast<MemSDNode>(N)->getAddressSpace();\n"
+ " if (";
+
+ bool First = true;
+ for (Init *Val : AddressSpaces->getValues()) {
+ if (First)
+ First = false;
+ else
+ Code += " && ";
+
+ IntInit *IntVal = dyn_cast<IntInit>(Val);
+ if (!IntVal) {
+ PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
+ "AddressSpaces element must be integer");
+ }
+
+ Code += "AddrSpace != " + utostr(IntVal->getValue());
+ }
+
+ Code += ")\nreturn false;\n";
+ }
Record *MemoryVT = getMemoryVT();
if (MemoryVT)
- Code += ("if (cast<" + SDNodeName + ">(N)->getMemoryVT() != MVT::" +
+ Code += ("if (cast<MemSDNode>(N)->getMemoryVT() != MVT::" +
MemoryVT->getName() + ") return false;\n")
.str();
}
return nullptr;
return R->getValueAsDef("MemoryVT");
}
+
+ListInit *TreePredicateFn::getAddressSpaces() const {
+ Record *R = getOrigPatFragRecord()->getRecord();
+ if (R->isValueUnset("AddressSpaces"))
+ return nullptr;
+ return R->getValueAsListInit("AddressSpaces");
+}
+
Record *TreePredicateFn::getScalarMemoryVT() const {
Record *R = getOrigPatFragRecord()->getRecord();
if (R->isValueUnset("ScalarMemoryVT"))
/// ValueType record for the memory VT.
Record *getScalarMemoryVT() const;
+ ListInit *getAddressSpaces() const;
+
// If true, indicates that GlobalISel-based C++ code was supplied.
bool hasGISelPredicateCode() const;
std::string getGISelPredicateCode() const;
if (Record *VT = P.getScalarMemoryVT())
Explanation += (" ScalarVT(MemVT)=" + VT->getName()).str();
+ if (ListInit *AddrSpaces = P.getAddressSpaces()) {
+ raw_string_ostream OS(Explanation);
+ OS << " AddressSpaces=[";
+
+ StringRef AddrSpaceSeparator;
+ for (Init *Val : AddrSpaces->getValues()) {
+ IntInit *IntVal = dyn_cast<IntInit>(Val);
+ if (!IntVal)
+ continue;
+
+ OS << AddrSpaceSeparator << IntVal->getValue();
+ AddrSpaceSeparator = ", ";
+ }
+
+ OS << ']';
+ }
+
if (P.isAtomicOrderingMonotonic())
Explanation += " monotonic";
if (P.isAtomicOrderingAcquire())
continue;
}
+ if (Predicate.isLoad() || Predicate.isStore() || Predicate.isAtomic()) {
+ const ListInit *AddrSpaces = Predicate.getAddressSpaces();
+ if (AddrSpaces && !AddrSpaces->empty())
+ continue;
+ }
+
if (Predicate.isAtomic() && Predicate.getMemoryVT())
continue;
IPM_AtomicOrderingMMO,
IPM_MemoryLLTSize,
IPM_MemoryVsLLTSize,
+ IPM_MemoryAddressSpace,
IPM_GenericPredicate,
OPM_SameOperand,
OPM_ComplexPattern,
}
};
+class MemoryAddressSpacePredicateMatcher : public InstructionPredicateMatcher {
+protected:
+ unsigned MMOIdx;
+ SmallVector<unsigned, 4> AddrSpaces;
+
+public:
+ MemoryAddressSpacePredicateMatcher(unsigned InsnVarID, unsigned MMOIdx,
+ ArrayRef<unsigned> AddrSpaces)
+ : InstructionPredicateMatcher(IPM_MemoryAddressSpace, InsnVarID),
+ MMOIdx(MMOIdx), AddrSpaces(AddrSpaces.begin(), AddrSpaces.end()) {}
+
+ static bool classof(const PredicateMatcher *P) {
+ return P->getKind() == IPM_MemoryAddressSpace;
+ }
+ bool isIdentical(const PredicateMatcher &B) const override {
+ if (!InstructionPredicateMatcher::isIdentical(B))
+ return false;
+ auto *Other = cast<MemoryAddressSpacePredicateMatcher>(&B);
+ return MMOIdx == Other->MMOIdx && AddrSpaces == Other->AddrSpaces;
+ }
+
+ void emitPredicateOpcodes(MatchTable &Table,
+ RuleMatcher &Rule) const override {
+ Table << MatchTable::Opcode("GIM_CheckMemoryAddressSpace")
+ << MatchTable::Comment("MI") << MatchTable::IntValue(InsnVarID)
+ << MatchTable::Comment("MMO") << MatchTable::IntValue(MMOIdx)
+ // Encode number of address spaces to expect.
+ << MatchTable::Comment("NumAddrSpace")
+ << MatchTable::IntValue(AddrSpaces.size());
+ for (unsigned AS : AddrSpaces)
+ Table << MatchTable::Comment("AddrSpace") << MatchTable::IntValue(AS);
+
+ Table << MatchTable::LineBreak;
+ }
+};
+
/// Generates code to check that the size of an MMO is less-than, equal-to, or
/// greater than a given LLT.
class MemoryVsLLTSizePredicateMatcher : public InstructionPredicateMatcher {
continue;
}
- // G_LOAD is used for both non-extending and any-extending loads.
+ // An address space check is needed in all contexts if there is one.
+ if (Predicate.isLoad() || Predicate.isStore() || Predicate.isAtomic()) {
+ if (const ListInit *AddrSpaces = Predicate.getAddressSpaces()) {
+ SmallVector<unsigned, 4> ParsedAddrSpaces;
+
+ for (Init *Val : AddrSpaces->getValues()) {
+ IntInit *IntVal = dyn_cast<IntInit>(Val);
+ if (!IntVal)
+ return failedImport("Address space is not an integer");
+ ParsedAddrSpaces.push_back(IntVal->getValue());
+ }
+
+ if (!ParsedAddrSpaces.empty()) {
+ InsnMatcher.addPredicate<MemoryAddressSpacePredicateMatcher>(
+ 0, ParsedAddrSpaces);
+ }
+ }
+ }
+
+ // G_LOAD is used for both non-extending and any-extending loads.
if (Predicate.isLoad() && Predicate.isNonExtLoad()) {
InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);