string OpName = opName;
}
-class VOP2_REV <string revOp, bit isOrig> {
+class Commutable_REV <string revOp, bit isOrig> {
string RevOp = revOp;
bit IsOrig = isOrig;
}
string revOp> {
def "" : VOP2_Pseudo <p.Outs32, p.Ins32, pattern, opName>,
- VOP2_REV<revOp#"_e32", !eq(revOp, opName)>;
+ Commutable_REV<revOp#"_e32", !eq(revOp, opName)>;
def _si : VOP2_Real_si <opName, op, p.Outs32, p.Ins32, p.Asm32>;
}
string revOp> {
def "" : VOP2_Pseudo <p.Outs32, p.Ins32, pattern, opName>,
- VOP2_REV<revOp#"_e32", !eq(revOp, opName)>;
+ Commutable_REV<revOp#"_e32", !eq(revOp, opName)>;
def _si : VOP2_Real_si <opName, op, p.Outs32, p.Ins32, p.Asm32>;
bit HasMods = 1> {
def "" : VOP3_Pseudo <outs, ins, pattern, opName, HasMods>,
- VOP2_REV<revOp#"_e64", !eq(revOp, opName)>;
+ Commutable_REV<revOp#"_e64", !eq(revOp, opName)>;
def _si : VOP3_Real_si <op.SI3, outs, ins, asm, opName, HasMods>,
VOP3DisableFields<1, 0, HasMods>;
bit HasMods = 1> {
def "" : VOP3_Pseudo <outs, ins, pattern, opName, HasMods>,
- VOP2_REV<revOp#"_e64", !eq(revOp, opName)>;
+ Commutable_REV<revOp#"_e64", !eq(revOp, opName)>;
def _si : VOP3_Real_si <op.SI3, outs, ins, asm, opName, HasMods>,
VOP3DisableFields<1, 0, HasMods>;
string revOp, list<SchedReadWrite> sched> {
def "" : VOP3_Pseudo <outs, ins, pattern, opName, HasMods>,
- VOP2_REV<revOp#"_e64", !eq(revOp, opName)> {
+ Commutable_REV<revOp#"_e64", !eq(revOp, opName)> {
let Defs = !if(defExec, [EXEC], []);
let SchedRW = sched;
}
string revOpName = "", string asm = opName#"_e32 "#op_asm,
string alias_asm = opName#" "#op_asm> {
def "" : VOPC_Pseudo <ins, pattern, opName>,
- VOP2_REV<revOpName#"_e32", !eq(revOpName, opName)> {
+ Commutable_REV<revOpName#"_e32", !eq(revOpName, opName)> {
let Defs = !if(DefExec, [VCC, EXEC], [VCC]);
let SchedRW = sched;
let isConvergent = DefExec;
// Maps an commuted opcode to its original version
def getCommuteOrig : InstrMapping {
- let FilterClass = "VOP2_REV";
+ let FilterClass = "Commutable_REV";
let RowFields = ["RevOp"];
let ColFields = ["IsOrig"];
let KeyCol = ["0"];
// Maps an original opcode to its commuted version
def getCommuteRev : InstrMapping {
- let FilterClass = "VOP2_REV";
+ let FilterClass = "Commutable_REV";
let RowFields = ["RevOp"];
let ColFields = ["IsOrig"];
let KeyCol = ["1"];
let ValueCols = [["0"]];
}
-def getCommuteCmpOrig : InstrMapping {
- let FilterClass = "VOP2_REV";
- let RowFields = ["RevOp"];
- let ColFields = ["IsOrig"];
- let KeyCol = ["0"];
- let ValueCols = [["1"]];
-}
-
-// Maps an original opcode to its commuted version
-def getCommuteCmpRev : InstrMapping {
- let FilterClass = "VOP2_REV";
- let RowFields = ["RevOp"];
- let ColFields = ["IsOrig"];
- let KeyCol = ["1"];
- let ValueCols = [["0"]];
-}
-
-
def getMCOpcodeGen : InstrMapping {
let FilterClass = "SIMCInstr";
let RowFields = ["PseudoInstr"];
[!cast<string>(SIEncodingFamily.VI)]];
}
+// Get equivalent SOPK instruction.
+def getSOPKOp : InstrMapping {
+ let FilterClass = "SOPKInstTable";
+ let RowFields = ["BaseCmpOp"];
+ let ColFields = ["IsSOPK"];
+ let KeyCol = ["0"];
+ let ValueCols = [["1"]];
+}
+
def getAddr64Inst : InstrMapping {
let FilterClass = "MUBUFAddr64Table";
let RowFields = ["OpName"];
let Inst{63-32} = imm;
}
+class SOPKInstTable <bit is_sopk, string cmpOp = ""> {
+ bit IsSOPK = is_sopk;
+ string BaseCmpOp = cmpOp;
+}
+
class SOPK_32 <string opName, list<dag> pattern=[]> : SOPK_Pseudo <
opName,
(outs SReg_32:$sdst),
"$sdst, $simm16",
pattern>;
-class SOPK_SCC <string opName, list<dag> pattern=[]> : SOPK_Pseudo <
+class SOPK_SCC <string opName, string base_op = ""> : SOPK_Pseudo <
opName,
(outs),
(ins SReg_32:$sdst, u16imm:$simm16),
- "$sdst, $simm16",
- pattern> {
+ "$sdst, $simm16", []>,
+ SOPKInstTable<1, base_op>{
let Defs = [SCC];
}
// [(set i1:$dst, (setcc i32:$src0, imm:$src1, SETEQ))]
// >;
-def S_CMPK_EQ_I32 : SOPK_SCC <"s_cmpk_eq_i32">;
-def S_CMPK_LG_I32 : SOPK_SCC <"s_cmpk_lg_i32">;
-def S_CMPK_GT_I32 : SOPK_SCC <"s_cmpk_gt_i32">;
-def S_CMPK_GE_I32 : SOPK_SCC <"s_cmpk_ge_i32">;
-def S_CMPK_LT_I32 : SOPK_SCC <"s_cmpk_lt_i32">;
-def S_CMPK_LE_I32 : SOPK_SCC <"s_cmpk_le_i32">;
-def S_CMPK_EQ_U32 : SOPK_SCC <"s_cmpk_eq_u32">;
-def S_CMPK_LG_U32 : SOPK_SCC <"s_cmpk_lg_u32">;
-def S_CMPK_GT_U32 : SOPK_SCC <"s_cmpk_gt_u32">;
-def S_CMPK_GE_U32 : SOPK_SCC <"s_cmpk_ge_u32">;
-def S_CMPK_LT_U32 : SOPK_SCC <"s_cmpk_lt_u32">;
-def S_CMPK_LE_U32 : SOPK_SCC <"s_cmpk_le_u32">;
+def S_CMPK_EQ_I32 : SOPK_SCC <"s_cmpk_eq_i32", "s_cmp_eq_i32">;
+def S_CMPK_LG_I32 : SOPK_SCC <"s_cmpk_lg_i32", "s_cmp_lg_i32">;
+def S_CMPK_GT_I32 : SOPK_SCC <"s_cmpk_gt_i32", "s_cmp_gt_i32">;
+def S_CMPK_GE_I32 : SOPK_SCC <"s_cmpk_ge_i32", "s_cmp_ge_i32">;
+def S_CMPK_LT_I32 : SOPK_SCC <"s_cmpk_lt_i32", "s_cmp_lt_i32">;
+def S_CMPK_LE_I32 : SOPK_SCC <"s_cmpk_le_i32", "s_cmp_le_i32">;
+
+let SOPKZext = 1 in {
+def S_CMPK_EQ_U32 : SOPK_SCC <"s_cmpk_eq_u32", "s_cmp_eq_u32">;
+def S_CMPK_LG_U32 : SOPK_SCC <"s_cmpk_lg_u32", "s_cmp_lg_u32">;
+def S_CMPK_GT_U32 : SOPK_SCC <"s_cmpk_gt_u32", "s_cmp_gt_u32">;
+def S_CMPK_GE_U32 : SOPK_SCC <"s_cmpk_ge_u32", "s_cmp_ge_u32">;
+def S_CMPK_LT_U32 : SOPK_SCC <"s_cmpk_lt_u32", "s_cmp_lt_u32">;
+def S_CMPK_LE_U32 : SOPK_SCC <"s_cmpk_le_u32", "s_cmp_le_u32">;
+} // End SOPKZext = 1
} // End isCompare = 1
let Defs = [SCC], isCommutable = 1, DisableEncoding = "$src0",
[(set SCC, (si_setcc_uniform vt:$src0, vt:$src1, cond))] > {
}
-class SOPC_CMP_32<bits<7> op, string opName, PatLeaf cond = COND_NULL>
- : SOPC_Helper<op, SSrc_b32, i32, opName, cond>;
+class SOPC_CMP_32<bits<7> op, string opName,
+ PatLeaf cond = COND_NULL, string revOp = opName>
+ : SOPC_Helper<op, SSrc_b32, i32, opName, cond>,
+ Commutable_REV<revOp, !eq(revOp, opName)>,
+ SOPKInstTable<0, opName> {
+ let isCompare = 1;
+ let isCommutable = 1;
+}
class SOPC_32<bits<7> op, string opName, list<dag> pattern = []>
: SOPC_Base<op, SSrc_b32, SSrc_b32, opName, pattern>;
class SOPC_64_32<bits<7> op, string opName, list<dag> pattern = []>
: SOPC_Base<op, SSrc_b64, SSrc_b32, opName, pattern>;
-
def S_CMP_EQ_I32 : SOPC_CMP_32 <0x00, "s_cmp_eq_i32", COND_EQ>;
def S_CMP_LG_I32 : SOPC_CMP_32 <0x01, "s_cmp_lg_i32", COND_NE>;
def S_CMP_GT_I32 : SOPC_CMP_32 <0x02, "s_cmp_gt_i32", COND_SGT>;
def S_CMP_GE_I32 : SOPC_CMP_32 <0x03, "s_cmp_ge_i32", COND_SGE>;
-def S_CMP_LT_I32 : SOPC_CMP_32 <0x04, "s_cmp_lt_i32", COND_SLT>;
-def S_CMP_LE_I32 : SOPC_CMP_32 <0x05, "s_cmp_le_i32", COND_SLE>;
+def S_CMP_LT_I32 : SOPC_CMP_32 <0x04, "s_cmp_lt_i32", COND_SLT, "s_cmp_gt_i32">;
+def S_CMP_LE_I32 : SOPC_CMP_32 <0x05, "s_cmp_le_i32", COND_SLE, "s_cmp_ge_i32">;
def S_CMP_EQ_U32 : SOPC_CMP_32 <0x06, "s_cmp_eq_u32", COND_EQ>;
-def S_CMP_LG_U32 : SOPC_CMP_32 <0x07, "s_cmp_lg_u32", COND_NE >;
+def S_CMP_LG_U32 : SOPC_CMP_32 <0x07, "s_cmp_lg_u32", COND_NE>;
def S_CMP_GT_U32 : SOPC_CMP_32 <0x08, "s_cmp_gt_u32", COND_UGT>;
def S_CMP_GE_U32 : SOPC_CMP_32 <0x09, "s_cmp_ge_u32", COND_UGE>;
-def S_CMP_LT_U32 : SOPC_CMP_32 <0x0a, "s_cmp_lt_u32", COND_ULT>;
-def S_CMP_LE_U32 : SOPC_CMP_32 <0x0b, "s_cmp_le_u32", COND_ULE>;
+def S_CMP_LT_U32 : SOPC_CMP_32 <0x0a, "s_cmp_lt_u32", COND_ULT, "s_cmp_gt_u32">;
+def S_CMP_LE_U32 : SOPC_CMP_32 <0x0b, "s_cmp_le_u32", COND_ULE, "s_cmp_ge_u32">;
+
def S_BITCMP0_B32 : SOPC_32 <0x0c, "s_bitcmp0_b32">;
def S_BITCMP1_B32 : SOPC_32 <0x0d, "s_bitcmp1_b32">;
def S_BITCMP0_B64 : SOPC_64_32 <0x0e, "s_bitcmp0_b64">;
--- /dev/null
+; RUN: llc -march=amdgcn -verify-machineinstrs < %s | FileCheck -check-prefix=GCN -check-prefix=SI %s
+; RUN: llc -march=amdgcn -mcpu=tonga -verify-machineinstrs < %s | FileCheck -check-prefix=GCN -check-prefix=VI %s
+
+; Since this intrinsic is exposed as a constant after isel, use it to
+; defeat the DAG's compare with constant canonicalizations.
+declare i32 @llvm.amdgcn.groupstaticsize() #1
+
+@lds = addrspace(3) global [512 x i32] undef, align 4
+
+; GCN-LABEL: {{^}}br_scc_eq_i32_inline_imm:
+; GCN: s_cmp_eq_i32 s{{[0-9]+}}, 4{{$}}
+define void @br_scc_eq_i32_inline_imm(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp eq i32 %cond, 4
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_max:
+; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x7fff{{$}}
+define void @br_scc_eq_i32_simm16_max(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp eq i32 %cond, 32767
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_max_p1:
+; GCN: s_cmpk_eq_u32 s{{[0-9]+}}, 0x8000{{$}}
+define void @br_scc_eq_i32_simm16_max_p1(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp eq i32 %cond, 32768
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_ne_i32_simm16_max_p1:
+; GCN: s_cmpk_lg_u32 s{{[0-9]+}}, 0x8000{{$}}
+define void @br_scc_ne_i32_simm16_max_p1(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp ne i32 %cond, 32768
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_min:
+; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x8000{{$}}
+define void @br_scc_eq_i32_simm16_min(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp eq i32 %cond, -32768
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_min_m1:
+; GCN: s_cmp_eq_i32 s{{[0-9]+}}, 0xffff7fff{{$}}
+define void @br_scc_eq_i32_simm16_min_m1(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp eq i32 %cond, -32769
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_eq_i32_uimm15_max:
+; GCN: s_cmpk_eq_u32 s{{[0-9]+}}, 0xffff{{$}}
+define void @br_scc_eq_i32_uimm15_max(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp eq i32 %cond, 65535
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_eq_i32_uimm16_max:
+; GCN: s_cmpk_eq_u32 s{{[0-9]+}}, 0xffff{{$}}
+define void @br_scc_eq_i32_uimm16_max(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp eq i32 %cond, 65535
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_eq_i32_uimm16_max_p1:
+; GCN: s_cmp_eq_i32 s{{[0-9]+}}, 0x10000{{$}}
+define void @br_scc_eq_i32_uimm16_max_p1(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp eq i32 %cond, 65536
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+
+; GCN-LABEL: {{^}}br_scc_eq_i32:
+; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x41{{$}}
+define void @br_scc_eq_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp eq i32 %cond, 65
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_ne_i32:
+; GCN: s_cmpk_lg_i32 s{{[0-9]+}}, 0x41{{$}}
+define void @br_scc_ne_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp ne i32 %cond, 65
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_sgt_i32:
+; GCN: s_cmpk_gt_i32 s{{[0-9]+}}, 0x41{{$}}
+define void @br_scc_sgt_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp sgt i32 %cond, 65
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_sgt_i32_simm16_max:
+; GCN: s_cmpk_gt_i32 s{{[0-9]+}}, 0x7fff{{$}}
+define void @br_scc_sgt_i32_simm16_max(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp sgt i32 %cond, 32767
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_sgt_i32_simm16_max_p1:
+; GCN: s_cmp_gt_i32 s{{[0-9]+}}, 0x8000{{$}}
+define void @br_scc_sgt_i32_simm16_max_p1(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp sgt i32 %cond, 32768
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_sge_i32:
+; GCN: s_cmpk_ge_i32 s{{[0-9]+}}, 0x800{{$}}
+define void @br_scc_sge_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp sge i32 %cond, %size
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_slt_i32:
+; GCN: s_cmpk_lt_i32 s{{[0-9]+}}, 0x41{{$}}
+define void @br_scc_slt_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp slt i32 %cond, 65
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_sle_i32:
+; GCN: s_cmpk_le_i32 s{{[0-9]+}}, 0x800{{$}}
+define void @br_scc_sle_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp sle i32 %cond, %size
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_ugt_i32:
+; GCN: s_cmpk_gt_u32 s{{[0-9]+}}, 0x800{{$}}
+define void @br_scc_ugt_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp ugt i32 %cond, %size
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_uge_i32:
+; GCN: s_cmpk_ge_u32 s{{[0-9]+}}, 0x800{{$}}
+define void @br_scc_uge_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp uge i32 %cond, %size
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_ult_i32:
+; GCN: s_cmpk_lt_u32 s{{[0-9]+}}, 0x41{{$}}
+define void @br_scc_ult_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp ult i32 %cond, 65
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_ult_i32_min_simm16:
+; GCN: s_cmp_lt_u32 s2, 0xffff8000
+define void @br_scc_ult_i32_min_simm16(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp ult i32 %cond, -32768
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_ult_i32_min_simm16_m1:
+; GCN: s_cmp_lt_u32 s{{[0-9]+}}, 0xffff7fff{{$}}
+define void @br_scc_ult_i32_min_simm16_m1(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %cmp0 = icmp ult i32 %cond, -32769
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "", ""()
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_ule_i32:
+; GCN: s_cmpk_le_u32 s{{[0-9]+}}, 0x800{{$}}
+define void @br_scc_ule_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp ule i32 %cond, %size
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}commute_br_scc_eq_i32:
+; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x800{{$}}
+define void @commute_br_scc_eq_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp eq i32 %size, %cond
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}commute_br_scc_ne_i32:
+; GCN: s_cmpk_lg_i32 s{{[0-9]+}}, 0x800{{$}}
+define void @commute_br_scc_ne_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp ne i32 %size, %cond
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}commute_br_scc_sgt_i32:
+; GCN: s_cmpk_lt_i32 s{{[0-9]+}}, 0x800{{$}}
+define void @commute_br_scc_sgt_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp sgt i32 %size, %cond
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}commute_br_scc_sge_i32:
+; GCN: s_cmpk_le_i32 s{{[0-9]+}}, 0x800{{$}}
+define void @commute_br_scc_sge_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp sge i32 %size, %cond
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}commute_br_scc_slt_i32:
+; GCN: s_cmpk_gt_i32 s{{[0-9]+}}, 0x800{{$}}
+define void @commute_br_scc_slt_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp slt i32 %size, %cond
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}commute_br_scc_sle_i32:
+; GCN: s_cmpk_ge_i32 s{{[0-9]+}}, 0x800{{$}}
+define void @commute_br_scc_sle_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp sle i32 %size, %cond
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}commute_br_scc_ugt_i32:
+; GCN: s_cmpk_lt_u32 s{{[0-9]+}}, 0x800{{$}}
+define void @commute_br_scc_ugt_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp ugt i32 %size, %cond
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}commute_br_scc_uge_i32:
+; GCN: s_cmpk_le_u32 s{{[0-9]+}}, 0x800{{$}}
+define void @commute_br_scc_uge_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp uge i32 %size, %cond
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}commute_br_scc_ult_i32:
+; GCN: s_cmpk_gt_u32 s{{[0-9]+}}, 0x800{{$}}
+define void @commute_br_scc_ult_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp ult i32 %size, %cond
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}commute_br_scc_ule_i32:
+; GCN: s_cmpk_ge_u32 s{{[0-9]+}}, 0x800{{$}}
+define void @commute_br_scc_ule_i32(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %cmp0 = icmp ule i32 %size, %cond
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+; GCN-LABEL: {{^}}br_scc_ult_i32_non_u16:
+; GCN: s_cmp_lt_u32 s2, 0xfffff7ff
+define void @br_scc_ult_i32_non_u16(i32 %cond, i32 addrspace(1)* %out) #0 {
+entry:
+ %size = call i32 @llvm.amdgcn.groupstaticsize()
+ %not.size = xor i32 %size, -1
+ %cmp0 = icmp ult i32 %cond, %not.size
+ br i1 %cmp0, label %endif, label %if
+
+if:
+ call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds)
+ br label %endif
+
+endif:
+ store volatile i32 1, i32 addrspace(1)* %out
+ ret void
+}
+
+attributes #0 = { nounwind }
+attributes #1 = { nounwind readnone }