// Skip any CMOVs in this group which don't load from memory.
if (!MI.mayLoad()) {
// Remember the false-side register input.
- FalseBBRegRewriteTable[MI.getOperand(0).getReg()] =
+ unsigned FalseReg =
MI.getOperand(X86::getCondFromCMovOpc(MI.getOpcode()) == CC ? 1 : 2)
.getReg();
+ // Walk back through any intermediate cmovs referenced.
+ for (;;) {
+ auto FRIt = FalseBBRegRewriteTable.find(FalseReg);
+ if (FRIt == FalseBBRegRewriteTable.end())
+ break;
+ FalseReg = FRIt->second;
+ }
+ FalseBBRegRewriteTable[MI.getOperand(0).getReg()] = FalseReg;
continue;
}
}
; Test that we can convert a group of cmovs where only one has a memory
-; operand and where that memory operand's registers come from a prior cmov in the group.
+; operand and where that memory operand's registers come from a prior cmov in
+; the group.
define i32 @test_cmov_memoperand_in_group_reuse_for_addr(i32 %a, i32 %b, i32* %x, i32* %y) #0 {
; CHECK-LABEL: test_cmov_memoperand_in_group_reuse_for_addr:
entry:
ret i32 %z
}
+; Test that we can convert a group of cmovs where only one has a memory
+; operand and where that memory operand's registers come from a prior cmov and
+; where that cmov gets *its* input from a prior cmov in the group.
+define i32 @test_cmov_memoperand_in_group_reuse_for_addr3(i32 %a, i32 %b, i32* %x, i32* %y, i32* %z) #0 {
+; CHECK-LABEL: test_cmov_memoperand_in_group_reuse_for_addr3:
+entry:
+ %cond = icmp ugt i32 %a, %b
+; CHECK: cmpl
+ %p = select i1 %cond, i32* %x, i32* %y
+ %p2 = select i1 %cond, i32* %z, i32* %p
+ %load = load i32, i32* %p2
+ %r = select i1 %cond, i32 %a, i32 %load
+; CHECK-NOT: cmov
+; CHECK: ja [[FALSE_BB:.*]]
+; CHECK: movl (%r{{..}}), %[[R:.*]]
+; CHECK: [[FALSE_BB]]:
+; CHECK: movl %[[R]], %eax
+; CHECK: retq
+ ret i32 %r
+}
+
attributes #0 = {"target-cpu"="x86-64"}