Imm8, i8imm, relocImm8_su, i8imm, invalid_node,
0, OpSizeFixed, 0>;
def Xi16 : X86TypeInfo<i16, "w", GR16, loadi16, i16mem,
- Imm16, i16imm, relocImm16_su, i16i8imm, i16immSExt8,
+ Imm16, i16imm, relocImm16_su, i16i8imm, i16immSExt8_su,
1, OpSize16, 0>;
def Xi32 : X86TypeInfo<i32, "l", GR32, loadi32, i32mem,
- Imm32, i32imm, relocImm32_su, i32i8imm, i32immSExt8,
+ Imm32, i32imm, relocImm32_su, i32i8imm, i32immSExt8_su,
1, OpSize32, 0>;
def Xi64 : X86TypeInfo<i64, "q", GR64, loadi64, i64mem,
- Imm32S, i64i32imm, i64relocImmSExt32_su, i64i8imm, i64immSExt8,
+ Imm32S, i64i32imm, i64relocImmSExt32_su, i64i8imm, i64immSExt8_su,
1, OpSizeFixed, 1>;
/// ITy - This instruction base class takes the type info for the instruction.
def : Pat<(X86sub_flag 0, GR64:$src), (NEG64r GR64:$src)>;
// sub reg, relocImm
-def : Pat<(X86sub_flag GR64:$src1, i64relocImmSExt8:$src2),
- (SUB64ri8 GR64:$src1, i64relocImmSExt8:$src2)>;
+def : Pat<(X86sub_flag GR64:$src1, i64relocImmSExt8_su:$src2),
+ (SUB64ri8 GR64:$src1, i64relocImmSExt8_su:$src2)>;
// mul reg, reg
def : Pat<(mul GR16:$src1, GR16:$src2),
return !shouldAvoidImmediateInstFormsForSize(N);
}]>;
+def i16immSExt8_su : PatLeaf<(i16immSExt8), [{
+ return !shouldAvoidImmediateInstFormsForSize(N);
+}]>;
+def i32immSExt8_su : PatLeaf<(i32immSExt8), [{
+ return !shouldAvoidImmediateInstFormsForSize(N);
+}]>;
+def i64immSExt8_su : PatLeaf<(i64immSExt8), [{
+ return !shouldAvoidImmediateInstFormsForSize(N);
+}]>;
+
+def i64relocImmSExt8_su : PatLeaf<(i64relocImmSExt8), [{
+ return !shouldAvoidImmediateInstFormsForSize(N);
+}]>;
def i64relocImmSExt32_su : PatLeaf<(i64relocImmSExt32), [{
return !shouldAvoidImmediateInstFormsForSize(N);
}]>;
; X86-NEXT: movl $1234, %eax # imm = 0x4D2
; X86-NEXT: movl %eax, a
; X86-NEXT: movl %eax, b
-; X86-NEXT: movl $12, c
-; X86-NEXT: cmpl $12, e
+; X86-NEXT: movl $12, %eax
+; X86-NEXT: movl %eax, c
+; X86-NEXT: cmpl %eax, e
; X86-NEXT: jne .LBB0_2
; X86-NEXT: # %bb.1: # %if.then
; X86-NEXT: movl $1, x
; X64-NEXT: movl $1234, %eax # imm = 0x4D2
; X64-NEXT: movl %eax, {{.*}}(%rip)
; X64-NEXT: movl %eax, {{.*}}(%rip)
-; X64-NEXT: movl $12, {{.*}}(%rip)
-; X64-NEXT: cmpl $12, {{.*}}(%rip)
+; X64-NEXT: movl $12, %eax
+; X64-NEXT: movl %eax, {{.*}}(%rip)
+; X64-NEXT: cmpl %eax, {{.*}}(%rip)
; X64-NEXT: jne .LBB0_2
; X64-NEXT: # %bb.1: # %if.then
; X64-NEXT: movl $1, {{.*}}(%rip)
; CHECK: # %bb.0:
; CHECK-NEXT: movq $-1, %rax
; CHECK-NEXT: movq %rax, (%rsi)
-; CHECK-NEXT: cmpq $-1, %rdi
+; CHECK-NEXT: cmpq %rax, %rdi
; CHECK-NEXT: sete %al
; CHECK-NEXT: retq
store i64 -1, i64* %b, align 8