// With the help of a runtime that understands the .fault_maps section,
// faulting_load_op branches to throw_npe if executing movl (%r10), %esi incurs
// a page fault.
+// Store is also supported.
//
//===----------------------------------------------------------------------===//
#include "llvm/ADT/SmallVector.h"
#include "llvm/ADT/Statistic.h"
#include "llvm/Analysis/AliasAnalysis.h"
+#include "llvm/CodeGen/FaultMaps.h"
#include "llvm/CodeGen/Passes.h"
#include "llvm/CodeGen/MachineFunction.h"
#include "llvm/CodeGen/MachineMemOperand.h"
bool analyzeBlockForNullChecks(MachineBasicBlock &MBB,
SmallVectorImpl<NullCheck> &NullCheckList);
- MachineInstr *insertFaultingLoad(MachineInstr *LoadMI, MachineBasicBlock *MBB,
- MachineBasicBlock *HandlerMBB);
+ MachineInstr *insertFaultingInstr(MachineInstr *MI, MachineBasicBlock *MBB,
+ MachineBasicBlock *HandlerMBB);
void rewriteNullChecks(ArrayRef<NullCheck> NullCheckList);
enum SuitabilityResult { SR_Suitable, SR_Unsuitable, SR_Impossible };
/// \p MI cannot be used to null check and SR_Impossible if there is
/// no sense to continue lookup due to any other instruction will not be able
/// to be used. \p PrevInsts is the set of instruction seen since
- /// the explicit null check on \p PointerReg.
+ /// the explicit null check on \p PointerReg. \p SeenLoad means that load
+ /// instruction has been observed in \PrevInsts set.
SuitabilityResult isSuitableMemoryOp(MachineInstr &MI, unsigned PointerReg,
- ArrayRef<MachineInstr *> PrevInsts);
+ ArrayRef<MachineInstr *> PrevInsts,
+ bool &SeenLoad);
/// Return true if \p FaultingMI can be hoisted from after the the
/// instructions in \p InstsSeenSoFar to before them. Set \p Dependence to a
/// non-null value if we also need to (and legally can) hoist a depedency.
- bool canHoistLoadInst(MachineInstr *FaultingMI, unsigned PointerReg,
- ArrayRef<MachineInstr *> InstsSeenSoFar,
- MachineBasicBlock *NullSucc, MachineInstr *&Dependence);
+ bool canHoistInst(MachineInstr *FaultingMI, unsigned PointerReg,
+ ArrayRef<MachineInstr *> InstsSeenSoFar,
+ MachineBasicBlock *NullSucc, MachineInstr *&Dependence);
public:
static char ID;
}
bool ImplicitNullChecks::canHandle(const MachineInstr *MI) {
- if (MI->isCall() || MI->mayStore() || MI->hasUnmodeledSideEffects())
+ if (MI->isCall() || MI->hasUnmodeledSideEffects())
return false;
auto IsRegMask = [](const MachineOperand &MO) { return MO.isRegMask(); };
(void)IsRegMask;
ImplicitNullChecks::SuitabilityResult
ImplicitNullChecks::isSuitableMemoryOp(MachineInstr &MI, unsigned PointerReg,
- ArrayRef<MachineInstr *> PrevInsts) {
+ ArrayRef<MachineInstr *> PrevInsts,
+ bool &SeenLoad) {
int64_t Offset;
unsigned BaseReg;
+ // First, if it is a store and we saw load before we bail out
+ // because we will not be able to re-order load-store without
+ // using alias analysis.
+ if (SeenLoad && MI.mayStore())
+ return SR_Impossible;
+
+ SeenLoad = SeenLoad || MI.mayLoad();
+
+ // Without alias analysis we cannot re-order store with anything.
+ // so if this instruction is not a candidate we should stop.
+ SuitabilityResult Unsuitable = MI.mayStore() ? SR_Impossible : SR_Unsuitable;
+
if (!TII->getMemOpBaseRegImmOfs(MI, BaseReg, Offset, TRI) ||
BaseReg != PointerReg)
- return SR_Unsuitable;
+ return Unsuitable;
- // We want the load to be issued at a sane offset from PointerReg, so that
- // if PointerReg is null then the load reliably page faults.
- if (!(MI.mayLoad() && !MI.isPredicable() && Offset < PageSize))
- return SR_Unsuitable;
+ // We want the mem access to be issued at a sane offset from PointerReg,
+ // so that if PointerReg is null then the access reliably page faults.
+ if (!((MI.mayLoad() || MI.mayStore()) && !MI.isPredicable() &&
+ Offset < PageSize))
+ return Unsuitable;
- // Finally, we need to make sure that the load instruction actually is
- // loading from PointerReg, and there isn't some re-definition of PointerReg
- // between the compare and the load.
+ // Finally, we need to make sure that the access instruction actually is
+ // accessing from PointerReg, and there isn't some re-definition of PointerReg
+ // between the compare and the memory access.
// If PointerReg has been redefined before then there is no sense to continue
// lookup due to this condition will fail for any further instruction.
for (auto *PrevMI : PrevInsts)
return SR_Suitable;
}
-bool ImplicitNullChecks::canHoistLoadInst(
- MachineInstr *FaultingMI, unsigned PointerReg,
- ArrayRef<MachineInstr *> InstsSeenSoFar, MachineBasicBlock *NullSucc,
- MachineInstr *&Dependence) {
+bool ImplicitNullChecks::canHoistInst(MachineInstr *FaultingMI,
+ unsigned PointerReg,
+ ArrayRef<MachineInstr *> InstsSeenSoFar,
+ MachineBasicBlock *NullSucc,
+ MachineInstr *&Dependence) {
auto DepResult = computeDependence(FaultingMI, InstsSeenSoFar);
if (!DepResult.CanReorder)
return false;
const unsigned PointerReg = MBP.LHS.getReg();
SmallVector<MachineInstr *, 8> InstsSeenSoFar;
+ bool SeenLoad = false;
for (auto &MI : *NotNullSucc) {
if (!canHandle(&MI) || InstsSeenSoFar.size() >= MaxInstsToConsider)
return false;
MachineInstr *Dependence;
- SuitabilityResult SR = isSuitableMemoryOp(MI, PointerReg, InstsSeenSoFar);
+ SuitabilityResult SR =
+ isSuitableMemoryOp(MI, PointerReg, InstsSeenSoFar, SeenLoad);
if (SR == SR_Impossible)
return false;
- if (SR == SR_Suitable && canHoistLoadInst(&MI, PointerReg, InstsSeenSoFar,
- NullSucc, Dependence)) {
+ if (SR == SR_Suitable &&
+ canHoistInst(&MI, PointerReg, InstsSeenSoFar, NullSucc, Dependence)) {
NullCheckList.emplace_back(&MI, MBP.ConditionDef, &MBB, NotNullSucc,
NullSucc, Dependence);
return true;
return false;
}
-/// Wrap a machine load instruction, LoadMI, into a FAULTING_LOAD_OP machine
-/// instruction. The FAULTING_LOAD_OP instruction does the same load as LoadMI
-/// (defining the same register), and branches to HandlerMBB if the load
-/// faults. The FAULTING_LOAD_OP instruction is inserted at the end of MBB.
-MachineInstr *
-ImplicitNullChecks::insertFaultingLoad(MachineInstr *LoadMI,
- MachineBasicBlock *MBB,
- MachineBasicBlock *HandlerMBB) {
+/// Wrap a machine instruction, MI, into a FAULTING machine instruction.
+/// The FAULTING instruction does the same load/store as MI
+/// (defining the same register), and branches to HandlerMBB if the mem access
+/// faults. The FAULTING instruction is inserted at the end of MBB.
+MachineInstr *ImplicitNullChecks::insertFaultingInstr(
+ MachineInstr *MI, MachineBasicBlock *MBB, MachineBasicBlock *HandlerMBB) {
const unsigned NoRegister = 0; // Guaranteed to be the NoRegister value for
// all targets.
DebugLoc DL;
- unsigned NumDefs = LoadMI->getDesc().getNumDefs();
+ unsigned NumDefs = MI->getDesc().getNumDefs();
assert(NumDefs <= 1 && "other cases unhandled!");
unsigned DefReg = NoRegister;
if (NumDefs != 0) {
- DefReg = LoadMI->defs().begin()->getReg();
- assert(std::distance(LoadMI->defs().begin(), LoadMI->defs().end()) == 1 &&
+ DefReg = MI->defs().begin()->getReg();
+ assert(std::distance(MI->defs().begin(), MI->defs().end()) == 1 &&
"expected exactly one def!");
}
- auto MIB = BuildMI(MBB, DL, TII->get(TargetOpcode::FAULTING_LOAD_OP), DefReg)
+ FaultMaps::FaultKind FK;
+ if (MI->mayLoad())
+ FK =
+ MI->mayStore() ? FaultMaps::FaultingLoadStore : FaultMaps::FaultingLoad;
+ else
+ FK = FaultMaps::FaultingStore;
+
+ auto MIB = BuildMI(MBB, DL, TII->get(TargetOpcode::FAULTING_OP), DefReg)
+ .addImm(FK)
.addMBB(HandlerMBB)
- .addImm(LoadMI->getOpcode());
+ .addImm(MI->getOpcode());
- for (auto &MO : LoadMI->uses())
+ for (auto &MO : MI->uses())
MIB.add(MO);
- MIB.setMemRefs(LoadMI->memoperands_begin(), LoadMI->memoperands_end());
+ MIB.setMemRefs(MI->memoperands_begin(), MI->memoperands_end());
return MIB;
}
NC.getCheckBlock()->insert(NC.getCheckBlock()->end(), DepMI);
}
- // Insert a faulting load where the conditional branch was originally. We
- // check earlier ensures that this bit of code motion is legal. We do not
- // touch the successors list for any basic block since we haven't changed
- // control flow, we've just made it implicit.
- MachineInstr *FaultingLoad = insertFaultingLoad(
+ // Insert a faulting instruction where the conditional branch was
+ // originally. We check earlier ensures that this bit of code motion
+ // is legal. We do not touch the successors list for any basic block
+ // since we haven't changed control flow, we've just made it implicit.
+ MachineInstr *FaultingInstr = insertFaultingInstr(
NC.getMemOperation(), NC.getCheckBlock(), NC.getNullSucc());
// Now the values defined by MemOperation, if any, are live-in of
// the block of MemOperation.
- // The original load operation may define implicit-defs alongside
- // the loaded value.
+ // The original operation may define implicit-defs alongside
+ // the value.
MachineBasicBlock *MBB = NC.getMemOperation()->getParent();
- for (const MachineOperand &MO : FaultingLoad->operands()) {
+ for (const MachineOperand &MO : FaultingInstr->operands()) {
if (!MO.isReg() || !MO.isDef())
continue;
unsigned Reg = MO.getReg();
ret i32 0
}
- attributes #0 = { "target-features"="+bmi,+bmi2" }
-
define i32 @imp_null_check_gep_load_with_use_dep(i32* %x, i32 %a) {
entry:
%c = icmp eq i32* %x, null
ret i32 undef
}
+ define void @inc_store(i32* %ptr, i32 %val) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ ret void
+
+ is_null:
+ ret void
+ }
+
+ define void @inc_store_plus_offset(i32* %ptr, i32 %val) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ ret void
+
+ is_null:
+ ret void
+ }
+
+ define void @inc_store_with_dep(i32* %ptr, i32 %val) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ ret void
+
+ is_null:
+ ret void
+ }
+
+ define i32 @inc_store_with_dep_in_null(i32* %ptr, i32 %val) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ ret i32 undef
+
+ is_null:
+ ret i32 undef
+ }
+
+ define void @inc_store_with_volatile(i32* %ptr, i32 %val) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ ret void
+
+ is_null:
+ ret void
+ }
+
+ define void @inc_store_with_two_dep(i32* %ptr, i32 %val) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ ret void
+
+ is_null:
+ ret void
+ }
+
+ define void @inc_store_with_redefined_base(i32* %ptr, i32 %val) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ ret void
+
+ is_null:
+ ret void
+ }
+
+ define i32 @inc_store_with_reused_base(i32* %ptr, i32 %val) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ ret i32 undef
+
+ is_null:
+ ret i32 undef
+ }
+
+ define i32 @inc_store_across_call(i32* %ptr) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ call void @f()
+ ret i32 undef
+
+ is_null:
+ ret i32 undef
+ }
+
+ define i32 @inc_store_with_dep_in_dep(i32* %ptr, i32 %val) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ ret i32 undef
+
+ is_null:
+ ret i32 undef
+ }
+
+ define i32 @inc_store_with_load_over_store(i32* %ptr, i32* %ptr2) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ ret i32 undef
+
+ is_null:
+ ret i32 undef
+ }
+
+ define i32 @inc_store_with_store_over_load(i32* %ptr, i32* %ptr2) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ ret i32 undef
+
+ is_null:
+ ret i32 undef
+ }
+
+ define void @inc_store_with_store_over_store(i32* %ptr, i32* %ptr2) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ ret void
+
+ is_null:
+ ret void
+ }
+
+ define void @inc_store_with_load_and_store(i32* %ptr, i32* %ptr2) {
+ entry:
+ %ptr_is_null = icmp eq i32* %ptr, null
+ br i1 %ptr_is_null, label %is_null, label %not_null, !make.implicit !0
+
+ not_null:
+ ret void
+
+ is_null:
+ ret void
+ }
+
+ attributes #0 = { "target-features"="+bmi,+bmi2" }
+
!0 = !{}
...
---
- { reg: '%esi' }
# CHECK: bb.0.entry:
# CHECK: %eax = MOV32ri 2200000
-# CHECK-NEXT: %eax = FAULTING_LOAD_OP %bb.3.is_null, {{[0-9]+}}, killed %eax, killed %rdi, 1, _, 0, _, implicit-def dead %eflags :: (load 4 from %ir.x)
+# CHECK-NEXT: %eax = FAULTING_OP 1, %bb.3.is_null, {{[0-9]+}}, killed %eax, killed %rdi, 1, _, 0, _, implicit-def dead %eflags :: (load 4 from %ir.x)
# CHECK-NEXT: JMP_1 %bb.1.not_null
body: |
- { reg: '%rsi' }
# CHECK: bb.0.entry:
# CHECK: %rbx = MOV64rr %rdx
-# CHECK-NEXT: %rdi = FAULTING_LOAD_OP %bb.3.is_null, {{[0-9]+}}, killed %rbx, killed %rdi, 1, _, 0, _, implicit-def dead %eflags :: (load 4 from %ir.x)
+# CHECK-NEXT: %rdi = FAULTING_OP 1, %bb.3.is_null, {{[0-9]+}}, killed %rbx, killed %rdi, 1, _, 0, _, implicit-def dead %eflags :: (load 4 from %ir.x)
body: |
bb.0.entry:
'%r12b', '%r13b', '%r14b', '%r15b', '%r12d', '%r13d',
'%r14d', '%r15d', '%r12w', '%r13w', '%r14w', '%r15w' ]
# CHECK: body:
-# CHECK-NOT: FAULTING_LOAD_OP
+# CHECK-NOT: FAULTING_OP
# CHECK: bb.1.stay:
# CHECK: CALL64pcrel32
body: |
name: dependency_live_in_hazard
# CHECK-LABEL: name: dependency_live_in_hazard
# CHECK: bb.0.entry:
-# CHECK-NOT: FAULTING_LOAD_OP
+# CHECK-NOT: FAULTING_OP
# CHECK: bb.1.not_null:
# Make sure that the BEXTR32rm instruction below is not used to emit
...
---
name: use_alternate_load_op
-# CHECK-LABEL: use_alternate_load_op
+# CHECK-LABEL: name: use_alternate_load_op
# CHECK: bb.0.entry:
-# CHECK: %r10 = FAULTING_LOAD_OP %bb.2.is_null, {{[0-9]+}}, killed %rdi, 1, _, 0, _
+# CHECK: %r10 = FAULTING_OP 1, %bb.2.is_null, {{[0-9]+}}, killed %rdi, 1, _, 0, _
# CHECK-NEXT: JMP_1 %bb.1.not_null
# CHECK: bb.1.not_null
...
---
name: imp_null_check_gep_load_with_use_dep
+# CHECK-LABEL: name: imp_null_check_gep_load_with_use_dep
# CHECK: bb.0.entry:
-# CHECK: %eax = FAULTING_LOAD_OP %bb.2.is_null, {{[0-9]+}}, killed %rdi, 1, _, 0, _, implicit-def %rax :: (load 4 from %ir.x)
+# CHECK: %eax = FAULTING_OP 1, %bb.2.is_null, {{[0-9]+}}, killed %rdi, 1, _, 0, _, implicit-def %rax :: (load 4 from %ir.x)
# CHECK-NEXT: JMP_1 %bb.1.not_null
alignment: 4
tracksRegLiveness: true
...
---
name: imp_null_check_load_with_base_sep
+# CHECK-LABEL: name: imp_null_check_load_with_base_sep
# CHECK: bb.0.entry:
# CHECK: %rsi = ADD64rr %rsi, %rdi, implicit-def dead %eflags
-# CHECK-NEXT: %esi = FAULTING_LOAD_OP %bb.2.is_null, {{[0-9]+}}, killed %esi, %rdi, 1, _, 0, _, implicit-def dead %eflags
+# CHECK-NEXT: %esi = FAULTING_OP 1, %bb.2.is_null, {{[0-9]+}}, killed %esi, %rdi, 1, _, 0, _, implicit-def dead %eflags
# CHECK-NEXT: JMP_1 %bb.1.not_null
alignment: 4
tracksRegLiveness: true
RETQ %eax
...
+---
+name: inc_store
+# CHECK-LABEL: name: inc_store
+# CHECK: bb.0.entry:
+# CHECK: _ = FAULTING_OP 3, %bb.2.is_null, {{[0-9]+}}, killed %rdi, 1, _, 0, _, killed %rsi
+# CHECK-NEXT: JMP_1 %bb.1.not_null
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+ - { reg: '%rsi' }
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rsi
+
+ TEST64rr %rdi, %rdi, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rdi, %rsi
+
+ MOV64mr killed %rdi, 1, _, 0, _, killed %rsi
+ RETQ
+
+ bb.2.is_null:
+ RETQ
+
+...
+---
+name: inc_store_plus_offset
+# CHECK-LABEL: inc_store_plus_offset
+# CHECK: bb.0.entry:
+# CHECK: _ = FAULTING_OP 3, %bb.2.is_null, {{[0-9]+}}, killed %rdi, 1, _, 16, _, killed %rsi
+# CHECK-NEXT: JMP_1 %bb.1.not_null
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+ - { reg: '%rsi' }
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rsi
+
+ TEST64rr %rdi, %rdi, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rdi, %rsi
+
+ MOV64mr killed %rdi, 1, _, 16, _, killed %rsi
+ RETQ
+
+ bb.2.is_null:
+ RETQ
+
+...
+---
+name: inc_store_with_dep
+# CHECK-LABEL: inc_store_with_dep
+# CHECK: bb.0.entry:
+# CHECK: %esi = ADD32rr killed %esi, killed %esi, implicit-def dead %eflags
+# CHECK-NEXT: _ = FAULTING_OP 3, %bb.2.is_null, {{[0-9]+}}, killed %rdi, 1, _, 16, _, killed %esi
+# CHECK-NEXT: JMP_1 %bb.1.not_null
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+ - { reg: '%rsi' }
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rsi
+
+ TEST64rr %rdi, %rdi, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rdi, %rsi
+
+ %esi = ADD32rr killed %esi, killed %esi, implicit-def dead %eflags
+ MOV32mr killed %rdi, 1, _, 16, _, killed %esi
+ RETQ
+
+ bb.2.is_null:
+ RETQ
+
+...
+---
+name: inc_store_with_dep_in_null
+# CHECK-LABEL: inc_store_with_dep_in_null
+# CHECK: bb.0.entry:
+# CHECK: TEST64rr %rdi, %rdi, implicit-def %eflags
+# CHECK-NEXT: JE_1 %bb.2.is_null, implicit killed %eflags
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+ - { reg: '%rsi' }
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rsi
+
+ TEST64rr %rdi, %rdi, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rdi, %rsi
+
+ %esi = ADD32rr %esi, %esi, implicit-def dead %eflags
+ MOV32mr killed %rdi, 1, _, 0, _, %esi
+ %eax = MOV32rr killed %esi
+ RETQ %eax
+
+ bb.2.is_null:
+ liveins: %rsi
+
+ %eax = MOV32rr killed %esi
+ RETQ %eax
+
+...
+---
+name: inc_store_with_volatile
+# CHECK-LABEL: inc_store_with_volatile
+# CHECK: bb.0.entry:
+# CHECK: TEST64rr %rdi, %rdi, implicit-def %eflags
+# CHECK-NEXT: JE_1 %bb.2.is_null, implicit killed %eflags
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+ - { reg: '%rsi' }
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rsi
+
+ TEST64rr %rdi, %rdi, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rdi, %rsi
+
+ MOV32mr killed %rdi, 1, _, 0, _, killed %esi :: (volatile store 4 into %ir.ptr)
+ RETQ
+
+ bb.2.is_null:
+ RETQ
+
+...
+---
+name: inc_store_with_two_dep
+# CHECK-LABEL: inc_store_with_two_dep
+# CHECK: bb.0.entry:
+# CHECK: TEST64rr %rdi, %rdi, implicit-def %eflags
+# CHECK-NEXT: JE_1 %bb.2.is_null, implicit killed %eflags
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+ - { reg: '%rsi' }
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rsi
+
+ TEST64rr %rdi, %rdi, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rdi, %rsi
+
+ %esi = ADD32rr killed %esi, killed %esi, implicit-def dead %eflags
+ %esi = ADD32ri killed %esi, 15, implicit-def dead %eflags
+ MOV32mr killed %rdi, 1, _, 16, _, killed %esi
+ RETQ
+
+ bb.2.is_null:
+ RETQ
+
+...
+---
+name: inc_store_with_redefined_base
+# CHECK-LABEL: inc_store_with_redefined_base
+# CHECK: bb.0.entry:
+# CHECK: TEST64rr %rdi, %rdi, implicit-def %eflags
+# CHECK-NEXT: JE_1 %bb.2.is_null, implicit killed %eflags
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+ - { reg: '%rsi' }
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rsi
+
+ TEST64rr %rdi, %rdi, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rdi, %rsi
+
+ %rdi = ADD64rr killed %rdi, killed %rdi, implicit-def dead %eflags
+ MOV32mr killed %rdi, 1, _, 16, _, killed %esi
+ RETQ
+
+ bb.2.is_null:
+ RETQ
+
+...
+---
+name: inc_store_with_reused_base
+# CHECK-LABEL: inc_store_with_reused_base
+# CHECK: bb.0.entry:
+# CHECK: _ = FAULTING_OP 3, %bb.2.is_null, {{[0-9]+}}, killed %rdi, 1, _, 16, _, killed %esi
+# CHECK-NEXT: JMP_1 %bb.1.not_null
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+ - { reg: '%rsi' }
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rsi
+
+ TEST64rr %rdi, %rdi, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rdi, %rsi
+
+ %rax = MOV64rr %rdi
+ MOV32mr killed %rdi, 1, _, 16, _, killed %esi
+ RETQ %eax
+
+ bb.2.is_null:
+ %rax = XOR64rr undef %rax, undef %rax, implicit-def dead %eflags
+ RETQ %eax
+
+...
+---
+name: inc_store_across_call
+# CHECK-LABEL: inc_store_across_call
+# CHECK: bb.0.entry:
+# CHECK: TEST64rr %rbx, %rbx, implicit-def %eflags
+# CHECK-NEXT: JE_1 %bb.2.is_null, implicit killed %eflags
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+calleeSavedRegisters: [ '%bh', '%bl', '%bp', '%bpl', '%bx', '%ebp', '%ebx',
+ '%rbp', '%rbx', '%r12', '%r13', '%r14', '%r15',
+ '%r12b', '%r13b', '%r14b', '%r15b', '%r12d', '%r13d',
+ '%r14d', '%r15d', '%r12w', '%r13w', '%r14w', '%r15w' ]
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rbx
+
+ frame-setup PUSH64r killed %rbx, implicit-def %rsp, implicit %rsp
+ CFI_INSTRUCTION def_cfa_offset 16
+ CFI_INSTRUCTION offset %rbx, -16
+ %rbx = MOV64rr killed %rdi
+ TEST64rr %rbx, %rbx, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rbx
+
+ CALL64pcrel32 @f, csr_64, implicit %rsp, implicit-def %rsp
+ MOV32mi %rbx, 1, _, 0, _, 20
+ %rax = MOV64rr killed %rbx
+ %rbx = POP64r implicit-def %rsp, implicit %rsp
+ RETQ %eax
+
+ bb.2.is_null:
+ %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
+ %rbx = POP64r implicit-def %rsp, implicit %rsp
+ RETQ %eax
+
+...
+---
+name: inc_store_with_dep_in_dep
+# CHECK-LABEL: inc_store_with_dep_in_dep
+# CHECK: bb.0.entry:
+# CHECK: TEST64rr %rdi, %rdi, implicit-def %eflags
+# CHECK-NEXT: JE_1 %bb.2.is_null, implicit killed %eflags
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+ - { reg: '%rsi' }
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rsi
+
+ TEST64rr %rdi, %rdi, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rdi, %rsi
+
+ %eax = MOV32rr %esi
+ %esi = ADD32ri killed %esi, 15, implicit-def dead %eflags
+ MOV32mr killed %rdi, 1, _, 0, _, killed %esi
+ RETQ %eax
+
+ bb.2.is_null:
+ %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
+ RETQ %eax
+
+...
+---
+name: inc_store_with_load_over_store
+# CHECK-LABEL: inc_store_with_load_over_store
+# CHECK: bb.0.entry:
+# CHECK: TEST64rr %rdi, %rdi, implicit-def %eflags
+# CHECK-NEXT: JE_1 %bb.2.is_null, implicit killed %eflags
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+ - { reg: '%rsi' }
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rsi
+
+ TEST64rr %rdi, %rdi, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rdi, %rsi
+
+ MOV32mi killed %rsi, 1, _, 0, _, 2
+ %eax = MOV32rm killed %rdi, 1, _, 0, _
+ RETQ %eax
+
+ bb.2.is_null:
+ %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
+ RETQ %eax
+
+...
+---
+name: inc_store_with_store_over_load
+# CHECK-LABEL: inc_store_with_store_over_load
+# CHECK: bb.0.entry:
+# CHECK: TEST64rr %rdi, %rdi, implicit-def %eflags
+# CHECK-NEXT: JE_1 %bb.2.is_null, implicit killed %eflags
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+ - { reg: '%rsi' }
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rsi
+
+ TEST64rr %rdi, %rdi, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rdi, %rsi
+
+ %eax = MOV32rm killed %rsi, 1, _, 0, _
+ MOV32mi killed %rdi, 1, _, 0, _, 2
+ RETQ %eax
+
+ bb.2.is_null:
+ %eax = XOR32rr undef %eax, undef %eax, implicit-def dead %eflags
+ RETQ %eax
+
+...
+---
+name: inc_store_with_store_over_store
+# CHECK-LABEL: inc_store_with_store_over_store
+# CHECK: bb.0.entry:
+# CHECK: TEST64rr %rdi, %rdi, implicit-def %eflags
+# CHECK-NEXT: JE_1 %bb.2.is_null, implicit killed %eflags
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+ - { reg: '%rsi' }
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rsi
+
+ TEST64rr %rdi, %rdi, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rdi, %rsi
+
+ MOV32mi killed %rsi, 1, _, 0, _, 3
+ MOV32mi killed %rdi, 1, _, 0, _, 2
+ RETQ
+
+ bb.2.is_null:
+ RETQ
+
+...
+---
+name: inc_store_with_load_and_store
+# CHECK-LABEL: inc_store_with_load_and_store
+# CHECK: bb.0.entry:
+# CHECK: _ = FAULTING_OP 2, %bb.2.is_null, {{[0-9]+}}, killed %rdi, 1, _, 0, _, killed %esi, implicit-def dead %eflags
+# CHECK-NEXT: JMP_1 %bb.1.not_null
+# CHECK: bb.1.not_null
+
+alignment: 4
+tracksRegLiveness: true
+liveins:
+ - { reg: '%rdi' }
+ - { reg: '%rsi' }
+body: |
+ bb.0.entry:
+ successors: %bb.2.is_null, %bb.1.not_null
+ liveins: %rdi, %rsi
+
+ TEST64rr %rdi, %rdi, implicit-def %eflags
+ JE_1 %bb.2.is_null, implicit killed %eflags
+
+ bb.1.not_null:
+ liveins: %rdi, %rsi
+
+ %esi = ADD32rr %esi, %esi, implicit-def dead %eflags
+ ADD32mr killed %rdi, 1, _, 0, _, killed %esi, implicit-def dead %eflags
+ RETQ
+
+ bb.2.is_null:
+ RETQ
+
+...