def int_hwasan_check_memaccess :
Intrinsic<[], [llvm_ptr_ty, llvm_ptr_ty, llvm_i32_ty], [IntrInaccessibleMemOnly, ImmArg<2>]>;
+def int_hwasan_check_memaccess_shortgranules :
+ Intrinsic<[], [llvm_ptr_ty, llvm_ptr_ty, llvm_i32_ty], [IntrInaccessibleMemOnly, ImmArg<2>]>;
// Xray intrinsics
//===----------------------------------------------------------------------===//
void LowerPATCHABLE_FUNCTION_EXIT(const MachineInstr &MI);
void LowerPATCHABLE_TAIL_CALL(const MachineInstr &MI);
- std::map<std::pair<unsigned, uint32_t>, MCSymbol *> HwasanMemaccessSymbols;
+ std::map<std::tuple<unsigned, bool, uint32_t>, MCSymbol *> HwasanMemaccessSymbols;
void LowerHWASAN_CHECK_MEMACCESS(const MachineInstr &MI);
void EmitHwasanMemaccessSymbols(Module &M);
void AArch64AsmPrinter::LowerHWASAN_CHECK_MEMACCESS(const MachineInstr &MI) {
Register Reg = MI.getOperand(0).getReg();
+ bool IsShort =
+ MI.getOpcode() == AArch64::HWASAN_CHECK_MEMACCESS_SHORTGRANULES;
uint32_t AccessInfo = MI.getOperand(1).getImm();
- MCSymbol *&Sym = HwasanMemaccessSymbols[{Reg, AccessInfo}];
+ MCSymbol *&Sym = HwasanMemaccessSymbols[{Reg, IsShort, AccessInfo}];
if (!Sym) {
// FIXME: Make this work on non-ELF.
if (!TM.getTargetTriple().isOSBinFormatELF())
std::string SymName = "__hwasan_check_x" + utostr(Reg - AArch64::X0) + "_" +
utostr(AccessInfo);
+ if (IsShort)
+ SymName += "_short";
Sym = OutContext.getOrCreateSymbol(SymName);
}
std::unique_ptr<MCSubtargetInfo> STI(
TM.getTarget().createMCSubtargetInfo(TT.str(), "", ""));
- MCSymbol *HwasanTagMismatchSym =
+ MCSymbol *HwasanTagMismatchV1Sym =
OutContext.getOrCreateSymbol("__hwasan_tag_mismatch");
+ MCSymbol *HwasanTagMismatchV2Sym =
+ OutContext.getOrCreateSymbol("__hwasan_tag_mismatch_v2");
- const MCSymbolRefExpr *HwasanTagMismatchRef =
- MCSymbolRefExpr::create(HwasanTagMismatchSym, OutContext);
+ const MCSymbolRefExpr *HwasanTagMismatchV1Ref =
+ MCSymbolRefExpr::create(HwasanTagMismatchV1Sym, OutContext);
+ const MCSymbolRefExpr *HwasanTagMismatchV2Ref =
+ MCSymbolRefExpr::create(HwasanTagMismatchV2Sym, OutContext);
for (auto &P : HwasanMemaccessSymbols) {
- unsigned Reg = P.first.first;
- uint32_t AccessInfo = P.first.second;
+ unsigned Reg = std::get<0>(P.first);
+ bool IsShort = std::get<1>(P.first);
+ uint32_t AccessInfo = std::get<2>(P.first);
+ const MCSymbolRefExpr *HwasanTagMismatchRef =
+ IsShort ? HwasanTagMismatchV2Ref : HwasanTagMismatchV1Ref;
MCSymbol *Sym = P.second;
OutStreamer->SwitchSection(OutContext.getELFSection(
.addReg(Reg)
.addImm(AArch64_AM::getShifterImm(AArch64_AM::LSR, 56)),
*STI);
- MCSymbol *HandlePartialSym = OutContext.createTempSymbol();
+ MCSymbol *HandleMismatchOrPartialSym = OutContext.createTempSymbol();
OutStreamer->EmitInstruction(
MCInstBuilder(AArch64::Bcc)
.addImm(AArch64CC::NE)
- .addExpr(MCSymbolRefExpr::create(HandlePartialSym, OutContext)),
+ .addExpr(MCSymbolRefExpr::create(HandleMismatchOrPartialSym,
+ OutContext)),
*STI);
MCSymbol *ReturnSym = OutContext.createTempSymbol();
OutStreamer->EmitLabel(ReturnSym);
OutStreamer->EmitInstruction(
MCInstBuilder(AArch64::RET).addReg(AArch64::LR), *STI);
+ OutStreamer->EmitLabel(HandleMismatchOrPartialSym);
- OutStreamer->EmitLabel(HandlePartialSym);
- OutStreamer->EmitInstruction(MCInstBuilder(AArch64::SUBSWri)
- .addReg(AArch64::WZR)
- .addReg(AArch64::W16)
- .addImm(15)
- .addImm(0),
- *STI);
- MCSymbol *HandleMismatchSym = OutContext.createTempSymbol();
- OutStreamer->EmitInstruction(
- MCInstBuilder(AArch64::Bcc)
- .addImm(AArch64CC::HI)
- .addExpr(MCSymbolRefExpr::create(HandleMismatchSym, OutContext)),
- *STI);
-
- OutStreamer->EmitInstruction(
- MCInstBuilder(AArch64::ANDXri)
- .addReg(AArch64::X17)
- .addReg(Reg)
- .addImm(AArch64_AM::encodeLogicalImmediate(0xf, 64)),
- *STI);
- unsigned Size = 1 << (AccessInfo & 0xf);
- if (Size != 1)
- OutStreamer->EmitInstruction(MCInstBuilder(AArch64::ADDXri)
- .addReg(AArch64::X17)
- .addReg(AArch64::X17)
- .addImm(Size - 1)
+ if (IsShort) {
+ OutStreamer->EmitInstruction(MCInstBuilder(AArch64::SUBSWri)
+ .addReg(AArch64::WZR)
+ .addReg(AArch64::W16)
+ .addImm(15)
.addImm(0),
*STI);
- OutStreamer->EmitInstruction(MCInstBuilder(AArch64::SUBSWrs)
- .addReg(AArch64::WZR)
- .addReg(AArch64::W16)
- .addReg(AArch64::W17)
- .addImm(0),
- *STI);
- OutStreamer->EmitInstruction(
- MCInstBuilder(AArch64::Bcc)
- .addImm(AArch64CC::LS)
- .addExpr(MCSymbolRefExpr::create(HandleMismatchSym, OutContext)),
- *STI);
-
- OutStreamer->EmitInstruction(
- MCInstBuilder(AArch64::ORRXri)
- .addReg(AArch64::X16)
- .addReg(Reg)
- .addImm(AArch64_AM::encodeLogicalImmediate(0xf, 64)),
- *STI);
- OutStreamer->EmitInstruction(MCInstBuilder(AArch64::LDRBBui)
- .addReg(AArch64::W16)
- .addReg(AArch64::X16)
- .addImm(0),
- *STI);
- OutStreamer->EmitInstruction(
- MCInstBuilder(AArch64::SUBSXrs)
- .addReg(AArch64::XZR)
- .addReg(AArch64::X16)
- .addReg(Reg)
- .addImm(AArch64_AM::getShifterImm(AArch64_AM::LSR, 56)),
- *STI);
- OutStreamer->EmitInstruction(
- MCInstBuilder(AArch64::Bcc)
- .addImm(AArch64CC::EQ)
- .addExpr(MCSymbolRefExpr::create(ReturnSym, OutContext)),
- *STI);
+ MCSymbol *HandleMismatchSym = OutContext.createTempSymbol();
+ OutStreamer->EmitInstruction(
+ MCInstBuilder(AArch64::Bcc)
+ .addImm(AArch64CC::HI)
+ .addExpr(MCSymbolRefExpr::create(HandleMismatchSym, OutContext)),
+ *STI);
+
+ OutStreamer->EmitInstruction(
+ MCInstBuilder(AArch64::ANDXri)
+ .addReg(AArch64::X17)
+ .addReg(Reg)
+ .addImm(AArch64_AM::encodeLogicalImmediate(0xf, 64)),
+ *STI);
+ unsigned Size = 1 << (AccessInfo & 0xf);
+ if (Size != 1)
+ OutStreamer->EmitInstruction(MCInstBuilder(AArch64::ADDXri)
+ .addReg(AArch64::X17)
+ .addReg(AArch64::X17)
+ .addImm(Size - 1)
+ .addImm(0),
+ *STI);
+ OutStreamer->EmitInstruction(MCInstBuilder(AArch64::SUBSWrs)
+ .addReg(AArch64::WZR)
+ .addReg(AArch64::W16)
+ .addReg(AArch64::W17)
+ .addImm(0),
+ *STI);
+ OutStreamer->EmitInstruction(
+ MCInstBuilder(AArch64::Bcc)
+ .addImm(AArch64CC::LS)
+ .addExpr(MCSymbolRefExpr::create(HandleMismatchSym, OutContext)),
+ *STI);
+
+ OutStreamer->EmitInstruction(
+ MCInstBuilder(AArch64::ORRXri)
+ .addReg(AArch64::X16)
+ .addReg(Reg)
+ .addImm(AArch64_AM::encodeLogicalImmediate(0xf, 64)),
+ *STI);
+ OutStreamer->EmitInstruction(MCInstBuilder(AArch64::LDRBBui)
+ .addReg(AArch64::W16)
+ .addReg(AArch64::X16)
+ .addImm(0),
+ *STI);
+ OutStreamer->EmitInstruction(
+ MCInstBuilder(AArch64::SUBSXrs)
+ .addReg(AArch64::XZR)
+ .addReg(AArch64::X16)
+ .addReg(Reg)
+ .addImm(AArch64_AM::getShifterImm(AArch64_AM::LSR, 56)),
+ *STI);
+ OutStreamer->EmitInstruction(
+ MCInstBuilder(AArch64::Bcc)
+ .addImm(AArch64CC::EQ)
+ .addExpr(MCSymbolRefExpr::create(ReturnSym, OutContext)),
+ *STI);
+
+ OutStreamer->EmitLabel(HandleMismatchSym);
+ }
- OutStreamer->EmitLabel(HandleMismatchSym);
OutStreamer->EmitInstruction(MCInstBuilder(AArch64::STPXpre)
.addReg(AArch64::SP)
.addReg(AArch64::X0)
MCInstBuilder(AArch64::ADRP)
.addReg(AArch64::X16)
.addExpr(AArch64MCExpr::create(
- HwasanTagMismatchRef,
- AArch64MCExpr::VariantKind::VK_GOT_PAGE, OutContext)),
+ HwasanTagMismatchRef, AArch64MCExpr::VariantKind::VK_GOT_PAGE,
+ OutContext)),
*STI);
OutStreamer->EmitInstruction(
MCInstBuilder(AArch64::LDRXui)
.addReg(AArch64::X16)
.addReg(AArch64::X16)
.addExpr(AArch64MCExpr::create(
- HwasanTagMismatchRef,
- AArch64MCExpr::VariantKind::VK_GOT_LO12, OutContext)),
+ HwasanTagMismatchRef, AArch64MCExpr::VariantKind::VK_GOT_LO12,
+ OutContext)),
*STI);
OutStreamer->EmitInstruction(
MCInstBuilder(AArch64::BR).addReg(AArch64::X16), *STI);
return;
case AArch64::HWASAN_CHECK_MEMACCESS:
+ case AArch64::HWASAN_CHECK_MEMACCESS_SHORTGRANULES:
LowerHWASAN_CHECK_MEMACCESS(*MI);
return;
(outs), (ins GPR64noip:$ptr, i32imm:$accessinfo),
[(int_hwasan_check_memaccess X9, GPR64noip:$ptr, (i32 timm:$accessinfo))]>,
Sched<[]>;
+def HWASAN_CHECK_MEMACCESS_SHORTGRANULES : Pseudo<
+ (outs), (ins GPR64noip:$ptr, i32imm:$accessinfo),
+ [(int_hwasan_check_memaccess_shortgranules X9, GPR64noip:$ptr, (i32 timm:$accessinfo))]>,
+ Sched<[]>;
}
// The cycle counter PMC register is PMCCNTR_EL0.
cl::desc("instrument landing pads"), cl::Hidden,
cl::init(false), cl::ZeroOrMore);
+static cl::opt<bool> ClUseShortGranules(
+ "hwasan-use-short-granules",
+ cl::desc("use short granules in allocas and outlined checks"), cl::Hidden,
+ cl::init(false), cl::ZeroOrMore);
+
static cl::opt<bool> ClInstrumentPersonalityFunctions(
"hwasan-instrument-personality-functions",
cl::desc("instrument personality functions"), cl::Hidden, cl::init(false),
bool CompileKernel;
bool Recover;
+ bool UseShortGranules;
bool InstrumentLandingPads;
Function *HwasanCtorFunction;
HwasanCtorFunction = nullptr;
// Older versions of Android do not have the required runtime support for
- // global or personality function instrumentation. On other platforms we
- // currently require using the latest version of the runtime.
+ // short granules, global or personality function instrumentation. On other
+ // platforms we currently require using the latest version of the runtime.
bool NewRuntime =
!TargetTriple.isAndroid() || !TargetTriple.isAndroidVersionLT(30);
+ UseShortGranules =
+ ClUseShortGranules.getNumOccurrences() ? ClUseShortGranules : NewRuntime;
+
// If we don't have personality function support, fall back to landing pads.
InstrumentLandingPads = ClInstrumentLandingPads.getNumOccurrences()
? ClInstrumentLandingPads
TargetTriple.isOSBinFormatELF() && !Recover) {
Module *M = IRB.GetInsertBlock()->getParent()->getParent();
Ptr = IRB.CreateBitCast(Ptr, Int8PtrTy);
- IRB.CreateCall(
- Intrinsic::getDeclaration(M, Intrinsic::hwasan_check_memaccess),
- {shadowBase(), Ptr, ConstantInt::get(Int32Ty, AccessInfo)});
+ IRB.CreateCall(Intrinsic::getDeclaration(
+ M, UseShortGranules
+ ? Intrinsic::hwasan_check_memaccess_shortgranules
+ : Intrinsic::hwasan_check_memaccess),
+ {shadowBase(), Ptr, ConstantInt::get(Int32Ty, AccessInfo)});
return;
}
bool HWAddressSanitizer::tagAlloca(IRBuilder<> &IRB, AllocaInst *AI,
Value *Tag, size_t Size) {
size_t AlignedSize = alignTo(Size, Mapping.getObjectAlignment());
+ if (!UseShortGranules)
+ Size = AlignedSize;
Value *JustTag = IRB.CreateTrunc(Tag, IRB.getInt8Ty());
if (ClInstrumentWithCalls) {
; CHECK-NEXT: .cfi_def_cfa_offset 16
; CHECK-NEXT: .cfi_offset w30, -16
; CHECK-NEXT: mov x9, x0
- ; CHECK-NEXT: bl __hwasan_check_x1_123
+ ; CHECK-NEXT: bl __hwasan_check_x1_1
; CHECK-NEXT: mov x0, x1
; CHECK-NEXT: ldr x30, [sp], #16
; CHECK-NEXT: ret
- call void @llvm.hwasan.check.memaccess(i8* %x0, i8* %x1, i32 123)
+ call void @llvm.hwasan.check.memaccess(i8* %x0, i8* %x1, i32 1)
ret i8* %x1
}
; CHECK-NEXT: .cfi_def_cfa_offset 16
; CHECK-NEXT: .cfi_offset w30, -16
; CHECK-NEXT: mov x9, x1
- ; CHECK-NEXT: bl __hwasan_check_x0_456
+ ; CHECK-NEXT: bl __hwasan_check_x0_2_short
; CHECK-NEXT: ldr x30, [sp], #16
; CHECK-NEXT: ret
- call void @llvm.hwasan.check.memaccess(i8* %x1, i8* %x0, i32 456)
+ call void @llvm.hwasan.check.memaccess.shortgranules(i8* %x1, i8* %x0, i32 2)
ret i8* %x0
}
declare void @llvm.hwasan.check.memaccess(i8*, i8*, i32)
+declare void @llvm.hwasan.check.memaccess.shortgranules(i8*, i8*, i32)
-; CHECK: .section .text.hot,"axG",@progbits,__hwasan_check_x0_456,comdat
-; CHECK-NEXT: .type __hwasan_check_x0_456,@function
-; CHECK-NEXT: .weak __hwasan_check_x0_456
-; CHECK-NEXT: .hidden __hwasan_check_x0_456
-; CHECK-NEXT: __hwasan_check_x0_456:
+; CHECK: .section .text.hot,"axG",@progbits,__hwasan_check_x0_2_short,comdat
+; CHECK-NEXT: .type __hwasan_check_x0_2_short,@function
+; CHECK-NEXT: .weak __hwasan_check_x0_2_short
+; CHECK-NEXT: .hidden __hwasan_check_x0_2_short
+; CHECK-NEXT: __hwasan_check_x0_2_short:
; CHECK-NEXT: ubfx x16, x0, #4, #52
; CHECK-NEXT: ldrb w16, [x9, x16]
; CHECK-NEXT: cmp x16, x0, lsr #56
; CHECK-NEXT: cmp w16, #15
; CHECK-NEXT: b.hi .Ltmp2
; CHECK-NEXT: and x17, x0, #0xf
-; CHECK-NEXT: add x17, x17, #255
+; CHECK-NEXT: add x17, x17, #3
; CHECK-NEXT: cmp w16, w17
; CHECK-NEXT: b.ls .Ltmp2
; CHECK-NEXT: orr x16, x0, #0xf
; CHECK-NEXT: .Ltmp2:
; CHECK-NEXT: stp x0, x1, [sp, #-256]!
; CHECK-NEXT: stp x29, x30, [sp, #232]
-; CHECK-NEXT: mov x1, #456
-; CHECK-NEXT: adrp x16, :got:__hwasan_tag_mismatch
-; CHECK-NEXT: ldr x16, [x16, :got_lo12:__hwasan_tag_mismatch]
+; CHECK-NEXT: mov x1, #2
+; CHECK-NEXT: adrp x16, :got:__hwasan_tag_mismatch_v2
+; CHECK-NEXT: ldr x16, [x16, :got_lo12:__hwasan_tag_mismatch_v2]
; CHECK-NEXT: br x16
-; CHECK: .section .text.hot,"axG",@progbits,__hwasan_check_x1_123,comdat
-; CHECK-NEXT: .type __hwasan_check_x1_123,@function
-; CHECK-NEXT: .weak __hwasan_check_x1_123
-; CHECK-NEXT: .hidden __hwasan_check_x1_123
-; CHECK-NEXT: __hwasan_check_x1_123:
+; CHECK: .section .text.hot,"axG",@progbits,__hwasan_check_x1_1,comdat
+; CHECK-NEXT: .type __hwasan_check_x1_1,@function
+; CHECK-NEXT: .weak __hwasan_check_x1_1
+; CHECK-NEXT: .hidden __hwasan_check_x1_1
+; CHECK-NEXT: __hwasan_check_x1_1:
; CHECK-NEXT: ubfx x16, x1, #4, #52
; CHECK-NEXT: ldrb w16, [x9, x16]
; CHECK-NEXT: cmp x16, x1, lsr #56
; CHECK-NEXT: .Ltmp4:
; CHECK-NEXT: ret
; CHECK-NEXT: .Ltmp3:
-; CHECK-NEXT: cmp w16, #15
-; CHECK-NEXT: b.hi .Ltmp5
-; CHECK-NEXT: and x17, x1, #0xf
-; CHECK-NEXT: add x17, x17, #2047
-; CHECK-NEXT: cmp w16, w17
-; CHECK-NEXT: b.ls .Ltmp5
-; CHECK-NEXT: orr x16, x1, #0xf
-; CHECK-NEXT: ldrb w16, [x16]
-; CHECK-NEXT: cmp x16, x1, lsr #56
-; CHECK-NEXT: b.eq .Ltmp4
-; CHECK-NEXT: .Ltmp5:
; CHECK-NEXT: stp x0, x1, [sp, #-256]!
; CHECK-NEXT: stp x29, x30, [sp, #232]
; CHECK-NEXT: mov x0, x1
-; CHECK-NEXT: mov x1, #123
+; CHECK-NEXT: mov x1, #1
; CHECK-NEXT: adrp x16, :got:__hwasan_tag_mismatch
; CHECK-NEXT: ldr x16, [x16, :got_lo12:__hwasan_tag_mismatch]
; CHECK-NEXT: br x16
--- /dev/null
+; Test that alloca instrumentation with old API levels does not use short granules.
+;
+; RUN: opt < %s -hwasan -S | FileCheck %s
+
+target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128"
+target triple = "aarch64--linux-android"
+
+declare void @use32(i32*)
+
+define void @test_alloca() sanitize_hwaddress {
+; CHECK-LABEL: @test_alloca(
+; CHECK: %[[X_TAG:[^ ]*]] = trunc i64 {{.*}} to i8
+; CHECK: call void @llvm.memset.p0i8.i64(i8* align 1 {{.*}}, i8 %[[X_TAG]], i64 1, i1 false)
+ %x = alloca i32, align 4
+ call void @use32(i32* nonnull %x)
+ ret void
+}
; RUN: opt < %s -hwasan -hwasan-with-ifunc=1 -hwasan-uar-retag-to-zero=0 -S | FileCheck %s --check-prefixes=CHECK,DYNAMIC-SHADOW,UAR-TAGS
target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128"
-target triple = "aarch64--linux-android"
+target triple = "aarch64--linux-android10000"
declare void @use32(i32*)
--- /dev/null
+; Test that the old outlined check is used with old API levels.
+
+; RUN: opt < %s -hwasan -S | FileCheck %s
+
+target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128"
+target triple = "aarch64--linux-android"
+
+define i8 @test_load8(i8* %a) sanitize_hwaddress {
+; CHECK-LABEL: @test_load8(
+; CHECK: call void @llvm.hwasan.check.memaccess(i8* {{.*}}, i8* {{.*}}, i32 0)
+ %b = load i8, i8* %a, align 4
+ ret i8 %b
+}
; CHECK: @llvm.global_ctors = appending global [1 x { i32, void ()*, i8* }] [{ i32, void ()*, i8* } { i32 0, void ()* @hwasan.module_ctor, i8* bitcast (void ()* @hwasan.module_ctor to i8*) }]
target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128"
-target triple = "aarch64--linux-android"
+target triple = "aarch64--linux-android10000"
define i8 @test_load8(i8* %a) sanitize_hwaddress {
; CHECK-LABEL: @test_load8(
; RECOVER: [[CONT]]:
-; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess(i8* %.hwasan.shadow, i8* %a, i32 0)
-; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess(i8* null, i8* %a, i32 0)
+; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* %.hwasan.shadow, i8* %a, i32 0)
+; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* null, i8* %a, i32 0)
; CHECK: %[[G:[^ ]*]] = load i8, i8* %a, align 4
; CHECK: ret i8 %[[G]]
; RECOVER: [[CONT]]:
; ABORT: %[[A:[^ ]*]] = bitcast i16* %a to i8*
-; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess(i8* %.hwasan.shadow, i8* %[[A]], i32 1)
-; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess(i8* null, i8* %[[A]], i32 1)
+; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* %.hwasan.shadow, i8* %[[A]], i32 1)
+; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* null, i8* %[[A]], i32 1)
; CHECK: %[[G:[^ ]*]] = load i16, i16* %a, align 4
; CHECK: ret i16 %[[G]]
; RECOVER: br label
; ABORT: %[[A:[^ ]*]] = bitcast i32* %a to i8*
-; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess(i8* %.hwasan.shadow, i8* %[[A]], i32 2)
-; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess(i8* null, i8* %[[A]], i32 2)
+; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* %.hwasan.shadow, i8* %[[A]], i32 2)
+; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* null, i8* %[[A]], i32 2)
; CHECK: %[[G:[^ ]*]] = load i32, i32* %a, align 4
; CHECK: ret i32 %[[G]]
; RECOVER: br label
; ABORT: %[[A:[^ ]*]] = bitcast i64* %a to i8*
-; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess(i8* %.hwasan.shadow, i8* %[[A]], i32 3)
-; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess(i8* null, i8* %[[A]], i32 3)
+; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* %.hwasan.shadow, i8* %[[A]], i32 3)
+; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* null, i8* %[[A]], i32 3)
; CHECK: %[[G:[^ ]*]] = load i64, i64* %a, align 8
; CHECK: ret i64 %[[G]]
; RECOVER: br label
; ABORT: %[[A:[^ ]*]] = bitcast i128* %a to i8*
-; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess(i8* %.hwasan.shadow, i8* %[[A]], i32 4)
-; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess(i8* null, i8* %[[A]], i32 4)
+; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* %.hwasan.shadow, i8* %[[A]], i32 4)
+; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* null, i8* %[[A]], i32 4)
; CHECK: %[[G:[^ ]*]] = load i128, i128* %a, align 16
; CHECK: ret i128 %[[G]]
; RECOVER: call void asm sideeffect "brk #2352", "{x0}"(i64 %[[A]])
; RECOVER: br label
-; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess(i8* %.hwasan.shadow, i8* %a, i32 16)
-; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess(i8* null, i8* %a, i32 16)
+; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* %.hwasan.shadow, i8* %a, i32 16)
+; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* null, i8* %a, i32 16)
; CHECK: store i8 %b, i8* %a, align 4
; CHECK: ret void
; RECOVER: br label
; ABORT: %[[A:[^ ]*]] = bitcast i16* %a to i8*
-; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess(i8* %.hwasan.shadow, i8* %[[A]], i32 17)
-; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess(i8* null, i8* %[[A]], i32 17)
+; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* %.hwasan.shadow, i8* %[[A]], i32 17)
+; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* null, i8* %[[A]], i32 17)
; CHECK: store i16 %b, i16* %a, align 4
; CHECK: ret void
; RECOVER: br label
; ABORT: %[[A:[^ ]*]] = bitcast i32* %a to i8*
-; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess(i8* %.hwasan.shadow, i8* %[[A]], i32 18)
-; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess(i8* null, i8* %[[A]], i32 18)
+; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* %.hwasan.shadow, i8* %[[A]], i32 18)
+; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* null, i8* %[[A]], i32 18)
; CHECK: store i32 %b, i32* %a, align 4
; CHECK: ret void
; RECOVER: br label
; ABORT: %[[A:[^ ]*]] = bitcast i64* %a to i8*
-; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess(i8* %.hwasan.shadow, i8* %[[A]], i32 19)
-; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess(i8* null, i8* %[[A]], i32 19)
+; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* %.hwasan.shadow, i8* %[[A]], i32 19)
+; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* null, i8* %[[A]], i32 19)
; CHECK: store i64 %b, i64* %a, align 8
; CHECK: ret void
; RECOVER: br label
; ABORT: %[[A:[^ ]*]] = bitcast i128* %a to i8*
-; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess(i8* %.hwasan.shadow, i8* %[[A]], i32 20)
-; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess(i8* null, i8* %[[A]], i32 20)
+; ABORT-DYNAMIC-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* %.hwasan.shadow, i8* %[[A]], i32 20)
+; ABORT-ZERO-BASED-SHADOW: call void @llvm.hwasan.check.memaccess.shortgranules(i8* null, i8* %[[A]], i32 20)
; CHECK: store i128 %b, i128* %a, align 16
; CHECK: ret void