Skip to content

Commit 2f63cbc

Browse files
committedFeb 7, 2017
[ImplicitNullCheck] Extend Implicit Null Check scope by using stores
Summary: This change allows usage of store instruction for implicit null check. Memory Aliasing Analisys is not used and change conservatively supposes that any store and load may access the same memory. As a result re-ordering of store-store, store-load and load-store is prohibited. Patch by Serguei Katkov! Reviewers: reames, sanjoy Reviewed By: sanjoy Subscribers: atrick, llvm-commits Differential Revision: https://reviews.llvm.org/D29400 llvm-svn: 294338
1 parent ef6d573 commit 2f63cbc

File tree

11 files changed

+809
-86
lines changed

11 files changed

+809
-86
lines changed
 

Diff for: ‎llvm/docs/FaultMaps.rst

+6-1
Original file line numberDiff line numberDiff line change
@@ -47,12 +47,17 @@ The format of this section is
4747
uint32 : NumFaultingPCs
4848
uint32 : Reserved (expected to be 0)
4949
FunctionFaultInfo[NumFaultingPCs] {
50-
uint32 : FaultKind = FaultMaps::FaultingLoad (only legal value currently)
50+
uint32 : FaultKind
5151
uint32 : FaultingPCOffset
5252
uint32 : HandlerPCOffset
5353
}
5454
}
5555
56+
FailtKind describes the reason of expected fault.
57+
Currently three kind of faults are supported:
58+
1. FaultingLoad - fault due to load from memory.
59+
2. FaultingLoadStore - fault due to instruction load and store.
60+
3. FaultingStore - fault due to store to memory.
5661

5762
The ``ImplicitNullChecks`` pass
5863
===============================

Diff for: ‎llvm/include/llvm/CodeGen/FaultMaps.h

+6-1
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,12 @@ class MCStreamer;
2626

2727
class FaultMaps {
2828
public:
29-
enum FaultKind { FaultingLoad = 1, FaultKindMax };
29+
enum FaultKind {
30+
FaultingLoad = 1,
31+
FaultingLoadStore,
32+
FaultingStore,
33+
FaultKindMax
34+
};
3035

3136
static const char *faultTypeToString(FaultKind);
3237

Diff for: ‎llvm/include/llvm/Target/Target.td

+2-1
Original file line numberDiff line numberDiff line change
@@ -951,11 +951,12 @@ def LOCAL_ESCAPE : Instruction {
951951
let hasSideEffects = 0;
952952
let hasCtrlDep = 1;
953953
}
954-
def FAULTING_LOAD_OP : Instruction {
954+
def FAULTING_OP : Instruction {
955955
let OutOperandList = (outs unknown:$dst);
956956
let InOperandList = (ins variable_ops);
957957
let usesCustomInserter = 1;
958958
let mayLoad = 1;
959+
let mayStore = 1;
959960
let isTerminator = 1;
960961
let isBranch = 1;
961962
}

Diff for: ‎llvm/include/llvm/Target/TargetOpcodes.def

+4-2
Original file line numberDiff line numberDiff line change
@@ -134,11 +134,13 @@ HANDLE_TARGET_OPCODE(STATEPOINT)
134134
/// frame index of the local stack allocation.
135135
HANDLE_TARGET_OPCODE(LOCAL_ESCAPE)
136136

137-
/// Loading instruction that may page fault, bundled with associated
137+
/// Wraps a machine instruction which can fault, bundled with associated
138+
/// information on how to handle such a fault.
139+
/// For example loading instruction that may page fault, bundled with associated
138140
/// information on how to handle such a page fault. It is intended to support
139141
/// "zero cost" null checks in managed languages by allowing LLVM to fold
140142
/// comparisons into existing memory operations.
141-
HANDLE_TARGET_OPCODE(FAULTING_LOAD_OP)
143+
HANDLE_TARGET_OPCODE(FAULTING_OP)
142144

143145
/// Wraps a machine instruction to add patchability constraints. An
144146
/// instruction wrapped in PATCHABLE_OP has to either have a minimum

Diff for: ‎llvm/lib/CodeGen/FaultMaps.cpp

+4
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,10 @@ const char *FaultMaps::faultTypeToString(FaultMaps::FaultKind FT) {
110110

111111
case FaultMaps::FaultingLoad:
112112
return "FaultingLoad";
113+
case FaultMaps::FaultingLoadStore:
114+
return "FaultingLoadStore";
115+
case FaultMaps::FaultingStore:
116+
return "FaultingStore";
113117
}
114118
}
115119

Diff for: ‎llvm/lib/CodeGen/ImplicitNullChecks.cpp

+74-47
Original file line numberDiff line numberDiff line change
@@ -22,13 +22,15 @@
2222
// With the help of a runtime that understands the .fault_maps section,
2323
// faulting_load_op branches to throw_npe if executing movl (%r10), %esi incurs
2424
// a page fault.
25+
// Store is also supported.
2526
//
2627
//===----------------------------------------------------------------------===//
2728

2829
#include "llvm/ADT/DenseSet.h"
2930
#include "llvm/ADT/SmallVector.h"
3031
#include "llvm/ADT/Statistic.h"
3132
#include "llvm/Analysis/AliasAnalysis.h"
33+
#include "llvm/CodeGen/FaultMaps.h"
3234
#include "llvm/CodeGen/Passes.h"
3335
#include "llvm/CodeGen/MachineFunction.h"
3436
#include "llvm/CodeGen/MachineMemOperand.h"
@@ -154,8 +156,8 @@ class ImplicitNullChecks : public MachineFunctionPass {
154156

155157
bool analyzeBlockForNullChecks(MachineBasicBlock &MBB,
156158
SmallVectorImpl<NullCheck> &NullCheckList);
157-
MachineInstr *insertFaultingLoad(MachineInstr *LoadMI, MachineBasicBlock *MBB,
158-
MachineBasicBlock *HandlerMBB);
159+
MachineInstr *insertFaultingInstr(MachineInstr *MI, MachineBasicBlock *MBB,
160+
MachineBasicBlock *HandlerMBB);
159161
void rewriteNullChecks(ArrayRef<NullCheck> NullCheckList);
160162

161163
enum SuitabilityResult { SR_Suitable, SR_Unsuitable, SR_Impossible };
@@ -165,16 +167,18 @@ class ImplicitNullChecks : public MachineFunctionPass {
165167
/// \p MI cannot be used to null check and SR_Impossible if there is
166168
/// no sense to continue lookup due to any other instruction will not be able
167169
/// to be used. \p PrevInsts is the set of instruction seen since
168-
/// the explicit null check on \p PointerReg.
170+
/// the explicit null check on \p PointerReg. \p SeenLoad means that load
171+
/// instruction has been observed in \PrevInsts set.
169172
SuitabilityResult isSuitableMemoryOp(MachineInstr &MI, unsigned PointerReg,
170-
ArrayRef<MachineInstr *> PrevInsts);
173+
ArrayRef<MachineInstr *> PrevInsts,
174+
bool &SeenLoad);
171175

172176
/// Return true if \p FaultingMI can be hoisted from after the the
173177
/// instructions in \p InstsSeenSoFar to before them. Set \p Dependence to a
174178
/// non-null value if we also need to (and legally can) hoist a depedency.
175-
bool canHoistLoadInst(MachineInstr *FaultingMI, unsigned PointerReg,
176-
ArrayRef<MachineInstr *> InstsSeenSoFar,
177-
MachineBasicBlock *NullSucc, MachineInstr *&Dependence);
179+
bool canHoistInst(MachineInstr *FaultingMI, unsigned PointerReg,
180+
ArrayRef<MachineInstr *> InstsSeenSoFar,
181+
MachineBasicBlock *NullSucc, MachineInstr *&Dependence);
178182

179183
public:
180184
static char ID;
@@ -198,7 +202,7 @@ class ImplicitNullChecks : public MachineFunctionPass {
198202
}
199203

200204
bool ImplicitNullChecks::canHandle(const MachineInstr *MI) {
201-
if (MI->isCall() || MI->mayStore() || MI->hasUnmodeledSideEffects())
205+
if (MI->isCall() || MI->hasUnmodeledSideEffects())
202206
return false;
203207
auto IsRegMask = [](const MachineOperand &MO) { return MO.isRegMask(); };
204208
(void)IsRegMask;
@@ -290,22 +294,36 @@ static bool AnyAliasLiveIn(const TargetRegisterInfo *TRI,
290294

291295
ImplicitNullChecks::SuitabilityResult
292296
ImplicitNullChecks::isSuitableMemoryOp(MachineInstr &MI, unsigned PointerReg,
293-
ArrayRef<MachineInstr *> PrevInsts) {
297+
ArrayRef<MachineInstr *> PrevInsts,
298+
bool &SeenLoad) {
294299
int64_t Offset;
295300
unsigned BaseReg;
296301

302+
// First, if it is a store and we saw load before we bail out
303+
// because we will not be able to re-order load-store without
304+
// using alias analysis.
305+
if (SeenLoad && MI.mayStore())
306+
return SR_Impossible;
307+
308+
SeenLoad = SeenLoad || MI.mayLoad();
309+
310+
// Without alias analysis we cannot re-order store with anything.
311+
// so if this instruction is not a candidate we should stop.
312+
SuitabilityResult Unsuitable = MI.mayStore() ? SR_Impossible : SR_Unsuitable;
313+
297314
if (!TII->getMemOpBaseRegImmOfs(MI, BaseReg, Offset, TRI) ||
298315
BaseReg != PointerReg)
299-
return SR_Unsuitable;
316+
return Unsuitable;
300317

301-
// We want the load to be issued at a sane offset from PointerReg, so that
302-
// if PointerReg is null then the load reliably page faults.
303-
if (!(MI.mayLoad() && !MI.isPredicable() && Offset < PageSize))
304-
return SR_Unsuitable;
318+
// We want the mem access to be issued at a sane offset from PointerReg,
319+
// so that if PointerReg is null then the access reliably page faults.
320+
if (!((MI.mayLoad() || MI.mayStore()) && !MI.isPredicable() &&
321+
Offset < PageSize))
322+
return Unsuitable;
305323

306-
// Finally, we need to make sure that the load instruction actually is
307-
// loading from PointerReg, and there isn't some re-definition of PointerReg
308-
// between the compare and the load.
324+
// Finally, we need to make sure that the access instruction actually is
325+
// accessing from PointerReg, and there isn't some re-definition of PointerReg
326+
// between the compare and the memory access.
309327
// If PointerReg has been redefined before then there is no sense to continue
310328
// lookup due to this condition will fail for any further instruction.
311329
for (auto *PrevMI : PrevInsts)
@@ -317,10 +335,11 @@ ImplicitNullChecks::isSuitableMemoryOp(MachineInstr &MI, unsigned PointerReg,
317335
return SR_Suitable;
318336
}
319337

320-
bool ImplicitNullChecks::canHoistLoadInst(
321-
MachineInstr *FaultingMI, unsigned PointerReg,
322-
ArrayRef<MachineInstr *> InstsSeenSoFar, MachineBasicBlock *NullSucc,
323-
MachineInstr *&Dependence) {
338+
bool ImplicitNullChecks::canHoistInst(MachineInstr *FaultingMI,
339+
unsigned PointerReg,
340+
ArrayRef<MachineInstr *> InstsSeenSoFar,
341+
MachineBasicBlock *NullSucc,
342+
MachineInstr *&Dependence) {
324343
auto DepResult = computeDependence(FaultingMI, InstsSeenSoFar);
325344
if (!DepResult.CanReorder)
326345
return false;
@@ -484,17 +503,19 @@ bool ImplicitNullChecks::analyzeBlockForNullChecks(
484503
const unsigned PointerReg = MBP.LHS.getReg();
485504

486505
SmallVector<MachineInstr *, 8> InstsSeenSoFar;
506+
bool SeenLoad = false;
487507

488508
for (auto &MI : *NotNullSucc) {
489509
if (!canHandle(&MI) || InstsSeenSoFar.size() >= MaxInstsToConsider)
490510
return false;
491511

492512
MachineInstr *Dependence;
493-
SuitabilityResult SR = isSuitableMemoryOp(MI, PointerReg, InstsSeenSoFar);
513+
SuitabilityResult SR =
514+
isSuitableMemoryOp(MI, PointerReg, InstsSeenSoFar, SeenLoad);
494515
if (SR == SR_Impossible)
495516
return false;
496-
if (SR == SR_Suitable && canHoistLoadInst(&MI, PointerReg, InstsSeenSoFar,
497-
NullSucc, Dependence)) {
517+
if (SR == SR_Suitable &&
518+
canHoistInst(&MI, PointerReg, InstsSeenSoFar, NullSucc, Dependence)) {
498519
NullCheckList.emplace_back(&MI, MBP.ConditionDef, &MBB, NotNullSucc,
499520
NullSucc, Dependence);
500521
return true;
@@ -506,36 +527,42 @@ bool ImplicitNullChecks::analyzeBlockForNullChecks(
506527
return false;
507528
}
508529

509-
/// Wrap a machine load instruction, LoadMI, into a FAULTING_LOAD_OP machine
510-
/// instruction. The FAULTING_LOAD_OP instruction does the same load as LoadMI
511-
/// (defining the same register), and branches to HandlerMBB if the load
512-
/// faults. The FAULTING_LOAD_OP instruction is inserted at the end of MBB.
513-
MachineInstr *
514-
ImplicitNullChecks::insertFaultingLoad(MachineInstr *LoadMI,
515-
MachineBasicBlock *MBB,
516-
MachineBasicBlock *HandlerMBB) {
530+
/// Wrap a machine instruction, MI, into a FAULTING machine instruction.
531+
/// The FAULTING instruction does the same load/store as MI
532+
/// (defining the same register), and branches to HandlerMBB if the mem access
533+
/// faults. The FAULTING instruction is inserted at the end of MBB.
534+
MachineInstr *ImplicitNullChecks::insertFaultingInstr(
535+
MachineInstr *MI, MachineBasicBlock *MBB, MachineBasicBlock *HandlerMBB) {
517536
const unsigned NoRegister = 0; // Guaranteed to be the NoRegister value for
518537
// all targets.
519538

520539
DebugLoc DL;
521-
unsigned NumDefs = LoadMI->getDesc().getNumDefs();
540+
unsigned NumDefs = MI->getDesc().getNumDefs();
522541
assert(NumDefs <= 1 && "other cases unhandled!");
523542

524543
unsigned DefReg = NoRegister;
525544
if (NumDefs != 0) {
526-
DefReg = LoadMI->defs().begin()->getReg();
527-
assert(std::distance(LoadMI->defs().begin(), LoadMI->defs().end()) == 1 &&
545+
DefReg = MI->defs().begin()->getReg();
546+
assert(std::distance(MI->defs().begin(), MI->defs().end()) == 1 &&
528547
"expected exactly one def!");
529548
}
530549

531-
auto MIB = BuildMI(MBB, DL, TII->get(TargetOpcode::FAULTING_LOAD_OP), DefReg)
550+
FaultMaps::FaultKind FK;
551+
if (MI->mayLoad())
552+
FK =
553+
MI->mayStore() ? FaultMaps::FaultingLoadStore : FaultMaps::FaultingLoad;
554+
else
555+
FK = FaultMaps::FaultingStore;
556+
557+
auto MIB = BuildMI(MBB, DL, TII->get(TargetOpcode::FAULTING_OP), DefReg)
558+
.addImm(FK)
532559
.addMBB(HandlerMBB)
533-
.addImm(LoadMI->getOpcode());
560+
.addImm(MI->getOpcode());
534561

535-
for (auto &MO : LoadMI->uses())
562+
for (auto &MO : MI->uses())
536563
MIB.add(MO);
537564

538-
MIB.setMemRefs(LoadMI->memoperands_begin(), LoadMI->memoperands_end());
565+
MIB.setMemRefs(MI->memoperands_begin(), MI->memoperands_end());
539566

540567
return MIB;
541568
}
@@ -556,18 +583,18 @@ void ImplicitNullChecks::rewriteNullChecks(
556583
NC.getCheckBlock()->insert(NC.getCheckBlock()->end(), DepMI);
557584
}
558585

559-
// Insert a faulting load where the conditional branch was originally. We
560-
// check earlier ensures that this bit of code motion is legal. We do not
561-
// touch the successors list for any basic block since we haven't changed
562-
// control flow, we've just made it implicit.
563-
MachineInstr *FaultingLoad = insertFaultingLoad(
586+
// Insert a faulting instruction where the conditional branch was
587+
// originally. We check earlier ensures that this bit of code motion
588+
// is legal. We do not touch the successors list for any basic block
589+
// since we haven't changed control flow, we've just made it implicit.
590+
MachineInstr *FaultingInstr = insertFaultingInstr(
564591
NC.getMemOperation(), NC.getCheckBlock(), NC.getNullSucc());
565592
// Now the values defined by MemOperation, if any, are live-in of
566593
// the block of MemOperation.
567-
// The original load operation may define implicit-defs alongside
568-
// the loaded value.
594+
// The original operation may define implicit-defs alongside
595+
// the value.
569596
MachineBasicBlock *MBB = NC.getMemOperation()->getParent();
570-
for (const MachineOperand &MO : FaultingLoad->operands()) {
597+
for (const MachineOperand &MO : FaultingInstr->operands()) {
571598
if (!MO.isReg() || !MO.isDef())
572599
continue;
573600
unsigned Reg = MO.getReg();

Diff for: ‎llvm/lib/Target/X86/X86AsmPrinter.h

+1-1
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ class LLVM_LIBRARY_VISIBILITY X86AsmPrinter : public AsmPrinter {
8181
void LowerSTACKMAP(const MachineInstr &MI);
8282
void LowerPATCHPOINT(const MachineInstr &MI, X86MCInstLower &MCIL);
8383
void LowerSTATEPOINT(const MachineInstr &MI, X86MCInstLower &MCIL);
84-
void LowerFAULTING_LOAD_OP(const MachineInstr &MI, X86MCInstLower &MCIL);
84+
void LowerFAULTING_OP(const MachineInstr &MI, X86MCInstLower &MCIL);
8585
void LowerPATCHABLE_OP(const MachineInstr &MI, X86MCInstLower &MCIL);
8686

8787
void LowerTlsAddr(X86MCInstLower &MCInstLowering, const MachineInstr &MI);

Diff for: ‎llvm/lib/Target/X86/X86MCInstLower.cpp

+23-19
Original file line numberDiff line numberDiff line change
@@ -894,30 +894,34 @@ void X86AsmPrinter::LowerSTATEPOINT(const MachineInstr &MI,
894894
SM.recordStatepoint(MI);
895895
}
896896

897-
void X86AsmPrinter::LowerFAULTING_LOAD_OP(const MachineInstr &MI,
898-
X86MCInstLower &MCIL) {
899-
// FAULTING_LOAD_OP <def>, <MBB handler>, <load opcode>, <load operands>
897+
void X86AsmPrinter::LowerFAULTING_OP(const MachineInstr &FaultingMI,
898+
X86MCInstLower &MCIL) {
899+
// FAULTING_LOAD_OP <def>, <faltinf type>, <MBB handler>,
900+
// <opcode>, <operands>
900901

901-
unsigned LoadDefRegister = MI.getOperand(0).getReg();
902-
MCSymbol *HandlerLabel = MI.getOperand(1).getMBB()->getSymbol();
903-
unsigned LoadOpcode = MI.getOperand(2).getImm();
904-
unsigned LoadOperandsBeginIdx = 3;
902+
unsigned DefRegister = FaultingMI.getOperand(0).getReg();
903+
FaultMaps::FaultKind FK =
904+
static_cast<FaultMaps::FaultKind>(FaultingMI.getOperand(1).getImm());
905+
MCSymbol *HandlerLabel = FaultingMI.getOperand(2).getMBB()->getSymbol();
906+
unsigned Opcode = FaultingMI.getOperand(3).getImm();
907+
unsigned OperandsBeginIdx = 4;
905908

906-
FM.recordFaultingOp(FaultMaps::FaultingLoad, HandlerLabel);
909+
assert(FK < FaultMaps::FaultKindMax && "Invalid Faulting Kind!");
910+
FM.recordFaultingOp(FK, HandlerLabel);
907911

908-
MCInst LoadMI;
909-
LoadMI.setOpcode(LoadOpcode);
912+
MCInst MI;
913+
MI.setOpcode(Opcode);
910914

911-
if (LoadDefRegister != X86::NoRegister)
912-
LoadMI.addOperand(MCOperand::createReg(LoadDefRegister));
915+
if (DefRegister != X86::NoRegister)
916+
MI.addOperand(MCOperand::createReg(DefRegister));
913917

914-
for (auto I = MI.operands_begin() + LoadOperandsBeginIdx,
915-
E = MI.operands_end();
918+
for (auto I = FaultingMI.operands_begin() + OperandsBeginIdx,
919+
E = FaultingMI.operands_end();
916920
I != E; ++I)
917-
if (auto MaybeOperand = MCIL.LowerMachineOperand(&MI, *I))
918-
LoadMI.addOperand(MaybeOperand.getValue());
921+
if (auto MaybeOperand = MCIL.LowerMachineOperand(&FaultingMI, *I))
922+
MI.addOperand(MaybeOperand.getValue());
919923

920-
OutStreamer->EmitInstruction(LoadMI, getSubtargetInfo());
924+
OutStreamer->EmitInstruction(MI, getSubtargetInfo());
921925
}
922926

923927
void X86AsmPrinter::LowerFENTRY_CALL(const MachineInstr &MI,
@@ -1388,8 +1392,8 @@ void X86AsmPrinter::EmitInstruction(const MachineInstr *MI) {
13881392
case TargetOpcode::STATEPOINT:
13891393
return LowerSTATEPOINT(*MI, MCInstLowering);
13901394

1391-
case TargetOpcode::FAULTING_LOAD_OP:
1392-
return LowerFAULTING_LOAD_OP(*MI, MCInstLowering);
1395+
case TargetOpcode::FAULTING_OP:
1396+
return LowerFAULTING_OP(*MI, MCInstLowering);
13931397

13941398
case TargetOpcode::FENTRY_CALL:
13951399
return LowerFENTRY_CALL(*MI, MCInstLowering);

Diff for: ‎llvm/test/CodeGen/X86/block-placement.mir

+2-2
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ liveins:
4646
- { reg: '%rdi' }
4747
- { reg: '%esi' }
4848

49-
# CHECK: %eax = FAULTING_LOAD_OP %bb.3.null, 1684, killed %rdi, 1, _, 0, _ :: (load 4 from %ir.ptr)
49+
# CHECK: %eax = FAULTING_OP 1, %bb.3.null, 1684, killed %rdi, 1, _, 0, _ :: (load 4 from %ir.ptr)
5050
# CHECK-NEXT: JMP_1 %bb.2.not_null
5151
# CHECK: bb.3.null:
5252
# CHECK: bb.4.right:
@@ -66,7 +66,7 @@ body: |
6666
successors: %bb.2.null(0x7ffff800), %bb.4.not_null(0x00000800)
6767
liveins: %rdi
6868
69-
%eax = FAULTING_LOAD_OP %bb.2.null, 1684, killed %rdi, 1, _, 0, _ :: (load 4 from %ir.ptr)
69+
%eax = FAULTING_OP 1, %bb.2.null, 1684, killed %rdi, 1, _, 0, _ :: (load 4 from %ir.ptr)
7070
JMP_1 %bb.4.not_null
7171
7272
bb.4.not_null:

Diff for: ‎llvm/test/CodeGen/X86/implicit-null-check.ll

+39-2
Original file line numberDiff line numberDiff line change
@@ -162,6 +162,26 @@ define i32 @imp_null_check_gep_load_with_use_dep(i32* %x, i32 %a) {
162162
ret i32 %z
163163
}
164164

165+
define void @imp_null_check_store(i32* %x) {
166+
; CHECK-LABEL: _imp_null_check_store:
167+
; CHECK: [[BB0_imp_null_check_store:L[^:]+]]:
168+
; CHECK: movl $1, (%rdi)
169+
; CHECK: retq
170+
; CHECK: [[BB1_imp_null_check_store:LBB6_[0-9]+]]:
171+
; CHECK: retq
172+
173+
entry:
174+
%c = icmp eq i32* %x, null
175+
br i1 %c, label %is_null, label %not_null, !make.implicit !0
176+
177+
is_null:
178+
ret void
179+
180+
not_null:
181+
store i32 1, i32* %x
182+
ret void
183+
}
184+
165185
!0 = !{}
166186

167187
; CHECK-LABEL: __LLVM_FaultMaps:
@@ -174,7 +194,7 @@ define i32 @imp_null_check_gep_load_with_use_dep(i32* %x, i32 %a) {
174194
; CHECK-NEXT: .short 0
175195

176196
; # functions:
177-
; CHECK-NEXT: .long 6
197+
; CHECK-NEXT: .long 7
178198

179199
; FunctionAddr:
180200
; CHECK-NEXT: .quad _imp_null_check_add_result
@@ -241,6 +261,19 @@ define i32 @imp_null_check_gep_load_with_use_dep(i32* %x, i32 %a) {
241261
; Fault[0].HandlerOffset:
242262
; CHECK-NEXT: .long [[BB1_imp_null_check_load]]-_imp_null_check_load
243263

264+
; FunctionAddr:
265+
; CHECK-NEXT: .quad _imp_null_check_store
266+
; NumFaultingPCs
267+
; CHECK-NEXT: .long 1
268+
; Reserved:
269+
; CHECK-NEXT: .long 0
270+
; Fault[0].Type:
271+
; CHECK-NEXT: .long 3
272+
; Fault[0].FaultOffset:
273+
; CHECK-NEXT: .long [[BB0_imp_null_check_store]]-_imp_null_check_store
274+
; Fault[0].HandlerOffset:
275+
; CHECK-NEXT: .long [[BB1_imp_null_check_store]]-_imp_null_check_store
276+
244277
; FunctionAddr:
245278
; CHECK-NEXT: .quad _imp_null_check_via_mem_comparision
246279
; NumFaultingPCs
@@ -256,7 +289,7 @@ define i32 @imp_null_check_gep_load_with_use_dep(i32* %x, i32 %a) {
256289

257290
; OBJDUMP: FaultMap table:
258291
; OBJDUMP-NEXT: Version: 0x1
259-
; OBJDUMP-NEXT: NumFunctions: 6
292+
; OBJDUMP-NEXT: NumFunctions: 7
260293
; OBJDUMP-NEXT: FunctionAddress: 0x000000, NumFaultingPCs: 1
261294
; OBJDUMP-NEXT: Fault kind: FaultingLoad, faulting PC offset: 0, handling PC offset: 5
262295
; OBJDUMP-NEXT: FunctionAddress: 0x000000, NumFaultingPCs: 1
@@ -267,3 +300,7 @@ define i32 @imp_null_check_gep_load_with_use_dep(i32* %x, i32 %a) {
267300
; OBJDUMP-NEXT: Fault kind: FaultingLoad, faulting PC offset: 0, handling PC offset: 7
268301
; OBJDUMP-NEXT: FunctionAddress: 0x000000, NumFaultingPCs: 1
269302
; OBJDUMP-NEXT: Fault kind: FaultingLoad, faulting PC offset: 0, handling PC offset: 3
303+
; OBJDUMP-NEXT: FunctionAddress: 0x000000, NumFaultingPCs: 1
304+
; OBJDUMP-NEXT: Fault kind: FaultingStore, faulting PC offset: 0, handling PC offset: 7
305+
; OBJDUMP-NEXT: FunctionAddress: 0x000000, NumFaultingPCs: 1
306+
; OBJDUMP-NEXT: Fault kind: FaultingLoad, faulting PC offset: 0, handling PC offset: 11

Diff for: ‎llvm/test/CodeGen/X86/implicit-null-checks.mir

+648-10
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)
Please sign in to comment.