Index: llvm/lib/Bitcode/Reader/BitcodeReader.cpp =================================================================== --- llvm/lib/Bitcode/Reader/BitcodeReader.cpp +++ llvm/lib/Bitcode/Reader/BitcodeReader.cpp @@ -5714,19 +5714,20 @@ InstructionList.push_back(I); break; } - case bitc::FUNC_CODE_INST_LOAD: { // LOAD: [opty, op, align, vol] + case bitc::FUNC_CODE_INST_LOAD: { + // LOAD: [opty, op, align, vol, [hasPtrProv, [PtrProv]]] unsigned OpNum = 0; Value *Op; unsigned OpTypeID; if (getValueTypePair(Record, OpNum, NextValueNo, Op, OpTypeID, CurBB) || - (OpNum + 2 != Record.size() && OpNum + 3 != Record.size())) + (OpNum + 2 > Record.size())) return error("Invalid record"); if (!isa(Op->getType())) return error("Load operand is not a pointer type"); Type *Ty = nullptr; - if (OpNum + 3 == Record.size()) { + if (OpNum + 3 <= Record.size()) { ResTypeID = Record[OpNum++]; Ty = getTypeByID(ResTypeID); } else { @@ -5747,24 +5748,41 @@ return error("load of unsized type"); if (!Align) Align = TheModule->getDataLayout().getABITypeAlign(Ty); - I = new LoadInst(Ty, Op, "", Record[OpNum + 1], *Align); + bool IsVolatile = Record[OpNum + 1]; + OpNum += 2; + Value *PtrProvenance = nullptr; + if (OpNum < Record.size()) { + // Do we have a ptr_provenance ? + if (Record[OpNum++]) { + unsigned PtrProvenanceTypeID; + if (getValueTypePair(Record, OpNum, NextValueNo, PtrProvenance, + PtrProvenanceTypeID, CurBB)) + return error("Invalid ptr_provenance operand"); + } + } + if (OpNum != Record.size()) + return error("Invalid record"); + I = new LoadInst(Ty, Op, "", IsVolatile, *Align); + if (PtrProvenance) + cast(I)->setPtrProvenanceOperand(PtrProvenance); InstructionList.push_back(I); break; } case bitc::FUNC_CODE_INST_LOADATOMIC: { - // LOADATOMIC: [opty, op, align, vol, ordering, ssid] + // LOADATOMIC: [opty, op, align, vol, ordering, ssid, + // [hasPtrProv,[PtrProv]]] unsigned OpNum = 0; Value *Op; unsigned OpTypeID; if (getValueTypePair(Record, OpNum, NextValueNo, Op, OpTypeID, CurBB) || - (OpNum + 4 != Record.size() && OpNum + 5 != Record.size())) + (OpNum + 4 > Record.size())) return error("Invalid record"); if (!isa(Op->getType())) return error("Load operand is not a pointer type"); Type *Ty = nullptr; - if (OpNum + 5 == Record.size()) { + if (OpNum + 5 <= Record.size()) { ResTypeID = Record[OpNum++]; Ty = getTypeByID(ResTypeID); } else { @@ -5791,12 +5809,29 @@ return Err; if (!Align) return error("Alignment missing from atomic load"); - I = new LoadInst(Ty, Op, "", Record[OpNum + 1], *Align, Ordering, SSID); + bool IsVolatile = Record[OpNum + 1]; + OpNum += 4; + Value *PtrProvenance = nullptr; + if (OpNum < Record.size()) { + // Do we have a ptr_provenance ? + if (Record[OpNum++]) { + unsigned PtrProvenanceTypeID; + if (getValueTypePair(Record, OpNum, NextValueNo, PtrProvenance, + PtrProvenanceTypeID, CurBB)) + return error("Invalid ptr_provenance operand"); + } + } + if (OpNum != Record.size()) + return error("Invalid record"); + I = new LoadInst(Ty, Op, "", IsVolatile, *Align, Ordering, SSID); + if (PtrProvenance) + cast(I)->setPtrProvenanceOperand(PtrProvenance); InstructionList.push_back(I); break; } case bitc::FUNC_CODE_INST_STORE: - case bitc::FUNC_CODE_INST_STORE_OLD: { // STORE2:[ptrty, ptr, val, align, vol] + case bitc::FUNC_CODE_INST_STORE_OLD: { + // STORE2:[ptrty, ptr, val, align, vol, [hasPtrProv, [PtrProv]]] unsigned OpNum = 0; Value *Val, *Ptr; unsigned PtrTypeID, ValTypeID; @@ -5813,7 +5848,7 @@ return error("Invalid record"); } - if (OpNum + 2 != Record.size()) + if (OpNum + 2 > Record.size()) return error("Invalid record"); if (Error Err = typeCheckLoadStoreInst(Val->getType(), Ptr->getType())) @@ -5826,13 +5861,30 @@ return error("store of unsized type"); if (!Align) Align = TheModule->getDataLayout().getABITypeAlign(Val->getType()); - I = new StoreInst(Val, Ptr, Record[OpNum + 1], *Align); + bool IsVolatile = Record[OpNum + 1]; + OpNum += 2; + Value *PtrProvenance = nullptr; + if (OpNum < Record.size()) { + // Do we have a ptr_provenance ? + if (Record[OpNum++]) { + unsigned PtrProvenanceTypeID; + if (getValueTypePair(Record, OpNum, NextValueNo, PtrProvenance, + PtrProvenanceTypeID, CurBB)) + return error("Invalid ptr_provenance operand"); + } + } + if (OpNum != Record.size()) + return error("Invalid record"); + I = new StoreInst(Val, Ptr, IsVolatile, *Align); + if (PtrProvenance) + cast(I)->setPtrProvenanceOperand(PtrProvenance); InstructionList.push_back(I); break; } case bitc::FUNC_CODE_INST_STOREATOMIC: case bitc::FUNC_CODE_INST_STOREATOMIC_OLD: { - // STOREATOMIC: [ptrty, ptr, val, align, vol, ordering, ssid] + // STOREATOMIC: [ptrty, ptr, val, align, vol, ordering, ssid, + // [hasPtrProv, [PtrProv]]] unsigned OpNum = 0; Value *Val, *Ptr; unsigned PtrTypeID, ValTypeID; @@ -5849,7 +5901,7 @@ return error("Invalid record"); } - if (OpNum + 4 != Record.size()) + if (OpNum + 4 > Record.size()) return error("Invalid record"); if (Error Err = typeCheckLoadStoreInst(Val->getType(), Ptr->getType())) @@ -5868,7 +5920,23 @@ return Err; if (!Align) return error("Alignment missing from atomic store"); - I = new StoreInst(Val, Ptr, Record[OpNum + 1], *Align, Ordering, SSID); + bool IsVolatile = Record[OpNum + 1]; + OpNum += 4; + Value *PtrProvenance = nullptr; + if (OpNum < Record.size()) { + // Do we have a ptr_provenance ? + if (Record[OpNum++]) { + unsigned PtrProvenanceTypeID; + if (getValueTypePair(Record, OpNum, NextValueNo, PtrProvenance, + PtrProvenanceTypeID, CurBB)) + return error("Invalid ptr_provenance operand"); + } + } + if (OpNum != Record.size()) + return error("Invalid record"); + I = new StoreInst(Val, Ptr, IsVolatile, *Align, Ordering, SSID); + if (PtrProvenance) + cast(I)->setPtrProvenanceOperand(PtrProvenance); InstructionList.push_back(I); break; } Index: llvm/lib/Bitcode/Writer/BitcodeWriter.cpp =================================================================== --- llvm/lib/Bitcode/Writer/BitcodeWriter.cpp +++ llvm/lib/Bitcode/Writer/BitcodeWriter.cpp @@ -2572,7 +2572,7 @@ else if (isCStr7) AbbrevToUse = CString7Abbrev; } else if (const ConstantDataSequential *CDS = - dyn_cast(C)) { + dyn_cast(C)) { Code = bitc::CST_CODE_DATA; Type *EltTy = CDS->getElementType(); if (isa(EltTy)) { @@ -3112,7 +3112,8 @@ pushValueAndType(I.getOperand(0), InstID, Vals); } else { Code = bitc::FUNC_CODE_INST_LOAD; - if (!pushValueAndType(I.getOperand(0), InstID, Vals)) // ptr + if (!pushValueAndType(I.getOperand(0), InstID, Vals) && + !cast(I).hasPtrProvenanceOperand()) // ptr AbbrevToUse = FUNCTION_INST_LOAD_ABBREV; } Vals.push_back(VE.getTypeID(I.getType())); @@ -3122,6 +3123,15 @@ Vals.push_back(getEncodedOrdering(cast(I).getOrdering())); Vals.push_back(getEncodedSyncScopeID(cast(I).getSyncScopeID())); } + if (cast(I).hasPtrProvenanceOperand()) { + Vals.push_back(true); // ptr_provenance present + pushValueAndType(cast(I).getPtrProvenanceOperand(), InstID, + Vals); // ptrty + ptr_provenance + } else { + // No ptr_provenance - do nothing for now, when in future more arguments + // are emitted, do not forget to emit a 'false': + // Vals.push_back(false); + } break; case Instruction::Store: if (cast(I).isAtomic()) @@ -3137,6 +3147,15 @@ Vals.push_back( getEncodedSyncScopeID(cast(I).getSyncScopeID())); } + if (cast(I).hasPtrProvenanceOperand()) { + Vals.push_back(true); // ptr_provenance present + pushValueAndType(cast(I).getPtrProvenanceOperand(), InstID, + Vals); // ptrty + ptr_provenance + } else { + // No ptr_provenance - do nothing for now, when in future more arguments + // are emitted, do not forget to emit a 'false': + // Vals.push_back(false); + } break; case Instruction::AtomicCmpXchg: Code = bitc::FUNC_CODE_INST_CMPXCHG; Index: llvm/test/Bitcode/compatibility.ll =================================================================== --- llvm/test/Bitcode/compatibility.ll +++ llvm/test/Bitcode/compatibility.ll @@ -844,12 +844,26 @@ %ld.3 = load atomic volatile i32, i32* %word syncscope("singlethread") seq_cst, align 16 ; CHECK: %ld.3 = load atomic volatile i32, i32* %word syncscope("singlethread") seq_cst, align 16 + %ld.1p = load atomic i32, i32* %word monotonic, ptr_provenance i32* %word, align 4 + ; CHECK: %ld.1p = load atomic i32, i32* %word monotonic, ptr_provenance i32* %word, align 4 + %ld.2p = load atomic volatile i32, i32* %word acquire, ptr_provenance i32* %word, align 8 + ; CHECK: %ld.2p = load atomic volatile i32, i32* %word acquire, ptr_provenance i32* %word, align 8 + %ld.3p = load atomic volatile i32, i32* %word syncscope("singlethread") seq_cst, ptr_provenance i32* %word, align 16 + ; CHECK: %ld.3p = load atomic volatile i32, i32* %word syncscope("singlethread") seq_cst, ptr_provenance i32* %word, align 16 + store atomic i32 23, i32* %word monotonic, align 4 ; CHECK: store atomic i32 23, i32* %word monotonic, align 4 store atomic volatile i32 24, i32* %word monotonic, align 4 ; CHECK: store atomic volatile i32 24, i32* %word monotonic, align 4 store atomic volatile i32 25, i32* %word syncscope("singlethread") monotonic, align 4 ; CHECK: store atomic volatile i32 25, i32* %word syncscope("singlethread") monotonic, align 4 + + store atomic i32 26, i32* %word monotonic, ptr_provenance i32* %word, align 4 + ; CHECK: store atomic i32 26, i32* %word monotonic, ptr_provenance i32* %word, align 4 + store atomic volatile i32 27, i32* %word monotonic, ptr_provenance i32* %word, align 4 + ; CHECK: store atomic volatile i32 27, i32* %word monotonic, ptr_provenance i32* %word, align 4 + store atomic volatile i32 28, i32* %word syncscope("singlethread") monotonic, ptr_provenance i32* %word, align 4 + ; CHECK: store atomic volatile i32 28, i32* %word syncscope("singlethread") monotonic, ptr_provenance i32* %word, align 4 ret void } @@ -1410,11 +1424,21 @@ load volatile i32*, i32** %base, align 8, !invariant.load !7, !nontemporal !8, !nonnull !7, !dereferenceable !9, !dereferenceable_or_null !9 ; CHECK: load volatile i32*, i32** %base, align 8, !invariant.load !7, !nontemporal !8, !nonnull !7, !dereferenceable !9, !dereferenceable_or_null !9 + load i32*, i32** %base, ptr_provenance i32** %base, align 8, !invariant.load !7, !nontemporal !8, !nonnull !7, !dereferenceable !9, !dereferenceable_or_null !9 + ; CHECK: load i32*, i32** %base, ptr_provenance i32** %base, align 8, !invariant.load !7, !nontemporal !8, !nonnull !7, !dereferenceable !9, !dereferenceable_or_null !9 + load volatile i32*, i32** %base, ptr_provenance i32** %base, align 8, !invariant.load !7, !nontemporal !8, !nonnull !7, !dereferenceable !9, !dereferenceable_or_null !9 + ; CHECK: load volatile i32*, i32** %base, ptr_provenance i32** %base, align 8, !invariant.load !7, !nontemporal !8, !nonnull !7, !dereferenceable !9, !dereferenceable_or_null !9 + store i32* null, i32** %base, align 4, !nontemporal !8 ; CHECK: store i32* null, i32** %base, align 4, !nontemporal !8 store volatile i32* null, i32** %base, align 4, !nontemporal !8 ; CHECK: store volatile i32* null, i32** %base, align 4, !nontemporal !8 + store i32* null, i32** %base, ptr_provenance i32** %base, align 4, !nontemporal !8 + ; CHECK: store i32* null, i32** %base, ptr_provenance i32** %base, align 4, !nontemporal !8 + store volatile i32* null, i32** %base, ptr_provenance i32** %base, align 4, !nontemporal !8 + ; CHECK: store volatile i32* null, i32** %base, ptr_provenance i32** %base, align 4, !nontemporal !8 + ret void } Index: llvm/test/Bitcode/loadstore_ptr_provenance.ll =================================================================== --- llvm/test/Bitcode/loadstore_ptr_provenance.ll +++ llvm/test/Bitcode/loadstore_ptr_provenance.ll @@ -1,7 +1,6 @@ ; RUN: opt --verify -S < %s | FileCheck %s -; Activate when bitcode support is added: -; R U N: llvm-as < %s | llvm-dis | llvm-as | llvm-dis | FileCheck %s -; R U N: verify-uselistorder < %s +; RUN: llvm-as < %s | llvm-dis | llvm-as | llvm-dis | FileCheck %s +; RUN: verify-uselistorder < %s define i32 @f(i32* %p, i32* %q, i32* %word, i32** %base) { ; CHECK: define i32 @f(i32* %p, i32* %q, i32* %word, i32** %base) {