diff --git a/clang/test/OpenMP/interop_irbuilder.cpp b/clang/test/OpenMP/interop_irbuilder.cpp --- a/clang/test/OpenMP/interop_irbuilder.cpp +++ b/clang/test/OpenMP/interop_irbuilder.cpp @@ -69,15 +69,15 @@ // CHECK-NEXT: [[DEP_COUNTER_ADDR6:%.*]] = alloca i64, align 8 // CHECK-NEXT: store i32 4, ptr [[DEVICE_ID]], align 4 // CHECK-NEXT: [[OMP_GLOBAL_THREAD_NUM:%.*]] = call i32 @__kmpc_global_thread_num(ptr @[[GLOB1:[0-9]+]]) -// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM]], ptr [[INTEROP]], i64 1, i32 -1, i32 0, ptr null, i32 0) +// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM]], ptr [[INTEROP]], i32 1, i32 -1, i64 0, ptr null, i32 0) // CHECK-NEXT: [[OMP_GLOBAL_THREAD_NUM1:%.*]] = call i32 @__kmpc_global_thread_num(ptr @[[GLOB1]]) -// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM1]], ptr [[INTEROP]], i64 2, i32 -1, i32 0, ptr null, i32 0) +// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM1]], ptr [[INTEROP]], i32 2, i32 -1, i64 0, ptr null, i32 0) // CHECK-NEXT: [[TMP0:%.*]] = load i32, ptr [[DEVICE_ID]], align 4 // CHECK-NEXT: [[OMP_GLOBAL_THREAD_NUM2:%.*]] = call i32 @__kmpc_global_thread_num(ptr @[[GLOB1]]) -// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM2]], ptr [[INTEROP]], i64 1, i32 [[TMP0]], i32 0, ptr null, i32 0) +// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM2]], ptr [[INTEROP]], i32 1, i32 [[TMP0]], i64 0, ptr null, i32 0) // CHECK-NEXT: [[TMP1:%.*]] = load i32, ptr [[DEVICE_ID]], align 4 // CHECK-NEXT: [[OMP_GLOBAL_THREAD_NUM3:%.*]] = call i32 @__kmpc_global_thread_num(ptr @[[GLOB1]]) -// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM3]], ptr [[INTEROP]], i64 2, i32 [[TMP1]], i32 0, ptr null, i32 0) +// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM3]], ptr [[INTEROP]], i32 2, i32 [[TMP1]], i64 0, ptr null, i32 0) // CHECK-NEXT: [[TMP2:%.*]] = getelementptr inbounds [2 x %struct.kmp_depend_info], ptr [[DOTDEP_ARR_ADDR]], i64 0, i64 0 // CHECK-NEXT: [[TMP3:%.*]] = ptrtoint ptr [[D0]] to i64 // CHECK-NEXT: [[TMP4:%.*]] = getelementptr [[STRUCT_KMP_DEPEND_INFO:%.*]], ptr [[TMP2]], i64 0 @@ -136,18 +136,18 @@ // CHECK-NEXT: store i32 4, ptr [[DEVICE_ID]], align 4 // CHECK-NEXT: [[INTEROP:%.*]] = getelementptr inbounds [[STRUCT_S:%.*]], ptr [[THIS1]], i32 0, i32 0 // CHECK-NEXT: [[OMP_GLOBAL_THREAD_NUM:%.*]] = call i32 @__kmpc_global_thread_num(ptr @[[GLOB1]]) -// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM]], ptr [[INTEROP]], i64 1, i32 -1, i32 0, ptr null, i32 0) +// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM]], ptr [[INTEROP]], i32 1, i32 -1, i64 0, ptr null, i32 0) // CHECK-NEXT: [[INTEROP2:%.*]] = getelementptr inbounds [[STRUCT_S]], ptr [[THIS1]], i32 0, i32 0 // CHECK-NEXT: [[OMP_GLOBAL_THREAD_NUM3:%.*]] = call i32 @__kmpc_global_thread_num(ptr @[[GLOB1]]) -// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM3]], ptr [[INTEROP2]], i64 2, i32 -1, i32 0, ptr null, i32 0) +// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM3]], ptr [[INTEROP2]], i32 2, i32 -1, i64 0, ptr null, i32 0) // CHECK-NEXT: [[TMP0:%.*]] = load i32, ptr [[DEVICE_ID]], align 4 // CHECK-NEXT: [[INTEROP4:%.*]] = getelementptr inbounds [[STRUCT_S]], ptr [[THIS1]], i32 0, i32 0 // CHECK-NEXT: [[OMP_GLOBAL_THREAD_NUM5:%.*]] = call i32 @__kmpc_global_thread_num(ptr @[[GLOB1]]) -// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM5]], ptr [[INTEROP4]], i64 1, i32 [[TMP0]], i32 0, ptr null, i32 0) +// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM5]], ptr [[INTEROP4]], i32 1, i32 [[TMP0]], i64 0, ptr null, i32 0) // CHECK-NEXT: [[TMP1:%.*]] = load i32, ptr [[DEVICE_ID]], align 4 // CHECK-NEXT: [[INTEROP6:%.*]] = getelementptr inbounds [[STRUCT_S]], ptr [[THIS1]], i32 0, i32 0 // CHECK-NEXT: [[OMP_GLOBAL_THREAD_NUM7:%.*]] = call i32 @__kmpc_global_thread_num(ptr @[[GLOB1]]) -// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM7]], ptr [[INTEROP6]], i64 2, i32 [[TMP1]], i32 0, ptr null, i32 0) +// CHECK-NEXT: call void @__tgt_interop_init(ptr @[[GLOB1]], i32 [[OMP_GLOBAL_THREAD_NUM7]], ptr [[INTEROP6]], i32 2, i32 [[TMP1]], i64 0, ptr null, i32 0) // CHECK-NEXT: [[TMP2:%.*]] = getelementptr inbounds [2 x %struct.kmp_depend_info], ptr [[DOTDEP_ARR_ADDR]], i64 0, i64 0 // CHECK-NEXT: [[TMP3:%.*]] = ptrtoint ptr [[D0]] to i64 // CHECK-NEXT: [[TMP4:%.*]] = getelementptr [[STRUCT_KMP_DEPEND_INFO:%.*]], ptr [[TMP2]], i64 0 diff --git a/llvm/include/llvm/Analysis/TargetLibraryInfo.h b/llvm/include/llvm/Analysis/TargetLibraryInfo.h --- a/llvm/include/llvm/Analysis/TargetLibraryInfo.h +++ b/llvm/include/llvm/Analysis/TargetLibraryInfo.h @@ -11,6 +11,7 @@ #include "llvm/ADT/BitVector.h" #include "llvm/ADT/DenseMap.h" +#include "llvm/ADT/Triple.h" #include "llvm/IR/InstrTypes.h" #include "llvm/IR/PassManager.h" #include "llvm/Pass.h" @@ -390,28 +391,108 @@ return Impl->CustomNames.find(F)->second; } + static void initExtensionsForTriple(bool &ShouldExtI32Param, + bool &ShouldExtI32Return, + bool &ShouldSignExtI32Param, + bool &ShouldSignExtI32Return, + const Triple &T) { + ShouldExtI32Param = ShouldExtI32Return = false; + ShouldSignExtI32Param = ShouldSignExtI32Return = false; + + // PowerPC64, Sparc64, SystemZ need signext/zeroext on i32 parameters and + // returns corresponding to C-level ints and unsigned ints. + if (T.isPPC64() || T.getArch() == Triple::sparcv9 || + T.getArch() == Triple::systemz) { + ShouldExtI32Param = true; + ShouldExtI32Return = true; + } + // Mips and riscv64, on the other hand, needs signext on i32 parameters + // corresponding to both signed and unsigned ints. + if (T.isMIPS() || T.isRISCV64()) { + ShouldSignExtI32Param = true; + } + // riscv64 needs signext on i32 returns corresponding to both signed and + // unsigned ints. + if (T.isRISCV64()) { + ShouldSignExtI32Return = true; + } + } + /// Returns extension attribute kind to be used for i32 parameters /// corresponding to C-level int or unsigned int. May be zeroext, signext, /// or none. - Attribute::AttrKind getExtAttrForI32Param(bool Signed = true) const { - if (Impl->ShouldExtI32Param) +private: + static Attribute::AttrKind getExtAttrForI32Param(bool ShouldExtI32Param_, + bool ShouldSignExtI32Param_, + bool Signed = true) { + if (ShouldExtI32Param_) return Signed ? Attribute::SExt : Attribute::ZExt; - if (Impl->ShouldSignExtI32Param) + if (ShouldSignExtI32Param_) return Attribute::SExt; return Attribute::None; } +public: + static Attribute::AttrKind getExtAttrForI32Param(const Triple &T, + bool Signed = true) { + bool ShouldExtI32Param, ShouldExtI32Return; + bool ShouldSignExtI32Param, ShouldSignExtI32Return; + initExtensionsForTriple(ShouldExtI32Param, ShouldExtI32Return, + ShouldSignExtI32Param, ShouldSignExtI32Return, T); + return getExtAttrForI32Param(ShouldExtI32Param, ShouldSignExtI32Param, + Signed); + } + + Attribute::AttrKind getExtAttrForI32Param(bool Signed = true) const { + return getExtAttrForI32Param(Impl->ShouldExtI32Param, + Impl->ShouldSignExtI32Param, Signed); + } + /// Returns extension attribute kind to be used for i32 return values /// corresponding to C-level int or unsigned int. May be zeroext, signext, /// or none. - Attribute::AttrKind getExtAttrForI32Return(bool Signed = true) const { - if (Impl->ShouldExtI32Return) +private: + static Attribute::AttrKind getExtAttrForI32Return(bool ShouldExtI32Return_, + bool ShouldSignExtI32Return_, + bool Signed) { + if (ShouldExtI32Return_) return Signed ? Attribute::SExt : Attribute::ZExt; - if (Impl->ShouldSignExtI32Return) + if (ShouldSignExtI32Return_) return Attribute::SExt; return Attribute::None; } +public: + static Attribute::AttrKind getExtAttrForI32Return(const Triple &T, + bool Signed = true) { + bool ShouldExtI32Param, ShouldExtI32Return; + bool ShouldSignExtI32Param, ShouldSignExtI32Return; + initExtensionsForTriple(ShouldExtI32Param, ShouldExtI32Return, + ShouldSignExtI32Param, ShouldSignExtI32Return, T); + return getExtAttrForI32Return(ShouldExtI32Return, ShouldSignExtI32Return, + Signed); + } + + Attribute::AttrKind getExtAttrForI32Return(bool Signed = true) const { + return getExtAttrForI32Return(Impl->ShouldExtI32Return, + Impl->ShouldSignExtI32Return, Signed); + } + + // Helper to create an AttributeList for args (and ret val) which all have + // the same signedness. Attributes in AL may be passed in to include them + // as well in the returned AttributeList. + AttributeList getAttrList(LLVMContext *C, ArrayRef ArgNos, + bool Signed, bool Ret = false, + AttributeList AL = AttributeList()) const { + if (auto AK = getExtAttrForI32Param(Signed)) + for (auto ArgNo : ArgNos) + AL = AL.addParamAttribute(*C, ArgNo, AK); + if (Ret) + if (auto AK = getExtAttrForI32Return(Signed)) + AL = AL.addRetAttribute(*C, AK); + return AL; + } + /// \copydoc TargetLibraryInfoImpl::getWCharSize() unsigned getWCharSize(const Module &M) const { return Impl->getWCharSize(M); diff --git a/llvm/include/llvm/Frontend/OpenMP/OMPKinds.def b/llvm/include/llvm/Frontend/OpenMP/OMPKinds.def --- a/llvm/include/llvm/Frontend/OpenMP/OMPKinds.def +++ b/llvm/include/llvm/Frontend/OpenMP/OMPKinds.def @@ -398,8 +398,8 @@ VoidPtr) __OMP_RTL(__kmpc_free, false, Void, /* Int */ Int32, VoidPtr, VoidPtr) -__OMP_RTL(__tgt_interop_init, false, Void, IdentPtr, Int32, VoidPtrPtr, Int64, - Int32, Int32, VoidPtr, Int32) +__OMP_RTL(__tgt_interop_init, false, Void, IdentPtr, Int32, VoidPtrPtr, Int32, + Int32, Int64, VoidPtr, Int32) __OMP_RTL(__tgt_interop_destroy, false, Void, IdentPtr, Int32, VoidPtrPtr, Int32, Int32, VoidPtr, Int32) __OMP_RTL(__tgt_interop_use, false, Void, IdentPtr, Int32, VoidPtrPtr, Int32, @@ -597,6 +597,13 @@ ? AttributeSet(EnumAttr(NoAlias)) : AttributeSet()) +__OMP_ATTRS_SET(ZExt, AttributeSet(EnumAttr(ZExt))) +__OMP_ATTRS_SET(SExt, AttributeSet(EnumAttr(SExt))) +__OMP_ATTRS_SET(SizeTyExt, + M.getDataLayout().getIntPtrType(Ctx)->getBitWidth() < 64 + ? AttributeSet(EnumAttr(ZExt)) + : AttributeSet()) + #if 0 __OMP_ATTRS_SET(ReturnAlignedPtrAttrs, OptimisticAttributes @@ -616,370 +623,416 @@ OMP_RTL_ATTRS(OMPRTL_##Name, FnAttrSet, RetAttrSet, ArgAttrSets) __OMP_RTL_ATTRS(__kmpc_barrier, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_barrier_simple_spmd, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_barrier_simple_generic, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_warp_active_thread_mask, BarrierAttrs, AttributeSet(), ParamAttrs()) __OMP_RTL_ATTRS(__kmpc_syncwarp, BarrierAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(__kmpc_cancel, InaccessibleArgOnlyAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_cancel_barrier, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) +__OMP_RTL_ATTRS(__kmpc_cancel, InaccessibleArgOnlyAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt)) +__OMP_RTL_ATTRS(__kmpc_cancel_barrier, BarrierAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt)) +__OMP_RTL_ATTRS(__kmpc_error, AttributeSet(), AttributeSet(), + ParamAttrs(AttributeSet(), SExt)) __OMP_RTL_ATTRS(__kmpc_flush, BarrierAttrs, AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_global_thread_num, GetterAttrs, AttributeSet(), +__OMP_RTL_ATTRS(__kmpc_global_thread_num, GetterAttrs, SExt, ParamAttrs(ReadOnlyPtrAttrs)) +__OMP_RTL_ATTRS(__kmpc_get_hardware_thread_id_in_block, GetterAttrs, ZExt, + ParamAttrs()) __OMP_RTL_ATTRS(__kmpc_fork_call, ForkAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_omp_taskwait, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_omp_taskyield, InaccessibleArgOnlyAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, ReadOnlyPtrAttrs)) +__OMP_RTL_ATTRS(__kmpc_fork_call_if, AttributeSet(), AttributeSet(), + ParamAttrs(ReadOnlyPtrAttrs, SExt, ReadOnlyPtrAttrs, SExt)) +__OMP_RTL_ATTRS(__kmpc_omp_taskwait, BarrierAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt)) +__OMP_RTL_ATTRS(__kmpc_omp_taskyield, InaccessibleArgOnlyAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_push_num_threads, InaccessibleArgOnlyAttrs, - AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs)) + AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_push_proc_bind, InaccessibleArgOnlyAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_serialized_parallel, InaccessibleArgOnlyAttrs, - AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_end_serialized_parallel, InaccessibleArgOnlyAttrs, - AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_omp_reg_task_with_affinity, DefaultAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ReadOnlyPtrAttrs, - AttributeSet(), ReadOnlyPtrAttrs)) - -__OMP_RTL_ATTRS(__kmpc_get_hardware_num_blocks, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(__kmpc_get_hardware_num_threads_in_block, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(__kmpc_get_warp_size, GetterAttrs, AttributeSet(), ParamAttrs()) - -__OMP_RTL_ATTRS(omp_get_thread_num, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_num_threads, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_max_threads, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_in_parallel, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_dynamic, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_cancellation, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_nested, GetterAttrs, AttributeSet(), ParamAttrs()) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt)) +__OMP_RTL_ATTRS(__kmpc_omp_reg_task_with_affinity, DefaultAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt, ReadOnlyPtrAttrs, + SExt, ReadOnlyPtrAttrs)) + +__OMP_RTL_ATTRS(__kmpc_get_hardware_num_blocks, GetterAttrs, ZExt, ParamAttrs()) +__OMP_RTL_ATTRS(__kmpc_get_hardware_num_threads_in_block, GetterAttrs, ZExt, ParamAttrs()) +__OMP_RTL_ATTRS(__kmpc_get_warp_size, GetterAttrs, ZExt, ParamAttrs()) + +__OMP_RTL_ATTRS(omp_get_thread_num, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_num_threads, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_max_threads, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_in_parallel, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_dynamic, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_cancellation, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_nested, GetterAttrs, SExt, ParamAttrs()) __OMP_RTL_ATTRS( omp_get_schedule, GetterArgWriteAttrs, AttributeSet(), ParamAttrs(AttributeSet(EnumAttr(NoCapture), EnumAttr(WriteOnly)), AttributeSet(EnumAttr(NoCapture), EnumAttr(WriteOnly)))) -__OMP_RTL_ATTRS(omp_get_thread_limit, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_supported_active_levels, GetterAttrs, AttributeSet(), - ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_max_active_levels, GetterAttrs, AttributeSet(), - ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_level, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_ancestor_thread_num, GetterAttrs, AttributeSet(), - ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_team_size, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_active_level, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_in_final, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_proc_bind, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_num_places, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_num_procs, GetterAttrs, AttributeSet(), ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_thread_limit, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_supported_active_levels, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_max_active_levels, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_level, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_ancestor_thread_num, GetterAttrs, SExt, ParamAttrs(SExt)) +__OMP_RTL_ATTRS(omp_get_team_size, GetterAttrs, SExt, ParamAttrs(SExt)) +__OMP_RTL_ATTRS(omp_get_active_level, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_in_final, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_proc_bind, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_num_places, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_num_procs, GetterAttrs, SExt, ParamAttrs()) __OMP_RTL_ATTRS(omp_get_place_proc_ids, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(AttributeSet(), AttributeSet(EnumAttr(NoCapture), - EnumAttr(WriteOnly)))) -__OMP_RTL_ATTRS(omp_get_place_num, GetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_get_partition_num_places, GetterAttrs, AttributeSet(), - ParamAttrs()) + ParamAttrs(SExt, AttributeSet(EnumAttr(NoCapture), + EnumAttr(WriteOnly)))) +__OMP_RTL_ATTRS(omp_get_place_num, GetterAttrs, SExt, ParamAttrs()) +__OMP_RTL_ATTRS(omp_get_partition_num_places, GetterAttrs, SExt, ParamAttrs()) __OMP_RTL_ATTRS(omp_get_partition_place_nums, GetterAttrs, AttributeSet(), ParamAttrs()) __OMP_RTL_ATTRS(omp_get_wtime, GetterArgWriteAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_set_num_threads, SetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_set_dynamic, SetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_set_nested, SetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(omp_set_schedule, SetterAttrs, AttributeSet(), ParamAttrs()) +__OMP_RTL_ATTRS(omp_set_num_threads, SetterAttrs, AttributeSet(), + ParamAttrs(SExt)) +__OMP_RTL_ATTRS(omp_set_dynamic, SetterAttrs, AttributeSet(), ParamAttrs(SExt)) +__OMP_RTL_ATTRS(omp_set_nested, SetterAttrs, AttributeSet(), ParamAttrs(SExt)) +__OMP_RTL_ATTRS(omp_set_schedule, SetterAttrs, AttributeSet(), + ParamAttrs(SExt, SExt)) __OMP_RTL_ATTRS(omp_set_max_active_levels, SetterAttrs, AttributeSet(), - ParamAttrs()) + ParamAttrs(SExt)) -__OMP_RTL_ATTRS(__kmpc_master, InaccessibleArgOnlyAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) +__OMP_RTL_ATTRS(__kmpc_master, InaccessibleArgOnlyAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_end_master, InaccessibleArgOnlyAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_masked, InaccessibleArgOnlyAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) +__OMP_RTL_ATTRS(__kmpc_masked, InaccessibleArgOnlyAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_end_masked, InaccessibleArgOnlyAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_critical, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet())) + ParamAttrs(ReadOnlyPtrAttrs, SExt, AttributeSet())) __OMP_RTL_ATTRS(__kmpc_critical_with_hint, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - AttributeSet())) + ParamAttrs(ReadOnlyPtrAttrs, SExt, AttributeSet(), ZExt)) __OMP_RTL_ATTRS(__kmpc_end_critical, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet())) + ParamAttrs(ReadOnlyPtrAttrs, SExt, AttributeSet())) __OMP_RTL_ATTRS(__kmpc_begin, DefaultAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_end, DefaultAttrs, AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_reduce, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - AttributeSet(), ReadOnlyPtrAttrs, AttributeSet())) -__OMP_RTL_ATTRS(__kmpc_reduce_nowait, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - AttributeSet(), ReadOnlyPtrAttrs, AttributeSet())) +__OMP_RTL_ATTRS(__kmpc_reduce, BarrierAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, SizeTyExt, + ReadOnlyPtrAttrs, AttributeSet())) +__OMP_RTL_ATTRS(__kmpc_reduce_nowait, BarrierAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, SizeTyExt, + ReadOnlyPtrAttrs, AttributeSet())) __OMP_RTL_ATTRS(__kmpc_end_reduce, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet())) + ParamAttrs(ReadOnlyPtrAttrs, SExt, AttributeSet())) __OMP_RTL_ATTRS(__kmpc_end_reduce_nowait, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet())) + ParamAttrs(ReadOnlyPtrAttrs, SExt, AttributeSet())) __OMP_RTL_ATTRS(__kmpc_ordered, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_end_ordered, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_for_static_init_4, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, - AttributeSet(), AttributeSet())) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_for_static_init_4u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, - AttributeSet(), AttributeSet())) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_for_static_init_8, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, AttributeSet(), AttributeSet())) __OMP_RTL_ATTRS(__kmpc_for_static_init_8u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, AttributeSet(), AttributeSet())) __OMP_RTL_ATTRS(__kmpc_for_static_fini, InaccessibleArgOnlyAttrs, - AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs)) + AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_distribute_static_init_4, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, - AttributeSet(), AttributeSet())) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_distribute_static_init_4u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, - AttributeSet(), AttributeSet())) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_distribute_static_init_8, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, AttributeSet(), AttributeSet())) __OMP_RTL_ATTRS(__kmpc_distribute_static_init_8u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, AttributeSet(), AttributeSet())) __OMP_RTL_ATTRS(__kmpc_distribute_static_fini, InaccessibleArgOnlyAttrs, - AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs)) + AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_dist_dispatch_init_4, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, SExt, + SExt, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_dist_dispatch_init_4u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, ZExt, + ZExt, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_dist_dispatch_init_8, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs)) __OMP_RTL_ATTRS(__kmpc_dist_dispatch_init_8u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs)) __OMP_RTL_ATTRS(__kmpc_dispatch_init_4, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, SExt, SExt, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_dispatch_init_4u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ZExt, ZExt, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_dispatch_init_8, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_dispatch_init_8u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_dispatch_next_4, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ArgPtrAttrs, - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_dispatch_next_4u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ArgPtrAttrs, - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_dispatch_next_8, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ArgPtrAttrs, - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_dispatch_next_8u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ArgPtrAttrs, - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt)) +__OMP_RTL_ATTRS(__kmpc_dispatch_next_4, GetterArgWriteAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt, ArgPtrAttrs, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs)) +__OMP_RTL_ATTRS(__kmpc_dispatch_next_4u, GetterArgWriteAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt, ArgPtrAttrs, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs)) +__OMP_RTL_ATTRS(__kmpc_dispatch_next_8, GetterArgWriteAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt, ArgPtrAttrs, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs)) +__OMP_RTL_ATTRS(__kmpc_dispatch_next_8u, GetterArgWriteAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt, ArgPtrAttrs, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs)) __OMP_RTL_ATTRS(__kmpc_dispatch_fini_4, InaccessibleArgOnlyAttrs, - AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs)) + AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_dispatch_fini_4u, InaccessibleArgOnlyAttrs, - AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs)) + AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_dispatch_fini_8, InaccessibleArgOnlyAttrs, - AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs)) + AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_dispatch_fini_8u, InaccessibleArgOnlyAttrs, - AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs)) + AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_team_static_init_4, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ArgPtrAttrs, - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, ArgPtrAttrs, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_team_static_init_4u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ArgPtrAttrs, - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, ArgPtrAttrs, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_team_static_init_8, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ArgPtrAttrs, - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, ArgPtrAttrs, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs)) __OMP_RTL_ATTRS(__kmpc_team_static_init_8u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ArgPtrAttrs, - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, ArgPtrAttrs, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs)) __OMP_RTL_ATTRS(__kmpc_dist_for_static_init_4, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, - ArgPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, + ArgPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_dist_for_static_init_4u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, - ArgPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, + ArgPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_dist_for_static_init_8, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, - ArgPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs)) __OMP_RTL_ATTRS(__kmpc_dist_for_static_init_8u, GetterArgWriteAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, - ArgPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ArgPtrAttrs, + ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs, ArgPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_single, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) +__OMP_RTL_ATTRS(__kmpc_single, BarrierAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_end_single, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_omp_task_alloc, DefaultAttrs, ReturnPtrAttrs, - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - AttributeSet(), AttributeSet(), ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_omp_task, DefaultAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet())) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, SizeTyExt, SizeTyExt, + ReadOnlyPtrAttrs)) +__OMP_RTL_ATTRS(__kmpc_omp_task, DefaultAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt, AttributeSet())) __OMP_RTL_ATTRS(__kmpc_end_taskgroup, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_taskgroup, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_omp_task_begin_if0, DefaultAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_omp_task_complete_if0, DefaultAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_omp_task_with_deps, DefaultAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - AttributeSet(), ReadOnlyPtrAttrs, AttributeSet(), - ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) +__OMP_RTL_ATTRS(__kmpc_omp_task_with_deps, DefaultAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt, AttributeSet(), SExt, + ReadOnlyPtrAttrs, SExt, ReadOnlyPtrAttrs)) __OMP_RTL_ATTRS(__kmpc_taskloop, DefaultAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - AttributeSet(), ArgPtrAttrs, ArgPtrAttrs, - AttributeSet(), AttributeSet(), AttributeSet(), - AttributeSet(), AttributeSet())) + ParamAttrs(ReadOnlyPtrAttrs, SExt, AttributeSet(), SExt, + ArgPtrAttrs, ArgPtrAttrs, AttributeSet(), SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_omp_target_task_alloc, DefaultAttrs, ReturnPtrAttrs, - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - AttributeSet(), AttributeSet(), ReadOnlyPtrAttrs, - AttributeSet())) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, SizeTyExt, SizeTyExt, + ReadOnlyPtrAttrs, AttributeSet())) __OMP_RTL_ATTRS(__kmpc_taskred_modifier_init, DefaultAttrs, ReturnPtrAttrs, - ParamAttrs(ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_taskred_init, DefaultAttrs, AttributeSet(), ParamAttrs()) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, SExt)) +__OMP_RTL_ATTRS(__kmpc_taskred_init, DefaultAttrs, AttributeSet(), + ParamAttrs(SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_task_reduction_modifier_fini, BarrierAttrs, - AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs)) + AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_task_reduction_get_th_data, DefaultAttrs, ReturnPtrAttrs, - ParamAttrs()) + ParamAttrs(SExt)) __OMP_RTL_ATTRS(__kmpc_task_reduction_init, DefaultAttrs, ReturnPtrAttrs, - ParamAttrs()) + ParamAttrs(SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_task_reduction_modifier_init, DefaultAttrs, - ReturnPtrAttrs, ParamAttrs()) + ReturnPtrAttrs, ParamAttrs(AttributeSet(), SExt, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_proxy_task_completed_ooo, DefaultAttrs, AttributeSet(), ParamAttrs()) __OMP_RTL_ATTRS(__kmpc_omp_wait_deps, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ReadOnlyPtrAttrs, SExt)) __OMP_RTL_ATTRS(__kmpc_omp_taskwait_deps_51, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ReadOnlyPtrAttrs)) -__OMP_RTL_ATTRS(__kmpc_cancellationpoint, DefaultAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, ReadOnlyPtrAttrs)) +__OMP_RTL_ATTRS(__kmpc_cancellationpoint, DefaultAttrs, SExt, + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_fork_teams, ForkAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, ReadOnlyPtrAttrs)) __OMP_RTL_ATTRS(__kmpc_push_num_teams, InaccessibleArgOnlyAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_copyprivate, DefaultAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), AttributeSet(), - ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SizeTyExt, + ReadOnlyPtrAttrs, AttributeSet(), SExt)) __OMP_RTL_ATTRS(__kmpc_threadprivate_cached, DefaultAttrs, ReturnPtrAttrs, - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, AttributeSet(), SizeTyExt)) __OMP_RTL_ATTRS(__kmpc_threadprivate_register, DefaultAttrs, AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ReadOnlyPtrAttrs, ReadOnlyPtrAttrs, ReadOnlyPtrAttrs)) __OMP_RTL_ATTRS(__kmpc_doacross_init, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, SExt)) __OMP_RTL_ATTRS(__kmpc_doacross_post, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, ReadOnlyPtrAttrs)) __OMP_RTL_ATTRS(__kmpc_doacross_wait, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs, AttributeSet(), ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt, ReadOnlyPtrAttrs)) __OMP_RTL_ATTRS(__kmpc_doacross_fini, BarrierAttrs, AttributeSet(), - ParamAttrs(ReadOnlyPtrAttrs)) + ParamAttrs(ReadOnlyPtrAttrs, SExt)) -__OMP_RTL_ATTRS(__kmpc_alloc_shared, AttributeSet( - EnumAttr(NoUnwind), - EnumAttr(NoSync), - AllocSizeAttr(0, std::nullopt)), ReturnPtrAttrs, ParamAttrs()) +__OMP_RTL_ATTRS(__kmpc_alloc_shared, + AttributeSet(EnumAttr(NoUnwind), EnumAttr(NoSync), + AllocSizeAttr(0, std::nullopt)), + ReturnPtrAttrs, ParamAttrs(SizeTyExt)) __OMP_RTL_ATTRS(__kmpc_free_shared, DeviceAllocAttrs, AttributeSet(), - ParamAttrs(AttributeSet(EnumAttr(NoCapture), EnumAttr(AllocatedPointer)))) - -__OMP_RTL_ATTRS(__kmpc_alloc, DefaultAttrs, ReturnPtrAttrs, ParamAttrs()) + ParamAttrs(AttributeSet(EnumAttr(NoCapture), + EnumAttr(AllocatedPointer)), + SizeTyExt)) +__OMP_RTL_ATTRS(__kmpc_begin_sharing_variables, AttributeSet(), AttributeSet(), + ParamAttrs(AttributeSet(), SizeTyExt)) + +__OMP_RTL_ATTRS(__kmpc_alloc, DefaultAttrs, ReturnPtrAttrs, + ParamAttrs(SExt, SizeTyExt)) __OMP_RTL_ATTRS(__kmpc_aligned_alloc, DefaultAttrs, ReturnPtrAttrs, - ParamAttrs()) -__OMP_RTL_ATTRS(__kmpc_free, AllocAttrs, AttributeSet(), ParamAttrs()) + ParamAttrs(SExt, SizeTyExt, SizeTyExt)) +__OMP_RTL_ATTRS(__kmpc_free, AllocAttrs, AttributeSet(), + ParamAttrs(SExt)) + +__OMP_RTL_ATTRS(__tgt_interop_init, AttributeSet(), AttributeSet(), + ParamAttrs(AttributeSet(), SExt, AttributeSet(), SExt, + SExt, AttributeSet(), AttributeSet(), SExt)) +__OMP_RTL_ATTRS(__tgt_interop_destroy, AttributeSet(), AttributeSet(), + ParamAttrs(AttributeSet(), SExt, AttributeSet(), SExt, SExt, + AttributeSet(), SExt)) +__OMP_RTL_ATTRS(__tgt_interop_use, AttributeSet(), AttributeSet(), + ParamAttrs(AttributeSet(), SExt, AttributeSet(), SExt, SExt, + AttributeSet(), SExt)) __OMP_RTL_ATTRS(__kmpc_init_allocator, DefaultAttrs, ReturnPtrAttrs, - ParamAttrs()) + ParamAttrs(SExt, AttributeSet(), SExt)) __OMP_RTL_ATTRS(__kmpc_destroy_allocator, AllocAttrs, AttributeSet(), - ParamAttrs()) + ParamAttrs(SExt)) __OMP_RTL_ATTRS(__kmpc_push_target_tripcount_mapper, SetterAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(__tgt_target_mapper, ForkAttrs, AttributeSet(), ParamAttrs()) -__OMP_RTL_ATTRS(__tgt_target_nowait_mapper, ForkAttrs, AttributeSet(), - ParamAttrs()) -__OMP_RTL_ATTRS(__tgt_target_teams_mapper, ForkAttrs, AttributeSet(), - ParamAttrs()) -__OMP_RTL_ATTRS(__tgt_target_teams_nowait_mapper, ForkAttrs, AttributeSet(), - ParamAttrs()) -__OMP_RTL_ATTRS(__tgt_target_kernel, ForkAttrs, AttributeSet(), - ParamAttrs()) -__OMP_RTL_ATTRS(__tgt_target_kernel_nowait, ForkAttrs, AttributeSet(), - ParamAttrs()) +__OMP_RTL_ATTRS(__tgt_target_mapper, ForkAttrs, SExt, + ParamAttrs(AttributeSet(),AttributeSet(),AttributeSet(), SExt)) +__OMP_RTL_ATTRS(__tgt_target_nowait_mapper, ForkAttrs, SExt, + ParamAttrs(AttributeSet(), AttributeSet(), AttributeSet(), SExt, + AttributeSet(), AttributeSet(), AttributeSet(), + AttributeSet(), AttributeSet(), AttributeSet(), + SExt, AttributeSet(), SExt)) +__OMP_RTL_ATTRS(__tgt_target_teams_mapper, ForkAttrs, SExt, + ParamAttrs(AttributeSet(), AttributeSet(), AttributeSet(), SExt, + AttributeSet(), AttributeSet(), AttributeSet(), + AttributeSet(), AttributeSet(), AttributeSet(), SExt, + SExt)) +__OMP_RTL_ATTRS(__tgt_target_teams_nowait_mapper, ForkAttrs, SExt, + ParamAttrs(AttributeSet(), AttributeSet(), AttributeSet(), SExt, + AttributeSet(), AttributeSet(), AttributeSet(), + AttributeSet(), AttributeSet(), AttributeSet(), + SExt, SExt, SExt, AttributeSet(), SExt)) +__OMP_RTL_ATTRS(__tgt_target_kernel, ForkAttrs, SExt, + ParamAttrs(AttributeSet(), AttributeSet(), SExt, SExt)) +__OMP_RTL_ATTRS(__tgt_target_kernel_nowait, ForkAttrs, SExt, + ParamAttrs(AttributeSet(), AttributeSet(), SExt, SExt, + AttributeSet(), AttributeSet(), SExt, AttributeSet(), + SExt)) __OMP_RTL_ATTRS(__tgt_register_requires, ForkAttrs, AttributeSet(), ParamAttrs()) __OMP_RTL_ATTRS(__tgt_target_data_begin_mapper, ForkAttrs, AttributeSet(), - ParamAttrs()) -__OMP_RTL_ATTRS(__tgt_target_data_begin_nowait_mapper, ForkAttrs, - AttributeSet(), ParamAttrs()) + ParamAttrs(AttributeSet(), AttributeSet(), SExt)) +__OMP_RTL_ATTRS(__tgt_target_data_begin_nowait_mapper, ForkAttrs, AttributeSet(), + ParamAttrs(AttributeSet(), AttributeSet(), SExt, AttributeSet(), + AttributeSet(), AttributeSet(), AttributeSet(), + AttributeSet(), AttributeSet())) +__OMP_RTL_ATTRS(__tgt_target_data_begin_mapper_issue, AttributeSet(), + AttributeSet(), + ParamAttrs(AttributeSet(), AttributeSet(), SExt)) __OMP_RTL_ATTRS(__tgt_target_data_end_mapper, ForkAttrs, AttributeSet(), - ParamAttrs()) -__OMP_RTL_ATTRS(__tgt_target_data_end_nowait_mapper, ForkAttrs, - AttributeSet(), ParamAttrs()) + ParamAttrs(AttributeSet(), AttributeSet(), SExt)) +__OMP_RTL_ATTRS(__tgt_target_data_end_nowait_mapper, ForkAttrs, AttributeSet(), + ParamAttrs(AttributeSet(), AttributeSet(), SExt, AttributeSet(), + AttributeSet(), AttributeSet(), AttributeSet(), + AttributeSet(), AttributeSet())) __OMP_RTL_ATTRS(__tgt_target_data_update_mapper, ForkAttrs, AttributeSet(), - ParamAttrs()) -__OMP_RTL_ATTRS(__tgt_target_data_update_nowait_mapper, ForkAttrs, - AttributeSet(), ParamAttrs()) + ParamAttrs(AttributeSet(), AttributeSet(), SExt)) +__OMP_RTL_ATTRS(__tgt_target_data_update_nowait_mapper, ForkAttrs, AttributeSet(), + ParamAttrs(AttributeSet(), AttributeSet(), SExt, AttributeSet(), + AttributeSet(), AttributeSet(), AttributeSet(), + AttributeSet(), AttributeSet())) __OMP_RTL_ATTRS(__tgt_mapper_num_components, ForkAttrs, AttributeSet(), ParamAttrs()) __OMP_RTL_ATTRS(__tgt_push_mapper_component, ForkAttrs, AttributeSet(), ParamAttrs()) __OMP_RTL_ATTRS(__kmpc_task_allow_completion_event, DefaultAttrs, - ReturnPtrAttrs, ParamAttrs(ReadOnlyPtrAttrs)) + ReturnPtrAttrs, ParamAttrs(ReadOnlyPtrAttrs, SExt)) +__OMP_RTL_ATTRS(__kmpc_target_init, AttributeSet(), SExt, + ParamAttrs(AttributeSet(), SExt, SExt)) +__OMP_RTL_ATTRS(__kmpc_target_deinit, AttributeSet(), AttributeSet(), + ParamAttrs(AttributeSet(), SExt)) __OMP_RTL_ATTRS(__kmpc_parallel_51, AlwaysInlineAttrs, AttributeSet(), - ParamAttrs()) + ParamAttrs(AttributeSet(), SExt, SExt, SExt, SExt, + AttributeSet(), AttributeSet(), AttributeSet(), + SizeTyExt)) +__OMP_RTL_ATTRS(__kmpc_serialized_parallel, InaccessibleArgOnlyAttrs, + AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs, SExt)) +__OMP_RTL_ATTRS(__kmpc_end_serialized_parallel, InaccessibleArgOnlyAttrs, + AttributeSet(), ParamAttrs(ReadOnlyPtrAttrs, SExt)) +__OMP_RTL_ATTRS(__kmpc_shuffle_int32, AttributeSet(), SExt, + ParamAttrs(SExt, SExt, SExt)) +__OMP_RTL_ATTRS(__kmpc_nvptx_parallel_reduce_nowait_v2, AttributeSet(), SExt, + ParamAttrs(AttributeSet(), SExt, SExt, SizeTyExt)) +__OMP_RTL_ATTRS(__kmpc_nvptx_end_reduce_nowait, AttributeSet(), AttributeSet(), + ParamAttrs(SExt)) +__OMP_RTL_ATTRS(__kmpc_nvptx_teams_reduce_nowait_v2, AttributeSet(), SExt, + ParamAttrs(AttributeSet(), SExt, AttributeSet(), ZExt)) + +__OMP_RTL_ATTRS(__kmpc_shuffle_int64, AttributeSet(), AttributeSet(), + ParamAttrs(AttributeSet(), SExt, SExt)) + +__OMP_RTL_ATTRS(__kmpc_is_spmd_exec_mode, AttributeSet(), SExt, ParamAttrs()) #undef __OMP_RTL_ATTRS #undef OMP_RTL_ATTRS diff --git a/llvm/lib/Analysis/TargetLibraryInfo.cpp b/llvm/lib/Analysis/TargetLibraryInfo.cpp --- a/llvm/lib/Analysis/TargetLibraryInfo.cpp +++ b/llvm/lib/Analysis/TargetLibraryInfo.cpp @@ -168,25 +168,10 @@ TLI.setUnavailable(LibFunc_fputs_unlocked); TLI.setUnavailable(LibFunc_fgets_unlocked); - bool ShouldExtI32Param = false, ShouldExtI32Return = false, - ShouldSignExtI32Param = false, ShouldSignExtI32Return = false; - // PowerPC64, Sparc64, SystemZ need signext/zeroext on i32 parameters and - // returns corresponding to C-level ints and unsigned ints. - if (T.isPPC64() || T.getArch() == Triple::sparcv9 || - T.getArch() == Triple::systemz) { - ShouldExtI32Param = true; - ShouldExtI32Return = true; - } - // Mips and riscv64, on the other hand, needs signext on i32 parameters - // corresponding to both signed and unsigned ints. - if (T.isMIPS() || T.isRISCV64()) { - ShouldSignExtI32Param = true; - } - // riscv64 needs signext on i32 returns corresponding to both signed and - // unsigned ints. - if (T.isRISCV64()) { - ShouldSignExtI32Return = true; - } + bool ShouldExtI32Param, ShouldExtI32Return; + bool ShouldSignExtI32Param, ShouldSignExtI32Return; + TargetLibraryInfo::initExtensionsForTriple(ShouldExtI32Param, + ShouldExtI32Return, ShouldSignExtI32Param, ShouldSignExtI32Return, T); TLI.setShouldExtI32Param(ShouldExtI32Param); TLI.setShouldExtI32Return(ShouldExtI32Return); TLI.setShouldSignExtI32Param(ShouldSignExtI32Param); diff --git a/llvm/lib/Frontend/OpenMP/OMPIRBuilder.cpp b/llvm/lib/Frontend/OpenMP/OMPIRBuilder.cpp --- a/llvm/lib/Frontend/OpenMP/OMPIRBuilder.cpp +++ b/llvm/lib/Frontend/OpenMP/OMPIRBuilder.cpp @@ -330,6 +330,7 @@ void OpenMPIRBuilder::addAttributes(omp::RuntimeFunction FnID, Function &Fn) { LLVMContext &Ctx = Fn.getContext(); + Triple T(M.getTargetTriple()); // Get the function's current attributes. auto Attrs = Fn.getAttributes(); @@ -339,6 +340,25 @@ for (size_t ArgNo = 0; ArgNo < Fn.arg_size(); ++ArgNo) ArgAttrs.emplace_back(Attrs.getParamAttrs(ArgNo)); + // Add AS to FnAS while taking special care with integer extensions. + auto addAttrSet = [&](AttributeSet &FnAS, const AttributeSet &AS, + bool Param = true) -> void { + bool HasSignExt = AS.hasAttribute(Attribute::SExt); + bool HasZeroExt = AS.hasAttribute(Attribute::ZExt); + if (HasSignExt || HasZeroExt) { + assert(AS.getNumAttributes() == 1 && + "Currently not handling extension attr combined with others."); + if (Param) { + if (auto AK = TargetLibraryInfo::getExtAttrForI32Param(T, HasSignExt)) + FnAS = FnAS.addAttribute(Ctx, AK); + } else + if (auto AK = TargetLibraryInfo::getExtAttrForI32Return(T, HasSignExt)) + FnAS = FnAS.addAttribute(Ctx, AK); + } else { + FnAS = FnAS.addAttributes(Ctx, AS); + } + }; + #define OMP_ATTRS_SET(VarName, AttrSet) AttributeSet VarName = AttrSet; #include "llvm/Frontend/OpenMP/OMPKinds.def" @@ -347,10 +367,9 @@ #define OMP_RTL_ATTRS(Enum, FnAttrSet, RetAttrSet, ArgAttrSets) \ case Enum: \ FnAttrs = FnAttrs.addAttributes(Ctx, FnAttrSet); \ - RetAttrs = RetAttrs.addAttributes(Ctx, RetAttrSet); \ + addAttrSet(RetAttrs, RetAttrSet, /*Param*/false); \ for (size_t ArgNo = 0; ArgNo < ArgAttrSets.size(); ++ArgNo) \ - ArgAttrs[ArgNo] = \ - ArgAttrs[ArgNo].addAttributes(Ctx, ArgAttrSets[ArgNo]); \ + addAttrSet(ArgAttrs[ArgNo], ArgAttrSets[ArgNo]); \ Fn.setAttributes(AttributeList::get(Ctx, FnAttrs, RetAttrs, ArgAttrs)); \ break; #include "llvm/Frontend/OpenMP/OMPKinds.def" @@ -3762,9 +3781,9 @@ Value *ThreadId = getOrCreateThreadID(Ident); if (Device == nullptr) Device = ConstantInt::get(Int32, -1); - Constant *InteropTypeVal = ConstantInt::get(Int64, (int)InteropType); + Constant *InteropTypeVal = ConstantInt::get(Int32, (int)InteropType); if (NumDependences == nullptr) { - NumDependences = ConstantInt::get(Int32, 0); + NumDependences = ConstantInt::get(Int64, 0); PointerType *PointerTypeVar = Type::getInt8PtrTy(M.getContext()); DependenceAddress = ConstantPointerNull::get(PointerTypeVar); } diff --git a/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp b/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp --- a/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp +++ b/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp @@ -712,7 +712,7 @@ private: friend struct FunctionStackPoisoner; - void initializeCallbacks(Module &M); + void initializeCallbacks(Module &M, const TargetLibraryInfo *TLI); bool LooksLikeCodeInBug11395(Instruction *I); bool GlobalIsLinkerInitialized(GlobalVariable *G); @@ -2484,7 +2484,7 @@ return true; } -void AddressSanitizer::initializeCallbacks(Module &M) { +void AddressSanitizer::initializeCallbacks(Module &M, const TargetLibraryInfo *TLI) { IRBuilder<> IRB(*C); // Create __asan_report* callbacks. // IsWrite, TypeSize and Exp are encoded in the function name. @@ -2496,18 +2496,24 @@ SmallVector Args2 = {IntptrTy, IntptrTy}; SmallVector Args1{1, IntptrTy}; + AttributeList AL2; + AttributeList AL1; if (Exp) { Type *ExpType = Type::getInt32Ty(*C); Args2.push_back(ExpType); Args1.push_back(ExpType); + if (auto AK = TLI->getExtAttrForI32Param(false)) { + AL2 = AL2.addParamAttribute(*C, 2, AK); + AL1 = AL1.addParamAttribute(*C, 1, AK); + } } AsanErrorCallbackSized[AccessIsWrite][Exp] = M.getOrInsertFunction( kAsanReportErrorTemplate + ExpStr + TypeStr + "_n" + EndingStr, - FunctionType::get(IRB.getVoidTy(), Args2, false)); + FunctionType::get(IRB.getVoidTy(), Args2, false), AL2); AsanMemoryAccessCallbackSized[AccessIsWrite][Exp] = M.getOrInsertFunction( ClMemoryAccessCallbackPrefix + ExpStr + TypeStr + "N" + EndingStr, - FunctionType::get(IRB.getVoidTy(), Args2, false)); + FunctionType::get(IRB.getVoidTy(), Args2, false), AL2); for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes; AccessSizeIndex++) { @@ -2515,12 +2521,12 @@ AsanErrorCallback[AccessIsWrite][Exp][AccessSizeIndex] = M.getOrInsertFunction( kAsanReportErrorTemplate + ExpStr + Suffix + EndingStr, - FunctionType::get(IRB.getVoidTy(), Args1, false)); + FunctionType::get(IRB.getVoidTy(), Args1, false), AL1); AsanMemoryAccessCallback[AccessIsWrite][Exp][AccessSizeIndex] = M.getOrInsertFunction( ClMemoryAccessCallbackPrefix + ExpStr + Suffix + EndingStr, - FunctionType::get(IRB.getVoidTy(), Args1, false)); + FunctionType::get(IRB.getVoidTy(), Args1, false), AL1); } } } @@ -2536,6 +2542,7 @@ IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IntptrTy); AsanMemset = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memset", + TLI->getAttrList(C, {1}, /*Signed=*/false), IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IRB.getInt32Ty(), IntptrTy); @@ -2662,7 +2669,7 @@ LLVM_DEBUG(dbgs() << "ASAN instrumenting:\n" << F << "\n"); - initializeCallbacks(*F.getParent()); + initializeCallbacks(*F.getParent(), TLI); FunctionStateRAII CleanupObj(this); diff --git a/llvm/lib/Transforms/Instrumentation/GCOVProfiling.cpp b/llvm/lib/Transforms/Instrumentation/GCOVProfiling.cpp --- a/llvm/lib/Transforms/Instrumentation/GCOVProfiling.cpp +++ b/llvm/lib/Transforms/Instrumentation/GCOVProfiling.cpp @@ -624,10 +624,11 @@ } bool GCOVProfiler::AddFlushBeforeForkAndExec() { + const TargetLibraryInfo *TLI = nullptr; SmallVector Forks; SmallVector Execs; for (auto &F : M->functions()) { - auto *TLI = &GetTLI(F); + TLI = TLI == nullptr ? &GetTLI(F) : TLI; for (auto &I : instructions(F)) { if (CallInst *CI = dyn_cast(&I)) { if (Function *Callee = CI->getCalledFunction()) { @@ -656,7 +657,9 @@ // We've a fork so just reset the counters in the child process FunctionType *FTy = FunctionType::get(Builder.getInt32Ty(), {}, false); - FunctionCallee GCOVFork = M->getOrInsertFunction("__gcov_fork", FTy); + FunctionCallee GCOVFork = M->getOrInsertFunction( + "__gcov_fork", FTy, + TLI->getAttrList(Ctx, {}, /*Signed=*/true, /*Ret=*/true)); F->setCalledFunction(GCOVFork); // We split just after the fork to have a counter for the lines after @@ -1025,11 +1028,8 @@ Type::getInt32Ty(*Ctx), // uint32_t checksum }; FunctionType *FTy = FunctionType::get(Type::getVoidTy(*Ctx), Args, false); - AttributeList AL; - if (auto AK = TLI->getExtAttrForI32Param(false)) - AL = AL.addParamAttribute(*Ctx, 2, AK); - FunctionCallee Res = M->getOrInsertFunction("llvm_gcda_start_file", FTy, AL); - return Res; + return M->getOrInsertFunction("llvm_gcda_start_file", FTy, + TLI->getAttrList(Ctx, {1, 2}, /*Signed=*/false)); } FunctionCallee GCOVProfiler::getEmitFunctionFunc(const TargetLibraryInfo *TLI) { @@ -1039,13 +1039,8 @@ Type::getInt32Ty(*Ctx), // uint32_t cfg_checksum }; FunctionType *FTy = FunctionType::get(Type::getVoidTy(*Ctx), Args, false); - AttributeList AL; - if (auto AK = TLI->getExtAttrForI32Param(false)) { - AL = AL.addParamAttribute(*Ctx, 0, AK); - AL = AL.addParamAttribute(*Ctx, 1, AK); - AL = AL.addParamAttribute(*Ctx, 2, AK); - } - return M->getOrInsertFunction("llvm_gcda_emit_function", FTy); + return M->getOrInsertFunction("llvm_gcda_emit_function", FTy, + TLI->getAttrList(Ctx, {0, 1, 2}, /*Signed=*/false)); } FunctionCallee GCOVProfiler::getEmitArcsFunc(const TargetLibraryInfo *TLI) { @@ -1054,10 +1049,8 @@ Type::getInt64PtrTy(*Ctx), // uint64_t *counters }; FunctionType *FTy = FunctionType::get(Type::getVoidTy(*Ctx), Args, false); - AttributeList AL; - if (auto AK = TLI->getExtAttrForI32Param(false)) - AL = AL.addParamAttribute(*Ctx, 0, AK); - return M->getOrInsertFunction("llvm_gcda_emit_arcs", FTy, AL); + return M->getOrInsertFunction("llvm_gcda_emit_arcs", FTy, + TLI->getAttrList(Ctx, {0}, /*Signed=*/false)); } FunctionCallee GCOVProfiler::getSummaryInfoFunc() { diff --git a/llvm/lib/Transforms/Instrumentation/MemorySanitizer.cpp b/llvm/lib/Transforms/Instrumentation/MemorySanitizer.cpp --- a/llvm/lib/Transforms/Instrumentation/MemorySanitizer.cpp +++ b/llvm/lib/Transforms/Instrumentation/MemorySanitizer.cpp @@ -539,9 +539,9 @@ friend struct VarArgSystemZHelper; void initializeModule(Module &M); - void initializeCallbacks(Module &M); - void createKernelApi(Module &M); - void createUserspaceApi(Module &M); + void initializeCallbacks(Module &M, const TargetLibraryInfo &TLI); + void createKernelApi(Module &M, const TargetLibraryInfo &TLI); + void createUserspaceApi(Module &M, const TargetLibraryInfo &TLI); /// True if we're compiling the Linux kernel. bool CompileKernel; @@ -730,7 +730,7 @@ } /// Create KMSAN API callbacks. -void MemorySanitizer::createKernelApi(Module &M) { +void MemorySanitizer::createKernelApi(Module &M, const TargetLibraryInfo &TLI) { IRBuilder<> IRB(*C); // These will be initialized in insertKmsanPrologue(). @@ -742,8 +742,10 @@ VAArgOriginTLS = nullptr; VAArgOverflowSizeTLS = nullptr; - WarningFn = M.getOrInsertFunction("__msan_warning", IRB.getVoidTy(), - IRB.getInt32Ty()); + WarningFn = M.getOrInsertFunction("__msan_warning", + TLI.getAttrList(C, {0}, /*Signed=*/false), + IRB.getVoidTy(), IRB.getInt32Ty()); + // Requests the per-task context state (kmsan_context_state*) from the // runtime library. MsanContextStateTy = StructType::get( @@ -794,7 +796,7 @@ } /// Insert declarations for userspace-specific functions and globals. -void MemorySanitizer::createUserspaceApi(Module &M) { +void MemorySanitizer::createUserspaceApi(Module &M, const TargetLibraryInfo &TLI) { IRBuilder<> IRB(*C); // Create the callback. @@ -803,8 +805,9 @@ if (TrackOrigins) { StringRef WarningFnName = Recover ? "__msan_warning_with_origin" : "__msan_warning_with_origin_noreturn"; - WarningFn = - M.getOrInsertFunction(WarningFnName, IRB.getVoidTy(), IRB.getInt32Ty()); + WarningFn = M.getOrInsertFunction(WarningFnName, + TLI.getAttrList(C, {0}, /*Signed=*/false), + IRB.getVoidTy(), IRB.getInt32Ty()); } else { StringRef WarningFnName = Recover ? "__msan_warning" : "__msan_warning_noreturn"; @@ -841,23 +844,13 @@ AccessSizeIndex++) { unsigned AccessSize = 1 << AccessSizeIndex; std::string FunctionName = "__msan_maybe_warning_" + itostr(AccessSize); - SmallVector, 2> MaybeWarningFnAttrs; - MaybeWarningFnAttrs.push_back(std::make_pair( - AttributeList::FirstArgIndex, Attribute::get(*C, Attribute::ZExt))); - MaybeWarningFnAttrs.push_back(std::make_pair( - AttributeList::FirstArgIndex + 1, Attribute::get(*C, Attribute::ZExt))); MaybeWarningFn[AccessSizeIndex] = M.getOrInsertFunction( - FunctionName, AttributeList::get(*C, MaybeWarningFnAttrs), + FunctionName, TLI.getAttrList(C, {0, 1}, /*Signed=*/false), IRB.getVoidTy(), IRB.getIntNTy(AccessSize * 8), IRB.getInt32Ty()); FunctionName = "__msan_maybe_store_origin_" + itostr(AccessSize); - SmallVector, 2> MaybeStoreOriginFnAttrs; - MaybeStoreOriginFnAttrs.push_back(std::make_pair( - AttributeList::FirstArgIndex, Attribute::get(*C, Attribute::ZExt))); - MaybeStoreOriginFnAttrs.push_back(std::make_pair( - AttributeList::FirstArgIndex + 2, Attribute::get(*C, Attribute::ZExt))); MaybeStoreOriginFn[AccessSizeIndex] = M.getOrInsertFunction( - FunctionName, AttributeList::get(*C, MaybeStoreOriginFnAttrs), + FunctionName, TLI.getAttrList(C, {0, 2}, /*Signed=*/false), IRB.getVoidTy(), IRB.getIntNTy(AccessSize * 8), IRB.getInt8PtrTy(), IRB.getInt32Ty()); } @@ -873,7 +866,7 @@ } /// Insert extern declaration of runtime-provided functions and globals. -void MemorySanitizer::initializeCallbacks(Module &M) { +void MemorySanitizer::initializeCallbacks(Module &M, const TargetLibraryInfo &TLI) { // Only do this once. if (CallbacksInitialized) return; @@ -881,29 +874,31 @@ IRBuilder<> IRB(*C); // Initialize callbacks that are common for kernel and userspace // instrumentation. - MsanChainOriginFn = M.getOrInsertFunction("__msan_chain_origin", - IRB.getInt32Ty(), IRB.getInt32Ty()); - MsanSetOriginFn = - M.getOrInsertFunction("__msan_set_origin", IRB.getVoidTy(), - IRB.getInt8PtrTy(), IntptrTy, IRB.getInt32Ty()); + MsanChainOriginFn = M.getOrInsertFunction( + "__msan_chain_origin", + TLI.getAttrList(C, {0}, /*Signed=*/false, /*Ret=*/true), IRB.getInt32Ty(), + IRB.getInt32Ty()); + MsanSetOriginFn = M.getOrInsertFunction( + "__msan_set_origin", TLI.getAttrList(C, {2}, /*Signed=*/false), + IRB.getVoidTy(), IRB.getInt8PtrTy(), IntptrTy, IRB.getInt32Ty()); MemmoveFn = M.getOrInsertFunction("__msan_memmove", IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IntptrTy); MemcpyFn = M.getOrInsertFunction("__msan_memcpy", IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IntptrTy); - MemsetFn = - M.getOrInsertFunction("__msan_memset", IRB.getInt8PtrTy(), - IRB.getInt8PtrTy(), IRB.getInt32Ty(), IntptrTy); + MemsetFn = M.getOrInsertFunction( + "__msan_memset", TLI.getAttrList(C, {1}, /*Signed=*/true), + IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IRB.getInt32Ty(), IntptrTy); MsanInstrumentAsmStoreFn = M.getOrInsertFunction("__msan_instrument_asm_store", IRB.getVoidTy(), PointerType::get(IRB.getInt8Ty(), 0), IntptrTy); if (CompileKernel) { - createKernelApi(M); + createKernelApi(M, TLI); } else { - createUserspaceApi(M); + createUserspaceApi(M, TLI); } CallbacksInitialized = true; } @@ -1132,7 +1127,7 @@ // It's easier to remove unreachable blocks than deal with missing shadow. removeUnreachableBlocks(F); - MS.initializeCallbacks(*F.getParent()); + MS.initializeCallbacks(*F.getParent(), TLI); FnPrologueEnd = IRBuilder<>(F.getEntryBlock().getFirstNonPHI()) .CreateIntrinsic(Intrinsic::donothing, {}, {}); diff --git a/llvm/lib/Transforms/Instrumentation/ThreadSanitizer.cpp b/llvm/lib/Transforms/Instrumentation/ThreadSanitizer.cpp --- a/llvm/lib/Transforms/Instrumentation/ThreadSanitizer.cpp +++ b/llvm/lib/Transforms/Instrumentation/ThreadSanitizer.cpp @@ -132,7 +132,7 @@ unsigned Flags = 0; }; - void initialize(Module &M); + void initialize(Module &M, const TargetLibraryInfo &TLI); bool instrumentLoadOrStore(const InstructionInfo &II, const DataLayout &DL); bool instrumentAtomic(Instruction *I, const DataLayout &DL); bool instrumentMemIntrinsic(Instruction *I); @@ -195,13 +195,14 @@ insertModuleCtor(M); return PreservedAnalyses::none(); } -void ThreadSanitizer::initialize(Module &M) { +void ThreadSanitizer::initialize(Module &M, const TargetLibraryInfo &TLI) { const DataLayout &DL = M.getDataLayout(); - IntptrTy = DL.getIntPtrType(M.getContext()); + LLVMContext &Ctx = M.getContext(); + IntptrTy = DL.getIntPtrType(Ctx); - IRBuilder<> IRB(M.getContext()); + IRBuilder<> IRB(Ctx); AttributeList Attr; - Attr = Attr.addFnAttribute(M.getContext(), Attribute::NoUnwind); + Attr = Attr.addFnAttribute(Ctx, Attribute::NoUnwind); // Initialize the callbacks. TsanFuncEntry = M.getOrInsertFunction("__tsan_func_entry", Attr, IRB.getVoidTy(), IRB.getInt8PtrTy()); @@ -260,24 +261,24 @@ TsanUnalignedCompoundRW[i] = M.getOrInsertFunction( UnalignedCompoundRWName, Attr, IRB.getVoidTy(), IRB.getInt8PtrTy()); - Type *Ty = Type::getIntNTy(M.getContext(), BitSize); + Type *Ty = Type::getIntNTy(Ctx, BitSize); Type *PtrTy = Ty->getPointerTo(); SmallString<32> AtomicLoadName("__tsan_atomic" + BitSizeStr + "_load"); - { - AttributeList AL = Attr; - AL = AL.addParamAttribute(M.getContext(), 1, Attribute::ZExt); - TsanAtomicLoad[i] = - M.getOrInsertFunction(AtomicLoadName, AL, Ty, PtrTy, OrdTy); - } - + TsanAtomicLoad[i] = + M.getOrInsertFunction(AtomicLoadName, + TLI.getAttrList(&Ctx, {1}, /*Signed=*/true, + /*Ret=*/BitSize <= 32, Attr), + Ty, PtrTy, OrdTy); + + // Args of type Ty need extension only when BitSize is 32 or less. + using Idxs = std::vector; + Idxs Idxs2Or12 ((BitSize <= 32) ? Idxs({1, 2}) : Idxs({2})); + Idxs Idxs34Or1234((BitSize <= 32) ? Idxs({1, 2, 3, 4}) : Idxs({3, 4})); SmallString<32> AtomicStoreName("__tsan_atomic" + BitSizeStr + "_store"); - { - AttributeList AL = Attr; - AL = AL.addParamAttribute(M.getContext(), 1, Attribute::ZExt); - AL = AL.addParamAttribute(M.getContext(), 2, Attribute::ZExt); - TsanAtomicStore[i] = M.getOrInsertFunction( - AtomicStoreName, AL, IRB.getVoidTy(), PtrTy, Ty, OrdTy); - } + TsanAtomicStore[i] = M.getOrInsertFunction( + AtomicStoreName, + TLI.getAttrList(&Ctx, Idxs2Or12, /*Signed=*/true, /*Ret=*/false, Attr), + IRB.getVoidTy(), PtrTy, Ty, OrdTy); for (unsigned Op = AtomicRMWInst::FIRST_BINOP; Op <= AtomicRMWInst::LAST_BINOP; ++Op) { @@ -300,44 +301,35 @@ else continue; SmallString<32> RMWName("__tsan_atomic" + itostr(BitSize) + NamePart); - { - AttributeList AL = Attr; - AL = AL.addParamAttribute(M.getContext(), 1, Attribute::ZExt); - AL = AL.addParamAttribute(M.getContext(), 2, Attribute::ZExt); - TsanAtomicRMW[Op][i] = - M.getOrInsertFunction(RMWName, AL, Ty, PtrTy, Ty, OrdTy); - } + TsanAtomicRMW[Op][i] = M.getOrInsertFunction( + RMWName, + TLI.getAttrList(&Ctx, Idxs2Or12, /*Signed=*/true, + /*Ret=*/BitSize <= 32, Attr), + Ty, PtrTy, Ty, OrdTy); } SmallString<32> AtomicCASName("__tsan_atomic" + BitSizeStr + "_compare_exchange_val"); - { - AttributeList AL = Attr; - AL = AL.addParamAttribute(M.getContext(), 1, Attribute::ZExt); - AL = AL.addParamAttribute(M.getContext(), 2, Attribute::ZExt); - AL = AL.addParamAttribute(M.getContext(), 3, Attribute::ZExt); - AL = AL.addParamAttribute(M.getContext(), 4, Attribute::ZExt); - TsanAtomicCAS[i] = M.getOrInsertFunction(AtomicCASName, AL, Ty, PtrTy, Ty, - Ty, OrdTy, OrdTy); - } + TsanAtomicCAS[i] = M.getOrInsertFunction( + AtomicCASName, + TLI.getAttrList(&Ctx, Idxs34Or1234, /*Signed=*/true, + /*Ret=*/BitSize <= 32, Attr), + Ty, PtrTy, Ty, Ty, OrdTy, OrdTy); } TsanVptrUpdate = M.getOrInsertFunction("__tsan_vptr_update", Attr, IRB.getVoidTy(), IRB.getInt8PtrTy(), IRB.getInt8PtrTy()); TsanVptrLoad = M.getOrInsertFunction("__tsan_vptr_read", Attr, IRB.getVoidTy(), IRB.getInt8PtrTy()); - { - AttributeList AL = Attr; - AL = AL.addParamAttribute(M.getContext(), 0, Attribute::ZExt); - TsanAtomicThreadFence = M.getOrInsertFunction("__tsan_atomic_thread_fence", - AL, IRB.getVoidTy(), OrdTy); - } - { - AttributeList AL = Attr; - AL = AL.addParamAttribute(M.getContext(), 0, Attribute::ZExt); - TsanAtomicSignalFence = M.getOrInsertFunction("__tsan_atomic_signal_fence", - AL, IRB.getVoidTy(), OrdTy); - } + TsanAtomicThreadFence = M.getOrInsertFunction( + "__tsan_atomic_thread_fence", + TLI.getAttrList(&Ctx, {0}, /*Signed=*/true, /*Ret=*/false, Attr), + IRB.getVoidTy(), OrdTy); + + TsanAtomicSignalFence = M.getOrInsertFunction( + "__tsan_atomic_signal_fence", + TLI.getAttrList(&Ctx, {0}, /*Signed=*/true, /*Ret=*/false, Attr), + IRB.getVoidTy(), OrdTy); MemmoveFn = M.getOrInsertFunction("__tsan_memmove", Attr, IRB.getInt8PtrTy(), @@ -345,9 +337,10 @@ MemcpyFn = M.getOrInsertFunction("__tsan_memcpy", Attr, IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IntptrTy); - MemsetFn = - M.getOrInsertFunction("__tsan_memset", Attr, IRB.getInt8PtrTy(), - IRB.getInt8PtrTy(), IRB.getInt32Ty(), IntptrTy); + MemsetFn = M.getOrInsertFunction( + "__tsan_memset", + TLI.getAttrList(&Ctx, {1}, /*Signed=*/true, /*Ret=*/false, Attr), + IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IRB.getInt32Ty(), IntptrTy); } static bool isVtableAccess(Instruction *I) { @@ -516,7 +509,7 @@ if (F.hasFnAttribute(Attribute::DisableSanitizerInstrumentation)) return false; - initialize(*F.getParent()); + initialize(*F.getParent(), TLI); SmallVector AllLoadsAndStores; SmallVector LocalLoadsAndStores; SmallVector AtomicAccesses; diff --git a/llvm/test/Instrumentation/AddressSanitizer/experiment.ll b/llvm/test/Instrumentation/AddressSanitizer/experiment.ll --- a/llvm/test/Instrumentation/AddressSanitizer/experiment.ll +++ b/llvm/test/Instrumentation/AddressSanitizer/experiment.ll @@ -1,6 +1,10 @@ ; Test optimization experiments. ; -asan-force-experiment flag turns all memory accesses into experiments. ; RUN: opt < %s -passes=asan -asan-force-experiment=42 -S | FileCheck %s +; RUN: opt < %s -passes=asan -asan-force-experiment=42 -S -mtriple=s390x-unknown-linux | FileCheck %s --check-prefix=EXT +; RUN: opt < %s -passes=asan -asan-force-experiment=42 -S -mtriple=mips-linux-gnu | FileCheck %s --check-prefix=MIPS_EXT +; REQUIRES: x86-registered-target, systemz-registered-target, mips-registered-target + target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64" target triple = "x86_64-unknown-linux-gnu" @@ -111,3 +115,19 @@ ; CHECK: __asan_report_exp_store_n{{.*}} i32 42 ; CHECK: ret void } + +; CHECK: declare void @__asan_report_exp_load_n(i64, i64, i32) +; EXT: declare void @__asan_report_exp_load_n(i64, i64, i32 zeroext) +; MIPS_EXT: declare void @__asan_report_exp_load_n(i64, i64, i32 signext) + +; CHECK: declare void @__asan_exp_loadN(i64, i64, i32) +; EXT: declare void @__asan_exp_loadN(i64, i64, i32 zeroext) +; MIPS_EXT: declare void @__asan_exp_loadN(i64, i64, i32 signext) + +; CHECK: declare void @__asan_report_exp_load1(i64, i32) +; EXT: declare void @__asan_report_exp_load1(i64, i32 zeroext) +; MIPS_EXT: declare void @__asan_report_exp_load1(i64, i32 signext) + +; CHECK: declare void @__asan_exp_load1(i64, i32) +; EXT: declare void @__asan_exp_load1(i64, i32 zeroext) +; MIPS_EXT: declare void @__asan_exp_load1(i64, i32 signext) diff --git a/llvm/test/Instrumentation/AddressSanitizer/mem-intrinsics.ll b/llvm/test/Instrumentation/AddressSanitizer/mem-intrinsics.ll --- a/llvm/test/Instrumentation/AddressSanitizer/mem-intrinsics.ll +++ b/llvm/test/Instrumentation/AddressSanitizer/mem-intrinsics.ll @@ -3,6 +3,9 @@ ; RUN: opt < %s -passes=asan -S | FileCheck --check-prefixes=CHECK,CHECK-PREFIX %s ; RUN: opt < %s -passes=asan -asan-kernel -S | FileCheck --check-prefixes=CHECK,CHECK-NOPREFIX %s ; RUN: opt < %s -passes=asan -asan-kernel -asan-kernel-mem-intrinsic-prefix -S | FileCheck --check-prefixes=CHECK,CHECK-PREFIX %s +; RUN: opt < %s -passes=asan -S -mtriple=s390x-unknown-linux | FileCheck --check-prefix=EXT %s +; RUN: opt < %s -passes=asan -S -mtriple=mips-linux-gnu | FileCheck --check-prefix=MIPS_EXT %s +; REQUIRES: x86-registered-target, systemz-registered-target, mips-registered-target target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64" target triple = "x86_64-unknown-linux-gnu" @@ -73,3 +76,7 @@ tail call void @llvm.memcpy.element.unordered.atomic.p0.p0.i64(ptr align 1 %a, ptr align 1 %b, i64 100, i32 1) ret void } + +; CHECK-PREFIX: declare ptr @__asan_memset(ptr, i32, i64) +; EXT: declare ptr @__asan_memset(ptr, i32 zeroext, i64) +; MIPS_EXT: declare ptr @__asan_memset(ptr, i32 signext, i64) diff --git a/llvm/test/Instrumentation/MemorySanitizer/Mips/vararg-mips64.ll b/llvm/test/Instrumentation/MemorySanitizer/Mips/vararg-mips64.ll --- a/llvm/test/Instrumentation/MemorySanitizer/Mips/vararg-mips64.ll +++ b/llvm/test/Instrumentation/MemorySanitizer/Mips/vararg-mips64.ll @@ -78,3 +78,12 @@ ; CHECK: i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 792) ; CHECK-NOT: i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 800) declare i64 @sum(i64 %n, ...) + +; CHECK: declare void @__msan_maybe_warning_1(i8 signext, i32 signext) +; CHECK: declare void @__msan_maybe_store_origin_1(i8 signext, ptr, i32 signext) +; CHECK: declare void @__msan_maybe_warning_2(i16 signext, i32 signext) +; CHECK: declare void @__msan_maybe_store_origin_2(i16 signext, ptr, i32 signext) +; CHECK: declare void @__msan_maybe_warning_4(i32 signext, i32 signext) +; CHECK: declare void @__msan_maybe_store_origin_4(i32 signext, ptr, i32 signext) +; CHECK: declare void @__msan_maybe_warning_8(i64 signext, i32 signext) +; CHECK: declare void @__msan_maybe_store_origin_8(i64 signext, ptr, i32 signext) diff --git a/llvm/test/Instrumentation/MemorySanitizer/SystemZ/vararg.ll b/llvm/test/Instrumentation/MemorySanitizer/SystemZ/vararg.ll --- a/llvm/test/Instrumentation/MemorySanitizer/SystemZ/vararg.ll +++ b/llvm/test/Instrumentation/MemorySanitizer/SystemZ/vararg.ll @@ -188,3 +188,12 @@ ; CHECK: store {{.*}} @__msan_va_arg_tls {{.*}} 24 ; CHECK: store {{.*}} @__msan_va_arg_tls {{.*}} 32 ; CHECK: store {{.*}} 0, {{.*}} @__msan_va_arg_overflow_size_tls + +; CHECK: declare void @__msan_maybe_warning_1(i8 zeroext, i32 zeroext) +; CHECK: declare void @__msan_maybe_store_origin_1(i8 zeroext, ptr, i32 zeroext) +; CHECK: declare void @__msan_maybe_warning_2(i16 zeroext, i32 zeroext) +; CHECK: declare void @__msan_maybe_store_origin_2(i16 zeroext, ptr, i32 zeroext) +; CHECK: declare void @__msan_maybe_warning_4(i32 zeroext, i32 zeroext) +; CHECK: declare void @__msan_maybe_store_origin_4(i32 zeroext, ptr, i32 zeroext) +; CHECK: declare void @__msan_maybe_warning_8(i64 zeroext, i32 zeroext) +; CHECK: declare void @__msan_maybe_store_origin_8(i64 zeroext, ptr, i32 zeroext) diff --git a/llvm/test/Instrumentation/MemorySanitizer/atomics.ll b/llvm/test/Instrumentation/MemorySanitizer/atomics.ll --- a/llvm/test/Instrumentation/MemorySanitizer/atomics.ll +++ b/llvm/test/Instrumentation/MemorySanitizer/atomics.ll @@ -1,6 +1,8 @@ ; RUN: opt < %s -msan-check-access-address=0 -S -passes=msan 2>&1 | FileCheck %s --check-prefixes=CHECK,NOORIGINS --implicit-check-not="call void @__msan_warning" ; RUN: opt < %s -msan-check-access-address=0 -msan-track-origins=1 -S -passes=msan 2>&1 | FileCheck %s --check-prefixes=CHECK,ORIGINS --implicit-check-not="call void @__msan_warning" ; RUN: opt < %s -msan-check-access-address=0 -msan-track-origins=2 -S -passes=msan 2>&1 | FileCheck %s --check-prefixes=CHECK,ORIGINS --implicit-check-not="call void @__msan_warning" +; RUN: opt < %s -msan-check-access-address=0 -msan-track-origins=1 -S -passes=msan -mtriple=s390x-unknown-linux 2>&1 | FileCheck %s --check-prefix=EXT +; REQUIRES: x86-registered-target, systemz-registered-target target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128" target triple = "x86_64-unknown-linux-gnu" @@ -207,3 +209,13 @@ ; CHECK: store i32 0, ptr {{.*}}, align 16 ; CHECK: store atomic i32 %x, ptr %p release, align 16 ; CHECK: ret void + + +; ORIGINS: declare i32 @__msan_chain_origin(i32) +; EXT: declare zeroext i32 @__msan_chain_origin(i32 zeroext) +; ORIGINS: declare void @__msan_set_origin(ptr, i64, i32) +; EXT: declare void @__msan_set_origin(ptr, i64, i32 zeroext) +; ORIGINS: declare ptr @__msan_memset(ptr, i32, i64) +; EXT: declare ptr @__msan_memset(ptr, i32 signext, i64) +; ORIGINS: declare void @__msan_warning_with_origin_noreturn(i32) +; EXT: declare void @__msan_warning_with_origin_noreturn(i32 zeroext) diff --git a/llvm/test/Instrumentation/MemorySanitizer/msan_basic.ll b/llvm/test/Instrumentation/MemorySanitizer/msan_basic.ll --- a/llvm/test/Instrumentation/MemorySanitizer/msan_basic.ll +++ b/llvm/test/Instrumentation/MemorySanitizer/msan_basic.ll @@ -1032,13 +1032,13 @@ ; CHECK-LABEL: define internal void @msan.module_ctor() #[[#ATTR:]] { ; CHECK: call void @__msan_init() -; CHECK-CALLS: declare void @__msan_maybe_warning_1(i8 zeroext, i32 zeroext) -; CHECK-CALLS: declare void @__msan_maybe_store_origin_1(i8 zeroext, ptr, i32 zeroext) -; CHECK-CALLS: declare void @__msan_maybe_warning_2(i16 zeroext, i32 zeroext) -; CHECK-CALLS: declare void @__msan_maybe_store_origin_2(i16 zeroext, ptr, i32 zeroext) -; CHECK-CALLS: declare void @__msan_maybe_warning_4(i32 zeroext, i32 zeroext) -; CHECK-CALLS: declare void @__msan_maybe_store_origin_4(i32 zeroext, ptr, i32 zeroext) -; CHECK-CALLS: declare void @__msan_maybe_warning_8(i64 zeroext, i32 zeroext) -; CHECK-CALLS: declare void @__msan_maybe_store_origin_8(i64 zeroext, ptr, i32 zeroext) +; CHECK-CALLS: declare void @__msan_maybe_warning_1(i8, i32) +; CHECK-CALLS: declare void @__msan_maybe_store_origin_1(i8, ptr, i32) +; CHECK-CALLS: declare void @__msan_maybe_warning_2(i16, i32) +; CHECK-CALLS: declare void @__msan_maybe_store_origin_2(i16, ptr, i32) +; CHECK-CALLS: declare void @__msan_maybe_warning_4(i32, i32) +; CHECK-CALLS: declare void @__msan_maybe_store_origin_4(i32, ptr, i32) +; CHECK-CALLS: declare void @__msan_maybe_warning_8(i64, i32) +; CHECK-CALLS: declare void @__msan_maybe_store_origin_8(i64, ptr, i32) ; CHECK: attributes #[[#ATTR]] = { nounwind } diff --git a/llvm/test/Instrumentation/ThreadSanitizer/atomic.ll b/llvm/test/Instrumentation/ThreadSanitizer/atomic.ll --- a/llvm/test/Instrumentation/ThreadSanitizer/atomic.ll +++ b/llvm/test/Instrumentation/ThreadSanitizer/atomic.ll @@ -1,4 +1,7 @@ ; RUN: opt < %s -passes=tsan -S | FileCheck %s +; RUN: opt < %s -passes=tsan -S -mtriple=s390x-unknown-linux | FileCheck --check-prefix=EXT %s +; RUN: opt < %s -passes=tsan -S -mtriple=mips-linux-gnu | FileCheck --check-prefix=MIPS_EXT %s +; REQUIRES: x86-registered-target, systemz-registered-target, mips-registered-target ; Check that atomic memory operations are converted to calls into ThreadSanitizer runtime. target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128" @@ -2096,6 +2099,42 @@ ; CHECK-LABEL: atomic_thread_fence_seq_cst ; CHECK: call void @__tsan_atomic_thread_fence(i32 5), !dbg +; CHECK: declare void @__tsan_atomic32_store(ptr, i32, i32) +; EXT: declare void @__tsan_atomic32_store(ptr, i32 signext, i32 signext) +; MIPS_EXT: declare void @__tsan_atomic32_store(ptr, i32 signext, i32 signext) + +; CHECK: declare i32 @__tsan_atomic32_compare_exchange_val(ptr, i32, i32, i32, i32) +; EXT: declare signext i32 @__tsan_atomic32_compare_exchange_val(ptr, i32 signext, i32 signext, i32 signext, i32 signext) +; MIPS_EXT: declare i32 @__tsan_atomic32_compare_exchange_val(ptr, i32 signext, i32 signext, i32 signext, i32 signext) + +; CHECK: declare i64 @__tsan_atomic64_load(ptr, i32) +; EXT: declare i64 @__tsan_atomic64_load(ptr, i32 signext) +; MIPS_EXT: declare i64 @__tsan_atomic64_load(ptr, i32 signext) + +; CHECK: declare void @__tsan_atomic64_store(ptr, i64, i32) +; EXT: declare void @__tsan_atomic64_store(ptr, i64, i32 signext) +; MIPS_EXT: declare void @__tsan_atomic64_store(ptr, i64, i32 signext) + +; CHECK: declare i64 @__tsan_atomic64_fetch_add(ptr, i64, i32) +; EXT: declare i64 @__tsan_atomic64_fetch_add(ptr, i64, i32 signext) +; MIPS_EXT: declare i64 @__tsan_atomic64_fetch_add(ptr, i64, i32 signext) + +; CHECK: declare i64 @__tsan_atomic64_compare_exchange_val(ptr, i64, i64, i32, i32) +; EXT: declare i64 @__tsan_atomic64_compare_exchange_val(ptr, i64, i64, i32 signext, i32 signext) +; MIPS_EXT: declare i64 @__tsan_atomic64_compare_exchange_val(ptr, i64, i64, i32 signext, i32 signext) + +; CHECK: declare void @__tsan_atomic_thread_fence(i32) +; EXT: declare void @__tsan_atomic_thread_fence(i32 signext) +; MIPS_EXT: declare void @__tsan_atomic_thread_fence(i32 signext) + +; CHECK: declare void @__tsan_atomic_signal_fence(i32) +; EXT: declare void @__tsan_atomic_signal_fence(i32 signext) +; MIPS_EXT: declare void @__tsan_atomic_signal_fence(i32 signext) + +; CHECK: declare ptr @__tsan_memset(ptr, i32, i64) +; EXT: declare ptr @__tsan_memset(ptr, i32 signext, i64) +; MIPS_EXT: declare ptr @__tsan_memset(ptr, i32 signext, i64) + !llvm.module.flags = !{!0, !1, !2} !llvm.dbg.cu = !{!8} !0 = !{i32 2, !"Dwarf Version", i32 4} diff --git a/llvm/test/Transforms/GCOVProfiling/function-numbering.ll b/llvm/test/Transforms/GCOVProfiling/function-numbering.ll --- a/llvm/test/Transforms/GCOVProfiling/function-numbering.ll +++ b/llvm/test/Transforms/GCOVProfiling/function-numbering.ll @@ -8,6 +8,9 @@ ; RUN: opt -passes=insert-gcov-profiling -S < %t/2 | FileCheck --check-prefix GCDA %s ; RUN: llvm-cov gcov -n -dump %t/function-numbering.gcno 2>&1 | FileCheck --check-prefix GCNO %s +; RUN: opt -passes=insert-gcov-profiling -S < %t/2 -mtriple=s390x-unknown-linux | FileCheck --check-prefix EXT %s +; RUN: opt -passes=insert-gcov-profiling -S < %t/2 -mtriple=mips-linux-gnu | FileCheck --check-prefix MIPS_EXT %s +; REQUIRES: x86-registered-target, systemz-registered-target, mips-registered-target target datalayout = "e-m:o-i64:64-f80:128-n8:16:32:64-S128" target triple = "x86_64-apple-macosx10.10.0" @@ -77,6 +80,18 @@ ; GCNO-NOT: == bar ({{[0-9]+}}) @ ; GCNO: == baz (1) @ +; GCDA: declare void @llvm_gcda_start_file(ptr, i32, i32) +; EXT: declare void @llvm_gcda_start_file(ptr, i32 zeroext, i32 zeroext) +; MIPS_EXT: declare void @llvm_gcda_start_file(ptr, i32 signext, i32 signext) + +; GCDA: declare void @llvm_gcda_emit_function(i32, i32, i32) +; EXT: declare void @llvm_gcda_emit_function(i32 zeroext, i32 zeroext, i32 zeroext) +; MIPS_EXT: declare void @llvm_gcda_emit_function(i32 signext, i32 signext, i32 signext) + +; GCDA: declare void @llvm_gcda_emit_arcs(i32, ptr) +; EXT: declare void @llvm_gcda_emit_arcs(i32 zeroext, ptr) +; MIPS_EXT: declare void @llvm_gcda_emit_arcs(i32 signext, ptr) + define void @foo() !dbg !4 { ret void, !dbg !12 } diff --git a/llvm/test/Transforms/OpenMP/add_attributes.ll b/llvm/test/Transforms/OpenMP/add_attributes.ll --- a/llvm/test/Transforms/OpenMP/add_attributes.ll +++ b/llvm/test/Transforms/OpenMP/add_attributes.ll @@ -1,10 +1,15 @@ -; RUN: opt < %s -S -passes=openmp-opt-cgscc | FileCheck %s -; RUN: opt < %s -S -passes=openmp-opt-cgscc -openmp-ir-builder-optimistic-attributes | FileCheck %s --check-prefix=OPTIMISTIC -target datalayout = "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" +; RUN: opt < %s -S -passes=openmp-opt-cgscc -mtriple=x86_64-unknown-unknown | FileCheck %s +; RUN: opt < %s -S -passes=openmp-opt-cgscc -openmp-ir-builder-optimistic-attributes -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefix=OPTIMISTIC +; RUN: opt < %s -S -passes=openmp-opt-cgscc -mtriple=s390x-unknown-linux | FileCheck %s --check-prefix=EXT +; RUN: opt < %s -S -passes=openmp-opt-cgscc -mtriple=mips-linux-gnu | FileCheck %s --check-prefix=MIPS_EXT +; RUN: opt < %s -S -passes=openmp-opt-cgscc -mtriple=riscv64 | FileCheck %s --check-prefix=RISCV_EXT +; REQUIRES: x86-registered-target, systemz-registered-target, mips-registered-target, riscv-registered-target %struct.omp_lock_t = type { ptr } %struct.omp_nest_lock_t = type { ptr } %struct.ident_t = type { i32, i32, i32, i32, ptr } +%struct.__tgt_async_info = type { ptr } +%struct.__tgt_kernel_arguments = type { i32, i32, ptr, ptr, ptr, ptr, ptr, ptr, i64 } define void @call_all(i32 %schedule, ptr %lock, i32 %lock_hint, ptr %nest_lock, i32 %i, ptr %s, i64 %st, ptr %vp, double %d, i32 %proc_bind, i64 %allocator_handle, ptr %cp, i64 %event_handle, i32 %pause_resource) { entry: @@ -669,6 +674,87 @@ attributes #0 = { noinline cold } +declare ptr @__kmpc_aligned_alloc(i32, i64, i64, ptr); + +declare ptr @__kmpc_alloc_shared(i64); + +declare void @__kmpc_barrier_simple_generic(ptr, i32); + +declare void @__kmpc_begin_sharing_variables(ptr, i64); + +declare void @__kmpc_distribute_static_fini(ptr, i32); + +declare void @__kmpc_distribute_static_init_4(ptr, i32, i32, ptr, ptr, ptr, ptr, i32, i32); + +declare void @__kmpc_distribute_static_init_4u(ptr, i32, i32, ptr, ptr, ptr, ptr, i32, i32); + +declare void @__kmpc_distribute_static_init_8(ptr, i32, i32, ptr, ptr, ptr, ptr, i64, i64); + +declare void @__kmpc_distribute_static_init_8u(ptr, i32, i32, ptr, ptr, ptr, ptr, i64, i64); + +declare void @__kmpc_end_masked(ptr, i32); + +declare void @__kmpc_end_sharing_variables(); + +declare void @__kmpc_error(ptr, i32, ptr); + +declare void @__kmpc_fork_call_if(ptr, i32, ptr, i32, ptr); + +declare void @__kmpc_free_shared(ptr, i64); + +declare i32 @__kmpc_get_hardware_num_blocks(); + +declare i32 @__kmpc_get_hardware_num_threads_in_block(); + +declare i32 @__kmpc_get_hardware_thread_id_in_block(); + +declare void @__kmpc_get_shared_variables(ptr); + +declare i32 @__kmpc_get_warp_size(); + +declare i8 @__kmpc_is_spmd_exec_mode(); + +declare void @__kmpc_kernel_end_parallel(); + +declare i1 @__kmpc_kernel_parallel(ptr); + +declare void @__kmpc_kernel_prepare_parallel(ptr); + +declare i32 @__kmpc_masked(ptr, i32, i32); + +declare void @__kmpc_nvptx_end_reduce_nowait(i32); + +declare i32 @__kmpc_nvptx_parallel_reduce_nowait_v2(ptr, i32, i32, i64, ptr, ptr, ptr); + +declare i32 @__kmpc_nvptx_teams_reduce_nowait_v2(ptr, i32, ptr, i32, ptr, ptr, ptr, ptr, ptr, ptr, ptr); + +declare i32 @__kmpc_omp_reg_task_with_affinity(ptr, i32, ptr, i32, ptr); + +declare void @__kmpc_parallel_51(ptr, i32, i32, i32, i32, ptr, ptr, ptr, i64); + +declare i32 @__kmpc_shuffle_int32(i32, i16, i16); + +declare i64 @__kmpc_shuffle_int64(i64, i16, i16); + +declare void @__kmpc_target_deinit(ptr, i8); + +declare i32 @__kmpc_target_init(ptr, i8, i1); + +declare void @__tgt_interop_destroy(ptr, i32, ptr, i32, i32, ptr, i32); + +declare void @__tgt_interop_init(ptr, i32, ptr, i32, i32, i64, ptr, i32); + +declare void @__tgt_interop_use(ptr, i32, ptr, i32, i32, ptr, i32); + +declare void @__tgt_target_data_begin_mapper_issue(ptr, i64, i32, ptr, ptr, ptr, ptr, ptr, ptr, ptr); + +declare void @__tgt_target_data_begin_mapper_wait(i64, ptr); + +declare i32 @__tgt_target_kernel(ptr, i64, i32, i32, ptr, ptr); + +declare i32 @__tgt_target_kernel_nowait(ptr, i64, i32, i32, ptr, ptr, i32, ptr, i32, ptr); + + ; CHECK: ; Function Attrs: nounwind ; CHECK-NEXT: declare dso_local void @omp_set_num_threads(i32) @@ -1209,6 +1295,126 @@ ; CHECK: ; Function Attrs: cold convergent noinline nounwind ; CHECK-NEXT: declare void @__kmpc_barrier_simple_spmd(ptr nocapture nofree readonly, i32) +; CHECK: ; Function Attrs: nounwind +; CHECK-NEXT: declare ptr @__kmpc_aligned_alloc(i32, i64, i64, ptr) + +; CHECK: ; Function Attrs: nosync nounwind allocsize(0) +; CHECK-NEXT: declare ptr @__kmpc_alloc_shared(i64) + +; CHECK: ; Function Attrs: convergent nounwind +; CHECK: declare void @__kmpc_barrier_simple_generic(ptr, i32) + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__kmpc_begin_sharing_variables(ptr, i64) + +; CHECK: ; Function Attrs: nounwind +; CHECK: declare void @__kmpc_distribute_static_fini(ptr, i32) + +; CHECK: ; Function Attrs: nounwind +; CHECK: declare void @__kmpc_distribute_static_init_4(ptr, i32, i32, ptr, ptr, ptr, ptr, i32, i32) + +; CHECK: ; Function Attrs: nounwind +; CHECK: declare void @__kmpc_distribute_static_init_4u(ptr, i32, i32, ptr, ptr, ptr, ptr, i32, i32) + +; CHECK: ; Function Attrs: nounwind +; CHECK: declare void @__kmpc_distribute_static_init_8(ptr, i32, i32, ptr, ptr, ptr, ptr, i64, i64) + +; CHECK: ; Function Attrs: nounwind +; CHECK: declare void @__kmpc_distribute_static_init_8u(ptr, i32, i32, ptr, ptr, ptr, ptr, i64, i64) + +; CHECK: ; Function Attrs: nounwind +; CHECK: declare void @__kmpc_end_masked(ptr, i32) + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__kmpc_end_sharing_variables() + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__kmpc_error(ptr, i32, ptr) + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__kmpc_fork_call_if(ptr, i32, ptr, i32, ptr) + +; CHECK: ; Function Attrs: nosync nounwind +; CHECK-NEXT: declare void @__kmpc_free_shared(ptr allocptr nocapture, i64) + +; CHECK: ; Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_get_hardware_num_blocks() + +; CHECK: ; Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_get_hardware_num_threads_in_block() + +; CHECK: ; Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_get_hardware_thread_id_in_block() + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__kmpc_get_shared_variables(ptr) + +; CHECK: ; Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_get_warp_size() + +; CHECK-NOT: Function Attrs +; CHECK: declare i8 @__kmpc_is_spmd_exec_mode() + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__kmpc_kernel_end_parallel() + +; CHECK-NOT: Function Attrs +; CHECK: declare i1 @__kmpc_kernel_parallel(ptr) + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__kmpc_kernel_prepare_parallel(ptr) + +; CHECK: ; Function Attrs: nounwind +; CHECK: declare i32 @__kmpc_masked(ptr, i32, i32) + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__kmpc_nvptx_end_reduce_nowait(i32) + +; CHECK-NOT: Function Attrs +; CHECK: declare i32 @__kmpc_nvptx_parallel_reduce_nowait_v2(ptr, i32, i32, i64, ptr, ptr, ptr) + +; CHECK-NOT: Function Attrs +; CHECK: declare i32 @__kmpc_nvptx_teams_reduce_nowait_v2(ptr, i32, ptr, i32, ptr, ptr, ptr, ptr, ptr, ptr, ptr) + +; CHECK: ; Function Attrs: nounwind +; CHECK: declare i32 @__kmpc_omp_reg_task_with_affinity(ptr, i32, ptr, i32, ptr) + +; CHECK: ; Function Attrs: alwaysinline +; CHECK: declare void @__kmpc_parallel_51(ptr, i32, i32, i32, i32, ptr, ptr, ptr, i64) + +; CHECK-NOT: Function Attrs +; CHECK: declare i32 @__kmpc_shuffle_int32(i32, i16, i16) + +; CHECK-NOT: Function Attrs +; CHECK: declare i64 @__kmpc_shuffle_int64(i64, i16, i16) + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__kmpc_target_deinit(ptr, i8) + +; CHECK-NOT: Function Attrs +; CHECK: declare i32 @__kmpc_target_init(ptr, i8, i1) + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__tgt_interop_destroy(ptr, i32, ptr, i32, i32, ptr, i32) + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__tgt_interop_init(ptr, i32, ptr, i32, i32, i64, ptr, i32) + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__tgt_interop_use(ptr, i32, ptr, i32, i32, ptr, i32) + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__tgt_target_data_begin_mapper_issue(ptr, i64, i32, ptr, ptr, ptr, ptr, ptr, ptr, ptr) + +; CHECK-NOT: Function Attrs +; CHECK: declare void @__tgt_target_data_begin_mapper_wait(i64, ptr) + +; CHECK: ; Function Attrs: nounwind +; CHECK: declare i32 @__tgt_target_kernel(ptr, i64, i32, i32, ptr, ptr) + +; CHECK: ; Function Attrs: nounwind +; CHECK: declare i32 @__tgt_target_kernel_nowait(ptr, i64, i32, i32, ptr, ptr, i32, ptr, i32, ptr) + ; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn memory(inaccessiblemem: write) ; OPTIMISTIC-NEXT: declare dso_local void @omp_set_num_threads(i32) @@ -1737,6 +1943,811 @@ ; OPTIMISTIC: ; Function Attrs: cold convergent noinline nounwind ; OPTIMISTIC-NEXT: declare void @__kmpc_barrier_simple_spmd(ptr nocapture nofree readonly, i32) +; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn +; OPTIMISTIC-NEXT: declare noalias ptr @__kmpc_aligned_alloc(i32, i64, i64, ptr) + +; OPTIMISTIC: ; Function Attrs: nosync nounwind allocsize(0) +; OPTIMISTIC-NEXT: declare noalias ptr @__kmpc_alloc_shared(i64) + +; OPTIMISTIC: ; Function Attrs: convergent nounwind +; OPTIMISTIC: declare void @__kmpc_barrier_simple_generic(ptr nocapture nofree readonly, i32) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__kmpc_begin_sharing_variables(ptr, i64) + +; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn memory(argmem: readwrite, inaccessiblemem: readwrite) +; OPTIMISTIC: declare void @__kmpc_distribute_static_fini(ptr nocapture nofree readonly, i32) + +; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn memory(argmem: readwrite, inaccessiblemem: readwrite) +; OPTIMISTIC: declare void @__kmpc_distribute_static_init_4(ptr nocapture nofree readonly, i32, i32, ptr nocapture nofree, ptr nocapture nofree, ptr nocapture nofree, ptr nocapture nofree, i32, i32) + +; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn memory(argmem: readwrite, inaccessiblemem: readwrite) +; OPTIMISTIC: declare void @__kmpc_distribute_static_init_4u(ptr nocapture nofree readonly, i32, i32, ptr nocapture nofree, ptr nocapture nofree, ptr nocapture nofree, ptr nocapture nofree, i32, i32) + +; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn memory(argmem: readwrite, inaccessiblemem: readwrite) +; OPTIMISTIC: declare void @__kmpc_distribute_static_init_8(ptr nocapture nofree readonly, i32, i32, ptr nocapture nofree, ptr nocapture nofree, ptr nocapture nofree, ptr nocapture nofree, i64, i64) + +; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn memory(argmem: readwrite, inaccessiblemem: readwrite) +; OPTIMISTIC: declare void @__kmpc_distribute_static_init_8u(ptr nocapture nofree readonly, i32, i32, ptr nocapture nofree, ptr nocapture nofree, ptr nocapture nofree, ptr nocapture nofree, i64, i64) + +; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn memory(argmem: readwrite, inaccessiblemem: readwrite) +; OPTIMISTIC: declare void @__kmpc_end_masked(ptr nocapture nofree readonly, i32) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__kmpc_end_sharing_variables() + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__kmpc_error(ptr, i32, ptr) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__kmpc_fork_call_if(ptr nocapture nofree readonly, i32, ptr nocapture nofree readonly, i32, ptr) + +; OPTIMISTIC: ; Function Attrs: nosync nounwind +; OPTIMISTIC-NEXT: declare void @__kmpc_free_shared(ptr allocptr nocapture, i64) + +; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn memory(inaccessiblemem: read) +; OPTIMISTIC-NEXT: declare i32 @__kmpc_get_hardware_num_blocks() + +; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn memory(inaccessiblemem: read) +; OPTIMISTIC-NEXT: declare i32 @__kmpc_get_hardware_num_threads_in_block() + +; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn memory(inaccessiblemem: read) +; OPTIMISTIC-NEXT: declare i32 @__kmpc_get_hardware_thread_id_in_block() + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__kmpc_get_shared_variables(ptr) + +; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn memory(inaccessiblemem: read) +; OPTIMISTIC-NEXT: declare i32 @__kmpc_get_warp_size() + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare i8 @__kmpc_is_spmd_exec_mode() + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__kmpc_kernel_end_parallel() + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare i1 @__kmpc_kernel_parallel(ptr) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__kmpc_kernel_prepare_parallel(ptr) + +; OPTIMISTIC: ; Function Attrs: nofree nosync nounwind willreturn memory(argmem: readwrite, inaccessiblemem: readwrite) +; OPTIMISTIC: declare i32 @__kmpc_masked(ptr nocapture nofree readonly, i32, i32) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__kmpc_nvptx_end_reduce_nowait(i32) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare i32 @__kmpc_nvptx_parallel_reduce_nowait_v2(ptr, i32, i32, i64, ptr, ptr, ptr) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare i32 @__kmpc_nvptx_teams_reduce_nowait_v2(ptr, i32, ptr, i32, ptr, ptr, ptr, ptr, ptr, ptr, ptr) + +; OPTIMISTIC: nofree nosync nounwind willreturn +; OPTIMISTIC: declare i32 @__kmpc_omp_reg_task_with_affinity(ptr nocapture nofree readonly, i32, ptr nocapture nofree readonly, i32, ptr nocapture nofree readonly) + +; OPTIMISTIC: alwaysinline +; OPTIMISTIC: declare void @__kmpc_parallel_51(ptr, i32, i32, i32, i32, ptr, ptr, ptr, i64) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare i32 @__kmpc_shuffle_int32(i32, i16, i16) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare i64 @__kmpc_shuffle_int64(i64, i16, i16) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__kmpc_target_deinit(ptr, i8) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare i32 @__kmpc_target_init(ptr, i8, i1) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__tgt_interop_destroy(ptr, i32, ptr, i32, i32, ptr, i32) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__tgt_interop_init(ptr, i32, ptr, i32, i32, i64, ptr, i32) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__tgt_interop_use(ptr, i32, ptr, i32, i32, ptr, i32) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__tgt_target_data_begin_mapper_issue(ptr, i64, i32, ptr, ptr, ptr, ptr, ptr, ptr, ptr) + +; OPTIMISTIC-NOT: Function Attrs +; OPTIMISTIC: declare void @__tgt_target_data_begin_mapper_wait(i64, ptr) + +; OPTIMISTIC: ; Function Attrs: nounwind +; OPTIMISTIC: declare i32 @__tgt_target_kernel(ptr, i64, i32, i32, ptr, ptr) + +; OPTIMISTIC: ; Function Attrs: nounwind +; OPTIMISTIC: declare i32 @__tgt_target_kernel_nowait(ptr, i64, i32, i32, ptr, ptr, i32, ptr, i32, ptr) + +;;; Check extensions of integer params / retvals <= i32. Only functions in this file which are handled in OMPIRBuilder will get these attributes. +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local void @omp_set_num_threads(i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local void @omp_set_dynamic(i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local void @omp_set_nested(i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local void @omp_set_max_active_levels(i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local void @omp_set_schedule(i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_num_threads() + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @use_int(i32) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_dynamic() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_nested() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_max_threads() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_thread_num() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_num_procs() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_in_parallel() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_in_final() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_active_level() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_level() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_ancestor_thread_num(i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_team_size(i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_thread_limit() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_max_active_levels() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local void @omp_get_schedule(ptr nocapture writeonly, ptr nocapture writeonly) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_get_max_task_priority() + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_init_lock(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_set_lock(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_unset_lock(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_destroy_lock(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_test_lock(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_init_nest_lock(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_set_nest_lock(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_unset_nest_lock(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_destroy_nest_lock(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_test_nest_lock(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_init_lock_with_hint(ptr, i32) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_init_nest_lock_with_hint(ptr, i32) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local double @omp_get_wtime() + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @use_double(double) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local double @omp_get_wtick() + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_get_default_device() + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_set_default_device(i32) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_is_initial_device() + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_get_num_devices() + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_get_num_teams() + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_get_team_num() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_cancellation() + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_get_initial_device() + +; EXT-NOT: Function Attrs +; EXT: declare dso_local ptr @omp_target_alloc(i64, i32) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @use_voidptr(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_target_free(ptr, i32) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_target_is_present(ptr, i32) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_target_memcpy(ptr, ptr, i64, i64, i64, i32, i32) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_target_associate_ptr(ptr, ptr, i64, i64, i32) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_target_disassociate_ptr(ptr, i32) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_get_device_num() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_proc_bind() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_num_places() + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_get_place_num_procs(i32) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local void @omp_get_place_proc_ids(i32 signext, ptr nocapture writeonly) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_place_num() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_partition_num_places() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local void @omp_get_partition_place_nums(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_control_tool(i32, i32, ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_destroy_allocator(i64) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_set_default_allocator(i64) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i64 @omp_get_default_allocator() + +; EXT-NOT: Function Attrs +; EXT: declare dso_local ptr @omp_alloc(i64, i64) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_free(ptr, i64) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @ompc_set_affinity_format(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i64 @ompc_get_affinity_format(ptr, i64) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @use_sizet(i64) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @ompc_display_affinity(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i64 @ompc_capture_affinity(ptr, i64, ptr) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local void @omp_fulfill_event(i64) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_pause_resource(i32, i32) + +; EXT-NOT: Function Attrs +; EXT: declare dso_local i32 @omp_pause_resource_all(i32) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare dso_local signext i32 @omp_get_supported_active_levels() + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_barrier(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__kmpc_cancel(ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare signext i32 @__kmpc_cancel_barrier(ptr, i32 signext) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_flush(ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__kmpc_global_thread_num(ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_fork_call(ptr, i32 signext, ptr, ...) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare signext i32 @__kmpc_omp_taskwait(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__kmpc_omp_taskyield(ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_push_num_threads(ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_push_proc_bind(ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_serialized_parallel(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_end_serialized_parallel(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__kmpc_master(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_end_master(ptr, i32 signext) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_critical(ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_critical_with_hint(ptr, i32 signext, ptr, i32 zeroext) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_end_critical(ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_begin(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_end(ptr) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare signext i32 @__kmpc_reduce(ptr, i32 signext, i32 signext, i64, ptr, ptr, ptr) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare signext i32 @__kmpc_reduce_nowait(ptr, i32 signext, i32 signext, i64, ptr, ptr, ptr) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_end_reduce(ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_end_reduce_nowait(ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_ordered(ptr, i32 signext) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_end_ordered(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_for_static_init_4(ptr, i32 signext, i32 signext, ptr, ptr, ptr, ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_for_static_init_4u(ptr, i32 signext, i32 signext, ptr, ptr, ptr, ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_for_static_init_8(ptr, i32 signext, i32 signext, ptr, ptr, ptr, ptr, i64, i64) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_for_static_init_8u(ptr, i32 signext, i32 signext, ptr, ptr, ptr, ptr, i64, i64) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_for_static_fini(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_team_static_init_4(ptr, i32 signext, ptr, ptr, ptr, ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_team_static_init_4u(ptr, i32 signext, ptr, ptr, ptr, ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_team_static_init_8(ptr, i32 signext, ptr, ptr, ptr, ptr, i64, i64) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_team_static_init_8u(ptr, i32 signext, ptr, ptr, ptr, ptr, i64, i64) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dist_for_static_init_4(ptr, i32 signext, i32 signext, ptr, ptr, ptr, ptr, ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dist_for_static_init_4u(ptr, i32 signext, i32 signext, ptr, ptr, ptr, ptr, ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dist_for_static_init_8(ptr, i32 signext, i32 signext, ptr, ptr, ptr, ptr, ptr, i64, i64) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dist_for_static_init_8u(ptr, i32 signext, i32 signext, ptr, ptr, ptr, ptr, ptr, i64, i64) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare signext i32 @__kmpc_single(ptr, i32 signext) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_end_single(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare ptr @__kmpc_omp_task_alloc(ptr, i32 signext, i32 signext, i64, i64, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__kmpc_omp_task(ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_end_taskgroup(ptr, i32 signext) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_taskgroup(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dist_dispatch_init_4(ptr, i32 signext, i32 signext, ptr, i32 signext, i32 signext, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dist_dispatch_init_4u(ptr, i32 signext, i32 signext, ptr, i32 zeroext, i32 zeroext, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dist_dispatch_init_8(ptr, i32 signext, i32 signext, ptr, i64, i64, i64, i64) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dist_dispatch_init_8u(ptr, i32 signext, i32 signext, ptr, i64, i64, i64, i64) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dispatch_init_4(ptr, i32 signext, i32 signext, i32 signext, i32 signext, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dispatch_init_4u(ptr, i32 signext, i32 signext, i32 zeroext, i32 zeroext, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dispatch_init_8(ptr, i32 signext, i32 signext, i64, i64, i64, i64) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dispatch_init_8u(ptr, i32 signext, i32 signext, i64, i64, i64, i64) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__kmpc_dispatch_next_4(ptr, i32 signext, ptr, ptr, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__kmpc_dispatch_next_4u(ptr, i32 signext, ptr, ptr, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__kmpc_dispatch_next_8(ptr, i32 signext, ptr, ptr, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__kmpc_dispatch_next_8u(ptr, i32 signext, ptr, ptr, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dispatch_fini_4(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dispatch_fini_4u(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dispatch_fini_8(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_dispatch_fini_8u(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_omp_task_begin_if0(ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_omp_task_complete_if0(ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__kmpc_omp_task_with_deps(ptr, i32 signext, ptr, i32 signext, ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_omp_wait_deps(ptr, i32 signext, i32 signext, ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__kmpc_cancellationpoint(ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_push_num_teams(ptr, i32 signext, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_fork_teams(ptr, i32 signext, ptr, ...) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_taskloop(ptr, i32 signext, ptr, i32 signext, ptr, ptr, i64, i32 signext, i32 signext, i64, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare ptr @__kmpc_omp_target_task_alloc(ptr, i32 signext, i32 signext, i64, i64, ptr, i64) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare ptr @__kmpc_taskred_modifier_init(ptr, i32 signext, i32 signext, i32 signext, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare ptr @__kmpc_taskred_init(i32 signext, i32 signext, ptr) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_task_reduction_modifier_fini(ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_copyprivate(ptr, i32 signext, i64, ptr, ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare ptr @__kmpc_threadprivate_cached(ptr, i32 signext, ptr, i64, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_threadprivate_register(ptr, ptr, ptr, ptr, ptr) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_doacross_init(ptr, i32 signext, i32 signext, ptr) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_doacross_wait(ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_doacross_post(ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_doacross_fini(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare ptr @__kmpc_alloc(i32 signext, i64, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_free(i32 signext, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare ptr @__kmpc_init_allocator(i32 signext, ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_destroy_allocator(i32 signext, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_push_target_tripcount_mapper(ptr, i64, i64) + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare i64 @__kmpc_warp_active_thread_mask() + +; EXT: ; Function Attrs: convergent nounwind +; EXT-NEXT: declare void @__kmpc_syncwarp(i64) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__tgt_target_mapper(ptr, i64, ptr, i32 signext, ptr, ptr, ptr, ptr, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__tgt_target_nowait_mapper(ptr, i64, ptr, i32 signext, ptr, ptr, ptr, ptr, ptr, ptr, i32 signext, ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__tgt_target_teams_mapper(ptr, i64, ptr, i32 signext, ptr, ptr, ptr, ptr, ptr, ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare signext i32 @__tgt_target_teams_nowait_mapper(ptr, i64, ptr, i32 signext, ptr, ptr, ptr, ptr, ptr, ptr, i32 signext, i32 signext, i32 signext, ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__tgt_register_requires(i64) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__tgt_target_data_begin_mapper(ptr, i64, i32 signext, ptr, ptr, ptr, ptr, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__tgt_target_data_begin_nowait_mapper(ptr, i64, i32 signext, ptr, ptr, ptr, ptr, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__tgt_target_data_end_mapper(ptr, i64, i32 signext, ptr, ptr, ptr, ptr, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__tgt_target_data_end_nowait_mapper(ptr, i64, i32 signext, ptr, ptr, ptr, ptr, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__tgt_target_data_update_mapper(ptr, i64, i32 signext, ptr, ptr, ptr, ptr, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__tgt_target_data_update_nowait_mapper(ptr, i64, i32 signext, ptr, ptr, ptr, ptr, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare i64 @__tgt_mapper_num_components(ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__tgt_push_mapper_component(ptr, ptr, ptr, i64, i64, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare ptr @__kmpc_task_allow_completion_event(ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare ptr @__kmpc_task_reduction_get_th_data(i32 signext, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare ptr @__kmpc_task_reduction_init(i32 signext, i32 signext, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare ptr @__kmpc_task_reduction_modifier_init(ptr, i32 signext, i32 signext, i32 signext, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare void @__kmpc_proxy_task_completed_ooo(ptr) + +; EXT: ; Function Attrs: cold convergent noinline nounwind +; EXT-NEXT: declare void @__kmpc_barrier_simple_spmd(ptr nocapture nofree readonly, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare ptr @__kmpc_aligned_alloc(i32 signext, i64, i64, ptr) + +; EXT: ; Function Attrs: nosync nounwind allocsize(0) +; EXT-NEXT: declare ptr @__kmpc_alloc_shared(i64) + +; EXT: ; Function Attrs: convergent nounwind +; EXT: declare void @__kmpc_barrier_simple_generic(ptr, i32 signext) + +; EXT-NOT: Function Attrs +; EXT: declare void @__kmpc_begin_sharing_variables(ptr, i64) + +; EXT: ; Function Attrs: nounwind +; EXT: declare void @__kmpc_distribute_static_fini(ptr, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT: declare void @__kmpc_distribute_static_init_4(ptr, i32 signext, i32 signext, ptr, ptr, ptr, ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT: declare void @__kmpc_distribute_static_init_4u(ptr, i32 signext, i32 signext, ptr, ptr, ptr, ptr, i32 signext, i32 signext) + +; EXT: ; Function Attrs: nounwind +; EXT: declare void @__kmpc_distribute_static_init_8(ptr, i32 signext, i32 signext, ptr, ptr, ptr, ptr, i64, i64) + +; EXT: ; Function Attrs: nounwind +; EXT: declare void @__kmpc_distribute_static_init_8u(ptr, i32 signext, i32 signext, ptr, ptr, ptr, ptr, i64, i64) + +; EXT: ; Function Attrs: nounwind +; EXT: declare void @__kmpc_end_masked(ptr, i32 signext) + +; EXT-NOT: Function Attrs +; EXT: declare void @__kmpc_end_sharing_variables() + +; EXT-NOT: Function Attrs +; EXT: declare void @__kmpc_error(ptr, i32 signext, ptr) + +; EXT-NOT: Function Attrs +; EXT: declare void @__kmpc_fork_call_if(ptr, i32 signext, ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: nosync nounwind +; EXT-NEXT: declare void @__kmpc_free_shared(ptr allocptr nocapture, i64) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare zeroext i32 @__kmpc_get_hardware_num_blocks() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare zeroext i32 @__kmpc_get_hardware_num_threads_in_block() + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare zeroext i32 @__kmpc_get_hardware_thread_id_in_block() + +; EXT-NOT: Function Attrs +; EXT: declare void @__kmpc_get_shared_variables(ptr) + +; EXT: ; Function Attrs: nounwind +; EXT-NEXT: declare zeroext i32 @__kmpc_get_warp_size() + +; EXT-NOT: Function Attrs +; EXT: declare signext i8 @__kmpc_is_spmd_exec_mode() + +; EXT-NOT: Function Attrs +; EXT: declare void @__kmpc_kernel_end_parallel() + +; EXT-NOT: Function Attrs +; EXT: declare i1 @__kmpc_kernel_parallel(ptr) + +; EXT-NOT: Function Attrs +; EXT: declare void @__kmpc_kernel_prepare_parallel(ptr) + +; EXT: ; Function Attrs: nounwind +; EXT: declare signext i32 @__kmpc_masked(ptr, i32 signext, i32 signext) + +; EXT-NOT: Function Attrs +; EXT: declare void @__kmpc_nvptx_end_reduce_nowait(i32 signext) + +; EXT-NOT: Function Attrs +; EXT: declare signext i32 @__kmpc_nvptx_parallel_reduce_nowait_v2(ptr, i32 signext, i32 signext, i64, ptr, ptr, ptr) + +; EXT-NOT: Function Attrs +; EXT: declare signext i32 @__kmpc_nvptx_teams_reduce_nowait_v2(ptr, i32 signext, ptr, i32 zeroext, ptr, ptr, ptr, ptr, ptr, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT: declare signext i32 @__kmpc_omp_reg_task_with_affinity(ptr, i32 signext, ptr, i32 signext, ptr) + +; EXT: ; Function Attrs: alwaysinline +; EXT: declare void @__kmpc_parallel_51(ptr, i32 signext, i32 signext, i32 signext, i32 signext, ptr, ptr, ptr, i64) + +; EXT-NOT: Function Attrs +; EXT: declare signext i32 @__kmpc_shuffle_int32(i32 signext, i16 signext, i16 signext) + +; EXT-NOT: Function Attrs +; EXT: declare i64 @__kmpc_shuffle_int64(i64, i16 signext, i16 signext) + +; EXT-NOT: Function Attrs +; EXT: declare void @__kmpc_target_deinit(ptr, i8 signext) + +; EXT-NOT: Function Attrs +; EXT: declare signext i32 @__kmpc_target_init(ptr, i8 signext, i1 signext) + +; EXT-NOT: Function Attrs +; EXT: declare void @__tgt_interop_destroy(ptr, i32 signext, ptr, i32 signext, i32 signext, ptr, i32 signext) + +; EXT-NOT: Function Attrs +; EXT: declare void @__tgt_interop_init(ptr, i32 signext, ptr, i32 signext, i32 signext, i64, ptr, i32 signext) + +; EXT-NOT: Function Attrs +; EXT: declare void @__tgt_interop_use(ptr, i32 signext, ptr, i32 signext, i32 signext, ptr, i32 signext) + +; EXT-NOT: Function Attrs +; EXT: declare void @__tgt_target_data_begin_mapper_issue(ptr, i64, i32 signext, ptr, ptr, ptr, ptr, ptr, ptr, ptr) + +; EXT-NOT: Function Attrs +; EXT: declare void @__tgt_target_data_begin_mapper_wait(i64, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT: declare signext i32 @__tgt_target_kernel(ptr, i64, i32 signext, i32 signext, ptr, ptr) + +; EXT: ; Function Attrs: nounwind +; EXT: declare signext i32 @__tgt_target_kernel_nowait(ptr, i64, i32 signext, i32 signext, ptr, ptr, i32 signext, ptr, i32 signext, ptr) + +; MIPS_EXT: ; Function Attrs: nounwind +; MIPS_EXT: declare dso_local void @omp_set_num_threads(i32 signext) + +; MIPS_EXT: ; Function Attrs: nounwind +; MIPS_EXT: declare dso_local i32 @omp_get_num_threads() + +; MIPS_EXT: ; Function Attrs: convergent nounwind +; MIPS_EXT: declare void @__kmpc_critical_with_hint(ptr, i32 signext, ptr, i32 signext) + +; MIPS_EXT: ; Function Attrs: nounwind +; MIPS_EXT: declare i32 @__kmpc_get_hardware_num_blocks() + +; RISCV_EXT: ; Function Attrs: nounwind +; RISCV_EXT: declare signext i32 @__kmpc_get_hardware_num_blocks() + +; RISCV_EXT: ; Function Attrs: nounwind +; RISCV_EXT: declare signext i32 @__kmpc_get_hardware_num_threads_in_block() + +; RISCV_EXT: ; Function Attrs: nounwind +; RISCV_EXT: declare signext i32 @__kmpc_get_hardware_thread_id_in_block() + +; RISCV_EXT: ; Function Attrs: nounwind +; RISCV_EXT: declare signext i32 @__kmpc_get_warp_size() + !llvm.module.flags = !{!0} !0 = !{i32 7, !"openmp", i32 50}