diff --git a/llvm/include/llvm/Frontend/OpenMP/OMPKinds.def b/llvm/include/llvm/Frontend/OpenMP/OMPKinds.def --- a/llvm/include/llvm/Frontend/OpenMP/OMPKinds.def +++ b/llvm/include/llvm/Frontend/OpenMP/OMPKinds.def @@ -230,8 +230,23 @@ __OMP_TYPE(Void) __OMP_TYPE(Int8) __OMP_TYPE(Int32) +__OMP_TYPE(Int64) __OMP_TYPE(Int8Ptr) __OMP_TYPE(Int32Ptr) +__OMP_TYPE(Int64Ptr) + +#define __OMP_PTR_TYPE(NAME, BASE) OMP_TYPE(NAME, BASE->getPointerTo()) + +__OMP_PTR_TYPE(VoidPtr, Int8) +__OMP_PTR_TYPE(VoidPtrPtr, VoidPtr) +__OMP_PTR_TYPE(VoidPtrPtrPtr, VoidPtrPtr) + +// TODO: Replace this with the real size_t type +#define __OMP_SIZE_TYPE(NAME) OMP_TYPE(NAME, Type::getInt64Ty(Ctx)) +__OMP_SIZE_TYPE(SizeTy) +#undef __OMP_SIZE_TYPE + +#undef __OMP_PTR_TYPE #undef __OMP_TYPE #undef OMP_TYPE @@ -280,6 +295,12 @@ OMP_FUNCTION_TYPE(VarName, IsVarArg, ReturnType, __VA_ARGS__) __OMP_FUNCTION_TYPE(ParallelTask, true, Void, Int32Ptr, Int32Ptr) +__OMP_FUNCTION_TYPE(ReduceFunction, false, Void, VoidPtr, VoidPtr) +__OMP_FUNCTION_TYPE(CopyFunction, false, Void, VoidPtr, VoidPtr) +__OMP_FUNCTION_TYPE(KmpcCtor, false, VoidPtr, VoidPtr) +__OMP_FUNCTION_TYPE(KmpcDtor, false, Void, VoidPtr) +__OMP_FUNCTION_TYPE(KmpcCopyCtor, false, VoidPtr, VoidPtr, VoidPtr) +__OMP_FUNCTION_TYPE(TaskRoutineEntry, false, Int32, Int32, VoidPtr) #undef __OMP_FUNCTION_TYPE #undef OMP_FUNCTION_TYPE @@ -349,6 +370,125 @@ __OMP_RTL(__kmpc_end_critical, false, Void, IdentPtr, Int32, KmpCriticalNamePtrTy) +__OMP_RTL(__kmpc_begin, false, Void, IdentPtr, Int32) +__OMP_RTL(__kmpc_end, false, Void, IdentPtr) + +__OMP_RTL(__kmpc_reduce, false, Int32, IdentPtr, Int32, Int32, SizeTy, VoidPtr, + ReduceFunctionPtr, KmpCriticalNamePtrTy) +__OMP_RTL(__kmpc_reduce_nowait, false, Int32, IdentPtr, Int32, Int32, SizeTy, VoidPtr, + ReduceFunctionPtr, KmpCriticalNamePtrTy) +__OMP_RTL(__kmpc_end_reduce, false, Void, IdentPtr, Int32, KmpCriticalNamePtrTy) +__OMP_RTL(__kmpc_end_reduce_nowait, false, Void, IdentPtr, Int32, KmpCriticalNamePtrTy) + +__OMP_RTL(__kmpc_ordered, false, Void, IdentPtr, Int32) +__OMP_RTL(__kmpc_end_ordered, false, Void, IdentPtr, Int32) + +__OMP_RTL(__kmpc_for_static_init_4, false, Void, IdentPtr, Int32, Int32, Int32Ptr, + Int32Ptr, Int32Ptr, Int32Ptr, Int32, Int32) +__OMP_RTL(__kmpc_for_static_init_4u, false, Void, IdentPtr, Int32, Int32, Int32Ptr, + Int32Ptr, Int32Ptr, Int32Ptr, Int32, Int32) +__OMP_RTL(__kmpc_for_static_init_8, false, Void, IdentPtr, Int32, Int32, Int32Ptr, + Int64Ptr, Int64Ptr, Int64Ptr, Int64, Int64) +__OMP_RTL(__kmpc_for_static_init_8u, false, Void, IdentPtr, Int32, Int32, Int32Ptr, + Int64Ptr, Int64Ptr, Int64Ptr, Int64, Int64) +__OMP_RTL(__kmpc_for_static_fini, false, Void, IdentPtr, Int32) +__OMP_RTL(__kmpc_dist_dispatch_init_4, false, Void, IdentPtr, Int32, Int32, + Int32Ptr, Int32, Int32, Int32, Int32) +__OMP_RTL(__kmpc_dist_dispatch_init_4u, false, Void, IdentPtr, Int32, Int32, + Int32Ptr, Int32, Int32, Int32, Int32) +__OMP_RTL(__kmpc_dist_dispatch_init_8, false, Void, IdentPtr, Int32, Int32, + Int32Ptr, Int64, Int64, Int64, Int64) +__OMP_RTL(__kmpc_dist_dispatch_init_8u, false, Void, IdentPtr, Int32, Int32, + Int32Ptr, Int64, Int64, Int64, Int64) +__OMP_RTL(__kmpc_dispatch_init_4, false, Void, IdentPtr, Int32, Int32, Int32, + Int32, Int32, Int32) +__OMP_RTL(__kmpc_dispatch_init_4u, false, Void, IdentPtr, Int32, Int32, Int32, + Int32, Int32, Int32) +__OMP_RTL(__kmpc_dispatch_init_8, false, Void, IdentPtr, Int32, Int32, Int64, + Int64, Int64, Int64) +__OMP_RTL(__kmpc_dispatch_init_8u, false, Void, IdentPtr, Int32, Int32, Int64, + Int64, Int64, Int64) +__OMP_RTL(__kmpc_dispatch_next_4, false, Int32, IdentPtr, Int32, Int32Ptr, + Int32Ptr, Int32Ptr, Int32Ptr) +__OMP_RTL(__kmpc_dispatch_next_4u, false, Int32, IdentPtr, Int32, Int32Ptr, + Int32Ptr, Int32Ptr, Int32Ptr) +__OMP_RTL(__kmpc_dispatch_next_8, false, Int32, IdentPtr, Int32, Int32Ptr, + Int64Ptr, Int64Ptr, Int64Ptr) +__OMP_RTL(__kmpc_dispatch_next_8u, false, Int32, IdentPtr, Int32, Int32Ptr, + Int64Ptr, Int64Ptr, Int64Ptr) +__OMP_RTL(__kmpc_dispatch_fini_4, false, Void, IdentPtr, Int32) +__OMP_RTL(__kmpc_dispatch_fini_4u, false, Void, IdentPtr, Int32) +__OMP_RTL(__kmpc_dispatch_fini_8, false, Void, IdentPtr, Int32) +__OMP_RTL(__kmpc_dispatch_fini_8u, false, Void, IdentPtr, Int32) +__OMP_RTL(__kmpc_team_static_init_4, false, Void, IdentPtr, Int32, Int32Ptr, + Int32Ptr, Int32Ptr, Int32Ptr, Int32, Int32) +__OMP_RTL(__kmpc_team_static_init_4u, false, Void, IdentPtr, Int32, Int32Ptr, + Int32Ptr, Int32Ptr, Int32Ptr, Int32, Int32) +__OMP_RTL(__kmpc_team_static_init_8, false, Void, IdentPtr, Int32, Int32Ptr, + Int64Ptr, Int64Ptr, Int64Ptr, Int64, Int64) +__OMP_RTL(__kmpc_team_static_init_8u, false, Void, IdentPtr, Int32, Int32Ptr, + Int64Ptr, Int64Ptr, Int64Ptr, Int64, Int64) +__OMP_RTL(__kmpc_dist_for_static_init_4, false, Void, IdentPtr, Int32, Int32, + Int32Ptr, Int32Ptr, Int32Ptr, Int32Ptr, Int32Ptr, Int32, Int32) +__OMP_RTL(__kmpc_dist_for_static_init_4u, false, Void, IdentPtr, Int32, Int32, + Int32Ptr, Int32Ptr, Int32Ptr, Int32Ptr, Int32Ptr, Int32, Int32) +__OMP_RTL(__kmpc_dist_for_static_init_8, false, Void, IdentPtr, Int32, Int32, + Int32Ptr, Int64Ptr, Int64Ptr, Int64Ptr, Int64Ptr, Int64, Int64) +__OMP_RTL(__kmpc_dist_for_static_init_8u, false, Void, IdentPtr, Int32, Int32, + Int32Ptr, Int64Ptr, Int64Ptr, Int64Ptr, Int64Ptr, Int64, Int64) + +__OMP_RTL(__kmpc_single, false, Int32, IdentPtr, Int32) +__OMP_RTL(__kmpc_end_single, false, Void, IdentPtr, Int32) + +__OMP_RTL(__kmpc_omp_task_alloc, false, VoidPtr, IdentPtr, Int32, Int32, SizeTy, + SizeTy, TaskRoutineEntryPtr) +__OMP_RTL(__kmpc_omp_task, false, Int32, IdentPtr, Int32, VoidPtr) +__OMP_RTL(__kmpc_end_taskgroup, false, Void, IdentPtr, Int32) +__OMP_RTL(__kmpc_taskgroup, false, Void, IdentPtr, Int32) +__OMP_RTL(__kmpc_omp_task_begin_if0, false, Void, IdentPtr, Int32, VoidPtr) +__OMP_RTL(__kmpc_omp_task_complete_if0, false, Void, IdentPtr, Int32, VoidPtr) +__OMP_RTL(__kmpc_omp_task_with_deps, false, Int32, IdentPtr, Int32, VoidPtr, + Int32, VoidPtr, Int32, VoidPtr) +__OMP_RTL(__kmpc_taskloop, false, Void, IdentPtr, Int32, VoidPtr, Int32, + Int64Ptr, Int64Ptr, Int64, Int32, Int32, Int64, VoidPtr) +__OMP_RTL(__kmpc_task_allow_completion_event, false, VoidPtr, IdentPtr, Int32, + VoidPtr) +__OMP_RTL(__kmpc_omp_target_task_alloc, false, VoidPtr, IdentPtr, Int32, Int32, + SizeTy, SizeTy, VoidPtr, Int64) +__OMP_RTL(__kmpc_taskred_modifier_init, false, VoidPtr, VoidPtr, Int32, Int32, + Int32, VoidPtr) +__OMP_RTL(__kmpc_taskred_init, false, VoidPtr, Int32, Int32, VoidPtr) +__OMP_RTL(__kmpc_task_reduction_modifier_fini, false, Void, VoidPtr, Int32, + Int32) +__OMP_RTL(__kmpc_task_reduction_get_th_data, false, VoidPtr, Int32, VoidPtr, + VoidPtr) +__OMP_RTL(__kmpc_task_reduction_init, false, VoidPtr, Int32, Int32, VoidPtr) +__OMP_RTL(__kmpc_task_reduction_modifier_init, false, VoidPtr, VoidPtr, Int32, + Int32, Int32, VoidPtr) +__OMP_RTL(__kmpc_proxy_task_completed_ooo, false, Void, VoidPtr) + +__OMP_RTL(__kmpc_omp_wait_deps, false, Void, IdentPtr, Int32, Int32, VoidPtr, + Int32, VoidPtr) +__OMP_RTL(__kmpc_cancellationpoint, false, Int32, IdentPtr, Int32, Int32) + +__OMP_RTL(__kmpc_fork_teams, true, Void, IdentPtr, Int32, ParallelTaskPtr) +__OMP_RTL(__kmpc_push_num_teams, false, Void, IdentPtr, Int32, Int32, Int32) + +__OMP_RTL(__kmpc_copyprivate, false, Void, IdentPtr, Int32, SizeTy, VoidPtr, + CopyFunctionPtr, Int32) +__OMP_RTL(__kmpc_threadprivate_cached, false, VoidPtr, IdentPtr, Int32, VoidPtr, + SizeTy, VoidPtrPtrPtr) +__OMP_RTL(__kmpc_threadprivate_register, false, Void, IdentPtr, VoidPtr, + KmpcCtorPtr, KmpcCopyCtorPtr, KmpcDtorPtr) + +__OMP_RTL(__kmpc_doacross_init, false, Void, IdentPtr, Int32, Int32, VoidPtr) +__OMP_RTL(__kmpc_doacross_post, false, Void, IdentPtr, Int32, Int64Ptr) +__OMP_RTL(__kmpc_doacross_wait, false, Void, IdentPtr, Int32, Int64Ptr) +__OMP_RTL(__kmpc_doacross_fini, false, Void, IdentPtr, Int32) + +__OMP_RTL(__kmpc_alloc, false, VoidPtr, Int32, SizeTy, VoidPtr) +__OMP_RTL(__kmpc_free, false, Void, Int32, VoidPtr, VoidPtr) + __OMP_RTL(__last, false, Void, ) #undef __OMP_RTL @@ -469,6 +609,164 @@ AttributeSet(EnumAttr(InaccessibleMemOrArgMemOnly)), AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_begin, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_end, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) + +__OMP_RTL_ATTRS(__kmpc_reduce, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_reduce_nowait, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_end_reduce, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_end_reduce_nowait, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) + +__OMP_RTL_ATTRS(__kmpc_ordered, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_end_ordered, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) + +__OMP_RTL_ATTRS(__kmpc_for_static_init_4, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_for_static_init_4u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_for_static_init_8, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_for_static_init_8u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_for_static_fini, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dist_dispatch_init_4, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dist_dispatch_init_4u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dist_dispatch_init_8, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dist_dispatch_init_8u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dispatch_init_4, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dispatch_init_4u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dispatch_init_8, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dispatch_init_8u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dispatch_next_4, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dispatch_next_4u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dispatch_next_8, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dispatch_next_8u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dispatch_fini_4, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dispatch_fini_4u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dispatch_fini_8, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dispatch_fini_8u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_team_static_init_4, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_team_static_init_4u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_team_static_init_8, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_team_static_init_8u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dist_for_static_init_4, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dist_for_static_init_4u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dist_for_static_init_8, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_dist_for_static_init_8u, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) + +__OMP_RTL_ATTRS(__kmpc_single, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_end_single, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) + +__OMP_RTL_ATTRS(__kmpc_omp_task_alloc, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_omp_task, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_end_taskgroup, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_taskgroup, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_omp_task_begin_if0, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_omp_task_complete_if0, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_omp_task_with_deps, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_taskloop, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_task_allow_completion_event, + AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_omp_target_task_alloc, + AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_taskred_modifier_init, + AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_taskred_init, + AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_task_reduction_modifier_fini, + AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_task_reduction_get_th_data, + AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_task_reduction_init, + AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_task_reduction_modifier_init, + AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_proxy_task_completed_ooo, + AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) + +__OMP_RTL_ATTRS(__kmpc_omp_wait_deps, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_cancellationpoint, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) + +__OMP_RTL_ATTRS(__kmpc_fork_teams, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_push_num_teams, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) + +__OMP_RTL_ATTRS(__kmpc_copyprivate, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_threadprivate_cached, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_threadprivate_register, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) + +__OMP_RTL_ATTRS(__kmpc_doacross_init, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_doacross_post, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_doacross_wait, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_doacross_fini, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) + +__OMP_RTL_ATTRS(__kmpc_alloc, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) +__OMP_RTL_ATTRS(__kmpc_free, AttributeSet(EnumAttr(NoUnwind)), + AttributeSet(), {}) + #undef __OMP_RTL_ATTRS #undef OMP_RTL_ATTRS #undef AttributeSet diff --git a/llvm/test/Transforms/OpenMP/add_attributes.ll b/llvm/test/Transforms/OpenMP/add_attributes.ll --- a/llvm/test/Transforms/OpenMP/add_attributes.ll +++ b/llvm/test/Transforms/OpenMP/add_attributes.ll @@ -495,6 +495,144 @@ declare void @__kmpc_end_critical(%struct.ident_t*, i32, [8 x i32]*) +declare void @__kmpc_begin(%struct.ident_t*, i32) + +declare void @__kmpc_end(%struct.ident_t*) + +declare i32 @__kmpc_reduce(%struct.ident_t*, i32, i32, i64, i8*, void (i8*, i8*)*, [8 x i32]*) + +declare i32 @__kmpc_reduce_nowait(%struct.ident_t*, i32, i32, i64, i8*, void (i8*, i8*)*, [8 x i32]*) + +declare void @__kmpc_end_reduce(%struct.ident_t*, i32, [8 x i32]*) + +declare void @__kmpc_end_reduce_nowait(%struct.ident_t*, i32, [8 x i32]*) + +declare void @__kmpc_ordered(%struct.ident_t*, i32) + +declare void @__kmpc_end_ordered(%struct.ident_t*, i32) + +declare void @__kmpc_for_static_init_4(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32, i32) + +declare void @__kmpc_for_static_init_4u(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32, i32) + +declare void @__kmpc_for_static_init_8(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64, i64) + +declare void @__kmpc_for_static_init_8u(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64, i64) + +declare void @__kmpc_for_static_fini(%struct.ident_t*, i32) + +declare void @__kmpc_team_static_init_4(%struct.ident_t*, i32, i32*, i32*, i32*, i32*, i32, i32) + +declare void @__kmpc_team_static_init_4u(%struct.ident_t*, i32, i32*, i32*, i32*, i32*, i32, i32) + +declare void @__kmpc_team_static_init_8(%struct.ident_t*, i32, i32*, i64*, i64*, i64*, i64, i64) + +declare void @__kmpc_team_static_init_8u(%struct.ident_t*, i32, i32*, i64*, i64*, i64*, i64, i64) + +declare void @__kmpc_dist_for_static_init_4(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32*, i32, i32) + +declare void @__kmpc_dist_for_static_init_4u(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32*, i32, i32) + +declare void @__kmpc_dist_for_static_init_8(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64*, i64, i64) + +declare void @__kmpc_dist_for_static_init_8u(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64*, i64, i64) + +declare i32 @__kmpc_single(%struct.ident_t*, i32) + +declare void @__kmpc_end_single(%struct.ident_t*, i32) + +declare i8* @__kmpc_omp_task_alloc(%struct.ident_t*, i32, i32, i64, i64, i32 (i32, i8*)*) + +declare i32 @__kmpc_omp_task(%struct.ident_t*, i32, i8*) + +declare void @__kmpc_end_taskgroup(%struct.ident_t*, i32) + +declare void @__kmpc_taskgroup(%struct.ident_t*, i32) + +declare void @__kmpc_dist_dispatch_init_4(%struct.ident_t*, i32, i32, i32*, i32, i32, i32, i32) + +declare void @__kmpc_dist_dispatch_init_4u(%struct.ident_t*, i32, i32, i32*, i32, i32, i32, i32) + +declare void @__kmpc_dist_dispatch_init_8(%struct.ident_t*, i32, i32, i32*, i64, i64, i64, i64) + +declare void @__kmpc_dist_dispatch_init_8u(%struct.ident_t*, i32, i32, i32*, i64, i64, i64, i64) + +declare void @__kmpc_dispatch_init_4(%struct.ident_t*, i32, i32, i32, i32, i32, i32) + +declare void @__kmpc_dispatch_init_4u(%struct.ident_t*, i32, i32, i32, i32, i32, i32) + +declare void @__kmpc_dispatch_init_8(%struct.ident_t*, i32, i32, i64, i64, i64, i64) + +declare void @__kmpc_dispatch_init_8u(%struct.ident_t*, i32, i32, i64, i64, i64, i64) + +declare i32 @__kmpc_dispatch_next_4(%struct.ident_t*, i32, i32*, i32*, i32*, i32*) + +declare i32 @__kmpc_dispatch_next_4u(%struct.ident_t*, i32, i32*, i32*, i32*, i32*) + +declare i32 @__kmpc_dispatch_next_8(%struct.ident_t*, i32, i32*, i64*, i64*, i64*) + +declare i32 @__kmpc_dispatch_next_8u(%struct.ident_t*, i32, i32*, i64*, i64*, i64*) + +declare void @__kmpc_dispatch_fini_4(%struct.ident_t*, i32) + +declare void @__kmpc_dispatch_fini_4u(%struct.ident_t*, i32) + +declare void @__kmpc_dispatch_fini_8(%struct.ident_t*, i32) + +declare void @__kmpc_dispatch_fini_8u(%struct.ident_t*, i32) + +declare void @__kmpc_omp_task_begin_if0(%struct.ident_t*, i32, i8*) + +declare void @__kmpc_omp_task_complete_if0(%struct.ident_t*, i32, i8*) + +declare i32 @__kmpc_omp_task_with_deps(%struct.ident_t*, i32, i8*, i32, i8*, i32, i8*) + +declare void @__kmpc_omp_wait_deps(%struct.ident_t*, i32, i32, i8*, i32, i8*) + +declare i32 @__kmpc_cancellationpoint(%struct.ident_t*, i32, i32) + +declare void @__kmpc_push_num_teams(%struct.ident_t*, i32, i32, i32) + +declare void @__kmpc_fork_teams(%struct.ident_t*, i32, void (i32*, i32*, ...)*, ...) + +declare void @__kmpc_taskloop(%struct.ident_t*, i32, i8*, i32, i64*, i64*, i64, i32, i32, i64, i8*) + +declare i8* @__kmpc_omp_target_task_alloc(%struct.ident_t*, i32, i32, i64, i64, i8*, i64) + +declare i8* @__kmpc_taskred_modifier_init(i8*, i32, i32, i32, i8*) + +declare i8* @__kmpc_taskred_init(i32, i32, i8*) + +declare void @__kmpc_task_reduction_modifier_fini(i8*, i32, i32) + +declare void @__kmpc_copyprivate(%struct.ident_t*, i32, i64, i8*, void (i8*, i8*)*, i32) + +declare i8* @__kmpc_threadprivate_cached(%struct.ident_t*, i32, i8*, i64, i8***) + +declare void @__kmpc_threadprivate_register(%struct.ident_t*, i8*, i8* (i8*)*, i8* (i8*, i8*)*, void (i8*)*) + +declare void @__kmpc_doacross_init(%struct.ident_t*, i32, i32, i8*) + +declare void @__kmpc_doacross_wait(%struct.ident_t*, i32, i64*) + +declare void @__kmpc_doacross_post(%struct.ident_t*, i32, i64*) + +declare void @__kmpc_doacross_fini(%struct.ident_t*, i32) + +declare i8* @__kmpc_alloc(i32, i64, i8*) + +declare void @__kmpc_free(i32, i8*, i8*) + +declare i8* @__kmpc_task_allow_completion_event(%struct.ident_t*, i32, i8*) + +declare i8* @__kmpc_task_reduction_get_th_data(i32, i8*, i8*) + +declare i8* @__kmpc_task_reduction_init(i32, i32, i8*) + +declare i8* @__kmpc_task_reduction_modifier_init(i8*, i32, i32, i32, i8*) + +declare void @__kmpc_proxy_task_completed_ooo(i8*) + ; CHECK: ; Function Attrs: nounwind ; CHECK-NEXT: declare dso_local void @omp_set_num_threads(i32) @@ -771,6 +909,210 @@ ; CHECK: Function Attrs: inaccessiblemem_or_argmemonly ; CHECK-NEXT: declare void @__kmpc_end_critical(%struct.ident_t*, i32, [8 x i32]*) +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_begin(%struct.ident_t*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_end(%struct.ident_t*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_reduce(%struct.ident_t*, i32, i32, i64, i8*, void (i8*, i8*)*, [8 x i32]*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_reduce_nowait(%struct.ident_t*, i32, i32, i64, i8*, void (i8*, i8*)*, [8 x i32]*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_end_reduce(%struct.ident_t*, i32, [8 x i32]*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_end_reduce_nowait(%struct.ident_t*, i32, [8 x i32]*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_ordered(%struct.ident_t*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_end_ordered(%struct.ident_t*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_for_static_init_4(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_for_static_init_4u(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_for_static_init_8(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64, i64) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_for_static_init_8u(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64, i64) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_for_static_fini(%struct.ident_t*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_team_static_init_4(%struct.ident_t*, i32, i32*, i32*, i32*, i32*, i32, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_team_static_init_4u(%struct.ident_t*, i32, i32*, i32*, i32*, i32*, i32, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_team_static_init_8(%struct.ident_t*, i32, i32*, i64*, i64*, i64*, i64, i64) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_team_static_init_8u(%struct.ident_t*, i32, i32*, i64*, i64*, i64*, i64, i64) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dist_for_static_init_4(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32*, i32, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dist_for_static_init_4u(%struct.ident_t*, i32, i32, i32*, i32*, i32*, i32*, i32*, i32, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dist_for_static_init_8(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64*, i64, i64) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dist_for_static_init_8u(%struct.ident_t*, i32, i32, i32*, i64*, i64*, i64*, i64*, i64, i64) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_single(%struct.ident_t*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_end_single(%struct.ident_t*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i8* @__kmpc_omp_task_alloc(%struct.ident_t*, i32, i32, i64, i64, i32 (i32, i8*)*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_omp_task(%struct.ident_t*, i32, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_end_taskgroup(%struct.ident_t*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_taskgroup(%struct.ident_t*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dist_dispatch_init_4(%struct.ident_t*, i32, i32, i32*, i32, i32, i32, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dist_dispatch_init_4u(%struct.ident_t*, i32, i32, i32*, i32, i32, i32, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dist_dispatch_init_8(%struct.ident_t*, i32, i32, i32*, i64, i64, i64, i64) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dist_dispatch_init_8u(%struct.ident_t*, i32, i32, i32*, i64, i64, i64, i64) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dispatch_init_4(%struct.ident_t*, i32, i32, i32, i32, i32, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dispatch_init_4u(%struct.ident_t*, i32, i32, i32, i32, i32, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dispatch_init_8(%struct.ident_t*, i32, i32, i64, i64, i64, i64) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dispatch_init_8u(%struct.ident_t*, i32, i32, i64, i64, i64, i64) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_dispatch_next_4(%struct.ident_t*, i32, i32*, i32*, i32*, i32*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_dispatch_next_4u(%struct.ident_t*, i32, i32*, i32*, i32*, i32*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_dispatch_next_8(%struct.ident_t*, i32, i32*, i64*, i64*, i64*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_dispatch_next_8u(%struct.ident_t*, i32, i32*, i64*, i64*, i64*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dispatch_fini_4(%struct.ident_t*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dispatch_fini_4u(%struct.ident_t*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dispatch_fini_8(%struct.ident_t*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_dispatch_fini_8u(%struct.ident_t*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_omp_task_begin_if0(%struct.ident_t*, i32, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_omp_task_complete_if0(%struct.ident_t*, i32, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_omp_task_with_deps(%struct.ident_t*, i32, i8*, i32, i8*, i32, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_omp_wait_deps(%struct.ident_t*, i32, i32, i8*, i32, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i32 @__kmpc_cancellationpoint(%struct.ident_t*, i32, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_push_num_teams(%struct.ident_t*, i32, i32, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_fork_teams(%struct.ident_t*, i32, void (i32*, i32*, ...)*, ...) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_taskloop(%struct.ident_t*, i32, i8*, i32, i64*, i64*, i64, i32, i32, i64, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i8* @__kmpc_omp_target_task_alloc(%struct.ident_t*, i32, i32, i64, i64, i8*, i64) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i8* @__kmpc_taskred_modifier_init(i8*, i32, i32, i32, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i8* @__kmpc_taskred_init(i32, i32, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_task_reduction_modifier_fini(i8*, i32, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_copyprivate(%struct.ident_t*, i32, i64, i8*, void (i8*, i8*)*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i8* @__kmpc_threadprivate_cached(%struct.ident_t*, i32, i8*, i64, i8***) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_threadprivate_register(%struct.ident_t*, i8*, i8* (i8*)*, i8* (i8*, i8*)*, void (i8*)*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_doacross_init(%struct.ident_t*, i32, i32, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_doacross_wait(%struct.ident_t*, i32, i64*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_doacross_post(%struct.ident_t*, i32, i64*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_doacross_fini(%struct.ident_t*, i32) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i8* @__kmpc_alloc(i32, i64, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare void @__kmpc_free(i32, i8*, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i8* @__kmpc_task_allow_completion_event(%struct.ident_t*, i32, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i8* @__kmpc_task_reduction_get_th_data(i32, i8*, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i8* @__kmpc_task_reduction_init(i32, i32, i8*) + +; CHECK: Function Attrs: nounwind +; CHECK-NEXT: declare i8* @__kmpc_task_reduction_modifier_init(i8*, i32, i32, i32, i8*) + ; OPTIMISTIC: ; Function Attrs: inaccessiblememonly nofree nosync nounwind writeonly ; OPTIMISTIC-NEXT: declare dso_local void @omp_set_num_threads(i32)