diff --git a/llvm/lib/CodeGen/BranchRelaxation.cpp b/llvm/lib/CodeGen/BranchRelaxation.cpp --- a/llvm/lib/CodeGen/BranchRelaxation.cpp +++ b/llvm/lib/CodeGen/BranchRelaxation.cpp @@ -482,11 +482,13 @@ // restore blocks are just duplicated for each far branch. assert(!DestBB->isEntryBlock()); MachineBasicBlock *PrevBB = &*std::prev(DestBB->getIterator()); - if (auto *FT = PrevBB->getFallThrough()) { - assert(FT == DestBB); - TII->insertUnconditionalBranch(*PrevBB, FT, DebugLoc()); - // Recalculate the block size. - BlockInfo[PrevBB->getNumber()].Size = computeBlockSize(*PrevBB); + if (PrevBB->terminators().empty()) { + if (auto *FT = PrevBB->getFallThrough()) { + assert(FT == DestBB); + TII->insertUnconditionalBranch(*PrevBB, FT, DebugLoc()); + // Recalculate the block size. + BlockInfo[PrevBB->getNumber()].Size = computeBlockSize(*PrevBB); + } } // Now, RestoreBB could be placed directly before DestBB. MF->splice(DestBB->getIterator(), RestoreBB->getIterator()); diff --git a/llvm/test/CodeGen/AMDGPU/branch-relax-no-terminators.ll b/llvm/test/CodeGen/AMDGPU/branch-relax-no-terminators.ll new file mode 100644 --- /dev/null +++ b/llvm/test/CodeGen/AMDGPU/branch-relax-no-terminators.ll @@ -0,0 +1,722 @@ +; RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx90a -verify-machineinstrs -amdgpu-s-branch-bits=5 %s -o - | FileCheck %s + +; CHECK-LABEL: branch_no_terminators +define void @branch_no_terminators(i32 addrspace(1)* %arg) #0 { +; CHECK: %bb.0: +; CHECK: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) +; CHECK: s_or_saveexec_b64 s[4:5], -1 +; CHECK: buffer_store_dword v0, off, s[0:3], s32 ; 4-byte Folded Spill +; CHECK: buffer_store_dword v1, off, s[0:3], s32 offset:4 ; 4-byte Folded Spill +; CHECK: s_mov_b64 exec, s[4:5] +; CHECK: v_writelane_b32 v0, s33, 0 +; CHECK: v_writelane_b32 v0, s34, 1 +; CHECK: v_writelane_b32 v0, s35, 2 +; CHECK: v_writelane_b32 v0, s36, 3 +; CHECK: v_writelane_b32 v0, s37, 4 +; CHECK: v_writelane_b32 v0, s38, 5 +; CHECK: v_writelane_b32 v0, s39, 6 +; CHECK: v_writelane_b32 v0, s40, 7 +; CHECK: v_writelane_b32 v0, s41, 8 +; CHECK: v_writelane_b32 v0, s42, 9 +; CHECK: v_writelane_b32 v0, s43, 10 +; CHECK: v_writelane_b32 v0, s44, 11 +; CHECK: v_writelane_b32 v0, s45, 12 +; CHECK: v_writelane_b32 v0, s46, 13 +; CHECK: v_writelane_b32 v0, s47, 14 +; CHECK: v_writelane_b32 v0, s48, 15 +; CHECK: v_writelane_b32 v0, s49, 16 +; CHECK: v_writelane_b32 v0, s50, 17 +; CHECK: v_writelane_b32 v0, s51, 18 +; CHECK: v_writelane_b32 v0, s52, 19 +; CHECK: v_writelane_b32 v0, s53, 20 +; CHECK: v_writelane_b32 v0, s54, 21 +; CHECK: v_writelane_b32 v0, s55, 22 +; CHECK: v_writelane_b32 v0, s56, 23 +; CHECK: v_writelane_b32 v0, s57, 24 +; CHECK: v_writelane_b32 v0, s58, 25 +; CHECK: v_writelane_b32 v0, s59, 26 +; CHECK: v_writelane_b32 v0, s60, 27 +; CHECK: v_writelane_b32 v0, s61, 28 +; CHECK: v_writelane_b32 v0, s62, 29 +; CHECK: v_writelane_b32 v0, s63, 30 +; CHECK: v_writelane_b32 v0, s64, 31 +; CHECK: v_writelane_b32 v0, s65, 32 +; CHECK: v_writelane_b32 v0, s66, 33 +; CHECK: v_writelane_b32 v0, s67, 34 +; CHECK: v_writelane_b32 v0, s68, 35 +; CHECK: v_writelane_b32 v0, s69, 36 +; CHECK: v_writelane_b32 v0, s70, 37 +; CHECK: v_writelane_b32 v0, s71, 38 +; CHECK: v_writelane_b32 v0, s72, 39 +; CHECK: v_writelane_b32 v0, s73, 40 +; CHECK: v_writelane_b32 v0, s74, 41 +; CHECK: v_writelane_b32 v0, s75, 42 +; CHECK: v_writelane_b32 v0, s76, 43 +; CHECK: v_writelane_b32 v0, s77, 44 +; CHECK: v_writelane_b32 v0, s78, 45 +; CHECK: v_writelane_b32 v0, s79, 46 +; CHECK: v_writelane_b32 v0, s80, 47 +; CHECK: v_writelane_b32 v0, s81, 48 +; CHECK: v_writelane_b32 v0, s82, 49 +; CHECK: v_writelane_b32 v0, s83, 50 +; CHECK: v_writelane_b32 v0, s84, 51 +; CHECK: v_writelane_b32 v0, s85, 52 +; CHECK: v_writelane_b32 v0, s86, 53 +; CHECK: v_writelane_b32 v0, s87, 54 +; CHECK: v_writelane_b32 v0, s88, 55 +; CHECK: v_writelane_b32 v0, s89, 56 +; CHECK: v_writelane_b32 v0, s90, 57 +; CHECK: v_writelane_b32 v0, s91, 58 +; CHECK: v_writelane_b32 v0, s92, 59 +; CHECK: v_writelane_b32 v0, s93, 60 +; CHECK: v_writelane_b32 v0, s94, 61 +; CHECK: v_writelane_b32 v0, s95, 62 +; CHECK: v_writelane_b32 v0, s96, 63 +; CHECK: v_writelane_b32 v1, s97, 0 +; CHECK: v_writelane_b32 v1, s98, 1 +; CHECK: v_writelane_b32 v1, s99, 2 +; CHECK: v_writelane_b32 v1, s100, 3 +; CHECK: v_writelane_b32 v1, s101, 4 +; CHECK: v_writelane_b32 v1, s30, 5 +; CHECK: v_writelane_b32 v1, s31, 6 +; CHECK: s_mov_b32 s81, s12 +; CHECK: s_cmp_eq_u32 s81, 0 +; CHECK: s_mov_b32 s0, 0 +; CHECK: s_mov_b32 s1, 0 +; CHECK: s_mov_b32 s2, 0 +; CHECK: s_mov_b32 s3, 0 +; CHECK: s_mov_b32 s4, 0 +; CHECK: s_mov_b32 s5, 0 +; CHECK: s_mov_b32 s6, 0 +; CHECK: s_mov_b32 s7, 0 +; CHECK: s_mov_b32 s8, 0 +; CHECK: s_mov_b32 s9, 0 +; CHECK: s_mov_b32 s10, 0 +; CHECK: s_mov_b32 s11, 0 +; CHECK: s_mov_b32 s12, 0 +; CHECK: s_mov_b32 s13, 0 +; CHECK: s_mov_b32 s14, 0 +; CHECK: s_mov_b32 s15, 0 +; CHECK: s_mov_b32 s16, 0 +; CHECK: s_mov_b32 s17, 0 +; CHECK: s_mov_b32 s18, 0 +; CHECK: s_mov_b32 s19, 0 +; CHECK: s_mov_b32 s20, 0 +; CHECK: s_mov_b32 s21, 0 +; CHECK: s_mov_b32 s22, 0 +; CHECK: s_mov_b32 s23, 0 +; CHECK: s_mov_b32 s24, 0 +; CHECK: s_mov_b32 s25, 0 +; CHECK: s_mov_b32 s26, 0 +; CHECK: s_mov_b32 s27, 0 +; CHECK: s_mov_b32 s28, 0 +; CHECK: s_mov_b32 s29, 0 +; CHECK: s_mov_b32 s30, 0 +; CHECK: s_mov_b32 s31, 0 +; CHECK: s_mov_b32 s32, 0 +; CHECK: s_mov_b32 s33, 0 +; CHECK: s_mov_b32 s34, 0 +; CHECK: s_mov_b32 s35, 0 +; CHECK: s_mov_b32 s36, 0 +; CHECK: s_mov_b32 s37, 0 +; CHECK: s_mov_b32 s38, 0 +; CHECK: s_mov_b32 s39, 0 +; CHECK: s_mov_b32 s40, 0 +; CHECK: s_mov_b32 s41, 0 +; CHECK: s_mov_b32 s42, 0 +; CHECK: s_mov_b32 s43, 0 +; CHECK: s_mov_b32 s44, 0 +; CHECK: s_mov_b32 s45, 0 +; CHECK: s_mov_b32 s46, 0 +; CHECK: s_mov_b32 s47, 0 +; CHECK: s_mov_b32 s48, 0 +; CHECK: s_mov_b32 s49, 0 +; CHECK: s_mov_b32 s50, 0 +; CHECK: s_mov_b32 s51, 0 +; CHECK: s_mov_b32 s52, 0 +; CHECK: s_mov_b32 s53, 0 +; CHECK: s_mov_b32 s54, 0 +; CHECK: s_mov_b32 s55, 0 +; CHECK: s_mov_b32 s56, 0 +; CHECK: s_mov_b32 s57, 0 +; CHECK: s_mov_b32 s58, 0 +; CHECK: s_mov_b32 s59, 0 +; CHECK: s_mov_b32 s60, 0 +; CHECK: s_mov_b32 s61, 0 +; CHECK: s_mov_b32 s62, 0 +; CHECK: s_mov_b32 s63, 0 +; CHECK: s_mov_b32 s64, 0 +; CHECK: s_mov_b32 s65, 0 +; CHECK: s_mov_b32 s66, 0 +; CHECK: s_mov_b32 s67, 0 +; CHECK: s_mov_b32 s68, 0 +; CHECK: s_mov_b32 s69, 0 +; CHECK: s_mov_b32 s70, 0 +; CHECK: s_mov_b32 s71, 0 +; CHECK: s_mov_b32 s72, 0 +; CHECK: s_mov_b32 s73, 0 +; CHECK: s_mov_b32 s74, 0 +; CHECK: s_mov_b32 s75, 0 +; CHECK: s_mov_b32 s76, 0 +; CHECK: s_mov_b32 s77, 0 +; CHECK: s_mov_b32 s78, 0 +; CHECK: s_mov_b32 s79, 0 +; CHECK: s_mov_b32 s80, 0 +; CHECK: s_mov_b32 s81, 0 +; CHECK: s_mov_b32 s82, 0 +; CHECK: s_mov_b32 s83, 0 +; CHECK: s_mov_b32 s84, 0 +; CHECK: s_mov_b32 s85, 0 +; CHECK: s_mov_b32 s86, 0 +; CHECK: s_mov_b32 s87, 0 +; CHECK: s_mov_b32 s88, 0 +; CHECK: s_mov_b32 s89, 0 +; CHECK: s_mov_b32 s90, 0 +; CHECK: s_mov_b32 s91, 0 +; CHECK: s_mov_b32 s92, 0 +; CHECK: s_mov_b32 s93, 0 +; CHECK: s_mov_b32 s94, 0 +; CHECK: s_mov_b32 s95, 0 +; CHECK: s_mov_b32 s96, 0 +; CHECK: s_mov_b32 s97, 0 +; CHECK: s_mov_b32 s98, 0 +; CHECK: s_mov_b32 s99, 0 +; CHECK: s_mov_b32 s100, 0 +; CHECK: s_mov_b32 s101, 0 +; CHECK: s_mov_b32 vcc_lo, 0 +; CHECK: s_mov_b32 vcc_hi, 0 +; CHECK: s_cbranch_scc0 .LBB0_1 +entry: + %cnd = tail call i32 @llvm.amdgcn.workgroup.id.x() #0 + %sgpr0 = tail call i32 asm sideeffect "s_mov_b32 s0, 0", "={s0}"() #0 + %sgpr1 = tail call i32 asm sideeffect "s_mov_b32 s1, 0", "={s1}"() #0 + %sgpr2 = tail call i32 asm sideeffect "s_mov_b32 s2, 0", "={s2}"() #0 + %sgpr3 = tail call i32 asm sideeffect "s_mov_b32 s3, 0", "={s3}"() #0 + %sgpr4 = tail call i32 asm sideeffect "s_mov_b32 s4, 0", "={s4}"() #0 + %sgpr5 = tail call i32 asm sideeffect "s_mov_b32 s5, 0", "={s5}"() #0 + %sgpr6 = tail call i32 asm sideeffect "s_mov_b32 s6, 0", "={s6}"() #0 + %sgpr7 = tail call i32 asm sideeffect "s_mov_b32 s7, 0", "={s7}"() #0 + %sgpr8 = tail call i32 asm sideeffect "s_mov_b32 s8, 0", "={s8}"() #0 + %sgpr9 = tail call i32 asm sideeffect "s_mov_b32 s9, 0", "={s9}"() #0 + %sgpr10 = tail call i32 asm sideeffect "s_mov_b32 s10, 0", "={s10}"() #0 + %sgpr11 = tail call i32 asm sideeffect "s_mov_b32 s11, 0", "={s11}"() #0 + %sgpr12 = tail call i32 asm sideeffect "s_mov_b32 s12, 0", "={s12}"() #0 + %sgpr13 = tail call i32 asm sideeffect "s_mov_b32 s13, 0", "={s13}"() #0 + %sgpr14 = tail call i32 asm sideeffect "s_mov_b32 s14, 0", "={s14}"() #0 + %sgpr15 = tail call i32 asm sideeffect "s_mov_b32 s15, 0", "={s15}"() #0 + %sgpr16 = tail call i32 asm sideeffect "s_mov_b32 s16, 0", "={s16}"() #0 + %sgpr17 = tail call i32 asm sideeffect "s_mov_b32 s17, 0", "={s17}"() #0 + %sgpr18 = tail call i32 asm sideeffect "s_mov_b32 s18, 0", "={s18}"() #0 + %sgpr19 = tail call i32 asm sideeffect "s_mov_b32 s19, 0", "={s19}"() #0 + %sgpr20 = tail call i32 asm sideeffect "s_mov_b32 s20, 0", "={s20}"() #0 + %sgpr21 = tail call i32 asm sideeffect "s_mov_b32 s21, 0", "={s21}"() #0 + %sgpr22 = tail call i32 asm sideeffect "s_mov_b32 s22, 0", "={s22}"() #0 + %sgpr23 = tail call i32 asm sideeffect "s_mov_b32 s23, 0", "={s23}"() #0 + %sgpr24 = tail call i32 asm sideeffect "s_mov_b32 s24, 0", "={s24}"() #0 + %sgpr25 = tail call i32 asm sideeffect "s_mov_b32 s25, 0", "={s25}"() #0 + %sgpr26 = tail call i32 asm sideeffect "s_mov_b32 s26, 0", "={s26}"() #0 + %sgpr27 = tail call i32 asm sideeffect "s_mov_b32 s27, 0", "={s27}"() #0 + %sgpr28 = tail call i32 asm sideeffect "s_mov_b32 s28, 0", "={s28}"() #0 + %sgpr29 = tail call i32 asm sideeffect "s_mov_b32 s29, 0", "={s29}"() #0 + %sgpr30 = tail call i32 asm sideeffect "s_mov_b32 s30, 0", "={s30}"() #0 + %sgpr31 = tail call i32 asm sideeffect "s_mov_b32 s31, 0", "={s31}"() #0 + %sgpr32 = tail call i32 asm sideeffect "s_mov_b32 s32, 0", "={s32}"() #0 + %sgpr33 = tail call i32 asm sideeffect "s_mov_b32 s33, 0", "={s33}"() #0 + %sgpr34 = tail call i32 asm sideeffect "s_mov_b32 s34, 0", "={s34}"() #0 + %sgpr35 = tail call i32 asm sideeffect "s_mov_b32 s35, 0", "={s35}"() #0 + %sgpr36 = tail call i32 asm sideeffect "s_mov_b32 s36, 0", "={s36}"() #0 + %sgpr37 = tail call i32 asm sideeffect "s_mov_b32 s37, 0", "={s37}"() #0 + %sgpr38 = tail call i32 asm sideeffect "s_mov_b32 s38, 0", "={s38}"() #0 + %sgpr39 = tail call i32 asm sideeffect "s_mov_b32 s39, 0", "={s39}"() #0 + %sgpr40 = tail call i32 asm sideeffect "s_mov_b32 s40, 0", "={s40}"() #0 + %sgpr41 = tail call i32 asm sideeffect "s_mov_b32 s41, 0", "={s41}"() #0 + %sgpr42 = tail call i32 asm sideeffect "s_mov_b32 s42, 0", "={s42}"() #0 + %sgpr43 = tail call i32 asm sideeffect "s_mov_b32 s43, 0", "={s43}"() #0 + %sgpr44 = tail call i32 asm sideeffect "s_mov_b32 s44, 0", "={s44}"() #0 + %sgpr45 = tail call i32 asm sideeffect "s_mov_b32 s45, 0", "={s45}"() #0 + %sgpr46 = tail call i32 asm sideeffect "s_mov_b32 s46, 0", "={s46}"() #0 + %sgpr47 = tail call i32 asm sideeffect "s_mov_b32 s47, 0", "={s47}"() #0 + %sgpr48 = tail call i32 asm sideeffect "s_mov_b32 s48, 0", "={s48}"() #0 + %sgpr49 = tail call i32 asm sideeffect "s_mov_b32 s49, 0", "={s49}"() #0 + %sgpr50 = tail call i32 asm sideeffect "s_mov_b32 s50, 0", "={s50}"() #0 + %sgpr51 = tail call i32 asm sideeffect "s_mov_b32 s51, 0", "={s51}"() #0 + %sgpr52 = tail call i32 asm sideeffect "s_mov_b32 s52, 0", "={s52}"() #0 + %sgpr53 = tail call i32 asm sideeffect "s_mov_b32 s53, 0", "={s53}"() #0 + %sgpr54 = tail call i32 asm sideeffect "s_mov_b32 s54, 0", "={s54}"() #0 + %sgpr55 = tail call i32 asm sideeffect "s_mov_b32 s55, 0", "={s55}"() #0 + %sgpr56 = tail call i32 asm sideeffect "s_mov_b32 s56, 0", "={s56}"() #0 + %sgpr57 = tail call i32 asm sideeffect "s_mov_b32 s57, 0", "={s57}"() #0 + %sgpr58 = tail call i32 asm sideeffect "s_mov_b32 s58, 0", "={s58}"() #0 + %sgpr59 = tail call i32 asm sideeffect "s_mov_b32 s59, 0", "={s59}"() #0 + %sgpr60 = tail call i32 asm sideeffect "s_mov_b32 s60, 0", "={s60}"() #0 + %sgpr61 = tail call i32 asm sideeffect "s_mov_b32 s61, 0", "={s61}"() #0 + %sgpr62 = tail call i32 asm sideeffect "s_mov_b32 s62, 0", "={s62}"() #0 + %sgpr63 = tail call i32 asm sideeffect "s_mov_b32 s63, 0", "={s63}"() #0 + %sgpr64 = tail call i32 asm sideeffect "s_mov_b32 s64, 0", "={s64}"() #0 + %sgpr65 = tail call i32 asm sideeffect "s_mov_b32 s65, 0", "={s65}"() #0 + %sgpr66 = tail call i32 asm sideeffect "s_mov_b32 s66, 0", "={s66}"() #0 + %sgpr67 = tail call i32 asm sideeffect "s_mov_b32 s67, 0", "={s67}"() #0 + %sgpr68 = tail call i32 asm sideeffect "s_mov_b32 s68, 0", "={s68}"() #0 + %sgpr69 = tail call i32 asm sideeffect "s_mov_b32 s69, 0", "={s69}"() #0 + %sgpr70 = tail call i32 asm sideeffect "s_mov_b32 s70, 0", "={s70}"() #0 + %sgpr71 = tail call i32 asm sideeffect "s_mov_b32 s71, 0", "={s71}"() #0 + %sgpr72 = tail call i32 asm sideeffect "s_mov_b32 s72, 0", "={s72}"() #0 + %sgpr73 = tail call i32 asm sideeffect "s_mov_b32 s73, 0", "={s73}"() #0 + %sgpr74 = tail call i32 asm sideeffect "s_mov_b32 s74, 0", "={s74}"() #0 + %sgpr75 = tail call i32 asm sideeffect "s_mov_b32 s75, 0", "={s75}"() #0 + %sgpr76 = tail call i32 asm sideeffect "s_mov_b32 s76, 0", "={s76}"() #0 + %sgpr77 = tail call i32 asm sideeffect "s_mov_b32 s77, 0", "={s77}"() #0 + %sgpr78 = tail call i32 asm sideeffect "s_mov_b32 s78, 0", "={s78}"() #0 + %sgpr79 = tail call i32 asm sideeffect "s_mov_b32 s79, 0", "={s79}"() #0 + %sgpr80 = tail call i32 asm sideeffect "s_mov_b32 s80, 0", "={s80}"() #0 + %sgpr81 = tail call i32 asm sideeffect "s_mov_b32 s81, 0", "={s81}"() #0 + %sgpr82 = tail call i32 asm sideeffect "s_mov_b32 s82, 0", "={s82}"() #0 + %sgpr83 = tail call i32 asm sideeffect "s_mov_b32 s83, 0", "={s83}"() #0 + %sgpr84 = tail call i32 asm sideeffect "s_mov_b32 s84, 0", "={s84}"() #0 + %sgpr85 = tail call i32 asm sideeffect "s_mov_b32 s85, 0", "={s85}"() #0 + %sgpr86 = tail call i32 asm sideeffect "s_mov_b32 s86, 0", "={s86}"() #0 + %sgpr87 = tail call i32 asm sideeffect "s_mov_b32 s87, 0", "={s87}"() #0 + %sgpr88 = tail call i32 asm sideeffect "s_mov_b32 s88, 0", "={s88}"() #0 + %sgpr89 = tail call i32 asm sideeffect "s_mov_b32 s89, 0", "={s89}"() #0 + %sgpr90 = tail call i32 asm sideeffect "s_mov_b32 s90, 0", "={s90}"() #0 + %sgpr91 = tail call i32 asm sideeffect "s_mov_b32 s91, 0", "={s91}"() #0 + %sgpr92 = tail call i32 asm sideeffect "s_mov_b32 s92, 0", "={s92}"() #0 + %sgpr93 = tail call i32 asm sideeffect "s_mov_b32 s93, 0", "={s93}"() #0 + %sgpr94 = tail call i32 asm sideeffect "s_mov_b32 s94, 0", "={s94}"() #0 + %sgpr95 = tail call i32 asm sideeffect "s_mov_b32 s95, 0", "={s95}"() #0 + %sgpr96 = tail call i32 asm sideeffect "s_mov_b32 s96, 0", "={s96}"() #0 + %sgpr97 = tail call i32 asm sideeffect "s_mov_b32 s97, 0", "={s97}"() #0 + %sgpr98 = tail call i32 asm sideeffect "s_mov_b32 s98, 0", "={s98}"() #0 + %sgpr99 = tail call i32 asm sideeffect "s_mov_b32 s99, 0", "={s99}"() #0 + %sgpr100 = tail call i32 asm sideeffect "s_mov_b32 s100, 0", "={s100}"() #0 + %sgpr101 = tail call i32 asm sideeffect "s_mov_b32 s101, 0", "={s101}"() #0 + %vcc_lo = tail call i32 asm sideeffect "s_mov_b32 $0, 0", "={vcc_lo}"() #0 + %vcc_hi = tail call i32 asm sideeffect "s_mov_b32 $0, 0", "={vcc_hi}"() #0 + %cmp = icmp eq i32 %cnd, 0 + br i1 %cmp, label %bb3, label %bb2 ; +8 dword branch +; CHECK-LABEL:.LBB0_5: ; %entry +; CHECK: s_not_b64 exec, exec +; CHECK: buffer_store_dword v2, off, s[0:3], s32 offset:8 +; CHECK: v_writelane_b32 v2, s0, 0 +; CHECK: v_writelane_b32 v2, s1, 1 +; CHECK: s_getpc_b64 s[0:1] + +; CHECK-LABEL:.Lpost_getpc0: +; CHECK: s_add_u32 s0, s0, (.LBB0_6-.Lpost_getpc0)&4294967295 +; CHECK: s_addc_u32 s1, s1, (.LBB0_6-.Lpost_getpc0)>>32 +; CHECK: s_setpc_b64 s[0:1] +bb2: ; 68 bytes + ; 64 byte asm + %res = call i32 asm sideeffect + "v_nop_e64 + v_nop_e64 + v_nop_e64 + v_nop_e64 + v_nop_e64 + v_nop_e64 + v_nop_e64 + v_nop_e64","=s"() #0 +; br label %bb3 + %cmp1 = icmp eq i32 %res, 0 + br i1 %cmp1, label %bb3, label %bb2 + +; CHECK-LABEL: .LBB0_3: +; CHECK: s_mov_b32 s4, s5 +; CHECK: s_mov_b32 s5, s6 +; CHECK: s_mov_b32 s6, s7 +; CHECK: s_mov_b32 s7, s8 +; CHECK: s_mov_b32 s8, s9 +; CHECK: s_mov_b32 s9, s10 +; CHECK: s_mov_b32 s10, s11 +; CHECK: s_mov_b32 s11, s12 +; CHECK: s_mov_b32 s12, s13 +; CHECK: s_mov_b32 s13, s14 +; CHECK: s_mov_b32 s14, s15 +; CHECK: s_mov_b32 s15, s16 +; CHECK: s_mov_b32 s16, s17 +; CHECK: s_mov_b32 s17, s18 +; CHECK: s_mov_b32 s18, s19 +; CHECK: s_mov_b32 s19, s20 +; CHECK: s_mov_b32 s20, s21 +; CHECK: s_mov_b32 s21, s22 +; CHECK: s_mov_b32 s22, s23 +; CHECK: s_mov_b32 s23, s24 +; CHECK: s_mov_b32 s24, s25 +; CHECK: s_mov_b32 s25, s26 +; CHECK: s_mov_b32 s26, s27 +; CHECK: s_mov_b32 s27, s28 +; CHECK: s_mov_b32 s28, s29 +; CHECK: s_mov_b32 s29, s30 +; CHECK: s_mov_b32 s30, s31 +; CHECK: s_mov_b32 s31, s34 +; CHECK: s_mov_b32 s34, s35 +; CHECK: s_mov_b32 s35, s36 +; CHECK: s_mov_b32 s36, s37 +; CHECK: s_mov_b32 s37, s38 +; CHECK: s_mov_b32 s38, s39 +; CHECK: s_mov_b32 s39, s40 +; CHECK: s_mov_b32 s40, s41 +; CHECK: s_mov_b32 s41, s42 +; CHECK: s_mov_b32 s42, s43 +; CHECK: s_mov_b32 s43, s44 +; CHECK: s_mov_b32 s44, s45 +; CHECK: s_mov_b32 s45, s46 +; CHECK: s_mov_b32 s46, s47 +; CHECK: s_mov_b32 s47, s48 +; CHECK: s_mov_b32 s48, s49 +; CHECK: s_mov_b32 s49, s50 +; CHECK: s_mov_b32 s50, s51 +; CHECK: s_mov_b32 s51, s52 +; CHECK: s_mov_b32 s52, s53 +; CHECK: s_mov_b32 s53, s54 +; CHECK: s_mov_b32 s54, s55 +; CHECK: s_mov_b32 s55, s56 +; CHECK: s_mov_b32 s56, s57 +; CHECK: s_mov_b32 s57, s58 +; CHECK: s_mov_b32 s58, s59 +; CHECK: s_mov_b32 s59, s60 +; CHECK: s_mov_b32 s60, s61 +; CHECK: s_mov_b32 s61, s62 +; CHECK: s_mov_b32 s62, s63 +; CHECK: s_mov_b32 s63, s64 +; CHECK: s_mov_b32 s64, s65 +; CHECK: s_mov_b32 s65, s66 +; CHECK: s_mov_b32 s66, s67 +; CHECK: s_mov_b32 s67, s68 +; CHECK: s_mov_b32 s68, s69 +; CHECK: s_mov_b32 s69, s70 +; CHECK: s_mov_b32 s70, s71 +; CHECK: s_mov_b32 s71, s72 +; CHECK: s_mov_b32 s72, s73 +; CHECK: s_mov_b32 s73, s74 +; CHECK: s_mov_b32 s74, s75 +; CHECK: s_mov_b32 s75, s76 +; CHECK: s_mov_b32 s76, s77 +; CHECK: s_mov_b32 s77, s78 +; CHECK: s_mov_b32 s78, s79 +; CHECK: s_mov_b32 s79, s80 +; CHECK: s_mov_b32 s80, vcc_hi +; CHECK: s_mov_b32 vcc_hi, vcc_lo +; CHECK: s_mov_b32 vcc_lo, s101 +; CHECK: s_mov_b32 s101, s100 +; CHECK: s_mov_b32 s100, s99 +; CHECK: s_mov_b32 s99, s98 +; CHECK: s_mov_b32 s98, s97 +; CHECK: s_mov_b32 s97, s96 +; CHECK: s_mov_b32 s96, s95 +; CHECK: s_mov_b32 s95, s94 +; CHECK: s_mov_b32 s94, s93 +; CHECK: s_mov_b32 s93, s92 +; CHECK: s_mov_b32 s92, s91 +; CHECK: s_mov_b32 s91, s90 +; CHECK: s_mov_b32 s90, s89 +; CHECK: s_mov_b32 s89, s88 +; CHECK: s_mov_b32 s88, s87 +; CHECK: s_mov_b32 s87, s86 +; CHECK: s_mov_b32 s86, s85 +; CHECK: s_mov_b32 s85, s84 +; CHECK: s_mov_b32 s84, s83 +; CHECK: s_mov_b32 s83, s82 +; CHECK: s_mov_b32 s82, s81 +; CHECK: v_readlane_b32 s81, v1, 7 +; CHECK: s_branch .LBB0_4 + +; CHECK-LABEL: .LBB0_6: ; %bb3 +; CHECK: v_readlane_b32 s0, v2, 0 +; CHECK: v_readlane_b32 s1, v2, 1 +; CHECK: buffer_load_dword v2, off, s[0:3], s32 offset:8 +; CHECK: s_not_b64 exec, exec + +; CHECK-LABEL: .LBB0_4: ; %bb3 +; CHECK: ; reg use s0 +; CHECK: ; reg use s1 +; CHECK: ; reg use s2 +; CHECK: ; reg use s3 +; CHECK: ; reg use s4 +; CHECK: ; reg use s5 +; CHECK: ; reg use s6 +; CHECK: ; reg use s7 +; CHECK: ; reg use s8 +; CHECK: ; reg use s9 +; CHECK: ; reg use s10 +; CHECK: ; reg use s11 +; CHECK: ; reg use s12 +; CHECK: ; reg use s13 +; CHECK: ; reg use s14 +; CHECK: ; reg use s15 +; CHECK: ; reg use s16 +; CHECK: ; reg use s17 +; CHECK: ; reg use s18 +; CHECK: ; reg use s19 +; CHECK: ; reg use s20 +; CHECK: ; reg use s21 +; CHECK: ; reg use s22 +; CHECK: ; reg use s23 +; CHECK: ; reg use s24 +; CHECK: ; reg use s25 +; CHECK: ; reg use s26 +; CHECK: ; reg use s27 +; CHECK: ; reg use s28 +; CHECK: ; reg use s29 +; CHECK: ; reg use s30 +; CHECK: ; reg use s31 +; CHECK: v_readlane_b32 s30, v1, 5 +; CHECK: ; reg use s32 +; CHECK: ; reg use s33 +; CHECK: ; reg use s34 +; CHECK: ; reg use s35 +; CHECK: ; reg use s36 +; CHECK: ; reg use s37 +; CHECK: ; reg use s38 +; CHECK: ; reg use s39 +; CHECK: ; reg use s40 +; CHECK: ; reg use s41 +; CHECK: ; reg use s42 +; CHECK: ; reg use s43 +; CHECK: ; reg use s44 +; CHECK: ; reg use s45 +; CHECK: ; reg use s46 +; CHECK: ; reg use s47 +; CHECK: ; reg use s48 +; CHECK: ; reg use s49 +; CHECK: ; reg use s50 +; CHECK: ; reg use s51 +; CHECK: ; reg use s52 +; CHECK: ; reg use s53 +; CHECK: ; reg use s54 +; CHECK: ; reg use s55 +; CHECK: ; reg use s56 +; CHECK: ; reg use s57 +; CHECK: ; reg use s58 +; CHECK: ; reg use s59 +; CHECK: ; reg use s60 +; CHECK: ; reg use s61 +; CHECK: ; reg use s62 +; CHECK: ; reg use s63 +; CHECK: ; reg use s64 +; CHECK: ; reg use s65 +; CHECK: ; reg use s66 +; CHECK: ; reg use s67 +; CHECK: ; reg use s68 +; CHECK: ; reg use s69 +; CHECK: ; reg use s70 +; CHECK: ; reg use s71 +; CHECK: ; reg use s72 +; CHECK: ; reg use s73 +; CHECK: ; reg use s74 +; CHECK: ; reg use s75 +; CHECK: ; reg use s76 +; CHECK: ; reg use s77 +; CHECK: ; reg use s78 +; CHECK: ; reg use s79 +; CHECK: ; reg use s80 +; CHECK: ; reg use s81 +; CHECK: ; reg use s82 +; CHECK: ; reg use s83 +; CHECK: ; reg use s84 +; CHECK: ; reg use s85 +; CHECK: ; reg use s86 +; CHECK: ; reg use s87 +; CHECK: ; reg use s88 +; CHECK: ; reg use s89 +; CHECK: ; reg use s90 +; CHECK: ; reg use s91 +; CHECK: ; reg use s92 +; CHECK: ; reg use s93 +; CHECK: ; reg use s94 +; CHECK: ; reg use s95 +; CHECK: ; reg use s96 +; CHECK: ; reg use s97 +; CHECK: ; reg use s98 +; CHECK: ; reg use s99 +; CHECK: ; reg use s100 +; CHECK: ; reg use s101 +; CHECK: ; reg use vcc_lo +; CHECK: ; reg use vcc_hi +; CHECK: v_readlane_b32 s31, v1, 6 +; CHECK: v_readlane_b32 s101, v1, 4 +; CHECK: v_readlane_b32 s100, v1, 3 +; CHECK: v_readlane_b32 s99, v1, 2 +; CHECK: v_readlane_b32 s98, v1, 1 +; CHECK: v_readlane_b32 s97, v1, 0 +; CHECK: v_readlane_b32 s96, v0, 63 +; CHECK: v_readlane_b32 s95, v0, 62 +; CHECK: v_readlane_b32 s94, v0, 61 +; CHECK: v_readlane_b32 s93, v0, 60 +; CHECK: v_readlane_b32 s92, v0, 59 +; CHECK: v_readlane_b32 s91, v0, 58 +; CHECK: v_readlane_b32 s90, v0, 57 +; CHECK: v_readlane_b32 s89, v0, 56 +; CHECK: v_readlane_b32 s88, v0, 55 +; CHECK: v_readlane_b32 s87, v0, 54 +; CHECK: v_readlane_b32 s86, v0, 53 +; CHECK: v_readlane_b32 s85, v0, 52 +; CHECK: v_readlane_b32 s84, v0, 51 +; CHECK: v_readlane_b32 s83, v0, 50 +; CHECK: v_readlane_b32 s82, v0, 49 +; CHECK: v_readlane_b32 s81, v0, 48 +; CHECK: v_readlane_b32 s80, v0, 47 +; CHECK: v_readlane_b32 s79, v0, 46 +; CHECK: v_readlane_b32 s78, v0, 45 +; CHECK: v_readlane_b32 s77, v0, 44 +; CHECK: v_readlane_b32 s76, v0, 43 +; CHECK: v_readlane_b32 s75, v0, 42 +; CHECK: v_readlane_b32 s74, v0, 41 +; CHECK: v_readlane_b32 s73, v0, 40 +; CHECK: v_readlane_b32 s72, v0, 39 +; CHECK: v_readlane_b32 s71, v0, 38 +; CHECK: v_readlane_b32 s70, v0, 37 +; CHECK: v_readlane_b32 s69, v0, 36 +; CHECK: v_readlane_b32 s68, v0, 35 +; CHECK: v_readlane_b32 s67, v0, 34 +; CHECK: v_readlane_b32 s66, v0, 33 +; CHECK: v_readlane_b32 s65, v0, 32 +; CHECK: v_readlane_b32 s64, v0, 31 +; CHECK: v_readlane_b32 s63, v0, 30 +; CHECK: v_readlane_b32 s62, v0, 29 +; CHECK: v_readlane_b32 s61, v0, 28 +; CHECK: v_readlane_b32 s60, v0, 27 +; CHECK: v_readlane_b32 s59, v0, 26 +; CHECK: v_readlane_b32 s58, v0, 25 +; CHECK: v_readlane_b32 s57, v0, 24 +; CHECK: v_readlane_b32 s56, v0, 23 +; CHECK: v_readlane_b32 s55, v0, 22 +; CHECK: v_readlane_b32 s54, v0, 21 +; CHECK: v_readlane_b32 s53, v0, 20 +; CHECK: v_readlane_b32 s52, v0, 19 +; CHECK: v_readlane_b32 s51, v0, 18 +; CHECK: v_readlane_b32 s50, v0, 17 +; CHECK: v_readlane_b32 s49, v0, 16 +; CHECK: v_readlane_b32 s48, v0, 15 +; CHECK: v_readlane_b32 s47, v0, 14 +; CHECK: v_readlane_b32 s46, v0, 13 +; CHECK: v_readlane_b32 s45, v0, 12 +; CHECK: v_readlane_b32 s44, v0, 11 +; CHECK: v_readlane_b32 s43, v0, 10 +; CHECK: v_readlane_b32 s42, v0, 9 +; CHECK: v_readlane_b32 s41, v0, 8 +; CHECK: v_readlane_b32 s40, v0, 7 +; CHECK: v_readlane_b32 s39, v0, 6 +; CHECK: v_readlane_b32 s38, v0, 5 +; CHECK: v_readlane_b32 s37, v0, 4 +; CHECK: v_readlane_b32 s36, v0, 3 +; CHECK: v_readlane_b32 s35, v0, 2 +; CHECK: v_readlane_b32 s34, v0, 1 +; CHECK: v_readlane_b32 s33, v0, 0 +; CHECK: s_or_saveexec_b64 s[4:5], -1 +; CHECK: buffer_load_dword v0, off, s[0:3], s32 ; 4-byte Folded Reload +; CHECK: buffer_load_dword v1, off, s[0:3], s32 offset:4 ; 4-byte Folded Reload +; CHECK: s_mov_b64 exec, s[4:5] +; CHECK: s_waitcnt vmcnt(0) +; CHECK: s_setpc_b64 s[30:31] +bb3: + tail call void asm sideeffect "; reg use $0", "{s0}"(i32 %sgpr0) #0 + tail call void asm sideeffect "; reg use $0", "{s1}"(i32 %sgpr1) #0 + tail call void asm sideeffect "; reg use $0", "{s2}"(i32 %sgpr2) #0 + tail call void asm sideeffect "; reg use $0", "{s3}"(i32 %sgpr3) #0 + tail call void asm sideeffect "; reg use $0", "{s4}"(i32 %sgpr4) #0 + tail call void asm sideeffect "; reg use $0", "{s5}"(i32 %sgpr5) #0 + tail call void asm sideeffect "; reg use $0", "{s6}"(i32 %sgpr6) #0 + tail call void asm sideeffect "; reg use $0", "{s7}"(i32 %sgpr7) #0 + tail call void asm sideeffect "; reg use $0", "{s8}"(i32 %sgpr8) #0 + tail call void asm sideeffect "; reg use $0", "{s9}"(i32 %sgpr9) #0 + tail call void asm sideeffect "; reg use $0", "{s10}"(i32 %sgpr10) #0 + tail call void asm sideeffect "; reg use $0", "{s11}"(i32 %sgpr11) #0 + tail call void asm sideeffect "; reg use $0", "{s12}"(i32 %sgpr12) #0 + tail call void asm sideeffect "; reg use $0", "{s13}"(i32 %sgpr13) #0 + tail call void asm sideeffect "; reg use $0", "{s14}"(i32 %sgpr14) #0 + tail call void asm sideeffect "; reg use $0", "{s15}"(i32 %sgpr15) #0 + tail call void asm sideeffect "; reg use $0", "{s16}"(i32 %sgpr16) #0 + tail call void asm sideeffect "; reg use $0", "{s17}"(i32 %sgpr17) #0 + tail call void asm sideeffect "; reg use $0", "{s18}"(i32 %sgpr18) #0 + tail call void asm sideeffect "; reg use $0", "{s19}"(i32 %sgpr19) #0 + tail call void asm sideeffect "; reg use $0", "{s20}"(i32 %sgpr20) #0 + tail call void asm sideeffect "; reg use $0", "{s21}"(i32 %sgpr21) #0 + tail call void asm sideeffect "; reg use $0", "{s22}"(i32 %sgpr22) #0 + tail call void asm sideeffect "; reg use $0", "{s23}"(i32 %sgpr23) #0 + tail call void asm sideeffect "; reg use $0", "{s24}"(i32 %sgpr24) #0 + tail call void asm sideeffect "; reg use $0", "{s25}"(i32 %sgpr25) #0 + tail call void asm sideeffect "; reg use $0", "{s26}"(i32 %sgpr26) #0 + tail call void asm sideeffect "; reg use $0", "{s27}"(i32 %sgpr27) #0 + tail call void asm sideeffect "; reg use $0", "{s28}"(i32 %sgpr28) #0 + tail call void asm sideeffect "; reg use $0", "{s29}"(i32 %sgpr29) #0 + tail call void asm sideeffect "; reg use $0", "{s30}"(i32 %sgpr30) #0 + tail call void asm sideeffect "; reg use $0", "{s31}"(i32 %sgpr31) #0 + tail call void asm sideeffect "; reg use $0", "{s32}"(i32 %sgpr32) #0 + tail call void asm sideeffect "; reg use $0", "{s33}"(i32 %sgpr33) #0 + tail call void asm sideeffect "; reg use $0", "{s34}"(i32 %sgpr34) #0 + tail call void asm sideeffect "; reg use $0", "{s35}"(i32 %sgpr35) #0 + tail call void asm sideeffect "; reg use $0", "{s36}"(i32 %sgpr36) #0 + tail call void asm sideeffect "; reg use $0", "{s37}"(i32 %sgpr37) #0 + tail call void asm sideeffect "; reg use $0", "{s38}"(i32 %sgpr38) #0 + tail call void asm sideeffect "; reg use $0", "{s39}"(i32 %sgpr39) #0 + tail call void asm sideeffect "; reg use $0", "{s40}"(i32 %sgpr40) #0 + tail call void asm sideeffect "; reg use $0", "{s41}"(i32 %sgpr41) #0 + tail call void asm sideeffect "; reg use $0", "{s42}"(i32 %sgpr42) #0 + tail call void asm sideeffect "; reg use $0", "{s43}"(i32 %sgpr43) #0 + tail call void asm sideeffect "; reg use $0", "{s44}"(i32 %sgpr44) #0 + tail call void asm sideeffect "; reg use $0", "{s45}"(i32 %sgpr45) #0 + tail call void asm sideeffect "; reg use $0", "{s46}"(i32 %sgpr46) #0 + tail call void asm sideeffect "; reg use $0", "{s47}"(i32 %sgpr47) #0 + tail call void asm sideeffect "; reg use $0", "{s48}"(i32 %sgpr48) #0 + tail call void asm sideeffect "; reg use $0", "{s49}"(i32 %sgpr49) #0 + tail call void asm sideeffect "; reg use $0", "{s50}"(i32 %sgpr50) #0 + tail call void asm sideeffect "; reg use $0", "{s51}"(i32 %sgpr51) #0 + tail call void asm sideeffect "; reg use $0", "{s52}"(i32 %sgpr52) #0 + tail call void asm sideeffect "; reg use $0", "{s53}"(i32 %sgpr53) #0 + tail call void asm sideeffect "; reg use $0", "{s54}"(i32 %sgpr54) #0 + tail call void asm sideeffect "; reg use $0", "{s55}"(i32 %sgpr55) #0 + tail call void asm sideeffect "; reg use $0", "{s56}"(i32 %sgpr56) #0 + tail call void asm sideeffect "; reg use $0", "{s57}"(i32 %sgpr57) #0 + tail call void asm sideeffect "; reg use $0", "{s58}"(i32 %sgpr58) #0 + tail call void asm sideeffect "; reg use $0", "{s59}"(i32 %sgpr59) #0 + tail call void asm sideeffect "; reg use $0", "{s60}"(i32 %sgpr60) #0 + tail call void asm sideeffect "; reg use $0", "{s61}"(i32 %sgpr61) #0 + tail call void asm sideeffect "; reg use $0", "{s62}"(i32 %sgpr62) #0 + tail call void asm sideeffect "; reg use $0", "{s63}"(i32 %sgpr63) #0 + tail call void asm sideeffect "; reg use $0", "{s64}"(i32 %sgpr64) #0 + tail call void asm sideeffect "; reg use $0", "{s65}"(i32 %sgpr65) #0 + tail call void asm sideeffect "; reg use $0", "{s66}"(i32 %sgpr66) #0 + tail call void asm sideeffect "; reg use $0", "{s67}"(i32 %sgpr67) #0 + tail call void asm sideeffect "; reg use $0", "{s68}"(i32 %sgpr68) #0 + tail call void asm sideeffect "; reg use $0", "{s69}"(i32 %sgpr69) #0 + tail call void asm sideeffect "; reg use $0", "{s70}"(i32 %sgpr70) #0 + tail call void asm sideeffect "; reg use $0", "{s71}"(i32 %sgpr71) #0 + tail call void asm sideeffect "; reg use $0", "{s72}"(i32 %sgpr72) #0 + tail call void asm sideeffect "; reg use $0", "{s73}"(i32 %sgpr73) #0 + tail call void asm sideeffect "; reg use $0", "{s74}"(i32 %sgpr74) #0 + tail call void asm sideeffect "; reg use $0", "{s75}"(i32 %sgpr75) #0 + tail call void asm sideeffect "; reg use $0", "{s76}"(i32 %sgpr76) #0 + tail call void asm sideeffect "; reg use $0", "{s77}"(i32 %sgpr77) #0 + tail call void asm sideeffect "; reg use $0", "{s78}"(i32 %sgpr78) #0 + tail call void asm sideeffect "; reg use $0", "{s79}"(i32 %sgpr79) #0 + tail call void asm sideeffect "; reg use $0", "{s80}"(i32 %sgpr80) #0 + tail call void asm sideeffect "; reg use $0", "{s81}"(i32 %sgpr81) #0 + tail call void asm sideeffect "; reg use $0", "{s82}"(i32 %sgpr82) #0 + tail call void asm sideeffect "; reg use $0", "{s83}"(i32 %sgpr83) #0 + tail call void asm sideeffect "; reg use $0", "{s84}"(i32 %sgpr84) #0 + tail call void asm sideeffect "; reg use $0", "{s85}"(i32 %sgpr85) #0 + tail call void asm sideeffect "; reg use $0", "{s86}"(i32 %sgpr86) #0 + tail call void asm sideeffect "; reg use $0", "{s87}"(i32 %sgpr87) #0 + tail call void asm sideeffect "; reg use $0", "{s88}"(i32 %sgpr88) #0 + tail call void asm sideeffect "; reg use $0", "{s89}"(i32 %sgpr89) #0 + tail call void asm sideeffect "; reg use $0", "{s90}"(i32 %sgpr90) #0 + tail call void asm sideeffect "; reg use $0", "{s91}"(i32 %sgpr91) #0 + tail call void asm sideeffect "; reg use $0", "{s92}"(i32 %sgpr92) #0 + tail call void asm sideeffect "; reg use $0", "{s93}"(i32 %sgpr93) #0 + tail call void asm sideeffect "; reg use $0", "{s94}"(i32 %sgpr94) #0 + tail call void asm sideeffect "; reg use $0", "{s95}"(i32 %sgpr95) #0 + tail call void asm sideeffect "; reg use $0", "{s96}"(i32 %sgpr96) #0 + tail call void asm sideeffect "; reg use $0", "{s97}"(i32 %sgpr97) #0 + tail call void asm sideeffect "; reg use $0", "{s98}"(i32 %sgpr98) #0 + tail call void asm sideeffect "; reg use $0", "{s99}"(i32 %sgpr99) #0 + tail call void asm sideeffect "; reg use $0", "{s100}"(i32 %sgpr100) #0 + tail call void asm sideeffect "; reg use $0", "{s101}"(i32 %sgpr101) #0 + tail call void asm sideeffect "; reg use $0", "{vcc_lo}"(i32 %vcc_lo) #0 + tail call void asm sideeffect "; reg use $0", "{vcc_hi}"(i32 %vcc_hi) #0 + ret void +} + +declare i32 @llvm.amdgcn.workgroup.id.x() #0 + +attributes #0 = { nounwind }