diff --git a/llvm/test/CodeGen/LoongArch/ir-instruction/add.ll b/llvm/test/CodeGen/LoongArch/ir-instruction/add.ll --- a/llvm/test/CodeGen/LoongArch/ir-instruction/add.ll +++ b/llvm/test/CodeGen/LoongArch/ir-instruction/add.ll @@ -703,3 +703,224 @@ %add = add i32 %x, -2147485696 ret i32 %add } + +define signext i32 @add_i32_4080(i32 %x) { +; LA32-LABEL: add_i32_4080: +; LA32: # %bb.0: +; LA32-NEXT: ori $a1, $zero, 4080 +; LA32-NEXT: add.w $a0, $a0, $a1 +; LA32-NEXT: ret +; +; LA64-LABEL: add_i32_4080: +; LA64: # %bb.0: +; LA64-NEXT: ori $a1, $zero, 4080 +; LA64-NEXT: add.w $a0, $a0, $a1 +; LA64-NEXT: ret + %add = add i32 %x, 4080 + ret i32 %add +} + +define signext i32 @add_i32_minus_4080(i32 %x) { +; LA32-LABEL: add_i32_minus_4080: +; LA32: # %bb.0: +; LA32-NEXT: lu12i.w $a1, -1 +; LA32-NEXT: ori $a1, $a1, 16 +; LA32-NEXT: add.w $a0, $a0, $a1 +; LA32-NEXT: ret +; +; LA64-LABEL: add_i32_minus_4080: +; LA64: # %bb.0: +; LA64-NEXT: lu12i.w $a1, -1 +; LA64-NEXT: ori $a1, $a1, 16 +; LA64-NEXT: add.w $a0, $a0, $a1 +; LA64-NEXT: ret + %add = add i32 %x, -4080 + ret i32 %add +} + +define signext i32 @add_i32_2048(i32 %x) { +; LA32-LABEL: add_i32_2048: +; LA32: # %bb.0: +; LA32-NEXT: ori $a1, $zero, 2048 +; LA32-NEXT: add.w $a0, $a0, $a1 +; LA32-NEXT: ret +; +; LA64-LABEL: add_i32_2048: +; LA64: # %bb.0: +; LA64-NEXT: ori $a1, $zero, 2048 +; LA64-NEXT: add.w $a0, $a0, $a1 +; LA64-NEXT: ret + %add = add i32 %x, 2048 + ret i32 %add +} + +define signext i32 @add_i32_4094(i32 %x) { +; LA32-LABEL: add_i32_4094: +; LA32: # %bb.0: +; LA32-NEXT: ori $a1, $zero, 4094 +; LA32-NEXT: add.w $a0, $a0, $a1 +; LA32-NEXT: ret +; +; LA64-LABEL: add_i32_4094: +; LA64: # %bb.0: +; LA64-NEXT: ori $a1, $zero, 4094 +; LA64-NEXT: add.w $a0, $a0, $a1 +; LA64-NEXT: ret + %add = add i32 %x, 4094 + ret i32 %add +} + +define signext i32 @add_i32_minus_2049(i32 %x) { +; LA32-LABEL: add_i32_minus_2049: +; LA32: # %bb.0: +; LA32-NEXT: lu12i.w $a1, -1 +; LA32-NEXT: ori $a1, $a1, 2047 +; LA32-NEXT: add.w $a0, $a0, $a1 +; LA32-NEXT: ret +; +; LA64-LABEL: add_i32_minus_2049: +; LA64: # %bb.0: +; LA64-NEXT: lu12i.w $a1, -1 +; LA64-NEXT: ori $a1, $a1, 2047 +; LA64-NEXT: add.w $a0, $a0, $a1 +; LA64-NEXT: ret + %add = add i32 %x, -2049 + ret i32 %add +} + +define signext i32 @add_i32_minus_4096(i32 %x) { +; LA32-LABEL: add_i32_minus_4096: +; LA32: # %bb.0: +; LA32-NEXT: lu12i.w $a1, -1 +; LA32-NEXT: add.w $a0, $a0, $a1 +; LA32-NEXT: ret +; +; LA64-LABEL: add_i32_minus_4096: +; LA64: # %bb.0: +; LA64-NEXT: lu12i.w $a1, -1 +; LA64-NEXT: add.w $a0, $a0, $a1 +; LA64-NEXT: ret + %add = add i32 %x, -4096 + ret i32 %add +} + +define i64 @add_i64_4080(i64 %x) { +; LA32-LABEL: add_i64_4080: +; LA32: # %bb.0: +; LA32-NEXT: ori $a2, $zero, 4080 +; LA32-NEXT: add.w $a2, $a0, $a2 +; LA32-NEXT: sltu $a0, $a2, $a0 +; LA32-NEXT: add.w $a1, $a1, $a0 +; LA32-NEXT: move $a0, $a2 +; LA32-NEXT: ret +; +; LA64-LABEL: add_i64_4080: +; LA64: # %bb.0: +; LA64-NEXT: ori $a1, $zero, 4080 +; LA64-NEXT: add.d $a0, $a0, $a1 +; LA64-NEXT: ret + %add = add i64 %x, 4080 + ret i64 %add +} + +define i64 @add_i64_minus_4080(i64 %x) { +; LA32-LABEL: add_i64_minus_4080: +; LA32: # %bb.0: +; LA32-NEXT: lu12i.w $a2, -1 +; LA32-NEXT: ori $a2, $a2, 16 +; LA32-NEXT: add.w $a2, $a0, $a2 +; LA32-NEXT: sltu $a0, $a2, $a0 +; LA32-NEXT: add.w $a0, $a1, $a0 +; LA32-NEXT: addi.w $a1, $a0, -1 +; LA32-NEXT: move $a0, $a2 +; LA32-NEXT: ret +; +; LA64-LABEL: add_i64_minus_4080: +; LA64: # %bb.0: +; LA64-NEXT: lu12i.w $a1, -1 +; LA64-NEXT: ori $a1, $a1, 16 +; LA64-NEXT: add.d $a0, $a0, $a1 +; LA64-NEXT: ret + %add = add i64 %x, -4080 + ret i64 %add +} + +define i64 @add_i64_2048(i64 %x) { +; LA32-LABEL: add_i64_2048: +; LA32: # %bb.0: +; LA32-NEXT: ori $a2, $zero, 2048 +; LA32-NEXT: add.w $a2, $a0, $a2 +; LA32-NEXT: sltu $a0, $a2, $a0 +; LA32-NEXT: add.w $a1, $a1, $a0 +; LA32-NEXT: move $a0, $a2 +; LA32-NEXT: ret +; +; LA64-LABEL: add_i64_2048: +; LA64: # %bb.0: +; LA64-NEXT: ori $a1, $zero, 2048 +; LA64-NEXT: add.d $a0, $a0, $a1 +; LA64-NEXT: ret + %add = add i64 %x, 2048 + ret i64 %add +} + +define i64 @add_i64_4094(i64 %x) { +; LA32-LABEL: add_i64_4094: +; LA32: # %bb.0: +; LA32-NEXT: ori $a2, $zero, 4094 +; LA32-NEXT: add.w $a2, $a0, $a2 +; LA32-NEXT: sltu $a0, $a2, $a0 +; LA32-NEXT: add.w $a1, $a1, $a0 +; LA32-NEXT: move $a0, $a2 +; LA32-NEXT: ret +; +; LA64-LABEL: add_i64_4094: +; LA64: # %bb.0: +; LA64-NEXT: ori $a1, $zero, 4094 +; LA64-NEXT: add.d $a0, $a0, $a1 +; LA64-NEXT: ret + %add = add i64 %x, 4094 + ret i64 %add +} + +define i64 @add_i64_minus_2049(i64 %x) { +; LA32-LABEL: add_i64_minus_2049: +; LA32: # %bb.0: +; LA32-NEXT: lu12i.w $a2, -1 +; LA32-NEXT: ori $a2, $a2, 2047 +; LA32-NEXT: add.w $a2, $a0, $a2 +; LA32-NEXT: sltu $a0, $a2, $a0 +; LA32-NEXT: add.w $a0, $a1, $a0 +; LA32-NEXT: addi.w $a1, $a0, -1 +; LA32-NEXT: move $a0, $a2 +; LA32-NEXT: ret +; +; LA64-LABEL: add_i64_minus_2049: +; LA64: # %bb.0: +; LA64-NEXT: lu12i.w $a1, -1 +; LA64-NEXT: ori $a1, $a1, 2047 +; LA64-NEXT: add.d $a0, $a0, $a1 +; LA64-NEXT: ret + %add = add i64 %x, -2049 + ret i64 %add +} + +define i64 @add_i64_minus_4096(i64 %x) { +; LA32-LABEL: add_i64_minus_4096: +; LA32: # %bb.0: +; LA32-NEXT: lu12i.w $a2, -1 +; LA32-NEXT: add.w $a2, $a0, $a2 +; LA32-NEXT: sltu $a0, $a2, $a0 +; LA32-NEXT: add.w $a0, $a1, $a0 +; LA32-NEXT: addi.w $a1, $a0, -1 +; LA32-NEXT: move $a0, $a2 +; LA32-NEXT: ret +; +; LA64-LABEL: add_i64_minus_4096: +; LA64: # %bb.0: +; LA64-NEXT: lu12i.w $a1, -1 +; LA64-NEXT: add.d $a0, $a0, $a1 +; LA64-NEXT: ret + %add = add i64 %x, -4096 + ret i64 %add +}