diff --git a/llvm/test/CodeGen/LoongArch/ir-instruction/and.ll b/llvm/test/CodeGen/LoongArch/ir-instruction/and.ll --- a/llvm/test/CodeGen/LoongArch/ir-instruction/and.ll +++ b/llvm/test/CodeGen/LoongArch/ir-instruction/and.ll @@ -265,3 +265,114 @@ %r = and i64 4096, %b ret i64 %r } + +define signext i32 @and_i32_0xfff0(i32 %a) { +; LA32-LABEL: and_i32_0xfff0: +; LA32: # %bb.0: +; LA32-NEXT: lu12i.w $a1, 15 +; LA32-NEXT: ori $a1, $a1, 4080 +; LA32-NEXT: and $a0, $a0, $a1 +; LA32-NEXT: ret +; +; LA64-LABEL: and_i32_0xfff0: +; LA64: # %bb.0: +; LA64-NEXT: lu12i.w $a1, 15 +; LA64-NEXT: ori $a1, $a1, 4080 +; LA64-NEXT: and $a0, $a0, $a1 +; LA64-NEXT: ret + %b = and i32 %a, 65520 + ret i32 %b +} + +define signext i32 @and_i32_0xff000(i32 %a) { +; LA32-LABEL: and_i32_0xff000: +; LA32: # %bb.0: +; LA32-NEXT: lu12i.w $a1, 255 +; LA32-NEXT: and $a0, $a0, $a1 +; LA32-NEXT: ret +; +; LA64-LABEL: and_i32_0xff000: +; LA64: # %bb.0: +; LA64-NEXT: lu12i.w $a1, 255 +; LA64-NEXT: and $a0, $a0, $a1 +; LA64-NEXT: ret + %b = and i32 %a, 1044480 + ret i32 %b +} + +define i64 @and_i64_0xfff0(i64 %a) { +; LA32-LABEL: and_i64_0xfff0: +; LA32: # %bb.0: +; LA32-NEXT: lu12i.w $a1, 15 +; LA32-NEXT: ori $a1, $a1, 4080 +; LA32-NEXT: and $a0, $a0, $a1 +; LA32-NEXT: move $a1, $zero +; LA32-NEXT: ret +; +; LA64-LABEL: and_i64_0xfff0: +; LA64: # %bb.0: +; LA64-NEXT: lu12i.w $a1, 15 +; LA64-NEXT: ori $a1, $a1, 4080 +; LA64-NEXT: and $a0, $a0, $a1 +; LA64-NEXT: ret + %b = and i64 %a, 65520 + ret i64 %b +} + +define i64 @and_i64_0xff000(i64 %a) { +; LA32-LABEL: and_i64_0xff000: +; LA32: # %bb.0: +; LA32-NEXT: lu12i.w $a1, 255 +; LA32-NEXT: and $a0, $a0, $a1 +; LA32-NEXT: move $a1, $zero +; LA32-NEXT: ret +; +; LA64-LABEL: and_i64_0xff000: +; LA64: # %bb.0: +; LA64-NEXT: lu12i.w $a1, 255 +; LA64-NEXT: and $a0, $a0, $a1 +; LA64-NEXT: ret + %b = and i64 %a, 1044480 + ret i64 %b +} + +define i64 @and_i64_0xfff0_twice(i64 %a, i64 %b) { +; LA32-LABEL: and_i64_0xfff0_twice: +; LA32: # %bb.0: +; LA32-NEXT: lu12i.w $a1, 15 +; LA32-NEXT: ori $a1, $a1, 4080 +; LA32-NEXT: and $a2, $a2, $a1 +; LA32-NEXT: and $a0, $a0, $a1 +; LA32-NEXT: mul.w $a0, $a0, $a2 +; LA32-NEXT: move $a1, $zero +; LA32-NEXT: ret +; +; LA64-LABEL: and_i64_0xfff0_twice: +; LA64: # %bb.0: +; LA64-NEXT: lu12i.w $a2, 15 +; LA64-NEXT: ori $a2, $a2, 4080 +; LA64-NEXT: and $a1, $a1, $a2 +; LA64-NEXT: and $a0, $a0, $a2 +; LA64-NEXT: mul.d $a0, $a0, $a1 +; LA64-NEXT: ret + %c = and i64 %a, 65520 + %d = and i64 %b, 65520 + %e = mul i64 %c, %d + ret i64 %e +} + +define i64 @and_i64_minus_31(i64 %a) { +; LA32-LABEL: and_i64_minus_31: +; LA32: # %bb.0: +; LA32-NEXT: addi.w $a2, $zero, -31 +; LA32-NEXT: and $a0, $a0, $a2 +; LA32-NEXT: ret +; +; LA64-LABEL: and_i64_minus_31: +; LA64: # %bb.0: +; LA64-NEXT: addi.w $a1, $zero, -31 +; LA64-NEXT: and $a0, $a0, $a1 +; LA64-NEXT: ret + %b = and i64 %a, -31 + ret i64 %b +}