diff mbox series

[SVE,ACLE] svbic implementation

Message ID CAELXzTNKADooOYNFFL8xWj7p5Wz1hcE0WWR=BDHp=XhFpZ3oDg@mail.gmail.com
State New
Headers show
Series [SVE,ACLE] svbic implementation | expand

Commit Message

Kugan Vivekanandarajah March 19, 2019, 11:20 p.m. UTC
I have committed attached patch to aarch64/sve-acle-branch branch
which implements svbic.

Thanks,
Kugan
diff mbox series

Patch

From 182bd15334874844bef5e317f55a6497f77e12ff Mon Sep 17 00:00:00 2001
From: Kugan Vivekanandarajah <kugan.vivekanandarajah@linaro.org>
Date: Thu, 24 Jan 2019 20:57:19 +1100
Subject: [PATCH 1/3] svbic

Change-Id: I819490ec63ee38b9cdc7c5e342436b7afdee2973
---
 gcc/config/aarch64/aarch64-sve-builtins.c          |  30 ++
 gcc/config/aarch64/aarch64-sve-builtins.def        |   1 +
 gcc/config/aarch64/aarch64-sve.md                  |  54 ++-
 .../gcc.target/aarch64/sve-acle/asm/bic_s16.c      | 398 +++++++++++++++++++++
 .../gcc.target/aarch64/sve-acle/asm/bic_s32.c      | 394 ++++++++++++++++++++
 .../gcc.target/aarch64/sve-acle/asm/bic_s64.c      | 394 ++++++++++++++++++++
 .../gcc.target/aarch64/sve-acle/asm/bic_s8.c       | 317 ++++++++++++++++
 .../gcc.target/aarch64/sve-acle/asm/bic_u16.c      | 398 +++++++++++++++++++++
 .../gcc.target/aarch64/sve-acle/asm/bic_u32.c      | 394 ++++++++++++++++++++
 .../gcc.target/aarch64/sve-acle/asm/bic_u64.c      | 394 ++++++++++++++++++++
 .../gcc.target/aarch64/sve-acle/asm/bic_u8.c       | 317 ++++++++++++++++
 11 files changed, 3087 insertions(+), 4 deletions(-)
 create mode 100644 gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s16.c
 create mode 100644 gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s32.c
 create mode 100644 gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s64.c
 create mode 100644 gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s8.c
 create mode 100644 gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u16.c
 create mode 100644 gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u32.c
 create mode 100644 gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u64.c
 create mode 100644 gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u8.c

diff --git a/gcc/config/aarch64/aarch64-sve-builtins.c b/gcc/config/aarch64/aarch64-sve-builtins.c
index 0e3db66..106f21e 100644
--- a/gcc/config/aarch64/aarch64-sve-builtins.c
+++ b/gcc/config/aarch64/aarch64-sve-builtins.c
@@ -166,6 +166,7 @@  enum function {
   FUNC_svadd,
   FUNC_svand,
   FUNC_svasrd,
+  FUNC_svbic,
   FUNC_svdiv,
   FUNC_svdivr,
   FUNC_svdot,
@@ -474,6 +475,7 @@  private:
   rtx expand_add (unsigned int);
   rtx expand_and ();
   rtx expand_asrd ();
+  rtx expand_bic ();
   rtx expand_div (bool);
   rtx expand_dot ();
   rtx expand_dup ();
@@ -1214,6 +1216,7 @@  arm_sve_h_builder::get_attributes (const function_instance &instance)
     case FUNC_svadd:
     case FUNC_svand:
     case FUNC_svasrd:
+    case FUNC_svbic:
     case FUNC_svdiv:
     case FUNC_svdivr:
     case FUNC_svdot:
@@ -1887,6 +1890,7 @@  gimple_folder::fold ()
     case FUNC_svadd:
     case FUNC_svand:
     case FUNC_svasrd:
+    case FUNC_svbic:
     case FUNC_svdiv:
     case FUNC_svdivr:
     case FUNC_svdot:
@@ -1990,6 +1994,9 @@  function_expander::expand ()
     case FUNC_svdot:
       return expand_dot ();
 
+    case FUNC_svbic:
+      return expand_bic ();
+
     case FUNC_svdup:
       return expand_dup ();
 
@@ -2133,6 +2140,29 @@  function_expander::expand_dot ()
     return expand_via_unpred_direct_optab (sdot_prod_optab);
 }
 
+/* Expand a call to svbic.  */
+rtx
+function_expander::expand_bic ()
+{
+  if (CONST_INT_P (m_args[2]))
+    {
+      machine_mode mode = GET_MODE_INNER (get_mode (0));
+      m_args[2] = simplify_unary_operation (NOT, mode, m_args[2], mode);
+      return expand_and ();
+    }
+
+  if (m_fi.pred == PRED_x)
+    {
+      insn_code icode = code_for_aarch64_bic (get_mode (0));
+      return expand_via_unpred_insn (icode);
+    }
+  else
+    {
+      insn_code icode = code_for_cond_bic (get_mode (0));
+      return expand_via_pred_insn (icode);
+    }
+}
+
 /* Expand a call to svdup.  */
 rtx
 function_expander::expand_dup ()
diff --git a/gcc/config/aarch64/aarch64-sve-builtins.def b/gcc/config/aarch64/aarch64-sve-builtins.def
index 8322c4b..4af06ac 100644
--- a/gcc/config/aarch64/aarch64-sve-builtins.def
+++ b/gcc/config/aarch64/aarch64-sve-builtins.def
@@ -65,6 +65,7 @@  DEF_SVE_FUNCTION (svabs, unary, all_signed_and_float, mxz)
 DEF_SVE_FUNCTION (svadd, binary_opt_n, all_data, mxz)
 DEF_SVE_FUNCTION (svand, binary_opt_n, all_integer, mxz)
 DEF_SVE_FUNCTION (svasrd, shift_right_imm, all_signed, mxz)
+DEF_SVE_FUNCTION (svbic, binary_opt_n, all_integer, mxz)
 DEF_SVE_FUNCTION (svdiv, binary_opt_n, all_sdi_and_float, mxz)
 DEF_SVE_FUNCTION (svdivr, binary_opt_n, all_sdi_and_float, mxz)
 DEF_SVE_FUNCTION (svdot, ternary_qq_opt_n, sdi, none)
diff --git a/gcc/config/aarch64/aarch64-sve.md b/gcc/config/aarch64/aarch64-sve.md
index d480289..5e629de 100644
--- a/gcc/config/aarch64/aarch64-sve.md
+++ b/gcc/config/aarch64/aarch64-sve.md
@@ -1360,6 +1360,52 @@ 
   [(set_attr "movprfx" "*,yes,*")]
 )
 
+;; Predicated BIC with select.
+(define_expand "@cond_bic<mode>"
+  [(set (match_operand:SVE_I 0 "register_operand")
+	(unspec:SVE_I
+	  [(match_operand:<VPRED> 1 "register_operand")
+	   (and:SVE_I
+	     (not:SVE_I (match_operand:SVE_I 3 "register_operand"))
+	     (match_operand:SVE_I 2 "aarch64_sve_logical_operand"))
+	   (match_operand:SVE_I 4 "aarch64_simd_reg_or_zero")]
+	  UNSPEC_SEL))]
+  "TARGET_SVE"
+)
+
+;; Predicated BIC with select 2nd operand.
+(define_insn "*cond_bic<mode>_2"
+  [(set (match_operand:SVE_I 0 "register_operand" "=w, ?&w")
+	(unspec:SVE_I
+	  [(match_operand:<VPRED> 1 "register_operand" "Upl, Upl")
+	   (and:SVE_I
+	     (not:SVE_I (match_operand:SVE_I 3 "register_operand" "w, w"))
+	     (match_operand:SVE_I 2 "aarch64_sve_logical_operand" "0, w"))
+	   (match_dup 2)]
+	  UNSPEC_SEL))]
+  "TARGET_SVE"
+  "@
+   bic\t%0.<Vetype>, %1/m, %0.<Vetype>, %3.<Vetype>
+   movprfx\t%0, %2\;bic\t%0.<Vetype>, %1/m, %0.<Vetype>, %3.<Vetype>"
+  [(set_attr "movprfx" "*,yes")]
+)
+
+;; Predicated BIC with select matching zero.
+(define_insn "*cond_bic<mode>_z"
+  [(set (match_operand:SVE_I 0 "register_operand" "=&w, &w")
+	(unspec:SVE_I
+	  [(match_operand:<VPRED> 1 "register_operand" "Upl, Upl")
+	   (and:SVE_I
+	     (not:SVE_I (match_operand:SVE_I 3 "register_operand" "w, w"))
+	     (match_operand:SVE_I 2 "aarch64_sve_logical_operand" "0, w"))
+	   (match_operand:SVE_I 4 "aarch64_simd_imm_zero")]
+	  UNSPEC_SEL))]
+  "TARGET_SVE"
+  "@
+   movprfx\t%0.<Vetype>, %1/z, %0.<Vetype>\;bic\t%0.<Vetype>, %1/m, %0.<Vetype>, %3.<Vetype>
+   movprfx\t%0.<Vetype>, %1/z, %2.<Vetype>\;bic\t%0.<Vetype>, %1/m, %0.<Vetype>, %3.<Vetype>"
+  [(set_attr "movprfx" "yes")]
+)
 ;; Vector AND, ORR and XOR on floating-point modes.  We avoid subregs
 ;; by providing this, but we need to use UNSPECs since rtx logical ops
 ;; aren't defined for floating-point modes.
@@ -1374,13 +1420,13 @@ 
 
 ;; REG_EQUAL notes on "not<mode>3" should ensure that we can generate
 ;; this pattern even though the NOT instruction itself is predicated.
-(define_insn "bic<mode>3"
+(define_insn "@aarch64_bic<mode>"
   [(set (match_operand:SVE_I 0 "register_operand" "=w")
 	(and:SVE_I
-	  (not:SVE_I (match_operand:SVE_I 1 "register_operand" "w"))
-	  (match_operand:SVE_I 2 "register_operand" "w")))]
+	  (not:SVE_I (match_operand:SVE_I 2 "register_operand" "w"))
+	  (match_operand:SVE_I 1 "register_operand" "w")))]
   "TARGET_SVE"
-  "bic\t%0.d, %2.d, %1.d"
+  "bic\t%0.d, %1.d, %2.d"
 )
 
 ;; Predicate AND.  We can reuse one of the inputs as the GP.
diff --git a/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s16.c b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s16.c
new file mode 100644
index 0000000..03463f2
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s16.c
@@ -0,0 +1,398 @@ 
+/* { dg-do compile } */
+/* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
+
+#include "test_sve_acle.h"
+
+/*
+** bic_s16_m_tied1:
+**	bic	z0\.h, p0/m, z0\.h, z1\.h
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s16_m_tied1, svint16_t,
+		z0 = svbic_s16_m (p0, z0, z1),
+		z0 = svbic_m (p0, z0, z1))
+
+/* Bad RA choice: no preferred output sequence.  */
+TEST_UNIFORM_Z (bic_s16_m_tied2, svint16_t,
+		z1 = svbic_s16_m (p0, z0, z1),
+		z1 = svbic_m (p0, z0, z1))
+
+/*
+** bic_s16_m_untied:
+**	movprfx	z0, z1
+**	bic	z0\.h, p0/m, z0\.h, z2\.h
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s16_m_untied, svint16_t,
+		z0 = svbic_s16_m (p0, z1, z2),
+		z0 = svbic_m (p0, z1, z2))
+
+/*
+** bic_w0_s16_m_tied1:
+**	mov	(z[0-9]+\.h), w0
+**	bic	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s16_m_tied1, svint16_t, int16_t,
+		 z0 = svbic_n_s16_m (p0, z0, x0),
+		 z0 = svbic_m (p0, z0, x0))
+
+/*
+** bic_w0_s16_m_untied:
+**	mov	(z[0-9]+\.h), w0
+**	movprfx	z0, z1
+**	bic	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s16_m_untied, svint16_t, int16_t,
+		 z0 = svbic_n_s16_m (p0, z1, x0),
+		 z0 = svbic_m (p0, z1, x0))
+
+/*
+** bic_h0_s16_m_tied1:
+**	mov	(z[0-9]+\.h), h0
+**	bic	z1\.h, p0/m, z1\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_h0_s16_m_tied1, svint16_t, int16_t,
+		 z1 = svbic_n_s16_m (p0, z1, d0),
+		 z1 = svbic_m (p0, z1, d0))
+
+/*
+** bic_h0_s16_m_untied:
+**	mov	(z[0-9]+\.h), h0
+**	movprfx	z1, z2
+**	bic	z1\.h, p0/m, z1\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_h0_s16_m_untied, svint16_t, int16_t,
+		 z1 = svbic_n_s16_m (p0, z2, d0),
+		 z1 = svbic_m (p0, z2, d0))
+
+/*
+** bic_1_s16_m_tied1:
+**	mov	(z[0-9]+\.h), #-2
+**	and	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s16_m_tied1, svint16_t,
+		z0 = svbic_n_s16_m (p0, z0, 1),
+		z0 = svbic_m (p0, z0, 1))
+
+/*
+** bic_1_s16_m_untied:
+**	mov	(z[0-9]+\.h), #-2
+**	movprfx	z0, z1
+**	and	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s16_m_untied, svint16_t,
+		z0 = svbic_n_s16_m (p0, z1, 1),
+		z0 = svbic_m (p0, z1, 1))
+
+/*
+** bic_m2_s16_m:
+**	mov	(z[0-9]+\.h), #1
+**	and	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m2_s16_m, svint16_t,
+		z0 = svbic_n_s16_m (p0, z0, -2),
+		z0 = svbic_m (p0, z0, -2))
+
+/*
+** bic_s16_z_tied1:
+**	movprfx	z0\.h, p0/z, z0\.h
+**	bic	z0\.h, p0/m, z0\.h, z1\.h
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s16_z_tied1, svint16_t,
+		z0 = svbic_s16_z (p0, z0, z1),
+		z0 = svbic_z (p0, z0, z1))
+
+/*
+** bic_s16_z_tied2:
+**	mov	(z[0-9]+)\.d, z1\.d
+**	movprfx	z1\.h, p0/z, z0\.h
+**	bic	z1\.h, p0/m, z1\.h, \1\.h
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s16_z_tied2, svint16_t,
+		z1 = svbic_s16_z (p0, z0, z1),
+		z1 = svbic_z (p0, z0, z1))
+
+/*
+** bic_s16_z_untied:
+**	movprfx	z0\.h, p0/z, z1\.h
+**	bic	z0\.h, p0/m, z0\.h, z2\.h
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s16_z_untied, svint16_t,
+		z0 = svbic_s16_z (p0, z1, z2),
+		z0 = svbic_z (p0, z1, z2))
+
+/*
+** bic_w0_s16_z_tied1:
+**	mov	(z[0-9]+\.h), w0
+**	movprfx	z0\.h, p0/z, z0\.h
+**	bic	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s16_z_tied1, svint16_t, int16_t,
+		 z0 = svbic_n_s16_z (p0, z0, x0),
+		 z0 = svbic_z (p0, z0, x0))
+
+/*
+** bic_w0_s16_z_untied:
+**	mov	(z[0-9]+\.h), w0
+**	movprfx	z0\.h, p0/z, z1\.h
+**	bic	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s16_z_untied, svint16_t, int16_t,
+		 z0 = svbic_n_s16_z (p0, z1, x0),
+		 z0 = svbic_z (p0, z1, x0))
+
+/*
+** bic_h0_s16_z_tied1:
+**	mov	(z[0-9]+\.h), h0
+**	movprfx	z1\.h, p0/z, z1\.h
+**	bic	z1\.h, p0/m, z1\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_h0_s16_z_tied1, svint16_t, int16_t,
+		 z1 = svbic_n_s16_z (p0, z1, d0),
+		 z1 = svbic_z (p0, z1, d0))
+
+/*
+** bic_h0_s16_z_untied:
+**	mov	(z[0-9]+\.h), h0
+**	movprfx	z1\.h, p0/z, z2\.h
+**	bic	z1\.h, p0/m, z1\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_h0_s16_z_untied, svint16_t, int16_t,
+		 z1 = svbic_n_s16_z (p0, z2, d0),
+		 z1 = svbic_z (p0, z2, d0))
+
+/*
+** bic_s16_x_tied1:
+**	bic	z0\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s16_x_tied1, svint16_t,
+		z0 = svbic_s16_x (p0, z0, z1),
+		z0 = svbic_x (p0, z0, z1))
+
+/*
+** bic_s16_x_tied2:
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s16_x_tied2, svint16_t,
+		z1 = svbic_s16_x (p0, z0, z1),
+		z1 = svbic_x (p0, z0, z1))
+
+/*
+** bic_s16_x_untied:
+**	bic	z2\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s16_x_untied, svint16_t,
+		z2 = svbic_s16_x (p0, z0, z1),
+		z2 = svbic_x (p0, z0, z1))
+
+/*
+** bic_w0_s16_x_tied1:
+**	mov	(z[0-9]+)\.h, w0
+**	bic	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s16_x_tied1, svint16_t, int16_t,
+		 z0 = svbic_n_s16_x (p0, z0, x0),
+		 z0 = svbic_x (p0, z0, x0))
+
+/*
+** bic_w0_s16_x_untied:
+**	mov	z1\.h, w0
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s16_x_untied, svint16_t, int16_t,
+		 z1 = svbic_n_s16_x (p0, z0, x0),
+		 z1 = svbic_x (p0, z0, x0))
+
+/*
+** bic_h0_s16_x_tied1:
+**	mov	(z[0-9]+)\.h, h0
+**	bic	z1\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_h0_s16_x_tied1, svint16_t, int16_t,
+		 z1 = svbic_n_s16_x (p0, z1, d0),
+		 z1 = svbic_x (p0, z1, d0))
+
+/*
+** bic_h0_s16_x_untied:
+**	mov	(z[0-9]+)\.h, h0
+**	bic	z2\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_h0_s16_x_untied, svint16_t, int16_t,
+		 z2 = svbic_n_s16_x (p0, z1, d0),
+		 z2 = svbic_x (p0, z1, d0))
+
+/*
+** bic_1_s16_x_tied1:
+**	and	z0\.h, z0\.h, #0xfffe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s16_x_tied1, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, 1),
+		z0 = svbic_x (p0, z0, 1))
+
+/*
+** bic_1_s16_x_untied:
+**	movprfx	z0, z1
+**	and	z0\.h, z0\.h, #0xfffe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s16_x_untied, svint16_t,
+		z0 = svbic_n_s16_x (p0, z1, 1),
+		z0 = svbic_x (p0, z1, 1))
+
+/*
+** bic_127_s16_x:
+**	and	z0\.h, z0\.h, #0xff80
+**	ret
+*/
+TEST_UNIFORM_Z (bic_127_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, 127),
+		z0 = svbic_x (p0, z0, 127))
+
+/*
+** bic_128_s16_x:
+**	and	z0\.h, z0\.h, #0xff7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_128_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, 128),
+		z0 = svbic_x (p0, z0, 128))
+
+/*
+** bic_255_s16_x:
+**	and	z0\.h, z0\.h, #0xff00
+**	ret
+*/
+TEST_UNIFORM_Z (bic_255_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, 255),
+		z0 = svbic_x (p0, z0, 255))
+
+/*
+** bic_256_s16_x:
+**	and	z0\.h, z0\.h, #0xfeff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_256_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, 256),
+		z0 = svbic_x (p0, z0, 256))
+
+/*
+** bic_257_s16_x:
+**	and	z0\.h, z0\.h, #0xfefe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_257_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, 257),
+		z0 = svbic_x (p0, z0, 257))
+
+/*
+** bic_512_s16_x:
+**	and	z0\.h, z0\.h, #0xfdff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_512_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, 512),
+		z0 = svbic_x (p0, z0, 512))
+
+/*
+** bic_65280_s16_x:
+**	and	z0\.h, z0\.h, #0xff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_65280_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, 0xff00),
+		z0 = svbic_x (p0, z0, 0xff00))
+
+/*
+** bic_m127_s16_x:
+**	and	z0\.h, z0\.h, #0x7e
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m127_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, -127),
+		z0 = svbic_x (p0, z0, -127))
+
+/*
+** bic_m128_s16_x:
+**	and	z0\.h, z0\.h, #0x7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m128_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, -128),
+		z0 = svbic_x (p0, z0, -128))
+
+/*
+** bic_m255_s16_x:
+**	and	z0\.h, z0\.h, #0xfe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m255_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, -255),
+		z0 = svbic_x (p0, z0, -255))
+
+/*
+** bic_m256_s16_x:
+**	and	z0\.h, z0\.h, #0xff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m256_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, -256),
+		z0 = svbic_x (p0, z0, -256))
+
+/*
+** bic_m257_s16_x:
+**	and	z0\.h, z0\.h, #0x100
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m257_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, -257),
+		z0 = svbic_x (p0, z0, -257))
+
+/*
+** bic_m512_s16_x:
+**	and	z0\.h, z0\.h, #0x1ff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m512_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, -512),
+		z0 = svbic_x (p0, z0, -512))
+
+/*
+** bic_m32768_s16_x:
+**	and	z0\.h, z0\.h, #0x7fff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m32768_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, -0x8000),
+		z0 = svbic_x (p0, z0, -0x8000))
+
+/*
+** bic_5_s16_x:
+**	mov	(z[0-9]+)\.h, #-6
+**	and	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_5_s16_x, svint16_t,
+		z0 = svbic_n_s16_x (p0, z0, 5),
+		z0 = svbic_x (p0, z0, 5))
diff --git a/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s32.c b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s32.c
new file mode 100644
index 0000000..045b971
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s32.c
@@ -0,0 +1,394 @@ 
+/* { dg-do compile } */
+/* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
+
+#include "test_sve_acle.h"
+
+/*
+** bic_s32_m_tied1:
+**	bic	z0\.s, p0/m, z0\.s, z1\.s
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s32_m_tied1, svint32_t,
+		z0 = svbic_s32_m (p0, z0, z1),
+		z0 = svbic_m (p0, z0, z1))
+
+/* Bad RA choice: no preferred output sequence.  */
+TEST_UNIFORM_Z (bic_s32_m_tied2, svint32_t,
+		z1 = svbic_s32_m (p0, z0, z1),
+		z1 = svbic_m (p0, z0, z1))
+
+/*
+** bic_s32_m_untied:
+**	movprfx	z0, z1
+**	bic	z0\.s, p0/m, z0\.s, z2\.s
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s32_m_untied, svint32_t,
+		z0 = svbic_s32_m (p0, z1, z2),
+		z0 = svbic_m (p0, z1, z2))
+
+/*
+** bic_w0_s32_m_tied1:
+**	mov	(z[0-9]+\.s), w0
+**	bic	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s32_m_tied1, svint32_t, int32_t,
+		 z0 = svbic_n_s32_m (p0, z0, x0),
+		 z0 = svbic_m (p0, z0, x0))
+
+/*
+** bic_w0_s32_m_untied:
+**	mov	(z[0-9]+\.s), w0
+**	movprfx	z0, z1
+**	bic	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s32_m_untied, svint32_t, int32_t,
+		 z0 = svbic_n_s32_m (p0, z1, x0),
+		 z0 = svbic_m (p0, z1, x0))
+
+/*
+** bic_s0_s32_m_tied1:
+**	mov	(z[0-9]+\.s), s0
+**	bic	z1\.s, p0/m, z1\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_s0_s32_m_tied1, svint32_t, int32_t,
+		 z1 = svbic_n_s32_m (p0, z1, d0),
+		 z1 = svbic_m (p0, z1, d0))
+
+/*
+** bic_s0_s32_m_untied:
+**	mov	(z[0-9]+\.s), s0
+**	movprfx	z1, z2
+**	bic	z1\.s, p0/m, z1\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_s0_s32_m_untied, svint32_t, int32_t,
+		 z1 = svbic_n_s32_m (p0, z2, d0),
+		 z1 = svbic_m (p0, z2, d0))
+
+/*
+** bic_1_s32_m_tied1:
+**	mov	(z[0-9]+\.s), #-2
+**	and	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s32_m_tied1, svint32_t,
+		z0 = svbic_n_s32_m (p0, z0, 1),
+		z0 = svbic_m (p0, z0, 1))
+
+/*
+** bic_1_s32_m_untied:
+**	mov	(z[0-9]+\.s), #-2
+**	movprfx	z0, z1
+**	and	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s32_m_untied, svint32_t,
+		z0 = svbic_n_s32_m (p0, z1, 1),
+		z0 = svbic_m (p0, z1, 1))
+
+/*
+** bic_m2_s32_m:
+**	mov	(z[0-9]+\.s), #1
+**	and	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m2_s32_m, svint32_t,
+		z0 = svbic_n_s32_m (p0, z0, -2),
+		z0 = svbic_m (p0, z0, -2))
+
+/*
+** bic_s32_z_tied1:
+**	movprfx	z0\.s, p0/z, z0\.s
+**	bic	z0\.s, p0/m, z0\.s, z1\.s
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s32_z_tied1, svint32_t,
+		z0 = svbic_s32_z (p0, z0, z1),
+		z0 = svbic_z (p0, z0, z1))
+
+/*
+** bic_s32_z_tied2:
+**	mov	(z[0-9]+)\.d, z1\.d
+**	movprfx	z1\.s, p0/z, z0\.s
+**	bic	z1\.s, p0/m, z1\.s, \1\.s
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s32_z_tied2, svint32_t,
+		z1 = svbic_s32_z (p0, z0, z1),
+		z1 = svbic_z (p0, z0, z1))
+
+/*
+** bic_s32_z_untied:
+**	movprfx	z0\.s, p0/z, z1\.s
+**	bic	z0\.s, p0/m, z0\.s, z2\.s
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s32_z_untied, svint32_t,
+		z0 = svbic_s32_z (p0, z1, z2),
+		z0 = svbic_z (p0, z1, z2))
+
+/*
+** bic_w0_s32_z_tied1:
+**	mov	(z[0-9]+\.s), w0
+**	movprfx	z0\.s, p0/z, z0\.s
+**	bic	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s32_z_tied1, svint32_t, int32_t,
+		 z0 = svbic_n_s32_z (p0, z0, x0),
+		 z0 = svbic_z (p0, z0, x0))
+
+/*
+** bic_w0_s32_z_untied:
+**	mov	(z[0-9]+\.s), w0
+**	movprfx	z0\.s, p0/z, z1\.s
+**	bic	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s32_z_untied, svint32_t, int32_t,
+		 z0 = svbic_n_s32_z (p0, z1, x0),
+		 z0 = svbic_z (p0, z1, x0))
+
+/*
+** bic_s0_s32_z_tied1:
+**	mov	(z[0-9]+\.s), s0
+**	movprfx	z1\.s, p0/z, z1\.s
+**	bic	z1\.s, p0/m, z1\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_s0_s32_z_tied1, svint32_t, int32_t,
+		 z1 = svbic_n_s32_z (p0, z1, d0),
+		 z1 = svbic_z (p0, z1, d0))
+
+/*
+** bic_s0_s32_z_untied:
+**	mov	(z[0-9]+\.s), s0
+**	movprfx	z1\.s, p0/z, z2\.s
+**	bic	z1\.s, p0/m, z1\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_s0_s32_z_untied, svint32_t, int32_t,
+		 z1 = svbic_n_s32_z (p0, z2, d0),
+		 z1 = svbic_z (p0, z2, d0))
+
+/*
+** bic_s32_x_tied1:
+**	bic	z0\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s32_x_tied1, svint32_t,
+		z0 = svbic_s32_x (p0, z0, z1),
+		z0 = svbic_x (p0, z0, z1))
+
+/*
+** bic_s32_x_tied2:
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s32_x_tied2, svint32_t,
+		z1 = svbic_s32_x (p0, z0, z1),
+		z1 = svbic_x (p0, z0, z1))
+
+/*
+** bic_s32_x_untied:
+**	bic	z2\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s32_x_untied, svint32_t,
+		z2 = svbic_s32_x (p0, z0, z1),
+		z2 = svbic_x (p0, z0, z1))
+
+/*
+** bic_w0_s32_x_tied1:
+**	mov	(z[0-9]+)\.s, w0
+**	bic	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s32_x_tied1, svint32_t, int32_t,
+		 z0 = svbic_n_s32_x (p0, z0, x0),
+		 z0 = svbic_x (p0, z0, x0))
+
+/*
+** bic_w0_s32_x_untied:
+**	mov	z1\.s, w0
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s32_x_untied, svint32_t, int32_t,
+		 z1 = svbic_n_s32_x (p0, z0, x0),
+		 z1 = svbic_x (p0, z0, x0))
+
+/*
+** bic_s0_s32_x_tied1:
+**	mov	(z[0-9]+)\.s, s0
+**	bic	z1\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_s0_s32_x_tied1, svint32_t, int32_t,
+		 z1 = svbic_n_s32_x (p0, z1, d0),
+		 z1 = svbic_x (p0, z1, d0))
+
+/*
+** bic_s0_s32_x_untied:
+**	mov	(z[0-9]+)\.s, s0
+**	bic	z2\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_s0_s32_x_untied, svint32_t, int32_t,
+		 z2 = svbic_n_s32_x (p0, z1, d0),
+		 z2 = svbic_x (p0, z1, d0))
+
+/*
+** bic_1_s32_x_tied1:
+**	and	z0\.s, z0\.s, #0xfffffffe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s32_x_tied1, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, 1),
+		z0 = svbic_x (p0, z0, 1))
+
+/*
+** bic_1_s32_x_untied:
+**	movprfx	z0, z1
+**	and	z0\.s, z0\.s, #0xfffffffe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s32_x_untied, svint32_t,
+		z0 = svbic_n_s32_x (p0, z1, 1),
+		z0 = svbic_x (p0, z1, 1))
+
+/*
+** bic_127_s32_x:
+**	and	z0\.s, z0\.s, #0xffffff80
+**	ret
+*/
+TEST_UNIFORM_Z (bic_127_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, 127),
+		z0 = svbic_x (p0, z0, 127))
+
+/*
+** bic_128_s32_x:
+**	and	z0\.s, z0\.s, #0xffffff7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_128_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, 128),
+		z0 = svbic_x (p0, z0, 128))
+
+/*
+** bic_255_s32_x:
+**	and	z0\.s, z0\.s, #0xffffff00
+**	ret
+*/
+TEST_UNIFORM_Z (bic_255_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, 255),
+		z0 = svbic_x (p0, z0, 255))
+
+/*
+** bic_256_s32_x:
+**	and	z0\.s, z0\.s, #0xfffffeff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_256_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, 256),
+		z0 = svbic_x (p0, z0, 256))
+
+/* TODO: Bad code needs fixing.  */
+TEST_UNIFORM_Z (bic_257_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, 257),
+		z0 = svbic_x (p0, z0, 257))
+
+/*
+** bic_512_s32_x:
+**	and	z0\.s, z0\.s, #0xfffffdff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_512_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, 512),
+		z0 = svbic_x (p0, z0, 512))
+
+/*
+** bic_65280_s32_x:
+**	and	z0\.s, z0\.s, #0xffff00ff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_65280_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, 0xff00),
+		z0 = svbic_x (p0, z0, 0xff00))
+
+/*
+** bic_m127_s32_x:
+**	and	z0\.s, z0\.s, #0x7e
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m127_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, -127),
+		z0 = svbic_x (p0, z0, -127))
+
+/*
+** bic_m128_s32_x:
+**	and	z0\.s, z0\.s, #0x7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m128_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, -128),
+		z0 = svbic_x (p0, z0, -128))
+
+/*
+** bic_m255_s32_x:
+**	and	z0\.s, z0\.s, #0xfe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m255_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, -255),
+		z0 = svbic_x (p0, z0, -255))
+
+/*
+** bic_m256_s32_x:
+**	and	z0\.s, z0\.s, #0xff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m256_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, -256),
+		z0 = svbic_x (p0, z0, -256))
+
+/*
+** bic_m257_s32_x:
+**	and	z0\.s, z0\.s, #0x100
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m257_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, -257),
+		z0 = svbic_x (p0, z0, -257))
+
+/*
+** bic_m512_s32_x:
+**	and	z0\.s, z0\.s, #0x1ff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m512_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, -512),
+		z0 = svbic_x (p0, z0, -512))
+
+/*
+** bic_m32768_s32_x:
+**	and	z0\.s, z0\.s, #0x7fff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m32768_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, -0x8000),
+		z0 = svbic_x (p0, z0, -0x8000))
+
+/*
+** bic_5_s32_x:
+**	mov	(z[0-9]+)\.s, #-6
+**	and	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_5_s32_x, svint32_t,
+		z0 = svbic_n_s32_x (p0, z0, 5),
+		z0 = svbic_x (p0, z0, 5))
diff --git a/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s64.c b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s64.c
new file mode 100644
index 0000000..df555b2
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s64.c
@@ -0,0 +1,394 @@ 
+/* { dg-do compile } */
+/* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
+
+#include "test_sve_acle.h"
+
+/*
+** bic_s64_m_tied1:
+**	bic	z0\.d, p0/m, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s64_m_tied1, svint64_t,
+		z0 = svbic_s64_m (p0, z0, z1),
+		z0 = svbic_m (p0, z0, z1))
+
+/* Bad RA choice: no preferred output sequence.  */
+TEST_UNIFORM_Z (bic_s64_m_tied2, svint64_t,
+		z1 = svbic_s64_m (p0, z0, z1),
+		z1 = svbic_m (p0, z0, z1))
+
+/*
+** bic_s64_m_untied:
+**	movprfx	z0, z1
+**	bic	z0\.d, p0/m, z0\.d, z2\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s64_m_untied, svint64_t,
+		z0 = svbic_s64_m (p0, z1, z2),
+		z0 = svbic_m (p0, z1, z2))
+
+/*
+** bic_x0_s64_m_tied1:
+**	mov	(z[0-9]+\.d), x0
+**	bic	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_x0_s64_m_tied1, svint64_t, int64_t,
+		 z0 = svbic_n_s64_m (p0, z0, x0),
+		 z0 = svbic_m (p0, z0, x0))
+
+/*
+** bic_x0_s64_m_untied:
+**	mov	(z[0-9]+\.d), x0
+**	movprfx	z0, z1
+**	bic	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_x0_s64_m_untied, svint64_t, int64_t,
+		 z0 = svbic_n_s64_m (p0, z1, x0),
+		 z0 = svbic_m (p0, z1, x0))
+
+/*
+** bic_d0_s64_m_tied1:
+**	mov	(z[0-9]+\.d), d0
+**	bic	z1\.d, p0/m, z1\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_d0_s64_m_tied1, svint64_t, int64_t,
+		 z1 = svbic_n_s64_m (p0, z1, d0),
+		 z1 = svbic_m (p0, z1, d0))
+
+/*
+** bic_d0_s64_m_untied:
+**	mov	(z[0-9]+\.d), d0
+**	movprfx	z1, z2
+**	bic	z1\.d, p0/m, z1\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_d0_s64_m_untied, svint64_t, int64_t,
+		 z1 = svbic_n_s64_m (p0, z2, d0),
+		 z1 = svbic_m (p0, z2, d0))
+
+/*
+** bic_1_s64_m_tied1:
+**	mov	(z[0-9]+\.d), #-2
+**	and	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s64_m_tied1, svint64_t,
+		z0 = svbic_n_s64_m (p0, z0, 1),
+		z0 = svbic_m (p0, z0, 1))
+
+/*
+** bic_1_s64_m_untied:
+**	mov	(z[0-9]+\.d), #-2
+**	movprfx	z0, z1
+**	and	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s64_m_untied, svint64_t,
+		z0 = svbic_n_s64_m (p0, z1, 1),
+		z0 = svbic_m (p0, z1, 1))
+
+/*
+** bic_m2_s64_m:
+**	mov	(z[0-9]+\.d), #1
+**	and	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m2_s64_m, svint64_t,
+		z0 = svbic_n_s64_m (p0, z0, -2),
+		z0 = svbic_m (p0, z0, -2))
+
+/*
+** bic_s64_z_tied1:
+**	movprfx	z0\.d, p0/z, z0\.d
+**	bic	z0\.d, p0/m, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s64_z_tied1, svint64_t,
+		z0 = svbic_s64_z (p0, z0, z1),
+		z0 = svbic_z (p0, z0, z1))
+
+/*
+** bic_s64_z_tied2:
+**	mov	(z[0-9]+\.d), z1\.d
+**	movprfx	z1\.d, p0/z, z0\.d
+**	bic	z1\.d, p0/m, z1\.d, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s64_z_tied2, svint64_t,
+		z1 = svbic_s64_z (p0, z0, z1),
+		z1 = svbic_z (p0, z0, z1))
+
+/*
+** bic_s64_z_untied:
+**	movprfx	z0\.d, p0/z, z1\.d
+**	bic	z0\.d, p0/m, z0\.d, z2\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s64_z_untied, svint64_t,
+		z0 = svbic_s64_z (p0, z1, z2),
+		z0 = svbic_z (p0, z1, z2))
+
+/*
+** bic_x0_s64_z_tied1:
+**	mov	(z[0-9]+\.d), x0
+**	movprfx	z0\.d, p0/z, z0\.d
+**	bic	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_x0_s64_z_tied1, svint64_t, int64_t,
+		 z0 = svbic_n_s64_z (p0, z0, x0),
+		 z0 = svbic_z (p0, z0, x0))
+
+/*
+** bic_x0_s64_z_untied:
+**	mov	(z[0-9]+\.d), x0
+**	movprfx	z0\.d, p0/z, z1\.d
+**	bic	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_x0_s64_z_untied, svint64_t, int64_t,
+		 z0 = svbic_n_s64_z (p0, z1, x0),
+		 z0 = svbic_z (p0, z1, x0))
+
+/*
+** bic_d0_s64_z_tied1:
+**	mov	(z[0-9]+\.d), d0
+**	movprfx	z1\.d, p0/z, z1\.d
+**	bic	z1\.d, p0/m, z1\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_d0_s64_z_tied1, svint64_t, int64_t,
+		 z1 = svbic_n_s64_z (p0, z1, d0),
+		 z1 = svbic_z (p0, z1, d0))
+
+/*
+** bic_d0_s64_z_untied:
+**	mov	(z[0-9]+\.d), d0
+**	movprfx	z1\.d, p0/z, z2\.d
+**	bic	z1\.d, p0/m, z1\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_d0_s64_z_untied, svint64_t, int64_t,
+		 z1 = svbic_n_s64_z (p0, z2, d0),
+		 z1 = svbic_z (p0, z2, d0))
+
+/*
+** bic_s64_x_tied1:
+**	bic	z0\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s64_x_tied1, svint64_t,
+		z0 = svbic_s64_x (p0, z0, z1),
+		z0 = svbic_x (p0, z0, z1))
+
+/*
+** bic_s64_x_tied2:
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s64_x_tied2, svint64_t,
+		z1 = svbic_s64_x (p0, z0, z1),
+		z1 = svbic_x (p0, z0, z1))
+
+/*
+** bic_s64_x_untied:
+**	bic	z2\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s64_x_untied, svint64_t,
+		z2 = svbic_s64_x (p0, z0, z1),
+		z2 = svbic_x (p0, z0, z1))
+
+/*
+** bic_x0_s64_x_tied1:
+**	mov	(z[0-9]+)\.d, x0
+**	bic	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_x0_s64_x_tied1, svint64_t, int64_t,
+		 z0 = svbic_n_s64_x (p0, z0, x0),
+		 z0 = svbic_x (p0, z0, x0))
+
+/*
+** bic_x0_s64_x_untied:
+**	mov	z1\.d, x0
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_x0_s64_x_untied, svint64_t, int64_t,
+		 z1 = svbic_n_s64_x (p0, z0, x0),
+		 z1 = svbic_x (p0, z0, x0))
+
+/*
+** bic_d0_s64_x_tied1:
+**	mov	(z[0-9]+)\.d, d0
+**	bic	z1\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_d0_s64_x_tied1, svint64_t, int64_t,
+		 z1 = svbic_n_s64_x (p0, z1, d0),
+		 z1 = svbic_x (p0, z1, d0))
+
+/*
+** bic_d0_s64_x_untied:
+**	mov	(z[0-9]+)\.d, d0
+**	bic	z2\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_d0_s64_x_untied, svint64_t, int64_t,
+		 z2 = svbic_n_s64_x (p0, z1, d0),
+		 z2 = svbic_x (p0, z1, d0))
+
+/*
+** bic_1_s64_x_tied1:
+**	and	z0\.d, z0\.d, #0xfffffffffffffffe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s64_x_tied1, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, 1),
+		z0 = svbic_x (p0, z0, 1))
+
+/*
+** bic_1_s64_x_untied:
+**	movprfx	z0, z1
+**	and	z0\.d, z0\.d, #0xfffffffffffffffe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s64_x_untied, svint64_t,
+		z0 = svbic_n_s64_x (p0, z1, 1),
+		z0 = svbic_x (p0, z1, 1))
+
+/*
+** bic_127_s64_x:
+**	and	z0\.d, z0\.d, #0xffffffffffffff80
+**	ret
+*/
+TEST_UNIFORM_Z (bic_127_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, 127),
+		z0 = svbic_x (p0, z0, 127))
+
+/*
+** bic_128_s64_x:
+**	and	z0\.d, z0\.d, #0xffffffffffffff7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_128_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, 128),
+		z0 = svbic_x (p0, z0, 128))
+
+/*
+** bic_255_s64_x:
+**	and	z0\.d, z0\.d, #0xffffffffffffff00
+**	ret
+*/
+TEST_UNIFORM_Z (bic_255_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, 255),
+		z0 = svbic_x (p0, z0, 255))
+
+/*
+** bic_256_s64_x:
+**	and	z0\.d, z0\.d, #0xfffffffffffffeff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_256_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, 256),
+		z0 = svbic_x (p0, z0, 256))
+
+/* TODO: Bad code needs fixing.  */
+TEST_UNIFORM_Z (bic_257_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, 257),
+		z0 = svbic_x (p0, z0, 257))
+
+/*
+** bic_512_s64_x:
+**	and	z0\.d, z0\.d, #0xfffffffffffffdff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_512_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, 512),
+		z0 = svbic_x (p0, z0, 512))
+
+/*
+** bic_65280_s64_x:
+**	and	z0\.d, z0\.d, #0xffffffffffff00ff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_65280_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, 0xff00),
+		z0 = svbic_x (p0, z0, 0xff00))
+
+/*
+** bic_m127_s64_x:
+**	and	z0\.d, z0\.d, #0x7e
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m127_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, -127),
+		z0 = svbic_x (p0, z0, -127))
+
+/*
+** bic_m128_s64_x:
+**	and	z0\.d, z0\.d, #0x7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m128_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, -128),
+		z0 = svbic_x (p0, z0, -128))
+
+/*
+** bic_m255_s64_x:
+**	and	z0\.d, z0\.d, #0xfe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m255_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, -255),
+		z0 = svbic_x (p0, z0, -255))
+
+/*
+** bic_m256_s64_x:
+**	and	z0\.d, z0\.d, #0xff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m256_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, -256),
+		z0 = svbic_x (p0, z0, -256))
+
+/*
+** bic_m257_s64_x:
+**	and	z0\.d, z0\.d, #0x100
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m257_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, -257),
+		z0 = svbic_x (p0, z0, -257))
+
+/*
+** bic_m512_s64_x:
+**	and	z0\.d, z0\.d, #0x1ff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m512_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, -512),
+		z0 = svbic_x (p0, z0, -512))
+
+/*
+** bic_m32768_s64_x:
+**	and	z0\.d, z0\.d, #0x7fff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m32768_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, -0x8000),
+		z0 = svbic_x (p0, z0, -0x8000))
+
+/*
+** bic_5_s64_x:
+**	mov	(z[0-9]+)\.d, #-6
+**	and	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_5_s64_x, svint64_t,
+		z0 = svbic_n_s64_x (p0, z0, 5),
+		z0 = svbic_x (p0, z0, 5))
diff --git a/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s8.c b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s8.c
new file mode 100644
index 0000000..185dba7
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_s8.c
@@ -0,0 +1,317 @@ 
+/* { dg-do compile } */
+/* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
+
+#include "test_sve_acle.h"
+
+/*
+** bic_s8_m_tied1:
+**	bic	z0\.b, p0/m, z0\.b, z1\.b
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s8_m_tied1, svint8_t,
+		z0 = svbic_s8_m (p0, z0, z1),
+		z0 = svbic_m (p0, z0, z1))
+
+/* Bad RA choice: no preferred output sequence.  */
+TEST_UNIFORM_Z (bic_s8_m_tied2, svint8_t,
+		z1 = svbic_s8_m (p0, z0, z1),
+		z1 = svbic_m (p0, z0, z1))
+
+/*
+** bic_s8_m_untied:
+**	movprfx	z0, z1
+**	bic	z0\.b, p0/m, z0\.b, z2\.b
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s8_m_untied, svint8_t,
+		z0 = svbic_s8_m (p0, z1, z2),
+		z0 = svbic_m (p0, z1, z2))
+
+/*
+** bic_w0_s8_m_tied1:
+**	mov	(z[0-9]+\.b), w0
+**	bic	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s8_m_tied1, svint8_t, int8_t,
+		 z0 = svbic_n_s8_m (p0, z0, x0),
+		 z0 = svbic_m (p0, z0, x0))
+
+/*
+** bic_w0_s8_m_untied:
+**	mov	(z[0-9]+\.b), w0
+**	movprfx	z0, z1
+**	bic	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s8_m_untied, svint8_t, int8_t,
+		 z0 = svbic_n_s8_m (p0, z1, x0),
+		 z0 = svbic_m (p0, z1, x0))
+
+/*
+** bic_b0_s8_m_tied1:
+**	mov	(z[0-9]+\.b), b0
+**	bic	z1\.b, p0/m, z1\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_b0_s8_m_tied1, svint8_t, int8_t,
+		 z1 = svbic_n_s8_m (p0, z1, d0),
+		 z1 = svbic_m (p0, z1, d0))
+
+/*
+** bic_b0_s8_m_untied:
+**	mov	(z[0-9]+\.b), b0
+**	movprfx	z1, z2
+**	bic	z1\.b, p0/m, z1\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_b0_s8_m_untied, svint8_t, int8_t,
+		 z1 = svbic_n_s8_m (p0, z2, d0),
+		 z1 = svbic_m (p0, z2, d0))
+
+/*
+** bic_1_s8_m_tied1:
+**	mov	(z[0-9]+\.b), #-2
+**	and	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s8_m_tied1, svint8_t,
+		z0 = svbic_n_s8_m (p0, z0, 1),
+		z0 = svbic_m (p0, z0, 1))
+
+/*
+** bic_1_s8_m_untied:
+**	mov	(z[0-9]+\.b), #-2
+**	movprfx	z0, z1
+**	and	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s8_m_untied, svint8_t,
+		z0 = svbic_n_s8_m (p0, z1, 1),
+		z0 = svbic_m (p0, z1, 1))
+
+/*
+** bic_m2_s8_m:
+**	mov	(z[0-9]+\.b), #1
+**	and	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m2_s8_m, svint8_t,
+		z0 = svbic_n_s8_m (p0, z0, -2),
+		z0 = svbic_m (p0, z0, -2))
+
+/*
+** bic_s8_z_tied1:
+**	movprfx	z0\.b, p0/z, z0\.b
+**	bic	z0\.b, p0/m, z0\.b, z1\.b
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s8_z_tied1, svint8_t,
+		z0 = svbic_s8_z (p0, z0, z1),
+		z0 = svbic_z (p0, z0, z1))
+
+/*
+** bic_s8_z_tied2:
+**	mov	(z[0-9]+)\.d, z1\.d
+**	movprfx	z1\.b, p0/z, z0\.b
+**	bic	z1\.b, p0/m, z1\.b, \1\.b
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s8_z_tied2, svint8_t,
+		z1 = svbic_s8_z (p0, z0, z1),
+		z1 = svbic_z (p0, z0, z1))
+
+/*
+** bic_s8_z_untied:
+**	movprfx	z0\.b, p0/z, z1\.b
+**	bic	z0\.b, p0/m, z0\.b, z2\.b
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s8_z_untied, svint8_t,
+		z0 = svbic_s8_z (p0, z1, z2),
+		z0 = svbic_z (p0, z1, z2))
+
+/*
+** bic_w0_s8_z_tied1:
+**	mov	(z[0-9]+\.b), w0
+**	movprfx	z0\.b, p0/z, z0\.b
+**	bic	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s8_z_tied1, svint8_t, int8_t,
+		 z0 = svbic_n_s8_z (p0, z0, x0),
+		 z0 = svbic_z (p0, z0, x0))
+
+/*
+** bic_w0_s8_z_untied:
+**	mov	(z[0-9]+\.b), w0
+**	movprfx	z0\.b, p0/z, z1\.b
+**	bic	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s8_z_untied, svint8_t, int8_t,
+		 z0 = svbic_n_s8_z (p0, z1, x0),
+		 z0 = svbic_z (p0, z1, x0))
+
+/*
+** bic_b0_s8_z_tied1:
+**	mov	(z[0-9]+\.b), b0
+**	movprfx	z1\.b, p0/z, z1\.b
+**	bic	z1\.b, p0/m, z1\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_b0_s8_z_tied1, svint8_t, int8_t,
+		 z1 = svbic_n_s8_z (p0, z1, d0),
+		 z1 = svbic_z (p0, z1, d0))
+
+/*
+** bic_b0_s8_z_untied:
+**	mov	(z[0-9]+\.b), b0
+**	movprfx	z1\.b, p0/z, z2\.b
+**	bic	z1\.b, p0/m, z1\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_b0_s8_z_untied, svint8_t, int8_t,
+		 z1 = svbic_n_s8_z (p0, z2, d0),
+		 z1 = svbic_z (p0, z2, d0))
+
+/*
+** bic_s8_x_tied1:
+**	bic	z0\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s8_x_tied1, svint8_t,
+		z0 = svbic_s8_x (p0, z0, z1),
+		z0 = svbic_x (p0, z0, z1))
+
+/*
+** bic_s8_x_tied2:
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s8_x_tied2, svint8_t,
+		z1 = svbic_s8_x (p0, z0, z1),
+		z1 = svbic_x (p0, z0, z1))
+
+/*
+** bic_s8_x_untied:
+**	bic	z2\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_s8_x_untied, svint8_t,
+		z2 = svbic_s8_x (p0, z0, z1),
+		z2 = svbic_x (p0, z0, z1))
+
+/*
+** bic_w0_s8_x_tied1:
+**	mov	(z[0-9]+)\.b, w0
+**	bic	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s8_x_tied1, svint8_t, int8_t,
+		 z0 = svbic_n_s8_x (p0, z0, x0),
+		 z0 = svbic_x (p0, z0, x0))
+
+/*
+** bic_w0_s8_x_untied:
+**	mov	z1\.b, w0
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_s8_x_untied, svint8_t, int8_t,
+		 z1 = svbic_n_s8_x (p0, z0, x0),
+		 z1 = svbic_x (p0, z0, x0))
+
+/*
+** bic_b0_s8_x_tied1:
+**	mov	(z[0-9]+)\.b, b0
+**	bic	z1\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_b0_s8_x_tied1, svint8_t, int8_t,
+		 z1 = svbic_n_s8_x (p0, z1, d0),
+		 z1 = svbic_x (p0, z1, d0))
+
+/*
+** bic_b0_s8_x_untied:
+**	mov	(z[0-9]+)\.b, b0
+**	bic	z2\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_b0_s8_x_untied, svint8_t, int8_t,
+		 z2 = svbic_n_s8_x (p0, z1, d0),
+		 z2 = svbic_x (p0, z1, d0))
+
+/*
+** bic_1_s8_x_tied1:
+**	and	z0\.b, z0\.b, #0xfe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s8_x_tied1, svint8_t,
+		z0 = svbic_n_s8_x (p0, z0, 1),
+		z0 = svbic_x (p0, z0, 1))
+
+/*
+** bic_1_s8_x_untied:
+**	movprfx	z0, z1
+**	and	z0\.b, z0\.b, #0xfe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_s8_x_untied, svint8_t,
+		z0 = svbic_n_s8_x (p0, z1, 1),
+		z0 = svbic_x (p0, z1, 1))
+
+/*
+** bic_127_s8_x:
+**	and	z0\.b, z0\.b, #0x80
+**	ret
+*/
+TEST_UNIFORM_Z (bic_127_s8_x, svint8_t,
+		z0 = svbic_n_s8_x (p0, z0, 127),
+		z0 = svbic_x (p0, z0, 127))
+
+/*
+** bic_128_s8_x:
+**	and	z0\.b, z0\.b, #0x7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_128_s8_x, svint8_t,
+		z0 = svbic_n_s8_x (p0, z0, 128),
+		z0 = svbic_x (p0, z0, 128))
+
+/*
+** bic_255_s8_x:
+**	mov	z0\.b, #0
+**	ret
+*/
+TEST_UNIFORM_Z (bic_255_s8_x, svint8_t,
+		z0 = svbic_n_s8_x (p0, z0, 255),
+		z0 = svbic_x (p0, z0, 255))
+
+/*
+** bic_m127_s8_x:
+**	and	z0\.b, z0\.b, #0x7e
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m127_s8_x, svint8_t,
+		z0 = svbic_n_s8_x (p0, z0, -127),
+		z0 = svbic_x (p0, z0, -127))
+
+/*
+** bic_m128_s8_x:
+**	and	z0\.b, z0\.b, #0x7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m128_s8_x, svint8_t,
+		z0 = svbic_n_s8_x (p0, z0, -128),
+		z0 = svbic_x (p0, z0, -128))
+
+/*
+** bic_5_s8_x:
+**	mov	(z[0-9]+)\.b, #-6
+**	and	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_5_s8_x, svint8_t,
+		z0 = svbic_n_s8_x (p0, z0, 5),
+		z0 = svbic_x (p0, z0, 5))
diff --git a/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u16.c b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u16.c
new file mode 100644
index 0000000..4587da8
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u16.c
@@ -0,0 +1,398 @@ 
+/* { dg-do compile } */
+/* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
+
+#include "test_sve_acle.h"
+
+/*
+** bic_u16_m_tied1:
+**	bic	z0\.h, p0/m, z0\.h, z1\.h
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u16_m_tied1, svuint16_t,
+		z0 = svbic_u16_m (p0, z0, z1),
+		z0 = svbic_m (p0, z0, z1))
+
+/* Bad RA choice: no preferred output sequence.  */
+TEST_UNIFORM_Z (bic_u16_m_tied2, svuint16_t,
+		z1 = svbic_u16_m (p0, z0, z1),
+		z1 = svbic_m (p0, z0, z1))
+
+/*
+** bic_u16_m_untied:
+**	movprfx	z0, z1
+**	bic	z0\.h, p0/m, z0\.h, z2\.h
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u16_m_untied, svuint16_t,
+		z0 = svbic_u16_m (p0, z1, z2),
+		z0 = svbic_m (p0, z1, z2))
+
+/*
+** bic_w0_u16_m_tied1:
+**	mov	(z[0-9]+\.h), w0
+**	bic	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u16_m_tied1, svuint16_t, uint16_t,
+		 z0 = svbic_n_u16_m (p0, z0, x0),
+		 z0 = svbic_m (p0, z0, x0))
+
+/*
+** bic_w0_u16_m_untied:
+**	mov	(z[0-9]+\.h), w0
+**	movprfx	z0, z1
+**	bic	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u16_m_untied, svuint16_t, uint16_t,
+		 z0 = svbic_n_u16_m (p0, z1, x0),
+		 z0 = svbic_m (p0, z1, x0))
+
+/*
+** bic_h0_u16_m_tied1:
+**	mov	(z[0-9]+\.h), h0
+**	bic	z1\.h, p0/m, z1\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_h0_u16_m_tied1, svuint16_t, uint16_t,
+		 z1 = svbic_n_u16_m (p0, z1, d0),
+		 z1 = svbic_m (p0, z1, d0))
+
+/*
+** bic_h0_u16_m_untied:
+**	mov	(z[0-9]+\.h), h0
+**	movprfx	z1, z2
+**	bic	z1\.h, p0/m, z1\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_h0_u16_m_untied, svuint16_t, uint16_t,
+		 z1 = svbic_n_u16_m (p0, z2, d0),
+		 z1 = svbic_m (p0, z2, d0))
+
+/*
+** bic_1_u16_m_tied1:
+**	mov	(z[0-9]+\.h), #-2
+**	and	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u16_m_tied1, svuint16_t,
+		z0 = svbic_n_u16_m (p0, z0, 1),
+		z0 = svbic_m (p0, z0, 1))
+
+/*
+** bic_1_u16_m_untied:
+**	mov	(z[0-9]+\.h), #-2
+**	movprfx	z0, z1
+**	and	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u16_m_untied, svuint16_t,
+		z0 = svbic_n_u16_m (p0, z1, 1),
+		z0 = svbic_m (p0, z1, 1))
+
+/*
+** bic_m2_u16_m:
+**	mov	(z[0-9]+\.h), #1
+**	and	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m2_u16_m, svuint16_t,
+		z0 = svbic_n_u16_m (p0, z0, -2),
+		z0 = svbic_m (p0, z0, -2))
+
+/*
+** bic_u16_z_tied1:
+**	movprfx	z0\.h, p0/z, z0\.h
+**	bic	z0\.h, p0/m, z0\.h, z1\.h
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u16_z_tied1, svuint16_t,
+		z0 = svbic_u16_z (p0, z0, z1),
+		z0 = svbic_z (p0, z0, z1))
+
+/*
+** bic_u16_z_tied2:
+**	mov	(z[0-9]+)\.d, z1\.d
+**	movprfx	z1\.h, p0/z, z0\.h
+**	bic	z1\.h, p0/m, z1\.h, \1\.h
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u16_z_tied2, svuint16_t,
+		z1 = svbic_u16_z (p0, z0, z1),
+		z1 = svbic_z (p0, z0, z1))
+
+/*
+** bic_u16_z_untied:
+**	movprfx	z0\.h, p0/z, z1\.h
+**	bic	z0\.h, p0/m, z0\.h, z2\.h
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u16_z_untied, svuint16_t,
+		z0 = svbic_u16_z (p0, z1, z2),
+		z0 = svbic_z (p0, z1, z2))
+
+/*
+** bic_w0_u16_z_tied1:
+**	mov	(z[0-9]+\.h), w0
+**	movprfx	z0\.h, p0/z, z0\.h
+**	bic	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u16_z_tied1, svuint16_t, uint16_t,
+		 z0 = svbic_n_u16_z (p0, z0, x0),
+		 z0 = svbic_z (p0, z0, x0))
+
+/*
+** bic_w0_u16_z_untied:
+**	mov	(z[0-9]+\.h), w0
+**	movprfx	z0\.h, p0/z, z1\.h
+**	bic	z0\.h, p0/m, z0\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u16_z_untied, svuint16_t, uint16_t,
+		 z0 = svbic_n_u16_z (p0, z1, x0),
+		 z0 = svbic_z (p0, z1, x0))
+
+/*
+** bic_h0_u16_z_tied1:
+**	mov	(z[0-9]+\.h), h0
+**	movprfx	z1\.h, p0/z, z1\.h
+**	bic	z1\.h, p0/m, z1\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_h0_u16_z_tied1, svuint16_t, uint16_t,
+		 z1 = svbic_n_u16_z (p0, z1, d0),
+		 z1 = svbic_z (p0, z1, d0))
+
+/*
+** bic_h0_u16_z_untied:
+**	mov	(z[0-9]+\.h), h0
+**	movprfx	z1\.h, p0/z, z2\.h
+**	bic	z1\.h, p0/m, z1\.h, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_h0_u16_z_untied, svuint16_t, uint16_t,
+		 z1 = svbic_n_u16_z (p0, z2, d0),
+		 z1 = svbic_z (p0, z2, d0))
+
+/*
+** bic_u16_x_tied1:
+**	bic	z0\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u16_x_tied1, svuint16_t,
+		z0 = svbic_u16_x (p0, z0, z1),
+		z0 = svbic_x (p0, z0, z1))
+
+/*
+** bic_u16_x_tied2:
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u16_x_tied2, svuint16_t,
+		z1 = svbic_u16_x (p0, z0, z1),
+		z1 = svbic_x (p0, z0, z1))
+
+/*
+** bic_u16_x_untied:
+**	bic	z2\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u16_x_untied, svuint16_t,
+		z2 = svbic_u16_x (p0, z0, z1),
+		z2 = svbic_x (p0, z0, z1))
+
+/*
+** bic_w0_u16_x_tied1:
+**	mov	(z[0-9]+)\.h, w0
+**	bic	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u16_x_tied1, svuint16_t, uint16_t,
+		 z0 = svbic_n_u16_x (p0, z0, x0),
+		 z0 = svbic_x (p0, z0, x0))
+
+/*
+** bic_w0_u16_x_untied:
+**	mov	z1\.h, w0
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u16_x_untied, svuint16_t, uint16_t,
+		 z1 = svbic_n_u16_x (p0, z0, x0),
+		 z1 = svbic_x (p0, z0, x0))
+
+/*
+** bic_h0_u16_x_tied1:
+**	mov	(z[0-9]+)\.h, h0
+**	bic	z1\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_h0_u16_x_tied1, svuint16_t, uint16_t,
+		 z1 = svbic_n_u16_x (p0, z1, d0),
+		 z1 = svbic_x (p0, z1, d0))
+
+/*
+** bic_h0_u16_x_untied:
+**	mov	(z[0-9]+)\.h, h0
+**	bic	z2\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_h0_u16_x_untied, svuint16_t, uint16_t,
+		 z2 = svbic_n_u16_x (p0, z1, d0),
+		 z2 = svbic_x (p0, z1, d0))
+
+/*
+** bic_1_u16_x_tied1:
+**	and	z0\.h, z0\.h, #0xfffe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u16_x_tied1, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, 1),
+		z0 = svbic_x (p0, z0, 1))
+
+/*
+** bic_1_u16_x_untied:
+**	movprfx	z0, z1
+**	and	z0\.h, z0\.h, #0xfffe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u16_x_untied, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z1, 1),
+		z0 = svbic_x (p0, z1, 1))
+
+/*
+** bic_127_u16_x:
+**	and	z0\.h, z0\.h, #0xff80
+**	ret
+*/
+TEST_UNIFORM_Z (bic_127_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, 127),
+		z0 = svbic_x (p0, z0, 127))
+
+/*
+** bic_128_u16_x:
+**	and	z0\.h, z0\.h, #0xff7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_128_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, 128),
+		z0 = svbic_x (p0, z0, 128))
+
+/*
+** bic_255_u16_x:
+**	and	z0\.h, z0\.h, #0xff00
+**	ret
+*/
+TEST_UNIFORM_Z (bic_255_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, 255),
+		z0 = svbic_x (p0, z0, 255))
+
+/*
+** bic_256_u16_x:
+**	and	z0\.h, z0\.h, #0xfeff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_256_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, 256),
+		z0 = svbic_x (p0, z0, 256))
+
+/*
+** bic_257_u16_x:
+**	and	z0\.h, z0\.h, #0xfefe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_257_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, 257),
+		z0 = svbic_x (p0, z0, 257))
+
+/*
+** bic_512_u16_x:
+**	and	z0\.h, z0\.h, #0xfdff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_512_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, 512),
+		z0 = svbic_x (p0, z0, 512))
+
+/*
+** bic_65280_u16_x:
+**	and	z0\.h, z0\.h, #0xff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_65280_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, 0xff00),
+		z0 = svbic_x (p0, z0, 0xff00))
+
+/*
+** bic_m127_u16_x:
+**	and	z0\.h, z0\.h, #0x7e
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m127_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, -127),
+		z0 = svbic_x (p0, z0, -127))
+
+/*
+** bic_m128_u16_x:
+**	and	z0\.h, z0\.h, #0x7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m128_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, -128),
+		z0 = svbic_x (p0, z0, -128))
+
+/*
+** bic_m255_u16_x:
+**	and	z0\.h, z0\.h, #0xfe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m255_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, -255),
+		z0 = svbic_x (p0, z0, -255))
+
+/*
+** bic_m256_u16_x:
+**	and	z0\.h, z0\.h, #0xff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m256_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, -256),
+		z0 = svbic_x (p0, z0, -256))
+
+/*
+** bic_m257_u16_x:
+**	and	z0\.h, z0\.h, #0x100
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m257_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, -257),
+		z0 = svbic_x (p0, z0, -257))
+
+/*
+** bic_m512_u16_x:
+**	and	z0\.h, z0\.h, #0x1ff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m512_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, -512),
+		z0 = svbic_x (p0, z0, -512))
+
+/*
+** bic_m32768_u16_x:
+**	and	z0\.h, z0\.h, #0x7fff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m32768_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, -0x8000),
+		z0 = svbic_x (p0, z0, -0x8000))
+
+/*
+** bic_5_u16_x:
+**	mov	(z[0-9]+)\.h, #-6
+**	and	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_5_u16_x, svuint16_t,
+		z0 = svbic_n_u16_x (p0, z0, 5),
+		z0 = svbic_x (p0, z0, 5))
diff --git a/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u32.c b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u32.c
new file mode 100644
index 0000000..4e8159d
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u32.c
@@ -0,0 +1,394 @@ 
+/* { dg-do compile } */
+/* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
+
+#include "test_sve_acle.h"
+
+/*
+** bic_u32_m_tied1:
+**	bic	z0\.s, p0/m, z0\.s, z1\.s
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u32_m_tied1, svuint32_t,
+		z0 = svbic_u32_m (p0, z0, z1),
+		z0 = svbic_m (p0, z0, z1))
+
+/* Bad RA choice: no preferred output sequence.  */
+TEST_UNIFORM_Z (bic_u32_m_tied2, svuint32_t,
+		z1 = svbic_u32_m (p0, z0, z1),
+		z1 = svbic_m (p0, z0, z1))
+
+/*
+** bic_u32_m_untied:
+**	movprfx	z0, z1
+**	bic	z0\.s, p0/m, z0\.s, z2\.s
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u32_m_untied, svuint32_t,
+		z0 = svbic_u32_m (p0, z1, z2),
+		z0 = svbic_m (p0, z1, z2))
+
+/*
+** bic_w0_u32_m_tied1:
+**	mov	(z[0-9]+\.s), w0
+**	bic	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u32_m_tied1, svuint32_t, uint32_t,
+		 z0 = svbic_n_u32_m (p0, z0, x0),
+		 z0 = svbic_m (p0, z0, x0))
+
+/*
+** bic_w0_u32_m_untied:
+**	mov	(z[0-9]+\.s), w0
+**	movprfx	z0, z1
+**	bic	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u32_m_untied, svuint32_t, uint32_t,
+		 z0 = svbic_n_u32_m (p0, z1, x0),
+		 z0 = svbic_m (p0, z1, x0))
+
+/*
+** bic_s0_u32_m_tied1:
+**	mov	(z[0-9]+\.s), s0
+**	bic	z1\.s, p0/m, z1\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_s0_u32_m_tied1, svuint32_t, uint32_t,
+		 z1 = svbic_n_u32_m (p0, z1, d0),
+		 z1 = svbic_m (p0, z1, d0))
+
+/*
+** bic_s0_u32_m_untied:
+**	mov	(z[0-9]+\.s), s0
+**	movprfx	z1, z2
+**	bic	z1\.s, p0/m, z1\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_s0_u32_m_untied, svuint32_t, uint32_t,
+		 z1 = svbic_n_u32_m (p0, z2, d0),
+		 z1 = svbic_m (p0, z2, d0))
+
+/*
+** bic_1_u32_m_tied1:
+**	mov	(z[0-9]+\.s), #-2
+**	and	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u32_m_tied1, svuint32_t,
+		z0 = svbic_n_u32_m (p0, z0, 1),
+		z0 = svbic_m (p0, z0, 1))
+
+/*
+** bic_1_u32_m_untied:
+**	mov	(z[0-9]+\.s), #-2
+**	movprfx	z0, z1
+**	and	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u32_m_untied, svuint32_t,
+		z0 = svbic_n_u32_m (p0, z1, 1),
+		z0 = svbic_m (p0, z1, 1))
+
+/*
+** bic_m2_u32_m:
+**	mov	(z[0-9]+\.s), #1
+**	and	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m2_u32_m, svuint32_t,
+		z0 = svbic_n_u32_m (p0, z0, -2),
+		z0 = svbic_m (p0, z0, -2))
+
+/*
+** bic_u32_z_tied1:
+**	movprfx	z0\.s, p0/z, z0\.s
+**	bic	z0\.s, p0/m, z0\.s, z1\.s
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u32_z_tied1, svuint32_t,
+		z0 = svbic_u32_z (p0, z0, z1),
+		z0 = svbic_z (p0, z0, z1))
+
+/*
+** bic_u32_z_tied2:
+**	mov	(z[0-9]+)\.d, z1\.d
+**	movprfx	z1\.s, p0/z, z0\.s
+**	bic	z1\.s, p0/m, z1\.s, \1\.s
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u32_z_tied2, svuint32_t,
+		z1 = svbic_u32_z (p0, z0, z1),
+		z1 = svbic_z (p0, z0, z1))
+
+/*
+** bic_u32_z_untied:
+**	movprfx	z0\.s, p0/z, z1\.s
+**	bic	z0\.s, p0/m, z0\.s, z2\.s
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u32_z_untied, svuint32_t,
+		z0 = svbic_u32_z (p0, z1, z2),
+		z0 = svbic_z (p0, z1, z2))
+
+/*
+** bic_w0_u32_z_tied1:
+**	mov	(z[0-9]+\.s), w0
+**	movprfx	z0\.s, p0/z, z0\.s
+**	bic	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u32_z_tied1, svuint32_t, uint32_t,
+		 z0 = svbic_n_u32_z (p0, z0, x0),
+		 z0 = svbic_z (p0, z0, x0))
+
+/*
+** bic_w0_u32_z_untied:
+**	mov	(z[0-9]+\.s), w0
+**	movprfx	z0\.s, p0/z, z1\.s
+**	bic	z0\.s, p0/m, z0\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u32_z_untied, svuint32_t, uint32_t,
+		 z0 = svbic_n_u32_z (p0, z1, x0),
+		 z0 = svbic_z (p0, z1, x0))
+
+/*
+** bic_s0_u32_z_tied1:
+**	mov	(z[0-9]+\.s), s0
+**	movprfx	z1\.s, p0/z, z1\.s
+**	bic	z1\.s, p0/m, z1\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_s0_u32_z_tied1, svuint32_t, uint32_t,
+		 z1 = svbic_n_u32_z (p0, z1, d0),
+		 z1 = svbic_z (p0, z1, d0))
+
+/*
+** bic_s0_u32_z_untied:
+**	mov	(z[0-9]+\.s), s0
+**	movprfx	z1\.s, p0/z, z2\.s
+**	bic	z1\.s, p0/m, z1\.s, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_s0_u32_z_untied, svuint32_t, uint32_t,
+		 z1 = svbic_n_u32_z (p0, z2, d0),
+		 z1 = svbic_z (p0, z2, d0))
+
+/*
+** bic_u32_x_tied1:
+**	bic	z0\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u32_x_tied1, svuint32_t,
+		z0 = svbic_u32_x (p0, z0, z1),
+		z0 = svbic_x (p0, z0, z1))
+
+/*
+** bic_u32_x_tied2:
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u32_x_tied2, svuint32_t,
+		z1 = svbic_u32_x (p0, z0, z1),
+		z1 = svbic_x (p0, z0, z1))
+
+/*
+** bic_u32_x_untied:
+**	bic	z2\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u32_x_untied, svuint32_t,
+		z2 = svbic_u32_x (p0, z0, z1),
+		z2 = svbic_x (p0, z0, z1))
+
+/*
+** bic_w0_u32_x_tied1:
+**	mov	(z[0-9]+)\.s, w0
+**	bic	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u32_x_tied1, svuint32_t, uint32_t,
+		 z0 = svbic_n_u32_x (p0, z0, x0),
+		 z0 = svbic_x (p0, z0, x0))
+
+/*
+** bic_w0_u32_x_untied:
+**	mov	z1\.s, w0
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u32_x_untied, svuint32_t, uint32_t,
+		 z1 = svbic_n_u32_x (p0, z0, x0),
+		 z1 = svbic_x (p0, z0, x0))
+
+/*
+** bic_s0_u32_x_tied1:
+**	mov	(z[0-9]+)\.s, s0
+**	bic	z1\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_s0_u32_x_tied1, svuint32_t, uint32_t,
+		 z1 = svbic_n_u32_x (p0, z1, d0),
+		 z1 = svbic_x (p0, z1, d0))
+
+/*
+** bic_s0_u32_x_untied:
+**	mov	(z[0-9]+)\.s, s0
+**	bic	z2\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_s0_u32_x_untied, svuint32_t, uint32_t,
+		 z2 = svbic_n_u32_x (p0, z1, d0),
+		 z2 = svbic_x (p0, z1, d0))
+
+/*
+** bic_1_u32_x_tied1:
+**	and	z0\.s, z0\.s, #0xfffffffe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u32_x_tied1, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, 1),
+		z0 = svbic_x (p0, z0, 1))
+
+/*
+** bic_1_u32_x_untied:
+**	movprfx	z0, z1
+**	and	z0\.s, z0\.s, #0xfffffffe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u32_x_untied, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z1, 1),
+		z0 = svbic_x (p0, z1, 1))
+
+/*
+** bic_127_u32_x:
+**	and	z0\.s, z0\.s, #0xffffff80
+**	ret
+*/
+TEST_UNIFORM_Z (bic_127_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, 127),
+		z0 = svbic_x (p0, z0, 127))
+
+/*
+** bic_128_u32_x:
+**	and	z0\.s, z0\.s, #0xffffff7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_128_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, 128),
+		z0 = svbic_x (p0, z0, 128))
+
+/*
+** bic_255_u32_x:
+**	and	z0\.s, z0\.s, #0xffffff00
+**	ret
+*/
+TEST_UNIFORM_Z (bic_255_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, 255),
+		z0 = svbic_x (p0, z0, 255))
+
+/*
+** bic_256_u32_x:
+**	and	z0\.s, z0\.s, #0xfffffeff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_256_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, 256),
+		z0 = svbic_x (p0, z0, 256))
+
+/* TODO: Bad code needs fixing.  */
+TEST_UNIFORM_Z (bic_257_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, 257),
+		z0 = svbic_x (p0, z0, 257))
+
+/*
+** bic_512_u32_x:
+**	and	z0\.s, z0\.s, #0xfffffdff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_512_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, 512),
+		z0 = svbic_x (p0, z0, 512))
+
+/*
+** bic_65280_u32_x:
+**	and	z0\.s, z0\.s, #0xffff00ff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_65280_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, 0xff00),
+		z0 = svbic_x (p0, z0, 0xff00))
+
+/*
+** bic_m127_u32_x:
+**	and	z0\.s, z0\.s, #0x7e
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m127_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, -127),
+		z0 = svbic_x (p0, z0, -127))
+
+/*
+** bic_m128_u32_x:
+**	and	z0\.s, z0\.s, #0x7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m128_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, -128),
+		z0 = svbic_x (p0, z0, -128))
+
+/*
+** bic_m255_u32_x:
+**	and	z0\.s, z0\.s, #0xfe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m255_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, -255),
+		z0 = svbic_x (p0, z0, -255))
+
+/*
+** bic_m256_u32_x:
+**	and	z0\.s, z0\.s, #0xff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m256_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, -256),
+		z0 = svbic_x (p0, z0, -256))
+
+/*
+** bic_m257_u32_x:
+**	and	z0\.s, z0\.s, #0x100
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m257_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, -257),
+		z0 = svbic_x (p0, z0, -257))
+
+/*
+** bic_m512_u32_x:
+**	and	z0\.s, z0\.s, #0x1ff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m512_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, -512),
+		z0 = svbic_x (p0, z0, -512))
+
+/*
+** bic_m32768_u32_x:
+**	and	z0\.s, z0\.s, #0x7fff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m32768_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, -0x8000),
+		z0 = svbic_x (p0, z0, -0x8000))
+
+/*
+** bic_5_u32_x:
+**	mov	(z[0-9]+)\.s, #-6
+**	and	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_5_u32_x, svuint32_t,
+		z0 = svbic_n_u32_x (p0, z0, 5),
+		z0 = svbic_x (p0, z0, 5))
diff --git a/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u64.c b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u64.c
new file mode 100644
index 0000000..785c1ec
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u64.c
@@ -0,0 +1,394 @@ 
+/* { dg-do compile } */
+/* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
+
+#include "test_sve_acle.h"
+
+/*
+** bic_u64_m_tied1:
+**	bic	z0\.d, p0/m, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u64_m_tied1, svuint64_t,
+		z0 = svbic_u64_m (p0, z0, z1),
+		z0 = svbic_m (p0, z0, z1))
+
+/* Bad RA choice: no preferred output sequence.  */
+TEST_UNIFORM_Z (bic_u64_m_tied2, svuint64_t,
+		z1 = svbic_u64_m (p0, z0, z1),
+		z1 = svbic_m (p0, z0, z1))
+
+/*
+** bic_u64_m_untied:
+**	movprfx	z0, z1
+**	bic	z0\.d, p0/m, z0\.d, z2\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u64_m_untied, svuint64_t,
+		z0 = svbic_u64_m (p0, z1, z2),
+		z0 = svbic_m (p0, z1, z2))
+
+/*
+** bic_x0_u64_m_tied1:
+**	mov	(z[0-9]+\.d), x0
+**	bic	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_x0_u64_m_tied1, svuint64_t, uint64_t,
+		 z0 = svbic_n_u64_m (p0, z0, x0),
+		 z0 = svbic_m (p0, z0, x0))
+
+/*
+** bic_x0_u64_m_untied:
+**	mov	(z[0-9]+\.d), x0
+**	movprfx	z0, z1
+**	bic	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_x0_u64_m_untied, svuint64_t, uint64_t,
+		 z0 = svbic_n_u64_m (p0, z1, x0),
+		 z0 = svbic_m (p0, z1, x0))
+
+/*
+** bic_d0_u64_m_tied1:
+**	mov	(z[0-9]+\.d), d0
+**	bic	z1\.d, p0/m, z1\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_d0_u64_m_tied1, svuint64_t, uint64_t,
+		 z1 = svbic_n_u64_m (p0, z1, d0),
+		 z1 = svbic_m (p0, z1, d0))
+
+/*
+** bic_d0_u64_m_untied:
+**	mov	(z[0-9]+\.d), d0
+**	movprfx	z1, z2
+**	bic	z1\.d, p0/m, z1\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_d0_u64_m_untied, svuint64_t, uint64_t,
+		 z1 = svbic_n_u64_m (p0, z2, d0),
+		 z1 = svbic_m (p0, z2, d0))
+
+/*
+** bic_1_u64_m_tied1:
+**	mov	(z[0-9]+\.d), #-2
+**	and	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u64_m_tied1, svuint64_t,
+		z0 = svbic_n_u64_m (p0, z0, 1),
+		z0 = svbic_m (p0, z0, 1))
+
+/*
+** bic_1_u64_m_untied:
+**	mov	(z[0-9]+\.d), #-2
+**	movprfx	z0, z1
+**	and	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u64_m_untied, svuint64_t,
+		z0 = svbic_n_u64_m (p0, z1, 1),
+		z0 = svbic_m (p0, z1, 1))
+
+/*
+** bic_m2_u64_m:
+**	mov	(z[0-9]+\.d), #1
+**	and	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m2_u64_m, svuint64_t,
+		z0 = svbic_n_u64_m (p0, z0, -2),
+		z0 = svbic_m (p0, z0, -2))
+
+/*
+** bic_u64_z_tied1:
+**	movprfx	z0\.d, p0/z, z0\.d
+**	bic	z0\.d, p0/m, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u64_z_tied1, svuint64_t,
+		z0 = svbic_u64_z (p0, z0, z1),
+		z0 = svbic_z (p0, z0, z1))
+
+/*
+** bic_u64_z_tied2:
+**	mov	(z[0-9]+\.d), z1\.d
+**	movprfx	z1\.d, p0/z, z0\.d
+**	bic	z1\.d, p0/m, z1\.d, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u64_z_tied2, svuint64_t,
+		z1 = svbic_u64_z (p0, z0, z1),
+		z1 = svbic_z (p0, z0, z1))
+
+/*
+** bic_u64_z_untied:
+**	movprfx	z0\.d, p0/z, z1\.d
+**	bic	z0\.d, p0/m, z0\.d, z2\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u64_z_untied, svuint64_t,
+		z0 = svbic_u64_z (p0, z1, z2),
+		z0 = svbic_z (p0, z1, z2))
+
+/*
+** bic_x0_u64_z_tied1:
+**	mov	(z[0-9]+\.d), x0
+**	movprfx	z0\.d, p0/z, z0\.d
+**	bic	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_x0_u64_z_tied1, svuint64_t, uint64_t,
+		 z0 = svbic_n_u64_z (p0, z0, x0),
+		 z0 = svbic_z (p0, z0, x0))
+
+/*
+** bic_x0_u64_z_untied:
+**	mov	(z[0-9]+\.d), x0
+**	movprfx	z0\.d, p0/z, z1\.d
+**	bic	z0\.d, p0/m, z0\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_x0_u64_z_untied, svuint64_t, uint64_t,
+		 z0 = svbic_n_u64_z (p0, z1, x0),
+		 z0 = svbic_z (p0, z1, x0))
+
+/*
+** bic_d0_u64_z_tied1:
+**	mov	(z[0-9]+\.d), d0
+**	movprfx	z1\.d, p0/z, z1\.d
+**	bic	z1\.d, p0/m, z1\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_d0_u64_z_tied1, svuint64_t, uint64_t,
+		 z1 = svbic_n_u64_z (p0, z1, d0),
+		 z1 = svbic_z (p0, z1, d0))
+
+/*
+** bic_d0_u64_z_untied:
+**	mov	(z[0-9]+\.d), d0
+**	movprfx	z1\.d, p0/z, z2\.d
+**	bic	z1\.d, p0/m, z1\.d, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_d0_u64_z_untied, svuint64_t, uint64_t,
+		 z1 = svbic_n_u64_z (p0, z2, d0),
+		 z1 = svbic_z (p0, z2, d0))
+
+/*
+** bic_u64_x_tied1:
+**	bic	z0\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u64_x_tied1, svuint64_t,
+		z0 = svbic_u64_x (p0, z0, z1),
+		z0 = svbic_x (p0, z0, z1))
+
+/*
+** bic_u64_x_tied2:
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u64_x_tied2, svuint64_t,
+		z1 = svbic_u64_x (p0, z0, z1),
+		z1 = svbic_x (p0, z0, z1))
+
+/*
+** bic_u64_x_untied:
+**	bic	z2\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u64_x_untied, svuint64_t,
+		z2 = svbic_u64_x (p0, z0, z1),
+		z2 = svbic_x (p0, z0, z1))
+
+/*
+** bic_x0_u64_x_tied1:
+**	mov	(z[0-9]+)\.d, x0
+**	bic	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_x0_u64_x_tied1, svuint64_t, uint64_t,
+		 z0 = svbic_n_u64_x (p0, z0, x0),
+		 z0 = svbic_x (p0, z0, x0))
+
+/*
+** bic_x0_u64_x_untied:
+**	mov	z1\.d, x0
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_x0_u64_x_untied, svuint64_t, uint64_t,
+		 z1 = svbic_n_u64_x (p0, z0, x0),
+		 z1 = svbic_x (p0, z0, x0))
+
+/*
+** bic_d0_u64_x_tied1:
+**	mov	(z[0-9]+)\.d, d0
+**	bic	z1\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_d0_u64_x_tied1, svuint64_t, uint64_t,
+		 z1 = svbic_n_u64_x (p0, z1, d0),
+		 z1 = svbic_x (p0, z1, d0))
+
+/*
+** bic_d0_u64_x_untied:
+**	mov	(z[0-9]+)\.d, d0
+**	bic	z2\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_d0_u64_x_untied, svuint64_t, uint64_t,
+		 z2 = svbic_n_u64_x (p0, z1, d0),
+		 z2 = svbic_x (p0, z1, d0))
+
+/*
+** bic_1_u64_x_tied1:
+**	and	z0\.d, z0\.d, #0xfffffffffffffffe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u64_x_tied1, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, 1),
+		z0 = svbic_x (p0, z0, 1))
+
+/*
+** bic_1_u64_x_untied:
+**	movprfx	z0, z1
+**	and	z0\.d, z0\.d, #0xfffffffffffffffe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u64_x_untied, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z1, 1),
+		z0 = svbic_x (p0, z1, 1))
+
+/*
+** bic_127_u64_x:
+**	and	z0\.d, z0\.d, #0xffffffffffffff80
+**	ret
+*/
+TEST_UNIFORM_Z (bic_127_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, 127),
+		z0 = svbic_x (p0, z0, 127))
+
+/*
+** bic_128_u64_x:
+**	and	z0\.d, z0\.d, #0xffffffffffffff7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_128_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, 128),
+		z0 = svbic_x (p0, z0, 128))
+
+/*
+** bic_255_u64_x:
+**	and	z0\.d, z0\.d, #0xffffffffffffff00
+**	ret
+*/
+TEST_UNIFORM_Z (bic_255_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, 255),
+		z0 = svbic_x (p0, z0, 255))
+
+/*
+** bic_256_u64_x:
+**	and	z0\.d, z0\.d, #0xfffffffffffffeff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_256_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, 256),
+		z0 = svbic_x (p0, z0, 256))
+
+/* TODO: Bad code needs fixing.  */
+TEST_UNIFORM_Z (bic_257_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, 257),
+		z0 = svbic_x (p0, z0, 257))
+
+/*
+** bic_512_u64_x:
+**	and	z0\.d, z0\.d, #0xfffffffffffffdff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_512_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, 512),
+		z0 = svbic_x (p0, z0, 512))
+
+/*
+** bic_65280_u64_x:
+**	and	z0\.d, z0\.d, #0xffffffffffff00ff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_65280_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, 0xff00),
+		z0 = svbic_x (p0, z0, 0xff00))
+
+/*
+** bic_m127_u64_x:
+**	and	z0\.d, z0\.d, #0x7e
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m127_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, -127),
+		z0 = svbic_x (p0, z0, -127))
+
+/*
+** bic_m128_u64_x:
+**	and	z0\.d, z0\.d, #0x7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m128_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, -128),
+		z0 = svbic_x (p0, z0, -128))
+
+/*
+** bic_m255_u64_x:
+**	and	z0\.d, z0\.d, #0xfe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m255_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, -255),
+		z0 = svbic_x (p0, z0, -255))
+
+/*
+** bic_m256_u64_x:
+**	and	z0\.d, z0\.d, #0xff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m256_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, -256),
+		z0 = svbic_x (p0, z0, -256))
+
+/*
+** bic_m257_u64_x:
+**	and	z0\.d, z0\.d, #0x100
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m257_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, -257),
+		z0 = svbic_x (p0, z0, -257))
+
+/*
+** bic_m512_u64_x:
+**	and	z0\.d, z0\.d, #0x1ff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m512_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, -512),
+		z0 = svbic_x (p0, z0, -512))
+
+/*
+** bic_m32768_u64_x:
+**	and	z0\.d, z0\.d, #0x7fff
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m32768_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, -0x8000),
+		z0 = svbic_x (p0, z0, -0x8000))
+
+/*
+** bic_5_u64_x:
+**	mov	(z[0-9]+)\.d, #-6
+**	and	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_5_u64_x, svuint64_t,
+		z0 = svbic_n_u64_x (p0, z0, 5),
+		z0 = svbic_x (p0, z0, 5))
diff --git a/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u8.c b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u8.c
new file mode 100644
index 0000000..dc7af1b
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/sve-acle/asm/bic_u8.c
@@ -0,0 +1,317 @@ 
+/* { dg-do compile } */
+/* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
+
+#include "test_sve_acle.h"
+
+/*
+** bic_u8_m_tied1:
+**	bic	z0\.b, p0/m, z0\.b, z1\.b
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u8_m_tied1, svuint8_t,
+		z0 = svbic_u8_m (p0, z0, z1),
+		z0 = svbic_m (p0, z0, z1))
+
+/* Bad RA choice: no preferred output sequence.  */
+TEST_UNIFORM_Z (bic_u8_m_tied2, svuint8_t,
+		z1 = svbic_u8_m (p0, z0, z1),
+		z1 = svbic_m (p0, z0, z1))
+
+/*
+** bic_u8_m_untied:
+**	movprfx	z0, z1
+**	bic	z0\.b, p0/m, z0\.b, z2\.b
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u8_m_untied, svuint8_t,
+		z0 = svbic_u8_m (p0, z1, z2),
+		z0 = svbic_m (p0, z1, z2))
+
+/*
+** bic_w0_u8_m_tied1:
+**	mov	(z[0-9]+\.b), w0
+**	bic	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u8_m_tied1, svuint8_t, uint8_t,
+		 z0 = svbic_n_u8_m (p0, z0, x0),
+		 z0 = svbic_m (p0, z0, x0))
+
+/*
+** bic_w0_u8_m_untied:
+**	mov	(z[0-9]+\.b), w0
+**	movprfx	z0, z1
+**	bic	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u8_m_untied, svuint8_t, uint8_t,
+		 z0 = svbic_n_u8_m (p0, z1, x0),
+		 z0 = svbic_m (p0, z1, x0))
+
+/*
+** bic_b0_u8_m_tied1:
+**	mov	(z[0-9]+\.b), b0
+**	bic	z1\.b, p0/m, z1\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_b0_u8_m_tied1, svuint8_t, uint8_t,
+		 z1 = svbic_n_u8_m (p0, z1, d0),
+		 z1 = svbic_m (p0, z1, d0))
+
+/*
+** bic_b0_u8_m_untied:
+**	mov	(z[0-9]+\.b), b0
+**	movprfx	z1, z2
+**	bic	z1\.b, p0/m, z1\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_b0_u8_m_untied, svuint8_t, uint8_t,
+		 z1 = svbic_n_u8_m (p0, z2, d0),
+		 z1 = svbic_m (p0, z2, d0))
+
+/*
+** bic_1_u8_m_tied1:
+**	mov	(z[0-9]+\.b), #-2
+**	and	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u8_m_tied1, svuint8_t,
+		z0 = svbic_n_u8_m (p0, z0, 1),
+		z0 = svbic_m (p0, z0, 1))
+
+/*
+** bic_1_u8_m_untied:
+**	mov	(z[0-9]+\.b), #-2
+**	movprfx	z0, z1
+**	and	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u8_m_untied, svuint8_t,
+		z0 = svbic_n_u8_m (p0, z1, 1),
+		z0 = svbic_m (p0, z1, 1))
+
+/*
+** bic_m2_u8_m:
+**	mov	(z[0-9]+\.b), #1
+**	and	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m2_u8_m, svuint8_t,
+		z0 = svbic_n_u8_m (p0, z0, -2),
+		z0 = svbic_m (p0, z0, -2))
+
+/*
+** bic_u8_z_tied1:
+**	movprfx	z0\.b, p0/z, z0\.b
+**	bic	z0\.b, p0/m, z0\.b, z1\.b
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u8_z_tied1, svuint8_t,
+		z0 = svbic_u8_z (p0, z0, z1),
+		z0 = svbic_z (p0, z0, z1))
+
+/*
+** bic_u8_z_tied2:
+**	mov	(z[0-9]+)\.d, z1\.d
+**	movprfx	z1\.b, p0/z, z0\.b
+**	bic	z1\.b, p0/m, z1\.b, \1\.b
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u8_z_tied2, svuint8_t,
+		z1 = svbic_u8_z (p0, z0, z1),
+		z1 = svbic_z (p0, z0, z1))
+
+/*
+** bic_u8_z_untied:
+**	movprfx	z0\.b, p0/z, z1\.b
+**	bic	z0\.b, p0/m, z0\.b, z2\.b
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u8_z_untied, svuint8_t,
+		z0 = svbic_u8_z (p0, z1, z2),
+		z0 = svbic_z (p0, z1, z2))
+
+/*
+** bic_w0_u8_z_tied1:
+**	mov	(z[0-9]+\.b), w0
+**	movprfx	z0\.b, p0/z, z0\.b
+**	bic	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u8_z_tied1, svuint8_t, uint8_t,
+		 z0 = svbic_n_u8_z (p0, z0, x0),
+		 z0 = svbic_z (p0, z0, x0))
+
+/*
+** bic_w0_u8_z_untied:
+**	mov	(z[0-9]+\.b), w0
+**	movprfx	z0\.b, p0/z, z1\.b
+**	bic	z0\.b, p0/m, z0\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u8_z_untied, svuint8_t, uint8_t,
+		 z0 = svbic_n_u8_z (p0, z1, x0),
+		 z0 = svbic_z (p0, z1, x0))
+
+/*
+** bic_b0_u8_z_tied1:
+**	mov	(z[0-9]+\.b), b0
+**	movprfx	z1\.b, p0/z, z1\.b
+**	bic	z1\.b, p0/m, z1\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_b0_u8_z_tied1, svuint8_t, uint8_t,
+		 z1 = svbic_n_u8_z (p0, z1, d0),
+		 z1 = svbic_z (p0, z1, d0))
+
+/*
+** bic_b0_u8_z_untied:
+**	mov	(z[0-9]+\.b), b0
+**	movprfx	z1\.b, p0/z, z2\.b
+**	bic	z1\.b, p0/m, z1\.b, \1
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_b0_u8_z_untied, svuint8_t, uint8_t,
+		 z1 = svbic_n_u8_z (p0, z2, d0),
+		 z1 = svbic_z (p0, z2, d0))
+
+/*
+** bic_u8_x_tied1:
+**	bic	z0\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u8_x_tied1, svuint8_t,
+		z0 = svbic_u8_x (p0, z0, z1),
+		z0 = svbic_x (p0, z0, z1))
+
+/*
+** bic_u8_x_tied2:
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u8_x_tied2, svuint8_t,
+		z1 = svbic_u8_x (p0, z0, z1),
+		z1 = svbic_x (p0, z0, z1))
+
+/*
+** bic_u8_x_untied:
+**	bic	z2\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_u8_x_untied, svuint8_t,
+		z2 = svbic_u8_x (p0, z0, z1),
+		z2 = svbic_x (p0, z0, z1))
+
+/*
+** bic_w0_u8_x_tied1:
+**	mov	(z[0-9]+)\.b, w0
+**	bic	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u8_x_tied1, svuint8_t, uint8_t,
+		 z0 = svbic_n_u8_x (p0, z0, x0),
+		 z0 = svbic_x (p0, z0, x0))
+
+/*
+** bic_w0_u8_x_untied:
+**	mov	z1\.b, w0
+**	bic	z1\.d, z0\.d, z1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_w0_u8_x_untied, svuint8_t, uint8_t,
+		 z1 = svbic_n_u8_x (p0, z0, x0),
+		 z1 = svbic_x (p0, z0, x0))
+
+/*
+** bic_b0_u8_x_tied1:
+**	mov	(z[0-9]+)\.b, b0
+**	bic	z1\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_b0_u8_x_tied1, svuint8_t, uint8_t,
+		 z1 = svbic_n_u8_x (p0, z1, d0),
+		 z1 = svbic_x (p0, z1, d0))
+
+/*
+** bic_b0_u8_x_untied:
+**	mov	(z[0-9]+)\.b, b0
+**	bic	z2\.d, z1\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_ZS (bic_b0_u8_x_untied, svuint8_t, uint8_t,
+		 z2 = svbic_n_u8_x (p0, z1, d0),
+		 z2 = svbic_x (p0, z1, d0))
+
+/*
+** bic_1_u8_x_tied1:
+**	and	z0\.b, z0\.b, #0xfe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u8_x_tied1, svuint8_t,
+		z0 = svbic_n_u8_x (p0, z0, 1),
+		z0 = svbic_x (p0, z0, 1))
+
+/*
+** bic_1_u8_x_untied:
+**	movprfx	z0, z1
+**	and	z0\.b, z0\.b, #0xfe
+**	ret
+*/
+TEST_UNIFORM_Z (bic_1_u8_x_untied, svuint8_t,
+		z0 = svbic_n_u8_x (p0, z1, 1),
+		z0 = svbic_x (p0, z1, 1))
+
+/*
+** bic_127_u8_x:
+**	and	z0\.b, z0\.b, #0x80
+**	ret
+*/
+TEST_UNIFORM_Z (bic_127_u8_x, svuint8_t,
+		z0 = svbic_n_u8_x (p0, z0, 127),
+		z0 = svbic_x (p0, z0, 127))
+
+/*
+** bic_128_u8_x:
+**	and	z0\.b, z0\.b, #0x7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_128_u8_x, svuint8_t,
+		z0 = svbic_n_u8_x (p0, z0, 128),
+		z0 = svbic_x (p0, z0, 128))
+
+/*
+** bic_255_u8_x:
+**	mov	z0\.b, #0
+**	ret
+*/
+TEST_UNIFORM_Z (bic_255_u8_x, svuint8_t,
+		z0 = svbic_n_u8_x (p0, z0, 255),
+		z0 = svbic_x (p0, z0, 255))
+
+/*
+** bic_m127_u8_x:
+**	and	z0\.b, z0\.b, #0x7e
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m127_u8_x, svuint8_t,
+		z0 = svbic_n_u8_x (p0, z0, -127),
+		z0 = svbic_x (p0, z0, -127))
+
+/*
+** bic_m128_u8_x:
+**	and	z0\.b, z0\.b, #0x7f
+**	ret
+*/
+TEST_UNIFORM_Z (bic_m128_u8_x, svuint8_t,
+		z0 = svbic_n_u8_x (p0, z0, -128),
+		z0 = svbic_x (p0, z0, -128))
+
+/*
+** bic_5_u8_x:
+**	mov	(z[0-9]+)\.b, #-6
+**	and	z0\.d, z0\.d, \1\.d
+**	ret
+*/
+TEST_UNIFORM_Z (bic_5_u8_x, svuint8_t,
+		z0 = svbic_n_u8_x (p0, z0, 5),
+		z0 = svbic_x (p0, z0, 5))
-- 
2.7.4