forked from OSchip/llvm-project
				
			AVX-512: added intrinsics for VPAND, VPOR and VPXOR
by Asaf Badouh (asaf.badouh@intel.com) llvm-svn: 233525
This commit is contained in:
		
							parent
							
								
									5d28b900ac
								
							
						
					
					
						commit
						98de9d6360
					
				| 
						 | 
					@ -3093,7 +3093,27 @@ let TargetPrefix = "x86" in {  // All intrinsics start with "llvm.x86.".
 | 
				
			||||||
              Intrinsic<[llvm_v8i64_ty], [llvm_v8i32_ty],
 | 
					              Intrinsic<[llvm_v8i64_ty], [llvm_v8i32_ty],
 | 
				
			||||||
                        [IntrNoMem]>;
 | 
					                        [IntrNoMem]>;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					//Bitwise Ops
 | 
				
			||||||
 | 
					let TargetPrefix = "x86" in {  // All intrinsics start with "llvm.x86.".
 | 
				
			||||||
 | 
					  def int_x86_avx512_mask_pand_d_512 : GCCBuiltin<"__builtin_ia32_pandd512_mask">,
 | 
				
			||||||
 | 
					          Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
 | 
				
			||||||
 | 
					                     llvm_v16i32_ty, llvm_i16_ty], [IntrNoMem]>;
 | 
				
			||||||
 | 
					  def int_x86_avx512_mask_pand_q_512 : GCCBuiltin<"__builtin_ia32_pandq512_mask">,
 | 
				
			||||||
 | 
					          Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty,
 | 
				
			||||||
 | 
					                     llvm_v8i64_ty, llvm_i8_ty], [IntrNoMem]>;
 | 
				
			||||||
 | 
					  def int_x86_avx512_mask_por_d_512 : GCCBuiltin<"__builtin_ia32_pord512_mask">,
 | 
				
			||||||
 | 
					          Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
 | 
				
			||||||
 | 
					                     llvm_v16i32_ty, llvm_i16_ty], [IntrNoMem]>;
 | 
				
			||||||
 | 
					  def int_x86_avx512_mask_por_q_512 : GCCBuiltin<"__builtin_ia32_porq512_mask">,
 | 
				
			||||||
 | 
					          Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty,
 | 
				
			||||||
 | 
					                     llvm_v8i64_ty, llvm_i8_ty], [IntrNoMem]>;
 | 
				
			||||||
 | 
					  def int_x86_avx512_mask_pxor_d_512 : GCCBuiltin<"__builtin_ia32_pxord512_mask">,
 | 
				
			||||||
 | 
					          Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
 | 
				
			||||||
 | 
					                     llvm_v16i32_ty, llvm_i16_ty], [IntrNoMem]>;
 | 
				
			||||||
 | 
					  def int_x86_avx512_mask_pxor_q_512 : GCCBuiltin<"__builtin_ia32_pxorq512_mask">,
 | 
				
			||||||
 | 
					          Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty,
 | 
				
			||||||
 | 
					                     llvm_v8i64_ty, llvm_i8_ty], [IntrNoMem]>;
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
// Arithmetic ops
 | 
					// Arithmetic ops
 | 
				
			||||||
let TargetPrefix = "x86" in {  // All intrinsics start with "llvm.x86.".
 | 
					let TargetPrefix = "x86" in {  // All intrinsics start with "llvm.x86.".
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -3813,14 +3833,6 @@ let TargetPrefix = "x86" in {
 | 
				
			||||||
  def int_x86_avx512_mask_cmp_pd_512 : GCCBuiltin<"__builtin_ia32_cmppd512_mask">,
 | 
					  def int_x86_avx512_mask_cmp_pd_512 : GCCBuiltin<"__builtin_ia32_cmppd512_mask">,
 | 
				
			||||||
            Intrinsic<[llvm_i8_ty], [llvm_v8f64_ty, llvm_v8f64_ty, llvm_i8_ty,
 | 
					            Intrinsic<[llvm_i8_ty], [llvm_v8f64_ty, llvm_v8f64_ty, llvm_i8_ty,
 | 
				
			||||||
                                      llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
 | 
					                                      llvm_i8_ty, llvm_i32_ty], [IntrNoMem]>;
 | 
				
			||||||
  def int_x86_avx512_mask_pand_d_512 : GCCBuiltin<"__builtin_ia32_pandd512_mask">,
 | 
					 | 
				
			||||||
            Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty,
 | 
					 | 
				
			||||||
                                         llvm_v16i32_ty, llvm_i16_ty],
 | 
					 | 
				
			||||||
                      [IntrNoMem]>;
 | 
					 | 
				
			||||||
  def int_x86_avx512_mask_pand_q_512 : GCCBuiltin<"__builtin_ia32_pandq512_mask">,
 | 
					 | 
				
			||||||
            Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_v8i64_ty,
 | 
					 | 
				
			||||||
                                        llvm_v8i64_ty, llvm_i8_ty],
 | 
					 | 
				
			||||||
                      [IntrNoMem]>;
 | 
					 | 
				
			||||||
  def int_x86_avx512_movntdqa : GCCBuiltin<"__builtin_ia32_movntdqa512">,
 | 
					  def int_x86_avx512_movntdqa : GCCBuiltin<"__builtin_ia32_movntdqa512">,
 | 
				
			||||||
            Intrinsic<[llvm_v8i64_ty], [llvm_ptr_ty], [IntrReadMem]>;
 | 
					            Intrinsic<[llvm_v8i64_ty], [llvm_ptr_ty], [IntrReadMem]>;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -334,6 +334,8 @@ static const IntrinsicData  IntrinsicsWithoutChain[] = {
 | 
				
			||||||
                     X86ISD::FMUL_RND),
 | 
					                     X86ISD::FMUL_RND),
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_mul_ps_512, INTR_TYPE_2OP_MASK, ISD::FMUL,
 | 
					  X86_INTRINSIC_DATA(avx512_mask_mul_ps_512, INTR_TYPE_2OP_MASK, ISD::FMUL,
 | 
				
			||||||
                     X86ISD::FMUL_RND),
 | 
					                     X86ISD::FMUL_RND),
 | 
				
			||||||
 | 
					  X86_INTRINSIC_DATA(avx512_mask_pand_d_512, INTR_TYPE_2OP_MASK, ISD::AND, 0),
 | 
				
			||||||
 | 
					  X86_INTRINSIC_DATA(avx512_mask_pand_q_512, INTR_TYPE_2OP_MASK, ISD::AND, 0),
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_pcmpeq_b_128,  CMP_MASK,  X86ISD::PCMPEQM, 0),
 | 
					  X86_INTRINSIC_DATA(avx512_mask_pcmpeq_b_128,  CMP_MASK,  X86ISD::PCMPEQM, 0),
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_pcmpeq_b_256,  CMP_MASK,  X86ISD::PCMPEQM, 0),
 | 
					  X86_INTRINSIC_DATA(avx512_mask_pcmpeq_b_256,  CMP_MASK,  X86ISD::PCMPEQM, 0),
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_pcmpeq_b_512,  CMP_MASK,  X86ISD::PCMPEQM, 0),
 | 
					  X86_INTRINSIC_DATA(avx512_mask_pcmpeq_b_512,  CMP_MASK,  X86ISD::PCMPEQM, 0),
 | 
				
			||||||
| 
						 | 
					@ -358,6 +360,8 @@ static const IntrinsicData  IntrinsicsWithoutChain[] = {
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_pcmpgt_w_128,  CMP_MASK,  X86ISD::PCMPGTM, 0),
 | 
					  X86_INTRINSIC_DATA(avx512_mask_pcmpgt_w_128,  CMP_MASK,  X86ISD::PCMPGTM, 0),
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_pcmpgt_w_256,  CMP_MASK,  X86ISD::PCMPGTM, 0),
 | 
					  X86_INTRINSIC_DATA(avx512_mask_pcmpgt_w_256,  CMP_MASK,  X86ISD::PCMPGTM, 0),
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_pcmpgt_w_512,  CMP_MASK,  X86ISD::PCMPGTM, 0),
 | 
					  X86_INTRINSIC_DATA(avx512_mask_pcmpgt_w_512,  CMP_MASK,  X86ISD::PCMPGTM, 0),
 | 
				
			||||||
 | 
					  X86_INTRINSIC_DATA(avx512_mask_por_d_512, INTR_TYPE_2OP_MASK, ISD::OR, 0),
 | 
				
			||||||
 | 
					  X86_INTRINSIC_DATA(avx512_mask_por_q_512, INTR_TYPE_2OP_MASK, ISD::OR, 0),
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_psll_d,        INTR_TYPE_2OP_MASK, X86ISD::VSHL, 0),
 | 
					  X86_INTRINSIC_DATA(avx512_mask_psll_d,        INTR_TYPE_2OP_MASK, X86ISD::VSHL, 0),
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_psll_q,        INTR_TYPE_2OP_MASK, X86ISD::VSHL, 0),
 | 
					  X86_INTRINSIC_DATA(avx512_mask_psll_q,        INTR_TYPE_2OP_MASK, X86ISD::VSHL, 0),
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_pslli_d,       VSHIFT_MASK, X86ISD::VSHLI, 0),
 | 
					  X86_INTRINSIC_DATA(avx512_mask_pslli_d,       VSHIFT_MASK, X86ISD::VSHLI, 0),
 | 
				
			||||||
| 
						 | 
					@ -376,6 +380,8 @@ static const IntrinsicData  IntrinsicsWithoutChain[] = {
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_psrli_q,       VSHIFT_MASK, X86ISD::VSRLI, 0),
 | 
					  X86_INTRINSIC_DATA(avx512_mask_psrli_q,       VSHIFT_MASK, X86ISD::VSRLI, 0),
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_psrlv_d,       INTR_TYPE_2OP_MASK, ISD::SRL, 0),
 | 
					  X86_INTRINSIC_DATA(avx512_mask_psrlv_d,       INTR_TYPE_2OP_MASK, ISD::SRL, 0),
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_psrlv_q,       INTR_TYPE_2OP_MASK, ISD::SRL, 0),
 | 
					  X86_INTRINSIC_DATA(avx512_mask_psrlv_q,       INTR_TYPE_2OP_MASK, ISD::SRL, 0),
 | 
				
			||||||
 | 
					  X86_INTRINSIC_DATA(avx512_mask_pxor_d_512, INTR_TYPE_2OP_MASK, ISD::XOR, 0),
 | 
				
			||||||
 | 
					  X86_INTRINSIC_DATA(avx512_mask_pxor_q_512, INTR_TYPE_2OP_MASK, ISD::XOR, 0),
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_rndscale_sd,   INTR_TYPE_SCALAR_MASK_RM,
 | 
					  X86_INTRINSIC_DATA(avx512_mask_rndscale_sd,   INTR_TYPE_SCALAR_MASK_RM,
 | 
				
			||||||
                     X86ISD::RNDSCALE, 0),
 | 
					                     X86ISD::RNDSCALE, 0),
 | 
				
			||||||
  X86_INTRINSIC_DATA(avx512_mask_rndscale_ss,   INTR_TYPE_SCALAR_MASK_RM,
 | 
					  X86_INTRINSIC_DATA(avx512_mask_rndscale_ss,   INTR_TYPE_SCALAR_MASK_RM,
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -1606,3 +1606,100 @@ define <8 x double> @test_vmulpd_mask_rz(<8 x double> %a0, <8 x double> %a1, i8
 | 
				
			||||||
                    <8 x double> zeroinitializer, i8 %mask, i32 3)
 | 
					                    <8 x double> zeroinitializer, i8 %mask, i32 3)
 | 
				
			||||||
  ret <8 x double> %res
 | 
					  ret <8 x double> %res
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					define <16 x i32> @test_xor_epi32(<16 x i32> %a, <16 x i32> %b) {
 | 
				
			||||||
 | 
					  ;CHECK-LABEL: test_xor_epi32
 | 
				
			||||||
 | 
					  ;CHECK: vpxord {{.*}}encoding: [0x62,0xf1,0x7d,0x48,0xef,0xc1]
 | 
				
			||||||
 | 
					  %res = call <16 x i32> @llvm.x86.avx512.mask.pxor.d.512(<16 x i32> %a,<16 x i32> %b, <16 x i32>zeroinitializer, i16 -1)
 | 
				
			||||||
 | 
					  ret < 16 x i32> %res
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					define <16 x i32> @test_mask_xor_epi32(<16 x i32> %a,<16 x i32> %b, <16 x i32> %passThru, i16 %mask) {
 | 
				
			||||||
 | 
					  ;CHECK-LABEL: test_mask_xor_epi32
 | 
				
			||||||
 | 
					  ;CHECK: vpxord %zmm1, %zmm0, %zmm2 {%k1} ## encoding: [0x62,0xf1,0x7d,0x49,0xef,0xd1]
 | 
				
			||||||
 | 
					  %res = call <16 x i32> @llvm.x86.avx512.mask.pxor.d.512(<16 x i32> %a, <16 x i32> %b, <16 x i32> %passThru, i16 %mask)
 | 
				
			||||||
 | 
					  ret < 16 x i32> %res
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					declare <16 x i32> @llvm.x86.avx512.mask.pxor.d.512(<16 x i32>, <16 x i32>, <16 x i32>, i16)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					define <16 x i32> @test_or_epi32(<16 x i32> %a, <16 x i32> %b) {
 | 
				
			||||||
 | 
					  ;CHECK-LABEL: test_or_epi32
 | 
				
			||||||
 | 
					  ;CHECK: vpord {{.*}}encoding: [0x62,0xf1,0x7d,0x48,0xeb,0xc1]
 | 
				
			||||||
 | 
					  %res = call <16 x i32> @llvm.x86.avx512.mask.por.d.512(<16 x i32> %a,<16 x i32> %b, <16 x i32>zeroinitializer, i16 -1)
 | 
				
			||||||
 | 
					  ret < 16 x i32> %res
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					define <16 x i32> @test_mask_or_epi32(<16 x i32> %a,<16 x i32> %b, <16 x i32> %passThru, i16 %mask) {
 | 
				
			||||||
 | 
					  ;CHECK-LABEL: test_mask_or_epi32
 | 
				
			||||||
 | 
					  ;CHECK: vpord %zmm1, %zmm0, %zmm2 {%k1} ## encoding: [0x62,0xf1,0x7d,0x49,0xeb,0xd1]
 | 
				
			||||||
 | 
					  %res = call <16 x i32> @llvm.x86.avx512.mask.por.d.512(<16 x i32> %a, <16 x i32> %b, <16 x i32> %passThru, i16 %mask)
 | 
				
			||||||
 | 
					  ret < 16 x i32> %res
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					declare <16 x i32> @llvm.x86.avx512.mask.por.d.512(<16 x i32>, <16 x i32>, <16 x i32>, i16)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					define <16 x i32> @test_and_epi32(<16 x i32> %a, <16 x i32> %b) {
 | 
				
			||||||
 | 
					  ;CHECK-LABEL: test_and_epi32
 | 
				
			||||||
 | 
					  ;CHECK: vpandd {{.*}}encoding: [0x62,0xf1,0x7d,0x48,0xdb,0xc1]
 | 
				
			||||||
 | 
					  %res = call <16 x i32> @llvm.x86.avx512.mask.pand.d.512(<16 x i32> %a,<16 x i32> %b, <16 x i32>zeroinitializer, i16 -1)
 | 
				
			||||||
 | 
					  ret < 16 x i32> %res
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					define <16 x i32> @test_mask_and_epi32(<16 x i32> %a,<16 x i32> %b, <16 x i32> %passThru, i16 %mask) {
 | 
				
			||||||
 | 
					  ;CHECK-LABEL: test_mask_and_epi32
 | 
				
			||||||
 | 
					  ;CHECK: vpandd %zmm1, %zmm0, %zmm2 {%k1} ## encoding: [0x62,0xf1,0x7d,0x49,0xdb,0xd1]
 | 
				
			||||||
 | 
					  %res = call <16 x i32> @llvm.x86.avx512.mask.pand.d.512(<16 x i32> %a, <16 x i32> %b, <16 x i32> %passThru, i16 %mask)
 | 
				
			||||||
 | 
					  ret < 16 x i32> %res
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					declare <16 x i32> @llvm.x86.avx512.mask.pand.d.512(<16 x i32>, <16 x i32>, <16 x i32>, i16)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					define <8 x i64> @test_xor_epi64(<8 x i64> %a, <8 x i64> %b) {
 | 
				
			||||||
 | 
					  ;CHECK-LABEL: test_xor_epi64
 | 
				
			||||||
 | 
					  ;CHECK: vpxorq {{.*}}encoding: [0x62,0xf1,0xfd,0x48,0xef,0xc1]
 | 
				
			||||||
 | 
					  %res = call <8 x i64> @llvm.x86.avx512.mask.pxor.q.512(<8 x i64> %a,<8 x i64> %b, <8 x i64>zeroinitializer, i8 -1)
 | 
				
			||||||
 | 
					  ret < 8 x i64> %res
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					define <8 x i64> @test_mask_xor_epi64(<8 x i64> %a,<8 x i64> %b, <8 x i64> %passThru, i8 %mask) {
 | 
				
			||||||
 | 
					  ;CHECK-LABEL: test_mask_xor_epi64
 | 
				
			||||||
 | 
					  ;CHECK: vpxorq %zmm1, %zmm0, %zmm2 {%k1} ## encoding: [0x62,0xf1,0xfd,0x49,0xef,0xd1]
 | 
				
			||||||
 | 
					  %res = call <8 x i64> @llvm.x86.avx512.mask.pxor.q.512(<8 x i64> %a, <8 x i64> %b, <8 x i64> %passThru, i8 %mask)
 | 
				
			||||||
 | 
					  ret < 8 x i64> %res
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					declare <8 x i64> @llvm.x86.avx512.mask.pxor.q.512(<8 x i64>, <8 x i64>, <8 x i64>, i8)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					define <8 x i64> @test_or_epi64(<8 x i64> %a, <8 x i64> %b) {
 | 
				
			||||||
 | 
					  ;CHECK-LABEL: test_or_epi64
 | 
				
			||||||
 | 
					  ;CHECK: vporq {{.*}}encoding: [0x62,0xf1,0xfd,0x48,0xeb,0xc1]
 | 
				
			||||||
 | 
					  %res = call <8 x i64> @llvm.x86.avx512.mask.por.q.512(<8 x i64> %a,<8 x i64> %b, <8 x i64>zeroinitializer, i8 -1)
 | 
				
			||||||
 | 
					  ret < 8 x i64> %res
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					define <8 x i64> @test_mask_or_epi64(<8 x i64> %a,<8 x i64> %b, <8 x i64> %passThru, i8 %mask) {
 | 
				
			||||||
 | 
					  ;CHECK-LABEL: test_mask_or_epi64
 | 
				
			||||||
 | 
					  ;CHECK: vporq %zmm1, %zmm0, %zmm2 {%k1} ## encoding: [0x62,0xf1,0xfd,0x49,0xeb,0xd1]
 | 
				
			||||||
 | 
					  %res = call <8 x i64> @llvm.x86.avx512.mask.por.q.512(<8 x i64> %a, <8 x i64> %b, <8 x i64> %passThru, i8 %mask)
 | 
				
			||||||
 | 
					  ret < 8 x i64> %res
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					declare <8 x i64> @llvm.x86.avx512.mask.por.q.512(<8 x i64>, <8 x i64>, <8 x i64>, i8)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					define <8 x i64> @test_and_epi64(<8 x i64> %a, <8 x i64> %b) {
 | 
				
			||||||
 | 
					  ;CHECK-LABEL: test_and_epi64
 | 
				
			||||||
 | 
					  ;CHECK: vpandq {{.*}}encoding: [0x62,0xf1,0xfd,0x48,0xdb,0xc1]
 | 
				
			||||||
 | 
					  %res = call <8 x i64> @llvm.x86.avx512.mask.pand.q.512(<8 x i64> %a,<8 x i64> %b, <8 x i64>zeroinitializer, i8 -1)
 | 
				
			||||||
 | 
					  ret < 8 x i64> %res
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					define <8 x i64> @test_mask_and_epi64(<8 x i64> %a,<8 x i64> %b, <8 x i64> %passThru, i8 %mask) {
 | 
				
			||||||
 | 
					  ;CHECK-LABEL: test_mask_and_epi64
 | 
				
			||||||
 | 
					  ;CHECK: vpandq %zmm1, %zmm0, %zmm2 {%k1} ## encoding: [0x62,0xf1,0xfd,0x49,0xdb,0xd1]
 | 
				
			||||||
 | 
					  %res = call <8 x i64> @llvm.x86.avx512.mask.pand.q.512(<8 x i64> %a, <8 x i64> %b, <8 x i64> %passThru, i8 %mask)
 | 
				
			||||||
 | 
					  ret < 8 x i64> %res
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					declare <8 x i64> @llvm.x86.avx512.mask.pand.q.512(<8 x i64>, <8 x i64>, <8 x i64>, i8)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in New Issue