2434 lines
		
	
	
		
			101 KiB
		
	
	
	
		
			LLVM
		
	
	
	
			
		
		
	
	
			2434 lines
		
	
	
		
			101 KiB
		
	
	
	
		
			LLVM
		
	
	
	
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
 | 
						|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse2 | FileCheck %s --check-prefix=SSE --check-prefix=SSE2
 | 
						|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+ssse3 | FileCheck %s --check-prefix=SSE --check-prefix=SSSE3
 | 
						|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse4.1 | FileCheck %s --check-prefix=SSE --check-prefix=SSE41
 | 
						|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx | FileCheck %s --check-prefix=AVX --check-prefix=AVX1
 | 
						|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX2,AVX2-SLOW
 | 
						|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx2,+fast-variable-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX2-FAST
 | 
						|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f | FileCheck %s --check-prefix=AVX512 --check-prefix=AVX512F
 | 
						|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512vl,+fast-variable-shuffle | FileCheck %s --check-prefixes=AVX512,AVX512VL
 | 
						|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw,+fast-variable-shuffle | FileCheck %s --check-prefixes=AVX512,AVX512BW
 | 
						|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw,+avx512vl,+fast-variable-shuffle | FileCheck %s --check-prefixes=AVX512,AVX512BWVL
 | 
						|
 | 
						|
;
 | 
						|
; Unsigned saturation truncation to vXi32
 | 
						|
;
 | 
						|
 | 
						|
define <4 x i32> @trunc_usat_v4i64_v4i32(<4 x i64> %a0) {
 | 
						|
; SSE2-LABEL: trunc_usat_v4i64_v4i32:
 | 
						|
; SSE2:       # %bb.0:
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSE2-NEXT:    movdqa %xmm0, %xmm3
 | 
						|
; SSE2-NEXT:    pxor %xmm2, %xmm3
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm4 = [9223372039002259455,9223372039002259455]
 | 
						|
; SSE2-NEXT:    movdqa %xmm4, %xmm5
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm3, %xmm5
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm4, %xmm3
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm6, %xmm3
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm5 = xmm5[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm3, %xmm5
 | 
						|
; SSE2-NEXT:    pxor %xmm1, %xmm2
 | 
						|
; SSE2-NEXT:    movdqa %xmm4, %xmm3
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm2, %xmm3
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm6 = xmm3[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm4, %xmm2
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm6, %xmm2
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm2, %xmm3
 | 
						|
; SSE2-NEXT:    pand %xmm3, %xmm1
 | 
						|
; SSE2-NEXT:    pandn {{.*}}(%rip), %xmm3
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm3
 | 
						|
; SSE2-NEXT:    pand %xmm5, %xmm0
 | 
						|
; SSE2-NEXT:    pandn {{.*}}(%rip), %xmm5
 | 
						|
; SSE2-NEXT:    por %xmm5, %xmm0
 | 
						|
; SSE2-NEXT:    shufps {{.*#+}} xmm0 = xmm0[0,2],xmm3[0,2]
 | 
						|
; SSE2-NEXT:    retq
 | 
						|
;
 | 
						|
; SSSE3-LABEL: trunc_usat_v4i64_v4i32:
 | 
						|
; SSSE3:       # %bb.0:
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm0, %xmm3
 | 
						|
; SSSE3-NEXT:    pxor %xmm2, %xmm3
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm4 = [9223372039002259455,9223372039002259455]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm4, %xmm5
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm3, %xmm5
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm4, %xmm3
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm6, %xmm3
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm5 = xmm5[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm3, %xmm5
 | 
						|
; SSSE3-NEXT:    pxor %xmm1, %xmm2
 | 
						|
; SSSE3-NEXT:    movdqa %xmm4, %xmm3
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm2, %xmm3
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm6 = xmm3[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm4, %xmm2
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm6, %xmm2
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm2, %xmm3
 | 
						|
; SSSE3-NEXT:    pand %xmm3, %xmm1
 | 
						|
; SSSE3-NEXT:    pandn {{.*}}(%rip), %xmm3
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm3
 | 
						|
; SSSE3-NEXT:    pand %xmm5, %xmm0
 | 
						|
; SSSE3-NEXT:    pandn {{.*}}(%rip), %xmm5
 | 
						|
; SSSE3-NEXT:    por %xmm5, %xmm0
 | 
						|
; SSSE3-NEXT:    shufps {{.*#+}} xmm0 = xmm0[0,2],xmm3[0,2]
 | 
						|
; SSSE3-NEXT:    retq
 | 
						|
;
 | 
						|
; SSE41-LABEL: trunc_usat_v4i64_v4i32:
 | 
						|
; SSE41:       # %bb.0:
 | 
						|
; SSE41-NEXT:    movdqa %xmm0, %xmm2
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm0 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSE41-NEXT:    movdqa %xmm2, %xmm3
 | 
						|
; SSE41-NEXT:    pxor %xmm0, %xmm3
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm4 = [9223372039002259455,9223372039002259455]
 | 
						|
; SSE41-NEXT:    movdqa %xmm4, %xmm5
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm3, %xmm5
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm4, %xmm3
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm6, %xmm3
 | 
						|
; SSE41-NEXT:    por %xmm5, %xmm3
 | 
						|
; SSE41-NEXT:    pxor %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm4, %xmm5
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm5
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm6, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm5, %xmm0
 | 
						|
; SSE41-NEXT:    movapd {{.*#+}} xmm4 = [4294967295,4294967295]
 | 
						|
; SSE41-NEXT:    movapd {{.*#+}} xmm5 = [4294967295,429496729]
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm1, %xmm5
 | 
						|
; SSE41-NEXT:    movdqa %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm2, %xmm4
 | 
						|
; SSE41-NEXT:    shufps {{.*#+}} xmm4 = xmm4[0,2],xmm5[0,2]
 | 
						|
; SSE41-NEXT:    movaps %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX1-LABEL: trunc_usat_v4i64_v4i32:
 | 
						|
; AVX1:       # %bb.0:
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808]
 | 
						|
; AVX1-NEXT:    vpxor %xmm2, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm3 = [9223372041149743103,9223372041149743103]
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm1, %xmm3, %xmm1
 | 
						|
; AVX1-NEXT:    vpxor %xmm2, %xmm0, %xmm2
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm2, %xmm3, %xmm2
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm2, %ymm1
 | 
						|
; AVX1-NEXT:    vmovapd {{.*#+}} ymm2 = [4294967295,4294967295,4294967295,429496729]
 | 
						|
; AVX1-NEXT:    vblendvpd %ymm1, %ymm0, %ymm2, %ymm0
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1
 | 
						|
; AVX1-NEXT:    vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
 | 
						|
; AVX1-NEXT:    vzeroupper
 | 
						|
; AVX1-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-SLOW-LABEL: trunc_usat_v4i64_v4i32:
 | 
						|
; AVX2-SLOW:       # %bb.0:
 | 
						|
; AVX2-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm1 = [9223372036854775808,9223372036854775808,9223372036854775808,9223372036854775808]
 | 
						|
; AVX2-SLOW-NEXT:    vpxor %ymm1, %ymm0, %ymm1
 | 
						|
; AVX2-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm2 = [9223372041149743103,9223372041149743103,9223372041149743103,9223372041149743103]
 | 
						|
; AVX2-SLOW-NEXT:    vpcmpgtq %ymm1, %ymm2, %ymm1
 | 
						|
; AVX2-SLOW-NEXT:    vmovapd {{.*#+}} ymm2 = [4294967295,4294967295,4294967295,429496729]
 | 
						|
; AVX2-SLOW-NEXT:    vblendvpd %ymm1, %ymm0, %ymm2, %ymm0
 | 
						|
; AVX2-SLOW-NEXT:    vextractf128 $1, %ymm0, %xmm1
 | 
						|
; AVX2-SLOW-NEXT:    vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
 | 
						|
; AVX2-SLOW-NEXT:    vzeroupper
 | 
						|
; AVX2-SLOW-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-FAST-LABEL: trunc_usat_v4i64_v4i32:
 | 
						|
; AVX2-FAST:       # %bb.0:
 | 
						|
; AVX2-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm1 = [9223372036854775808,9223372036854775808,9223372036854775808,9223372036854775808]
 | 
						|
; AVX2-FAST-NEXT:    vpxor %ymm1, %ymm0, %ymm1
 | 
						|
; AVX2-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm2 = [9223372041149743103,9223372041149743103,9223372041149743103,9223372041149743103]
 | 
						|
; AVX2-FAST-NEXT:    vpcmpgtq %ymm1, %ymm2, %ymm1
 | 
						|
; AVX2-FAST-NEXT:    vmovapd {{.*#+}} ymm2 = [4294967295,4294967295,4294967295,429496729]
 | 
						|
; AVX2-FAST-NEXT:    vblendvpd %ymm1, %ymm0, %ymm2, %ymm0
 | 
						|
; AVX2-FAST-NEXT:    vmovapd {{.*#+}} ymm1 = [0,2,4,6,4,6,6,7]
 | 
						|
; AVX2-FAST-NEXT:    vpermps %ymm0, %ymm1, %ymm0
 | 
						|
; AVX2-FAST-NEXT:    # kill: def $xmm0 killed $xmm0 killed $ymm0
 | 
						|
; AVX2-FAST-NEXT:    vzeroupper
 | 
						|
; AVX2-FAST-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512F-LABEL: trunc_usat_v4i64_v4i32:
 | 
						|
; AVX512F:       # %bb.0:
 | 
						|
; AVX512F-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0
 | 
						|
; AVX512F-NEXT:    vpbroadcastq {{.*#+}} ymm1 = [4294967295,4294967295,4294967295,4294967295]
 | 
						|
; AVX512F-NEXT:    vpcmpltuq %zmm1, %zmm0, %k1
 | 
						|
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm1 = [4294967295,4294967295,4294967295,429496729]
 | 
						|
; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm1 {%k1}
 | 
						|
; AVX512F-NEXT:    vpmovqd %zmm1, %ymm0
 | 
						|
; AVX512F-NEXT:    # kill: def $xmm0 killed $xmm0 killed $ymm0
 | 
						|
; AVX512F-NEXT:    vzeroupper
 | 
						|
; AVX512F-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512VL-LABEL: trunc_usat_v4i64_v4i32:
 | 
						|
; AVX512VL:       # %bb.0:
 | 
						|
; AVX512VL-NEXT:    vpcmpltuq {{.*}}(%rip){1to4}, %ymm0, %k1
 | 
						|
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm1 = [4294967295,4294967295,4294967295,429496729]
 | 
						|
; AVX512VL-NEXT:    vmovdqa64 %ymm0, %ymm1 {%k1}
 | 
						|
; AVX512VL-NEXT:    vpmovqd %ymm1, %xmm0
 | 
						|
; AVX512VL-NEXT:    vzeroupper
 | 
						|
; AVX512VL-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512BW-LABEL: trunc_usat_v4i64_v4i32:
 | 
						|
; AVX512BW:       # %bb.0:
 | 
						|
; AVX512BW-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0
 | 
						|
; AVX512BW-NEXT:    vpbroadcastq {{.*#+}} ymm1 = [4294967295,4294967295,4294967295,4294967295]
 | 
						|
; AVX512BW-NEXT:    vpcmpltuq %zmm1, %zmm0, %k1
 | 
						|
; AVX512BW-NEXT:    vmovdqa {{.*#+}} ymm1 = [4294967295,4294967295,4294967295,429496729]
 | 
						|
; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm1 {%k1}
 | 
						|
; AVX512BW-NEXT:    vpmovqd %zmm1, %ymm0
 | 
						|
; AVX512BW-NEXT:    # kill: def $xmm0 killed $xmm0 killed $ymm0
 | 
						|
; AVX512BW-NEXT:    vzeroupper
 | 
						|
; AVX512BW-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512BWVL-LABEL: trunc_usat_v4i64_v4i32:
 | 
						|
; AVX512BWVL:       # %bb.0:
 | 
						|
; AVX512BWVL-NEXT:    vpcmpltuq {{.*}}(%rip){1to4}, %ymm0, %k1
 | 
						|
; AVX512BWVL-NEXT:    vmovdqa {{.*#+}} ymm1 = [4294967295,4294967295,4294967295,429496729]
 | 
						|
; AVX512BWVL-NEXT:    vmovdqa64 %ymm0, %ymm1 {%k1}
 | 
						|
; AVX512BWVL-NEXT:    vpmovqd %ymm1, %xmm0
 | 
						|
; AVX512BWVL-NEXT:    vzeroupper
 | 
						|
; AVX512BWVL-NEXT:    retq
 | 
						|
  %1 = icmp ult <4 x i64> %a0, <i64 4294967295, i64 4294967295, i64 4294967295, i64 4294967295>
 | 
						|
  %2 = select <4 x i1> %1, <4 x i64> %a0, <4 x i64> <i64 4294967295, i64 4294967295, i64 4294967295, i64 429496729>
 | 
						|
  %3 = trunc <4 x i64> %2 to <4 x i32>
 | 
						|
  ret <4 x i32> %3
 | 
						|
}
 | 
						|
 | 
						|
define <8 x i32> @trunc_usat_v8i64_v8i32(<8 x i64> %a0) {
 | 
						|
; SSE2-LABEL: trunc_usat_v8i64_v8i32:
 | 
						|
; SSE2:       # %bb.0:
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm8 = [4294967295,4294967295]
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSE2-NEXT:    movdqa %xmm3, %xmm7
 | 
						|
; SSE2-NEXT:    pxor %xmm5, %xmm7
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002259455,9223372039002259455]
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm6
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm7, %xmm6
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm4 = xmm6[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm7
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm7 = xmm7[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm4, %xmm7
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm4 = xmm6[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm7, %xmm4
 | 
						|
; SSE2-NEXT:    pand %xmm4, %xmm3
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSE2-NEXT:    por %xmm3, %xmm4
 | 
						|
; SSE2-NEXT:    movdqa %xmm2, %xmm3
 | 
						|
; SSE2-NEXT:    pxor %xmm5, %xmm3
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm6
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm3, %xmm6
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm10 = xmm6[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm3
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm7 = xmm3[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm10, %xmm7
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm3 = xmm6[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm7, %xmm3
 | 
						|
; SSE2-NEXT:    pand %xmm3, %xmm2
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm3
 | 
						|
; SSE2-NEXT:    por %xmm2, %xmm3
 | 
						|
; SSE2-NEXT:    shufps {{.*#+}} xmm3 = xmm3[0,2],xmm4[0,2]
 | 
						|
; SSE2-NEXT:    movdqa %xmm1, %xmm2
 | 
						|
; SSE2-NEXT:    pxor %xmm5, %xmm2
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm2, %xmm4
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm6 = xmm4[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm2
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm6, %xmm2
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm4 = xmm4[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm2, %xmm4
 | 
						|
; SSE2-NEXT:    pand %xmm4, %xmm1
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm4
 | 
						|
; SSE2-NEXT:    pxor %xmm0, %xmm5
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm1
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm5, %xmm1
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm2 = xmm1[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm5
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm5 = xmm5[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm2, %xmm5
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm5, %xmm1
 | 
						|
; SSE2-NEXT:    pand %xmm1, %xmm0
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm1
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSE2-NEXT:    shufps {{.*#+}} xmm0 = xmm0[0,2],xmm4[0,2]
 | 
						|
; SSE2-NEXT:    movaps %xmm3, %xmm1
 | 
						|
; SSE2-NEXT:    retq
 | 
						|
;
 | 
						|
; SSSE3-LABEL: trunc_usat_v8i64_v8i32:
 | 
						|
; SSSE3:       # %bb.0:
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm8 = [4294967295,4294967295]
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm3, %xmm7
 | 
						|
; SSSE3-NEXT:    pxor %xmm5, %xmm7
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002259455,9223372039002259455]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm6
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm7, %xmm6
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm4 = xmm6[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm7
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm7 = xmm7[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm4, %xmm7
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm4 = xmm6[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm7, %xmm4
 | 
						|
; SSSE3-NEXT:    pand %xmm4, %xmm3
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSSE3-NEXT:    por %xmm3, %xmm4
 | 
						|
; SSSE3-NEXT:    movdqa %xmm2, %xmm3
 | 
						|
; SSSE3-NEXT:    pxor %xmm5, %xmm3
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm6
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm3, %xmm6
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm10 = xmm6[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm3
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm7 = xmm3[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm10, %xmm7
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm3 = xmm6[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm7, %xmm3
 | 
						|
; SSSE3-NEXT:    pand %xmm3, %xmm2
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm3
 | 
						|
; SSSE3-NEXT:    por %xmm2, %xmm3
 | 
						|
; SSSE3-NEXT:    shufps {{.*#+}} xmm3 = xmm3[0,2],xmm4[0,2]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm1, %xmm2
 | 
						|
; SSSE3-NEXT:    pxor %xmm5, %xmm2
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm2, %xmm4
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm6 = xmm4[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm2
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm6, %xmm2
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm4 = xmm4[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm2, %xmm4
 | 
						|
; SSSE3-NEXT:    pand %xmm4, %xmm1
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm4
 | 
						|
; SSSE3-NEXT:    pxor %xmm0, %xmm5
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm1
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm5, %xmm1
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm2 = xmm1[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm5
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm5 = xmm5[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm2, %xmm5
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm5, %xmm1
 | 
						|
; SSSE3-NEXT:    pand %xmm1, %xmm0
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm1
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSSE3-NEXT:    shufps {{.*#+}} xmm0 = xmm0[0,2],xmm4[0,2]
 | 
						|
; SSSE3-NEXT:    movaps %xmm3, %xmm1
 | 
						|
; SSSE3-NEXT:    retq
 | 
						|
;
 | 
						|
; SSE41-LABEL: trunc_usat_v8i64_v8i32:
 | 
						|
; SSE41:       # %bb.0:
 | 
						|
; SSE41-NEXT:    movdqa %xmm0, %xmm8
 | 
						|
; SSE41-NEXT:    movapd {{.*#+}} xmm6 = [4294967295,4294967295]
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm7 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSE41-NEXT:    movdqa %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002259455,9223372039002259455]
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm4
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm5, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm6, %xmm5
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm3, %xmm5
 | 
						|
; SSE41-NEXT:    movdqa %xmm2, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm3
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm3
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm6, %xmm4
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm2, %xmm4
 | 
						|
; SSE41-NEXT:    shufps {{.*#+}} xmm4 = xmm4[0,2],xmm5[0,2]
 | 
						|
; SSE41-NEXT:    movdqa %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm2
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm2
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm2, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm6, %xmm2
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm1, %xmm2
 | 
						|
; SSE41-NEXT:    pxor %xmm8, %xmm7
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm1
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm7, %xmm1
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm3 = xmm1[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm7
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm7[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm8, %xmm6
 | 
						|
; SSE41-NEXT:    shufps {{.*#+}} xmm6 = xmm6[0,2],xmm2[0,2]
 | 
						|
; SSE41-NEXT:    movaps %xmm6, %xmm0
 | 
						|
; SSE41-NEXT:    movaps %xmm4, %xmm1
 | 
						|
; SSE41-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX1-LABEL: trunc_usat_v8i64_v8i32:
 | 
						|
; AVX1:       # %bb.0:
 | 
						|
; AVX1-NEXT:    vmovapd {{.*#+}} ymm2 = [4294967295,4294967295,4294967295,4294967295]
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm3
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm4 = [9223372036854775808,9223372036854775808]
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm3, %xmm3
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm5 = [9223372041149743103,9223372041149743103]
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm3, %xmm5, %xmm3
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm0, %xmm6
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm6, %xmm5, %xmm6
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm3, %ymm6, %ymm3
 | 
						|
; AVX1-NEXT:    vblendvpd %ymm3, %ymm0, %ymm2, %ymm0
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm3
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm3, %xmm3
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm3, %xmm5, %xmm3
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm1, %xmm4
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm4, %xmm5, %xmm4
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm3, %ymm4, %ymm3
 | 
						|
; AVX1-NEXT:    vblendvpd %ymm3, %ymm1, %ymm2, %ymm1
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm2
 | 
						|
; AVX1-NEXT:    vshufps {{.*#+}} xmm1 = xmm1[0,2],xmm2[0,2]
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2
 | 
						|
; AVX1-NEXT:    vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm2[0,2]
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0
 | 
						|
; AVX1-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-SLOW-LABEL: trunc_usat_v8i64_v8i32:
 | 
						|
; AVX2-SLOW:       # %bb.0:
 | 
						|
; AVX2-SLOW-NEXT:    vbroadcastsd {{.*#+}} ymm2 = [4294967295,4294967295,4294967295,4294967295]
 | 
						|
; AVX2-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm3 = [9223372036854775808,9223372036854775808,9223372036854775808,9223372036854775808]
 | 
						|
; AVX2-SLOW-NEXT:    vpxor %ymm3, %ymm0, %ymm4
 | 
						|
; AVX2-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm5 = [9223372041149743103,9223372041149743103,9223372041149743103,9223372041149743103]
 | 
						|
; AVX2-SLOW-NEXT:    vpcmpgtq %ymm4, %ymm5, %ymm4
 | 
						|
; AVX2-SLOW-NEXT:    vblendvpd %ymm4, %ymm0, %ymm2, %ymm0
 | 
						|
; AVX2-SLOW-NEXT:    vpxor %ymm3, %ymm1, %ymm3
 | 
						|
; AVX2-SLOW-NEXT:    vpcmpgtq %ymm3, %ymm5, %ymm3
 | 
						|
; AVX2-SLOW-NEXT:    vblendvpd %ymm3, %ymm1, %ymm2, %ymm1
 | 
						|
; AVX2-SLOW-NEXT:    vextractf128 $1, %ymm1, %xmm2
 | 
						|
; AVX2-SLOW-NEXT:    vshufps {{.*#+}} xmm1 = xmm1[0,2],xmm2[0,2]
 | 
						|
; AVX2-SLOW-NEXT:    vextractf128 $1, %ymm0, %xmm2
 | 
						|
; AVX2-SLOW-NEXT:    vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm2[0,2]
 | 
						|
; AVX2-SLOW-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0
 | 
						|
; AVX2-SLOW-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-FAST-LABEL: trunc_usat_v8i64_v8i32:
 | 
						|
; AVX2-FAST:       # %bb.0:
 | 
						|
; AVX2-FAST-NEXT:    vbroadcastsd {{.*#+}} ymm2 = [4294967295,4294967295,4294967295,4294967295]
 | 
						|
; AVX2-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm3 = [9223372036854775808,9223372036854775808,9223372036854775808,9223372036854775808]
 | 
						|
; AVX2-FAST-NEXT:    vpxor %ymm3, %ymm1, %ymm4
 | 
						|
; AVX2-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm5 = [9223372041149743103,9223372041149743103,9223372041149743103,9223372041149743103]
 | 
						|
; AVX2-FAST-NEXT:    vpcmpgtq %ymm4, %ymm5, %ymm4
 | 
						|
; AVX2-FAST-NEXT:    vblendvpd %ymm4, %ymm1, %ymm2, %ymm1
 | 
						|
; AVX2-FAST-NEXT:    vpxor %ymm3, %ymm0, %ymm3
 | 
						|
; AVX2-FAST-NEXT:    vpcmpgtq %ymm3, %ymm5, %ymm3
 | 
						|
; AVX2-FAST-NEXT:    vblendvpd %ymm3, %ymm0, %ymm2, %ymm0
 | 
						|
; AVX2-FAST-NEXT:    vmovapd {{.*#+}} ymm2 = [0,2,4,6,4,6,6,7]
 | 
						|
; AVX2-FAST-NEXT:    vpermps %ymm0, %ymm2, %ymm0
 | 
						|
; AVX2-FAST-NEXT:    vpermps %ymm1, %ymm2, %ymm1
 | 
						|
; AVX2-FAST-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0
 | 
						|
; AVX2-FAST-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512-LABEL: trunc_usat_v8i64_v8i32:
 | 
						|
; AVX512:       # %bb.0:
 | 
						|
; AVX512-NEXT:    vpmovusqd %zmm0, %ymm0
 | 
						|
; AVX512-NEXT:    retq
 | 
						|
  %1 = icmp ult <8 x i64> %a0, <i64 4294967295, i64 4294967295, i64 4294967295, i64 4294967295, i64 4294967295, i64 4294967295, i64 4294967295, i64 4294967295>
 | 
						|
  %2 = select <8 x i1> %1, <8 x i64> %a0, <8 x i64> <i64 4294967295, i64 4294967295, i64 4294967295, i64 4294967295, i64 4294967295, i64 4294967295, i64 4294967295, i64 4294967295>
 | 
						|
  %3 = trunc <8 x i64> %2 to <8 x i32>
 | 
						|
  ret <8 x i32> %3
 | 
						|
}
 | 
						|
 | 
						|
;
 | 
						|
; Unsigned saturation truncation to vXi16
 | 
						|
;
 | 
						|
 | 
						|
define <8 x i16> @trunc_usat_v8i64_v8i16(<8 x i64> %a0) {
 | 
						|
; SSE2-LABEL: trunc_usat_v8i64_v8i16:
 | 
						|
; SSE2:       # %bb.0:
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm8 = [65535,65535]
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm6 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSE2-NEXT:    movdqa %xmm2, %xmm5
 | 
						|
; SSE2-NEXT:    pxor %xmm6, %xmm5
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002324991,9223372039002324991]
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm7
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm5, %xmm7
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm10 = xmm7[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm5
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm4 = xmm5[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm10, %xmm4
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm5 = xmm7[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm4, %xmm5
 | 
						|
; SSE2-NEXT:    pand %xmm5, %xmm2
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm5
 | 
						|
; SSE2-NEXT:    por %xmm2, %xmm5
 | 
						|
; SSE2-NEXT:    movdqa %xmm3, %xmm2
 | 
						|
; SSE2-NEXT:    pxor %xmm6, %xmm2
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm2, %xmm4
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm10 = xmm4[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm2
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm7 = xmm2[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm10, %xmm7
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm2 = xmm4[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm7, %xmm2
 | 
						|
; SSE2-NEXT:    pand %xmm2, %xmm3
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm2
 | 
						|
; SSE2-NEXT:    por %xmm3, %xmm2
 | 
						|
; SSE2-NEXT:    movdqa %xmm0, %xmm3
 | 
						|
; SSE2-NEXT:    pxor %xmm6, %xmm3
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm3, %xmm4
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm7 = xmm4[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm3
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm7, %xmm3
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm4 = xmm4[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm3, %xmm4
 | 
						|
; SSE2-NEXT:    pand %xmm4, %xmm0
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSE2-NEXT:    por %xmm0, %xmm4
 | 
						|
; SSE2-NEXT:    pxor %xmm1, %xmm6
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm0
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm6, %xmm0
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm3 = xmm0[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm6
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm6 = xmm6[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm3, %xmm6
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm6, %xmm0
 | 
						|
; SSE2-NEXT:    pand %xmm0, %xmm1
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm0
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
 | 
						|
; SSE2-NEXT:    pshuflw {{.*#+}} xmm0 = xmm0[0,2,2,3,4,5,6,7]
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm1 = xmm4[0,2,2,3]
 | 
						|
; SSE2-NEXT:    pshuflw {{.*#+}} xmm1 = xmm1[0,2,2,3,4,5,6,7]
 | 
						|
; SSE2-NEXT:    punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm0 = xmm2[0,2,2,3]
 | 
						|
; SSE2-NEXT:    pshuflw {{.*#+}} xmm2 = xmm0[0,1,0,2,4,5,6,7]
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm0 = xmm5[0,2,2,3]
 | 
						|
; SSE2-NEXT:    pshuflw {{.*#+}} xmm0 = xmm0[0,1,0,2,4,5,6,7]
 | 
						|
; SSE2-NEXT:    punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
 | 
						|
; SSE2-NEXT:    movsd {{.*#+}} xmm0 = xmm1[0],xmm0[1]
 | 
						|
; SSE2-NEXT:    retq
 | 
						|
;
 | 
						|
; SSSE3-LABEL: trunc_usat_v8i64_v8i16:
 | 
						|
; SSSE3:       # %bb.0:
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm8 = [65535,65535]
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm6 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm2, %xmm5
 | 
						|
; SSSE3-NEXT:    pxor %xmm6, %xmm5
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002324991,9223372039002324991]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm7
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm5, %xmm7
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm10 = xmm7[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm5
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm4 = xmm5[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm10, %xmm4
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm5 = xmm7[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm4, %xmm5
 | 
						|
; SSSE3-NEXT:    pand %xmm5, %xmm2
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm5
 | 
						|
; SSSE3-NEXT:    por %xmm2, %xmm5
 | 
						|
; SSSE3-NEXT:    movdqa %xmm3, %xmm2
 | 
						|
; SSSE3-NEXT:    pxor %xmm6, %xmm2
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm2, %xmm4
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm10 = xmm4[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm2
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm7 = xmm2[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm10, %xmm7
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm2 = xmm4[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm7, %xmm2
 | 
						|
; SSSE3-NEXT:    pand %xmm2, %xmm3
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm2
 | 
						|
; SSSE3-NEXT:    por %xmm3, %xmm2
 | 
						|
; SSSE3-NEXT:    movdqa %xmm0, %xmm3
 | 
						|
; SSSE3-NEXT:    pxor %xmm6, %xmm3
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm3, %xmm4
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm7 = xmm4[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm3
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm7, %xmm3
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm4 = xmm4[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm3, %xmm4
 | 
						|
; SSSE3-NEXT:    pand %xmm4, %xmm0
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSSE3-NEXT:    por %xmm0, %xmm4
 | 
						|
; SSSE3-NEXT:    pxor %xmm1, %xmm6
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm0
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm6, %xmm0
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm3 = xmm0[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm6
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm6 = xmm6[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm3, %xmm6
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm6, %xmm0
 | 
						|
; SSSE3-NEXT:    pand %xmm0, %xmm1
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm0
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
 | 
						|
; SSSE3-NEXT:    pshuflw {{.*#+}} xmm0 = xmm0[0,2,2,3,4,5,6,7]
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm1 = xmm4[0,2,2,3]
 | 
						|
; SSSE3-NEXT:    pshuflw {{.*#+}} xmm1 = xmm1[0,2,2,3,4,5,6,7]
 | 
						|
; SSSE3-NEXT:    punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm0 = xmm2[0,2,2,3]
 | 
						|
; SSSE3-NEXT:    pshuflw {{.*#+}} xmm2 = xmm0[0,1,0,2,4,5,6,7]
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm0 = xmm5[0,2,2,3]
 | 
						|
; SSSE3-NEXT:    pshuflw {{.*#+}} xmm0 = xmm0[0,1,0,2,4,5,6,7]
 | 
						|
; SSSE3-NEXT:    punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
 | 
						|
; SSSE3-NEXT:    movsd {{.*#+}} xmm0 = xmm1[0],xmm0[1]
 | 
						|
; SSSE3-NEXT:    retq
 | 
						|
;
 | 
						|
; SSE41-LABEL: trunc_usat_v8i64_v8i16:
 | 
						|
; SSE41:       # %bb.0:
 | 
						|
; SSE41-NEXT:    movdqa %xmm0, %xmm8
 | 
						|
; SSE41-NEXT:    movapd {{.*#+}} xmm5 = [65535,65535]
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm6 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSE41-NEXT:    movdqa %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm6, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002324991,9223372039002324991]
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm7
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm7
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm4 = xmm7[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm5, %xmm4
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm1, %xmm4
 | 
						|
; SSE41-NEXT:    movdqa %xmm8, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm6, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm1
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm1
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm7 = xmm1[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm5, %xmm1
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm8, %xmm1
 | 
						|
; SSE41-NEXT:    packusdw %xmm4, %xmm1
 | 
						|
; SSE41-NEXT:    movdqa %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm6, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm4
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm7 = xmm4[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm5, %xmm4
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm3, %xmm4
 | 
						|
; SSE41-NEXT:    pxor %xmm2, %xmm6
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm3
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm6, %xmm3
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm7 = xmm3[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm6
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm6[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm2, %xmm5
 | 
						|
; SSE41-NEXT:    packusdw %xmm4, %xmm5
 | 
						|
; SSE41-NEXT:    packusdw %xmm5, %xmm1
 | 
						|
; SSE41-NEXT:    movdqa %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX1-LABEL: trunc_usat_v8i64_v8i16:
 | 
						|
; AVX1:       # %bb.0:
 | 
						|
; AVX1-NEXT:    vmovapd {{.*#+}} ymm2 = [65535,65535,65535,65535]
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm3
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm4 = [9223372036854775808,9223372036854775808]
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm3, %xmm3
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm5 = [9223372036854841343,9223372036854841343]
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm3, %xmm5, %xmm3
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm0, %xmm6
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm6, %xmm5, %xmm6
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm3, %ymm6, %ymm3
 | 
						|
; AVX1-NEXT:    vblendvpd %ymm3, %ymm0, %ymm2, %ymm0
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm3
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm3, %xmm3
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm3, %xmm5, %xmm3
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm1, %xmm4
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm4, %xmm5, %xmm4
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm3, %ymm4, %ymm3
 | 
						|
; AVX1-NEXT:    vblendvpd %ymm3, %ymm1, %ymm2, %ymm1
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm2
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm2, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm2, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm1, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vzeroupper
 | 
						|
; AVX1-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-LABEL: trunc_usat_v8i64_v8i16:
 | 
						|
; AVX2:       # %bb.0:
 | 
						|
; AVX2-NEXT:    vbroadcastsd {{.*#+}} ymm2 = [65535,65535,65535,65535]
 | 
						|
; AVX2-NEXT:    vpbroadcastq {{.*#+}} ymm3 = [9223372036854775808,9223372036854775808,9223372036854775808,9223372036854775808]
 | 
						|
; AVX2-NEXT:    vpxor %ymm3, %ymm1, %ymm4
 | 
						|
; AVX2-NEXT:    vpbroadcastq {{.*#+}} ymm5 = [9223372036854841343,9223372036854841343,9223372036854841343,9223372036854841343]
 | 
						|
; AVX2-NEXT:    vpcmpgtq %ymm4, %ymm5, %ymm4
 | 
						|
; AVX2-NEXT:    vblendvpd %ymm4, %ymm1, %ymm2, %ymm1
 | 
						|
; AVX2-NEXT:    vpxor %ymm3, %ymm0, %ymm3
 | 
						|
; AVX2-NEXT:    vpcmpgtq %ymm3, %ymm5, %ymm3
 | 
						|
; AVX2-NEXT:    vblendvpd %ymm3, %ymm0, %ymm2, %ymm0
 | 
						|
; AVX2-NEXT:    vpackusdw %ymm1, %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
 | 
						|
; AVX2-NEXT:    vextracti128 $1, %ymm0, %xmm1
 | 
						|
; AVX2-NEXT:    vpackusdw %xmm1, %xmm0, %xmm0
 | 
						|
; AVX2-NEXT:    vzeroupper
 | 
						|
; AVX2-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512-LABEL: trunc_usat_v8i64_v8i16:
 | 
						|
; AVX512:       # %bb.0:
 | 
						|
; AVX512-NEXT:    vpmovusqw %zmm0, %xmm0
 | 
						|
; AVX512-NEXT:    vzeroupper
 | 
						|
; AVX512-NEXT:    retq
 | 
						|
  %1 = icmp ult <8 x i64> %a0, <i64 65535, i64 65535, i64 65535, i64 65535, i64 65535, i64 65535, i64 65535, i64 65535>
 | 
						|
  %2 = select <8 x i1> %1, <8 x i64> %a0, <8 x i64> <i64 65535, i64 65535, i64 65535, i64 65535, i64 65535, i64 65535, i64 65535, i64 65535>
 | 
						|
  %3 = trunc <8 x i64> %2 to <8 x i16>
 | 
						|
  ret <8 x i16> %3
 | 
						|
}
 | 
						|
 | 
						|
define <8 x i16> @trunc_usat_v8i32_v8i16(<8 x i32> %a0) {
 | 
						|
; SSE2-LABEL: trunc_usat_v8i32_v8i16:
 | 
						|
; SSE2:       # %bb.0:
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
 | 
						|
; SSE2-NEXT:    movdqa %xmm0, %xmm3
 | 
						|
; SSE2-NEXT:    pxor %xmm2, %xmm3
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm4 = [2147549183,2147549183,2147549183,2147549183]
 | 
						|
; SSE2-NEXT:    movdqa %xmm4, %xmm5
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm3, %xmm5
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm3, %xmm3
 | 
						|
; SSE2-NEXT:    pand %xmm5, %xmm0
 | 
						|
; SSE2-NEXT:    pxor %xmm3, %xmm5
 | 
						|
; SSE2-NEXT:    por %xmm5, %xmm0
 | 
						|
; SSE2-NEXT:    pxor %xmm1, %xmm2
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm2, %xmm4
 | 
						|
; SSE2-NEXT:    pxor %xmm4, %xmm3
 | 
						|
; SSE2-NEXT:    pand %xmm1, %xmm4
 | 
						|
; SSE2-NEXT:    por %xmm3, %xmm4
 | 
						|
; SSE2-NEXT:    pslld $16, %xmm4
 | 
						|
; SSE2-NEXT:    psrad $16, %xmm4
 | 
						|
; SSE2-NEXT:    pslld $16, %xmm0
 | 
						|
; SSE2-NEXT:    psrad $16, %xmm0
 | 
						|
; SSE2-NEXT:    packssdw %xmm4, %xmm0
 | 
						|
; SSE2-NEXT:    retq
 | 
						|
;
 | 
						|
; SSSE3-LABEL: trunc_usat_v8i32_v8i16:
 | 
						|
; SSSE3:       # %bb.0:
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm2 = [65535,65535,65535,65535]
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm3 = [2147483648,2147483648,2147483648,2147483648]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm0, %xmm4
 | 
						|
; SSSE3-NEXT:    pxor %xmm3, %xmm4
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm5 = [2147549183,2147549183,2147549183,2147549183]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm5, %xmm6
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm4, %xmm6
 | 
						|
; SSSE3-NEXT:    pand %xmm6, %xmm0
 | 
						|
; SSSE3-NEXT:    pandn %xmm2, %xmm6
 | 
						|
; SSSE3-NEXT:    por %xmm6, %xmm0
 | 
						|
; SSSE3-NEXT:    pxor %xmm1, %xmm3
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm3, %xmm5
 | 
						|
; SSSE3-NEXT:    pand %xmm5, %xmm1
 | 
						|
; SSSE3-NEXT:    pandn %xmm2, %xmm5
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm5
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm1 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15]
 | 
						|
; SSSE3-NEXT:    pshufb %xmm1, %xmm5
 | 
						|
; SSSE3-NEXT:    pshufb %xmm1, %xmm0
 | 
						|
; SSSE3-NEXT:    punpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm5[0]
 | 
						|
; SSSE3-NEXT:    retq
 | 
						|
;
 | 
						|
; SSE41-LABEL: trunc_usat_v8i32_v8i16:
 | 
						|
; SSE41:       # %bb.0:
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm2 = [65535,65535,65535,65535]
 | 
						|
; SSE41-NEXT:    pminud %xmm2, %xmm1
 | 
						|
; SSE41-NEXT:    pminud %xmm2, %xmm0
 | 
						|
; SSE41-NEXT:    packusdw %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX1-LABEL: trunc_usat_v8i32_v8i16:
 | 
						|
; AVX1:       # %bb.0:
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm2 = [65535,65535,65535,65535]
 | 
						|
; AVX1-NEXT:    vpminud %xmm2, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vpminud %xmm2, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm1, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vzeroupper
 | 
						|
; AVX1-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-LABEL: trunc_usat_v8i32_v8i16:
 | 
						|
; AVX2:       # %bb.0:
 | 
						|
; AVX2-NEXT:    vpbroadcastd {{.*#+}} ymm1 = [65535,65535,65535,65535,65535,65535,65535,65535]
 | 
						|
; AVX2-NEXT:    vpminud %ymm1, %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vextracti128 $1, %ymm0, %xmm1
 | 
						|
; AVX2-NEXT:    vpackusdw %xmm1, %xmm0, %xmm0
 | 
						|
; AVX2-NEXT:    vzeroupper
 | 
						|
; AVX2-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512F-LABEL: trunc_usat_v8i32_v8i16:
 | 
						|
; AVX512F:       # %bb.0:
 | 
						|
; AVX512F-NEXT:    vpbroadcastd {{.*#+}} ymm1 = [65535,65535,65535,65535,65535,65535,65535,65535]
 | 
						|
; AVX512F-NEXT:    vpminud %ymm1, %ymm0, %ymm0
 | 
						|
; AVX512F-NEXT:    vpmovdw %zmm0, %ymm0
 | 
						|
; AVX512F-NEXT:    # kill: def $xmm0 killed $xmm0 killed $ymm0
 | 
						|
; AVX512F-NEXT:    vzeroupper
 | 
						|
; AVX512F-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512VL-LABEL: trunc_usat_v8i32_v8i16:
 | 
						|
; AVX512VL:       # %bb.0:
 | 
						|
; AVX512VL-NEXT:    vpmovusdw %ymm0, %xmm0
 | 
						|
; AVX512VL-NEXT:    vzeroupper
 | 
						|
; AVX512VL-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512BW-LABEL: trunc_usat_v8i32_v8i16:
 | 
						|
; AVX512BW:       # %bb.0:
 | 
						|
; AVX512BW-NEXT:    vpbroadcastd {{.*#+}} ymm1 = [65535,65535,65535,65535,65535,65535,65535,65535]
 | 
						|
; AVX512BW-NEXT:    vpminud %ymm1, %ymm0, %ymm0
 | 
						|
; AVX512BW-NEXT:    vpmovdw %zmm0, %ymm0
 | 
						|
; AVX512BW-NEXT:    # kill: def $xmm0 killed $xmm0 killed $ymm0
 | 
						|
; AVX512BW-NEXT:    vzeroupper
 | 
						|
; AVX512BW-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512BWVL-LABEL: trunc_usat_v8i32_v8i16:
 | 
						|
; AVX512BWVL:       # %bb.0:
 | 
						|
; AVX512BWVL-NEXT:    vpmovusdw %ymm0, %xmm0
 | 
						|
; AVX512BWVL-NEXT:    vzeroupper
 | 
						|
; AVX512BWVL-NEXT:    retq
 | 
						|
  %1 = icmp ult <8 x i32> %a0, <i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535>
 | 
						|
  %2 = select <8 x i1> %1, <8 x i32> %a0, <8 x i32> <i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535>
 | 
						|
  %3 = trunc <8 x i32> %2 to <8 x i16>
 | 
						|
  ret <8 x i16> %3
 | 
						|
}
 | 
						|
 | 
						|
define <16 x i16> @trunc_usat_v16i32_v16i16(<16 x i32> %a0) {
 | 
						|
; SSE2-LABEL: trunc_usat_v16i32_v16i16:
 | 
						|
; SSE2:       # %bb.0:
 | 
						|
; SSE2-NEXT:    movdqa %xmm1, %xmm8
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm6 = [2147483648,2147483648,2147483648,2147483648]
 | 
						|
; SSE2-NEXT:    movdqa %xmm2, %xmm7
 | 
						|
; SSE2-NEXT:    pxor %xmm6, %xmm7
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm5 = [2147549183,2147549183,2147549183,2147549183]
 | 
						|
; SSE2-NEXT:    movdqa %xmm5, %xmm1
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm7, %xmm1
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm7, %xmm7
 | 
						|
; SSE2-NEXT:    pand %xmm1, %xmm2
 | 
						|
; SSE2-NEXT:    pxor %xmm7, %xmm1
 | 
						|
; SSE2-NEXT:    por %xmm2, %xmm1
 | 
						|
; SSE2-NEXT:    movdqa %xmm3, %xmm4
 | 
						|
; SSE2-NEXT:    pxor %xmm6, %xmm4
 | 
						|
; SSE2-NEXT:    movdqa %xmm5, %xmm2
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm4, %xmm2
 | 
						|
; SSE2-NEXT:    pand %xmm2, %xmm3
 | 
						|
; SSE2-NEXT:    pxor %xmm7, %xmm2
 | 
						|
; SSE2-NEXT:    por %xmm3, %xmm2
 | 
						|
; SSE2-NEXT:    movdqa %xmm0, %xmm3
 | 
						|
; SSE2-NEXT:    pxor %xmm6, %xmm3
 | 
						|
; SSE2-NEXT:    movdqa %xmm5, %xmm4
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm3, %xmm4
 | 
						|
; SSE2-NEXT:    pand %xmm4, %xmm0
 | 
						|
; SSE2-NEXT:    pxor %xmm7, %xmm4
 | 
						|
; SSE2-NEXT:    por %xmm4, %xmm0
 | 
						|
; SSE2-NEXT:    pxor %xmm8, %xmm6
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm6, %xmm5
 | 
						|
; SSE2-NEXT:    pxor %xmm5, %xmm7
 | 
						|
; SSE2-NEXT:    pand %xmm8, %xmm5
 | 
						|
; SSE2-NEXT:    por %xmm7, %xmm5
 | 
						|
; SSE2-NEXT:    pslld $16, %xmm5
 | 
						|
; SSE2-NEXT:    psrad $16, %xmm5
 | 
						|
; SSE2-NEXT:    pslld $16, %xmm0
 | 
						|
; SSE2-NEXT:    psrad $16, %xmm0
 | 
						|
; SSE2-NEXT:    packssdw %xmm5, %xmm0
 | 
						|
; SSE2-NEXT:    pslld $16, %xmm2
 | 
						|
; SSE2-NEXT:    psrad $16, %xmm2
 | 
						|
; SSE2-NEXT:    pslld $16, %xmm1
 | 
						|
; SSE2-NEXT:    psrad $16, %xmm1
 | 
						|
; SSE2-NEXT:    packssdw %xmm2, %xmm1
 | 
						|
; SSE2-NEXT:    retq
 | 
						|
;
 | 
						|
; SSSE3-LABEL: trunc_usat_v16i32_v16i16:
 | 
						|
; SSSE3:       # %bb.0:
 | 
						|
; SSSE3-NEXT:    movdqa %xmm1, %xmm8
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm6 = [2147483648,2147483648,2147483648,2147483648]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm2, %xmm7
 | 
						|
; SSSE3-NEXT:    pxor %xmm6, %xmm7
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm5 = [2147549183,2147549183,2147549183,2147549183]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm5, %xmm1
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm7, %xmm1
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm7, %xmm7
 | 
						|
; SSSE3-NEXT:    pand %xmm1, %xmm2
 | 
						|
; SSSE3-NEXT:    pxor %xmm7, %xmm1
 | 
						|
; SSSE3-NEXT:    por %xmm2, %xmm1
 | 
						|
; SSSE3-NEXT:    movdqa %xmm3, %xmm4
 | 
						|
; SSSE3-NEXT:    pxor %xmm6, %xmm4
 | 
						|
; SSSE3-NEXT:    movdqa %xmm5, %xmm2
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm4, %xmm2
 | 
						|
; SSSE3-NEXT:    pand %xmm2, %xmm3
 | 
						|
; SSSE3-NEXT:    pxor %xmm7, %xmm2
 | 
						|
; SSSE3-NEXT:    por %xmm3, %xmm2
 | 
						|
; SSSE3-NEXT:    movdqa %xmm0, %xmm3
 | 
						|
; SSSE3-NEXT:    pxor %xmm6, %xmm3
 | 
						|
; SSSE3-NEXT:    movdqa %xmm5, %xmm4
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm3, %xmm4
 | 
						|
; SSSE3-NEXT:    pand %xmm4, %xmm0
 | 
						|
; SSSE3-NEXT:    pxor %xmm7, %xmm4
 | 
						|
; SSSE3-NEXT:    por %xmm4, %xmm0
 | 
						|
; SSSE3-NEXT:    pxor %xmm8, %xmm6
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm6, %xmm5
 | 
						|
; SSSE3-NEXT:    pxor %xmm5, %xmm7
 | 
						|
; SSSE3-NEXT:    pand %xmm8, %xmm5
 | 
						|
; SSSE3-NEXT:    por %xmm7, %xmm5
 | 
						|
; SSSE3-NEXT:    pslld $16, %xmm5
 | 
						|
; SSSE3-NEXT:    psrad $16, %xmm5
 | 
						|
; SSSE3-NEXT:    pslld $16, %xmm0
 | 
						|
; SSSE3-NEXT:    psrad $16, %xmm0
 | 
						|
; SSSE3-NEXT:    packssdw %xmm5, %xmm0
 | 
						|
; SSSE3-NEXT:    pslld $16, %xmm2
 | 
						|
; SSSE3-NEXT:    psrad $16, %xmm2
 | 
						|
; SSSE3-NEXT:    pslld $16, %xmm1
 | 
						|
; SSSE3-NEXT:    psrad $16, %xmm1
 | 
						|
; SSSE3-NEXT:    packssdw %xmm2, %xmm1
 | 
						|
; SSSE3-NEXT:    retq
 | 
						|
;
 | 
						|
; SSE41-LABEL: trunc_usat_v16i32_v16i16:
 | 
						|
; SSE41:       # %bb.0:
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm4 = [65535,65535,65535,65535]
 | 
						|
; SSE41-NEXT:    pminud %xmm4, %xmm3
 | 
						|
; SSE41-NEXT:    pminud %xmm4, %xmm2
 | 
						|
; SSE41-NEXT:    packusdw %xmm3, %xmm2
 | 
						|
; SSE41-NEXT:    pminud %xmm4, %xmm1
 | 
						|
; SSE41-NEXT:    pminud %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    packusdw %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm2, %xmm1
 | 
						|
; SSE41-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX1-LABEL: trunc_usat_v16i32_v16i16:
 | 
						|
; AVX1:       # %bb.0:
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm3 = [65535,65535,65535,65535]
 | 
						|
; AVX1-NEXT:    vpminud %xmm3, %xmm2, %xmm2
 | 
						|
; AVX1-NEXT:    vpminud %xmm3, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm2, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm2
 | 
						|
; AVX1-NEXT:    vpminud %xmm3, %xmm2, %xmm2
 | 
						|
; AVX1-NEXT:    vpminud %xmm3, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm2, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0
 | 
						|
; AVX1-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-LABEL: trunc_usat_v16i32_v16i16:
 | 
						|
; AVX2:       # %bb.0:
 | 
						|
; AVX2-NEXT:    vpbroadcastd {{.*#+}} ymm2 = [65535,65535,65535,65535,65535,65535,65535,65535]
 | 
						|
; AVX2-NEXT:    vpminud %ymm2, %ymm1, %ymm1
 | 
						|
; AVX2-NEXT:    vpminud %ymm2, %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vpackusdw %ymm1, %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
 | 
						|
; AVX2-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512-LABEL: trunc_usat_v16i32_v16i16:
 | 
						|
; AVX512:       # %bb.0:
 | 
						|
; AVX512-NEXT:    vpmovusdw %zmm0, %ymm0
 | 
						|
; AVX512-NEXT:    retq
 | 
						|
  %1 = icmp ult <16 x i32> %a0, <i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535>
 | 
						|
  %2 = select <16 x i1> %1, <16 x i32> %a0, <16 x i32> <i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535, i32 65535>
 | 
						|
  %3 = trunc <16 x i32> %2 to <16 x i16>
 | 
						|
  ret <16 x i16> %3
 | 
						|
}
 | 
						|
 | 
						|
;
 | 
						|
; Unsigned saturation truncation to v16i8
 | 
						|
;
 | 
						|
 | 
						|
define <8 x i8> @trunc_usat_v8i64_v8i8(<8 x i64> %a0) {
 | 
						|
; SSE2-LABEL: trunc_usat_v8i64_v8i8:
 | 
						|
; SSE2:       # %bb.0:
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm8 = [255,255]
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSE2-NEXT:    movdqa %xmm1, %xmm7
 | 
						|
; SSE2-NEXT:    pxor %xmm5, %xmm7
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002259711,9223372039002259711]
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm6
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm7, %xmm6
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm4 = xmm6[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm7
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm7 = xmm7[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm4, %xmm7
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm4 = xmm6[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm7, %xmm4
 | 
						|
; SSE2-NEXT:    pand %xmm4, %xmm1
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm4
 | 
						|
; SSE2-NEXT:    movdqa %xmm0, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm5, %xmm1
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm6
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm1, %xmm6
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm7 = xmm6[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm1
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm7, %xmm1
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm6 = xmm6[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm6
 | 
						|
; SSE2-NEXT:    pand %xmm6, %xmm0
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm6
 | 
						|
; SSE2-NEXT:    por %xmm6, %xmm0
 | 
						|
; SSE2-NEXT:    packuswb %xmm4, %xmm0
 | 
						|
; SSE2-NEXT:    movdqa %xmm3, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm5, %xmm1
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm1, %xmm4
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm6 = xmm4[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm1
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm6, %xmm1
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm4 = xmm4[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm4
 | 
						|
; SSE2-NEXT:    pand %xmm4, %xmm3
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSE2-NEXT:    por %xmm3, %xmm4
 | 
						|
; SSE2-NEXT:    pxor %xmm2, %xmm5
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm1
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm5, %xmm1
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm3 = xmm1[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm5
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm5 = xmm5[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm3, %xmm5
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm5, %xmm1
 | 
						|
; SSE2-NEXT:    pand %xmm1, %xmm2
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm1
 | 
						|
; SSE2-NEXT:    por %xmm2, %xmm1
 | 
						|
; SSE2-NEXT:    packuswb %xmm4, %xmm1
 | 
						|
; SSE2-NEXT:    packuswb %xmm1, %xmm0
 | 
						|
; SSE2-NEXT:    retq
 | 
						|
;
 | 
						|
; SSSE3-LABEL: trunc_usat_v8i64_v8i8:
 | 
						|
; SSSE3:       # %bb.0:
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm8 = [255,255]
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm1, %xmm7
 | 
						|
; SSSE3-NEXT:    pxor %xmm5, %xmm7
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002259711,9223372039002259711]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm6
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm7, %xmm6
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm4 = xmm6[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm7
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm7 = xmm7[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm4, %xmm7
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm4 = xmm6[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm7, %xmm4
 | 
						|
; SSSE3-NEXT:    pand %xmm4, %xmm1
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm4
 | 
						|
; SSSE3-NEXT:    movdqa %xmm0, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm5, %xmm1
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm6
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm1, %xmm6
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm7 = xmm6[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm1
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm7, %xmm1
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm6 = xmm6[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm6
 | 
						|
; SSSE3-NEXT:    pand %xmm6, %xmm0
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm6
 | 
						|
; SSSE3-NEXT:    por %xmm6, %xmm0
 | 
						|
; SSSE3-NEXT:    packuswb %xmm4, %xmm0
 | 
						|
; SSSE3-NEXT:    movdqa %xmm3, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm5, %xmm1
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm1, %xmm4
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm6 = xmm4[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm1
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm6, %xmm1
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm4 = xmm4[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm4
 | 
						|
; SSSE3-NEXT:    pand %xmm4, %xmm3
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSSE3-NEXT:    por %xmm3, %xmm4
 | 
						|
; SSSE3-NEXT:    pxor %xmm2, %xmm5
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm1
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm5, %xmm1
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm3 = xmm1[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm5
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm5 = xmm5[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm3, %xmm5
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm5, %xmm1
 | 
						|
; SSSE3-NEXT:    pand %xmm1, %xmm2
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm1
 | 
						|
; SSSE3-NEXT:    por %xmm2, %xmm1
 | 
						|
; SSSE3-NEXT:    packuswb %xmm4, %xmm1
 | 
						|
; SSSE3-NEXT:    packuswb %xmm1, %xmm0
 | 
						|
; SSSE3-NEXT:    retq
 | 
						|
;
 | 
						|
; SSE41-LABEL: trunc_usat_v8i64_v8i8:
 | 
						|
; SSE41:       # %bb.0:
 | 
						|
; SSE41-NEXT:    movdqa %xmm0, %xmm8
 | 
						|
; SSE41-NEXT:    movapd {{.*#+}} xmm5 = [255,255]
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm6 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSE41-NEXT:    movdqa %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm6, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002259711,9223372039002259711]
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm7
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm7
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm4 = xmm7[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm5, %xmm4
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm1, %xmm4
 | 
						|
; SSE41-NEXT:    movdqa %xmm8, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm6, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm1
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm1
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm7 = xmm1[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm5, %xmm1
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm8, %xmm1
 | 
						|
; SSE41-NEXT:    packusdw %xmm4, %xmm1
 | 
						|
; SSE41-NEXT:    movdqa %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm6, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm4
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm7 = xmm4[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm5, %xmm4
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm3, %xmm4
 | 
						|
; SSE41-NEXT:    pxor %xmm2, %xmm6
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm3
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm6, %xmm3
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm7 = xmm3[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm6
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm6[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm2, %xmm5
 | 
						|
; SSE41-NEXT:    packusdw %xmm4, %xmm5
 | 
						|
; SSE41-NEXT:    packusdw %xmm5, %xmm1
 | 
						|
; SSE41-NEXT:    movdqa %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX1-LABEL: trunc_usat_v8i64_v8i8:
 | 
						|
; AVX1:       # %bb.0:
 | 
						|
; AVX1-NEXT:    vmovapd {{.*#+}} ymm2 = [255,255,255,255]
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm3
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm4 = [9223372036854775808,9223372036854775808]
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm3, %xmm3
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm5 = [9223372036854776063,9223372036854776063]
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm3, %xmm5, %xmm3
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm0, %xmm6
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm6, %xmm5, %xmm6
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm3, %ymm6, %ymm3
 | 
						|
; AVX1-NEXT:    vblendvpd %ymm3, %ymm0, %ymm2, %ymm0
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm3
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm3, %xmm3
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm3, %xmm5, %xmm3
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm1, %xmm4
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm4, %xmm5, %xmm4
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm3, %ymm4, %ymm3
 | 
						|
; AVX1-NEXT:    vblendvpd %ymm3, %ymm1, %ymm2, %ymm1
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm2
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm2, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm2, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm1, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vzeroupper
 | 
						|
; AVX1-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-LABEL: trunc_usat_v8i64_v8i8:
 | 
						|
; AVX2:       # %bb.0:
 | 
						|
; AVX2-NEXT:    vbroadcastsd {{.*#+}} ymm2 = [255,255,255,255]
 | 
						|
; AVX2-NEXT:    vpbroadcastq {{.*#+}} ymm3 = [9223372036854775808,9223372036854775808,9223372036854775808,9223372036854775808]
 | 
						|
; AVX2-NEXT:    vpxor %ymm3, %ymm1, %ymm4
 | 
						|
; AVX2-NEXT:    vpbroadcastq {{.*#+}} ymm5 = [9223372036854776063,9223372036854776063,9223372036854776063,9223372036854776063]
 | 
						|
; AVX2-NEXT:    vpcmpgtq %ymm4, %ymm5, %ymm4
 | 
						|
; AVX2-NEXT:    vblendvpd %ymm4, %ymm1, %ymm2, %ymm1
 | 
						|
; AVX2-NEXT:    vpxor %ymm3, %ymm0, %ymm3
 | 
						|
; AVX2-NEXT:    vpcmpgtq %ymm3, %ymm5, %ymm3
 | 
						|
; AVX2-NEXT:    vblendvpd %ymm3, %ymm0, %ymm2, %ymm0
 | 
						|
; AVX2-NEXT:    vpackusdw %ymm1, %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
 | 
						|
; AVX2-NEXT:    vextracti128 $1, %ymm0, %xmm1
 | 
						|
; AVX2-NEXT:    vpackusdw %xmm1, %xmm0, %xmm0
 | 
						|
; AVX2-NEXT:    vzeroupper
 | 
						|
; AVX2-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512-LABEL: trunc_usat_v8i64_v8i8:
 | 
						|
; AVX512:       # %bb.0:
 | 
						|
; AVX512-NEXT:    vpminuq {{.*}}(%rip){1to8}, %zmm0, %zmm0
 | 
						|
; AVX512-NEXT:    vpmovqw %zmm0, %xmm0
 | 
						|
; AVX512-NEXT:    vzeroupper
 | 
						|
; AVX512-NEXT:    retq
 | 
						|
  %1 = icmp ult <8 x i64> %a0, <i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255>
 | 
						|
  %2 = select <8 x i1> %1, <8 x i64> %a0, <8 x i64> <i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255>
 | 
						|
  %3 = trunc <8 x i64> %2 to <8 x i8>
 | 
						|
  ret <8 x i8> %3
 | 
						|
}
 | 
						|
 | 
						|
define void @trunc_usat_v8i64_v8i8_store(<8 x i64> %a0, <8 x i8> *%p1) {
 | 
						|
; SSE2-LABEL: trunc_usat_v8i64_v8i8_store:
 | 
						|
; SSE2:       # %bb.0:
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm8 = [255,255]
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm6 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSE2-NEXT:    movdqa %xmm0, %xmm5
 | 
						|
; SSE2-NEXT:    pxor %xmm6, %xmm5
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002259711,9223372039002259711]
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm7
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm5, %xmm7
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm10 = xmm7[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm5
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm4 = xmm5[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm10, %xmm4
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm5 = xmm7[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm4, %xmm5
 | 
						|
; SSE2-NEXT:    pand %xmm5, %xmm0
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm5
 | 
						|
; SSE2-NEXT:    por %xmm0, %xmm5
 | 
						|
; SSE2-NEXT:    movdqa %xmm1, %xmm0
 | 
						|
; SSE2-NEXT:    pxor %xmm6, %xmm0
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm0, %xmm4
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm10 = xmm4[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm7 = xmm0[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm10, %xmm7
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm0 = xmm4[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm7, %xmm0
 | 
						|
; SSE2-NEXT:    pand %xmm0, %xmm1
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm0
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSE2-NEXT:    movdqa %xmm2, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm6, %xmm1
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm1, %xmm4
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm10 = xmm4[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm1
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm7 = xmm1[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm10, %xmm7
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm1 = xmm4[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm7, %xmm1
 | 
						|
; SSE2-NEXT:    pand %xmm1, %xmm2
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm1
 | 
						|
; SSE2-NEXT:    por %xmm2, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm3, %xmm6
 | 
						|
; SSE2-NEXT:    movdqa %xmm9, %xmm2
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm6, %xmm2
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm4 = xmm2[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm9, %xmm6
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm6 = xmm6[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm4, %xmm6
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm6, %xmm2
 | 
						|
; SSE2-NEXT:    pand %xmm2, %xmm3
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm2
 | 
						|
; SSE2-NEXT:    por %xmm3, %xmm2
 | 
						|
; SSE2-NEXT:    pand %xmm8, %xmm2
 | 
						|
; SSE2-NEXT:    pand %xmm8, %xmm1
 | 
						|
; SSE2-NEXT:    packuswb %xmm2, %xmm1
 | 
						|
; SSE2-NEXT:    pand %xmm8, %xmm0
 | 
						|
; SSE2-NEXT:    pand %xmm8, %xmm5
 | 
						|
; SSE2-NEXT:    packuswb %xmm0, %xmm5
 | 
						|
; SSE2-NEXT:    packuswb %xmm1, %xmm5
 | 
						|
; SSE2-NEXT:    packuswb %xmm5, %xmm5
 | 
						|
; SSE2-NEXT:    movq %xmm5, (%rdi)
 | 
						|
; SSE2-NEXT:    retq
 | 
						|
;
 | 
						|
; SSSE3-LABEL: trunc_usat_v8i64_v8i8_store:
 | 
						|
; SSSE3:       # %bb.0:
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm8 = [255,255]
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm6 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm0, %xmm5
 | 
						|
; SSSE3-NEXT:    pxor %xmm6, %xmm5
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002259711,9223372039002259711]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm7
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm5, %xmm7
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm10 = xmm7[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm5
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm4 = xmm5[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm10, %xmm4
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm5 = xmm7[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm4, %xmm5
 | 
						|
; SSSE3-NEXT:    pand %xmm5, %xmm0
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm5
 | 
						|
; SSSE3-NEXT:    por %xmm0, %xmm5
 | 
						|
; SSSE3-NEXT:    movdqa %xmm1, %xmm0
 | 
						|
; SSSE3-NEXT:    pxor %xmm6, %xmm0
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm0, %xmm4
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm10 = xmm4[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm7 = xmm0[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm10, %xmm7
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm0 = xmm4[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm7, %xmm0
 | 
						|
; SSSE3-NEXT:    pand %xmm0, %xmm1
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm0
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSSE3-NEXT:    movdqa %xmm2, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm6, %xmm1
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm1, %xmm4
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm10 = xmm4[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm1
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm7 = xmm1[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm10, %xmm7
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm1 = xmm4[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm7, %xmm1
 | 
						|
; SSSE3-NEXT:    pand %xmm1, %xmm2
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm1
 | 
						|
; SSSE3-NEXT:    por %xmm2, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm3, %xmm6
 | 
						|
; SSSE3-NEXT:    movdqa %xmm9, %xmm2
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm6, %xmm2
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm4 = xmm2[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm9, %xmm6
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm6 = xmm6[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm4, %xmm6
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm6, %xmm2
 | 
						|
; SSSE3-NEXT:    pand %xmm2, %xmm3
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm2
 | 
						|
; SSSE3-NEXT:    por %xmm3, %xmm2
 | 
						|
; SSSE3-NEXT:    pand %xmm8, %xmm2
 | 
						|
; SSSE3-NEXT:    pand %xmm8, %xmm1
 | 
						|
; SSSE3-NEXT:    packuswb %xmm2, %xmm1
 | 
						|
; SSSE3-NEXT:    pand %xmm8, %xmm0
 | 
						|
; SSSE3-NEXT:    pand %xmm8, %xmm5
 | 
						|
; SSSE3-NEXT:    packuswb %xmm0, %xmm5
 | 
						|
; SSSE3-NEXT:    packuswb %xmm1, %xmm5
 | 
						|
; SSSE3-NEXT:    packuswb %xmm5, %xmm5
 | 
						|
; SSSE3-NEXT:    movq %xmm5, (%rdi)
 | 
						|
; SSSE3-NEXT:    retq
 | 
						|
;
 | 
						|
; SSE41-LABEL: trunc_usat_v8i64_v8i8_store:
 | 
						|
; SSE41:       # %bb.0:
 | 
						|
; SSE41-NEXT:    movdqa %xmm0, %xmm4
 | 
						|
; SSE41-NEXT:    movapd {{.*#+}} xmm8 = [255,255]
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm7 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSE41-NEXT:    pxor %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002259711,9223372039002259711]
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm6
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm6
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm5 = xmm6[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm5, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm6, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm8, %xmm6
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm4, %xmm6
 | 
						|
; SSE41-NEXT:    movdqa %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm4
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm4
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm5, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm8, %xmm4
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm1, %xmm4
 | 
						|
; SSE41-NEXT:    movdqa %xmm2, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm1
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm1
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm5 = xmm1[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm5, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm8, %xmm1
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm2, %xmm1
 | 
						|
; SSE41-NEXT:    pxor %xmm3, %xmm7
 | 
						|
; SSE41-NEXT:    movdqa %xmm9, %xmm2
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm7, %xmm2
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm5 = xmm2[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm9, %xmm7
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm7[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm5, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm2, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm8, %xmm2
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm3, %xmm2
 | 
						|
; SSE41-NEXT:    andpd %xmm8, %xmm2
 | 
						|
; SSE41-NEXT:    andpd %xmm8, %xmm1
 | 
						|
; SSE41-NEXT:    packusdw %xmm2, %xmm1
 | 
						|
; SSE41-NEXT:    andpd %xmm8, %xmm4
 | 
						|
; SSE41-NEXT:    andpd %xmm8, %xmm6
 | 
						|
; SSE41-NEXT:    packusdw %xmm4, %xmm6
 | 
						|
; SSE41-NEXT:    packusdw %xmm1, %xmm6
 | 
						|
; SSE41-NEXT:    packuswb %xmm6, %xmm6
 | 
						|
; SSE41-NEXT:    movq %xmm6, (%rdi)
 | 
						|
; SSE41-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX1-LABEL: trunc_usat_v8i64_v8i8_store:
 | 
						|
; AVX1:       # %bb.0:
 | 
						|
; AVX1-NEXT:    vmovapd {{.*#+}} ymm2 = [255,255,255,255]
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm3
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm4 = [9223372036854775808,9223372036854775808]
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm3, %xmm3
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm5 = [9223372036854776063,9223372036854776063]
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm3, %xmm5, %xmm3
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm0, %xmm6
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm6, %xmm5, %xmm6
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm3, %ymm6, %ymm3
 | 
						|
; AVX1-NEXT:    vblendvpd %ymm3, %ymm0, %ymm2, %ymm0
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm3
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm3, %xmm3
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm3, %xmm5, %xmm3
 | 
						|
; AVX1-NEXT:    vpxor %xmm4, %xmm1, %xmm4
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm4, %xmm5, %xmm4
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm3, %ymm4, %ymm3
 | 
						|
; AVX1-NEXT:    vblendvpd %ymm3, %ymm1, %ymm2, %ymm1
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm2
 | 
						|
; AVX1-NEXT:    vmovddup {{.*#+}} xmm3 = [1.2598673968951787E-321,1.2598673968951787E-321]
 | 
						|
; AVX1-NEXT:    # xmm3 = mem[0,0]
 | 
						|
; AVX1-NEXT:    vandpd %xmm3, %xmm2, %xmm2
 | 
						|
; AVX1-NEXT:    vandpd %xmm3, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm2, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2
 | 
						|
; AVX1-NEXT:    vandpd %xmm3, %xmm2, %xmm2
 | 
						|
; AVX1-NEXT:    vandpd %xmm3, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm2, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm1, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackuswb %xmm0, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vmovq %xmm0, (%rdi)
 | 
						|
; AVX1-NEXT:    vzeroupper
 | 
						|
; AVX1-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-LABEL: trunc_usat_v8i64_v8i8_store:
 | 
						|
; AVX2:       # %bb.0:
 | 
						|
; AVX2-NEXT:    vbroadcastsd {{.*#+}} ymm2 = [255,255,255,255]
 | 
						|
; AVX2-NEXT:    vpbroadcastq {{.*#+}} ymm3 = [9223372036854775808,9223372036854775808,9223372036854775808,9223372036854775808]
 | 
						|
; AVX2-NEXT:    vpxor %ymm3, %ymm0, %ymm4
 | 
						|
; AVX2-NEXT:    vpbroadcastq {{.*#+}} ymm5 = [9223372036854776063,9223372036854776063,9223372036854776063,9223372036854776063]
 | 
						|
; AVX2-NEXT:    vpcmpgtq %ymm4, %ymm5, %ymm4
 | 
						|
; AVX2-NEXT:    vblendvpd %ymm4, %ymm0, %ymm2, %ymm0
 | 
						|
; AVX2-NEXT:    vpxor %ymm3, %ymm1, %ymm3
 | 
						|
; AVX2-NEXT:    vpcmpgtq %ymm3, %ymm5, %ymm3
 | 
						|
; AVX2-NEXT:    vblendvpd %ymm3, %ymm1, %ymm2, %ymm1
 | 
						|
; AVX2-NEXT:    vextractf128 $1, %ymm1, %xmm2
 | 
						|
; AVX2-NEXT:    vpackusdw %xmm2, %xmm1, %xmm1
 | 
						|
; AVX2-NEXT:    vmovdqa {{.*#+}} xmm2 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15]
 | 
						|
; AVX2-NEXT:    vpshufb %xmm2, %xmm1, %xmm1
 | 
						|
; AVX2-NEXT:    vextractf128 $1, %ymm0, %xmm3
 | 
						|
; AVX2-NEXT:    vpackusdw %xmm3, %xmm0, %xmm0
 | 
						|
; AVX2-NEXT:    vpshufb %xmm2, %xmm0, %xmm0
 | 
						|
; AVX2-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
 | 
						|
; AVX2-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u]
 | 
						|
; AVX2-NEXT:    vmovq %xmm0, (%rdi)
 | 
						|
; AVX2-NEXT:    vzeroupper
 | 
						|
; AVX2-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512-LABEL: trunc_usat_v8i64_v8i8_store:
 | 
						|
; AVX512:       # %bb.0:
 | 
						|
; AVX512-NEXT:    vpmovusqb %zmm0, (%rdi)
 | 
						|
; AVX512-NEXT:    vzeroupper
 | 
						|
; AVX512-NEXT:    retq
 | 
						|
  %1 = icmp ult <8 x i64> %a0, <i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255>
 | 
						|
  %2 = select <8 x i1> %1, <8 x i64> %a0, <8 x i64> <i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255>
 | 
						|
  %3 = trunc <8 x i64> %2 to <8 x i8>
 | 
						|
  store <8 x i8> %3, <8 x i8> *%p1
 | 
						|
  ret void
 | 
						|
}
 | 
						|
 | 
						|
define <16 x i8> @trunc_usat_v16i64_v16i8(<16 x i64> %a0) {
 | 
						|
; SSE2-LABEL: trunc_usat_v16i64_v16i8:
 | 
						|
; SSE2:       # %bb.0:
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm8 = [255,255]
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSE2-NEXT:    movdqa %xmm1, %xmm11
 | 
						|
; SSE2-NEXT:    pxor %xmm9, %xmm11
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm10 = [9223372039002259711,9223372039002259711]
 | 
						|
; SSE2-NEXT:    movdqa %xmm10, %xmm12
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm11, %xmm12
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm13 = xmm12[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm10, %xmm11
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm11 = xmm11[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm13, %xmm11
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm12 = xmm12[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm11, %xmm12
 | 
						|
; SSE2-NEXT:    pand %xmm12, %xmm1
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm12
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm12
 | 
						|
; SSE2-NEXT:    movdqa %xmm0, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm9, %xmm1
 | 
						|
; SSE2-NEXT:    movdqa %xmm10, %xmm11
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm1, %xmm11
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm13 = xmm11[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm10, %xmm1
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm14 = xmm1[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm13, %xmm14
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm1 = xmm11[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm14, %xmm1
 | 
						|
; SSE2-NEXT:    pand %xmm1, %xmm0
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm1
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSE2-NEXT:    packuswb %xmm12, %xmm0
 | 
						|
; SSE2-NEXT:    movdqa %xmm3, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm9, %xmm1
 | 
						|
; SSE2-NEXT:    movdqa %xmm10, %xmm11
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm1, %xmm11
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm12 = xmm11[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm10, %xmm1
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm13 = xmm1[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm12, %xmm13
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm1 = xmm11[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm13, %xmm1
 | 
						|
; SSE2-NEXT:    pand %xmm1, %xmm3
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm1
 | 
						|
; SSE2-NEXT:    por %xmm3, %xmm1
 | 
						|
; SSE2-NEXT:    movdqa %xmm2, %xmm3
 | 
						|
; SSE2-NEXT:    pxor %xmm9, %xmm3
 | 
						|
; SSE2-NEXT:    movdqa %xmm10, %xmm11
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm3, %xmm11
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm12 = xmm11[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm10, %xmm3
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm13 = xmm3[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm12, %xmm13
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm3 = xmm11[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm13, %xmm3
 | 
						|
; SSE2-NEXT:    pand %xmm3, %xmm2
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm3
 | 
						|
; SSE2-NEXT:    por %xmm2, %xmm3
 | 
						|
; SSE2-NEXT:    packuswb %xmm1, %xmm3
 | 
						|
; SSE2-NEXT:    packuswb %xmm3, %xmm0
 | 
						|
; SSE2-NEXT:    movdqa %xmm5, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm9, %xmm1
 | 
						|
; SSE2-NEXT:    movdqa %xmm10, %xmm2
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm1, %xmm2
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm10, %xmm1
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm3, %xmm1
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm2
 | 
						|
; SSE2-NEXT:    pand %xmm2, %xmm5
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm2
 | 
						|
; SSE2-NEXT:    por %xmm5, %xmm2
 | 
						|
; SSE2-NEXT:    movdqa %xmm4, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm9, %xmm1
 | 
						|
; SSE2-NEXT:    movdqa %xmm10, %xmm3
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm1, %xmm3
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm11 = xmm3[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm10, %xmm1
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm5 = xmm1[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm11, %xmm5
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm1 = xmm3[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm5, %xmm1
 | 
						|
; SSE2-NEXT:    pand %xmm1, %xmm4
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm1
 | 
						|
; SSE2-NEXT:    por %xmm4, %xmm1
 | 
						|
; SSE2-NEXT:    packuswb %xmm2, %xmm1
 | 
						|
; SSE2-NEXT:    movdqa %xmm7, %xmm2
 | 
						|
; SSE2-NEXT:    pxor %xmm9, %xmm2
 | 
						|
; SSE2-NEXT:    movdqa %xmm10, %xmm3
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm2, %xmm3
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm10, %xmm2
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm4, %xmm2
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm2, %xmm3
 | 
						|
; SSE2-NEXT:    pand %xmm3, %xmm7
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm3
 | 
						|
; SSE2-NEXT:    por %xmm7, %xmm3
 | 
						|
; SSE2-NEXT:    pxor %xmm6, %xmm9
 | 
						|
; SSE2-NEXT:    movdqa %xmm10, %xmm2
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm9, %xmm2
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm4 = xmm2[0,0,2,2]
 | 
						|
; SSE2-NEXT:    pcmpeqd %xmm10, %xmm9
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm5 = xmm9[1,1,3,3]
 | 
						|
; SSE2-NEXT:    pand %xmm4, %xmm5
 | 
						|
; SSE2-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
 | 
						|
; SSE2-NEXT:    por %xmm5, %xmm2
 | 
						|
; SSE2-NEXT:    pand %xmm2, %xmm6
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm2
 | 
						|
; SSE2-NEXT:    por %xmm6, %xmm2
 | 
						|
; SSE2-NEXT:    packuswb %xmm3, %xmm2
 | 
						|
; SSE2-NEXT:    packuswb %xmm2, %xmm1
 | 
						|
; SSE2-NEXT:    packuswb %xmm1, %xmm0
 | 
						|
; SSE2-NEXT:    retq
 | 
						|
;
 | 
						|
; SSSE3-LABEL: trunc_usat_v16i64_v16i8:
 | 
						|
; SSSE3:       # %bb.0:
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm8 = [255,255]
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm9 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm1, %xmm11
 | 
						|
; SSSE3-NEXT:    pxor %xmm9, %xmm11
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm10 = [9223372039002259711,9223372039002259711]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm10, %xmm12
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm11, %xmm12
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm13 = xmm12[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm10, %xmm11
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm11 = xmm11[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm13, %xmm11
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm12 = xmm12[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm11, %xmm12
 | 
						|
; SSSE3-NEXT:    pand %xmm12, %xmm1
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm12
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm12
 | 
						|
; SSSE3-NEXT:    movdqa %xmm0, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm9, %xmm1
 | 
						|
; SSSE3-NEXT:    movdqa %xmm10, %xmm11
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm1, %xmm11
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm13 = xmm11[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm10, %xmm1
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm14 = xmm1[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm13, %xmm14
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm1 = xmm11[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm14, %xmm1
 | 
						|
; SSSE3-NEXT:    pand %xmm1, %xmm0
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm1
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSSE3-NEXT:    packuswb %xmm12, %xmm0
 | 
						|
; SSSE3-NEXT:    movdqa %xmm3, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm9, %xmm1
 | 
						|
; SSSE3-NEXT:    movdqa %xmm10, %xmm11
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm1, %xmm11
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm12 = xmm11[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm10, %xmm1
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm13 = xmm1[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm12, %xmm13
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm1 = xmm11[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm13, %xmm1
 | 
						|
; SSSE3-NEXT:    pand %xmm1, %xmm3
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm1
 | 
						|
; SSSE3-NEXT:    por %xmm3, %xmm1
 | 
						|
; SSSE3-NEXT:    movdqa %xmm2, %xmm3
 | 
						|
; SSSE3-NEXT:    pxor %xmm9, %xmm3
 | 
						|
; SSSE3-NEXT:    movdqa %xmm10, %xmm11
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm3, %xmm11
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm12 = xmm11[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm10, %xmm3
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm13 = xmm3[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm12, %xmm13
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm3 = xmm11[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm13, %xmm3
 | 
						|
; SSSE3-NEXT:    pand %xmm3, %xmm2
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm3
 | 
						|
; SSSE3-NEXT:    por %xmm2, %xmm3
 | 
						|
; SSSE3-NEXT:    packuswb %xmm1, %xmm3
 | 
						|
; SSSE3-NEXT:    packuswb %xmm3, %xmm0
 | 
						|
; SSSE3-NEXT:    movdqa %xmm5, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm9, %xmm1
 | 
						|
; SSSE3-NEXT:    movdqa %xmm10, %xmm2
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm1, %xmm2
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm10, %xmm1
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm3, %xmm1
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm2
 | 
						|
; SSSE3-NEXT:    pand %xmm2, %xmm5
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm2
 | 
						|
; SSSE3-NEXT:    por %xmm5, %xmm2
 | 
						|
; SSSE3-NEXT:    movdqa %xmm4, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm9, %xmm1
 | 
						|
; SSSE3-NEXT:    movdqa %xmm10, %xmm3
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm1, %xmm3
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm11 = xmm3[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm10, %xmm1
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm5 = xmm1[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm11, %xmm5
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm1 = xmm3[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm5, %xmm1
 | 
						|
; SSSE3-NEXT:    pand %xmm1, %xmm4
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm1
 | 
						|
; SSSE3-NEXT:    por %xmm4, %xmm1
 | 
						|
; SSSE3-NEXT:    packuswb %xmm2, %xmm1
 | 
						|
; SSSE3-NEXT:    movdqa %xmm7, %xmm2
 | 
						|
; SSSE3-NEXT:    pxor %xmm9, %xmm2
 | 
						|
; SSSE3-NEXT:    movdqa %xmm10, %xmm3
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm2, %xmm3
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm10, %xmm2
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm4, %xmm2
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm2, %xmm3
 | 
						|
; SSSE3-NEXT:    pand %xmm3, %xmm7
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm3
 | 
						|
; SSSE3-NEXT:    por %xmm7, %xmm3
 | 
						|
; SSSE3-NEXT:    pxor %xmm6, %xmm9
 | 
						|
; SSSE3-NEXT:    movdqa %xmm10, %xmm2
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm9, %xmm2
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm4 = xmm2[0,0,2,2]
 | 
						|
; SSSE3-NEXT:    pcmpeqd %xmm10, %xmm9
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm5 = xmm9[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    pand %xmm4, %xmm5
 | 
						|
; SSSE3-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
 | 
						|
; SSSE3-NEXT:    por %xmm5, %xmm2
 | 
						|
; SSSE3-NEXT:    pand %xmm2, %xmm6
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm2
 | 
						|
; SSSE3-NEXT:    por %xmm6, %xmm2
 | 
						|
; SSSE3-NEXT:    packuswb %xmm3, %xmm2
 | 
						|
; SSSE3-NEXT:    packuswb %xmm2, %xmm1
 | 
						|
; SSSE3-NEXT:    packuswb %xmm1, %xmm0
 | 
						|
; SSSE3-NEXT:    retq
 | 
						|
;
 | 
						|
; SSE41-LABEL: trunc_usat_v16i64_v16i8:
 | 
						|
; SSE41:       # %bb.0:
 | 
						|
; SSE41-NEXT:    movdqa %xmm0, %xmm8
 | 
						|
; SSE41-NEXT:    movapd {{.*#+}} xmm9 = [255,255]
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm10 = [9223372039002259456,9223372039002259456]
 | 
						|
; SSE41-NEXT:    movdqa %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm10, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm11 = [9223372039002259711,9223372039002259711]
 | 
						|
; SSE41-NEXT:    movdqa %xmm11, %xmm12
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm12
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm13 = xmm12[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm11, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm13, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm12, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm9, %xmm12
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm1, %xmm12
 | 
						|
; SSE41-NEXT:    movdqa %xmm8, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm10, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm11, %xmm1
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm1
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm13 = xmm1[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm11, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm13, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm9, %xmm13
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm8, %xmm13
 | 
						|
; SSE41-NEXT:    packusdw %xmm12, %xmm13
 | 
						|
; SSE41-NEXT:    movdqa %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm10, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm11, %xmm1
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm1
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm8 = xmm1[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm11, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm8, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm9, %xmm1
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm3, %xmm1
 | 
						|
; SSE41-NEXT:    movdqa %xmm2, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm10, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm11, %xmm3
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm3
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm8 = xmm3[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm11, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm8, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm9, %xmm3
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm2, %xmm3
 | 
						|
; SSE41-NEXT:    packusdw %xmm1, %xmm3
 | 
						|
; SSE41-NEXT:    packusdw %xmm3, %xmm13
 | 
						|
; SSE41-NEXT:    movdqa %xmm5, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm10, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm11, %xmm1
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm1
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm2 = xmm1[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm11, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm2, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm9, %xmm1
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm5, %xmm1
 | 
						|
; SSE41-NEXT:    movdqa %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm10, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm11, %xmm2
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm2
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm11, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm2, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm9, %xmm2
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm4, %xmm2
 | 
						|
; SSE41-NEXT:    packusdw %xmm1, %xmm2
 | 
						|
; SSE41-NEXT:    movdqa %xmm7, %xmm0
 | 
						|
; SSE41-NEXT:    pxor %xmm10, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm11, %xmm1
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm0, %xmm1
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm3 = xmm1[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm11, %xmm0
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    movapd %xmm9, %xmm1
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm7, %xmm1
 | 
						|
; SSE41-NEXT:    pxor %xmm6, %xmm10
 | 
						|
; SSE41-NEXT:    movdqa %xmm11, %xmm3
 | 
						|
; SSE41-NEXT:    pcmpgtd %xmm10, %xmm3
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2]
 | 
						|
; SSE41-NEXT:    pcmpeqd %xmm11, %xmm10
 | 
						|
; SSE41-NEXT:    pshufd {{.*#+}} xmm0 = xmm10[1,1,3,3]
 | 
						|
; SSE41-NEXT:    pand %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    por %xmm3, %xmm0
 | 
						|
; SSE41-NEXT:    blendvpd %xmm0, %xmm6, %xmm9
 | 
						|
; SSE41-NEXT:    packusdw %xmm1, %xmm9
 | 
						|
; SSE41-NEXT:    packusdw %xmm9, %xmm2
 | 
						|
; SSE41-NEXT:    packuswb %xmm2, %xmm13
 | 
						|
; SSE41-NEXT:    movdqa %xmm13, %xmm0
 | 
						|
; SSE41-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX1-LABEL: trunc_usat_v16i64_v16i8:
 | 
						|
; AVX1:       # %bb.0:
 | 
						|
; AVX1-NEXT:    vmovapd {{.*#+}} ymm8 = [255,255,255,255]
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm5
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm6 = [9223372036854775808,9223372036854775808]
 | 
						|
; AVX1-NEXT:    vpxor %xmm6, %xmm5, %xmm5
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm7 = [9223372036854776063,9223372036854776063]
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm5, %xmm7, %xmm5
 | 
						|
; AVX1-NEXT:    vpxor %xmm6, %xmm0, %xmm4
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm4, %xmm7, %xmm4
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm5, %ymm4, %ymm4
 | 
						|
; AVX1-NEXT:    vblendvpd %ymm4, %ymm0, %ymm8, %ymm0
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm4
 | 
						|
; AVX1-NEXT:    vpxor %xmm6, %xmm4, %xmm4
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm4, %xmm7, %xmm4
 | 
						|
; AVX1-NEXT:    vpxor %xmm6, %xmm1, %xmm5
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm5, %xmm7, %xmm5
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm4, %ymm5, %ymm4
 | 
						|
; AVX1-NEXT:    vblendvpd %ymm4, %ymm1, %ymm8, %ymm1
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm2, %xmm4
 | 
						|
; AVX1-NEXT:    vpxor %xmm6, %xmm4, %xmm4
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm4, %xmm7, %xmm4
 | 
						|
; AVX1-NEXT:    vpxor %xmm6, %xmm2, %xmm5
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm5, %xmm7, %xmm5
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm4, %ymm5, %ymm4
 | 
						|
; AVX1-NEXT:    vblendvpd %ymm4, %ymm2, %ymm8, %ymm2
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm3, %xmm4
 | 
						|
; AVX1-NEXT:    vpxor %xmm6, %xmm4, %xmm4
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm4, %xmm7, %xmm4
 | 
						|
; AVX1-NEXT:    vpxor %xmm6, %xmm3, %xmm5
 | 
						|
; AVX1-NEXT:    vpcmpgtq %xmm5, %xmm7, %xmm5
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm4, %ymm5, %ymm4
 | 
						|
; AVX1-NEXT:    vblendvpd %ymm4, %ymm3, %ymm8, %ymm3
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm3, %xmm4
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm4, %xmm3, %xmm3
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm2, %xmm4
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm4, %xmm2, %xmm2
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm3, %xmm2, %xmm2
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm3
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm3, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm3
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm3, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm1, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackuswb %xmm2, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vzeroupper
 | 
						|
; AVX1-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-LABEL: trunc_usat_v16i64_v16i8:
 | 
						|
; AVX2:       # %bb.0:
 | 
						|
; AVX2-NEXT:    vbroadcastsd {{.*#+}} ymm4 = [255,255,255,255]
 | 
						|
; AVX2-NEXT:    vpbroadcastq {{.*#+}} ymm5 = [9223372036854775808,9223372036854775808,9223372036854775808,9223372036854775808]
 | 
						|
; AVX2-NEXT:    vpxor %ymm5, %ymm1, %ymm6
 | 
						|
; AVX2-NEXT:    vpbroadcastq {{.*#+}} ymm7 = [9223372036854776063,9223372036854776063,9223372036854776063,9223372036854776063]
 | 
						|
; AVX2-NEXT:    vpcmpgtq %ymm6, %ymm7, %ymm6
 | 
						|
; AVX2-NEXT:    vblendvpd %ymm6, %ymm1, %ymm4, %ymm1
 | 
						|
; AVX2-NEXT:    vpxor %ymm5, %ymm0, %ymm6
 | 
						|
; AVX2-NEXT:    vpcmpgtq %ymm6, %ymm7, %ymm6
 | 
						|
; AVX2-NEXT:    vblendvpd %ymm6, %ymm0, %ymm4, %ymm0
 | 
						|
; AVX2-NEXT:    vpackusdw %ymm1, %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vpxor %ymm5, %ymm3, %ymm1
 | 
						|
; AVX2-NEXT:    vpcmpgtq %ymm1, %ymm7, %ymm1
 | 
						|
; AVX2-NEXT:    vblendvpd %ymm1, %ymm3, %ymm4, %ymm1
 | 
						|
; AVX2-NEXT:    vpxor %ymm5, %ymm2, %ymm3
 | 
						|
; AVX2-NEXT:    vpcmpgtq %ymm3, %ymm7, %ymm3
 | 
						|
; AVX2-NEXT:    vblendvpd %ymm3, %ymm2, %ymm4, %ymm2
 | 
						|
; AVX2-NEXT:    vpackusdw %ymm1, %ymm2, %ymm1
 | 
						|
; AVX2-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,2,1,3]
 | 
						|
; AVX2-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
 | 
						|
; AVX2-NEXT:    vpackusdw %ymm1, %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
 | 
						|
; AVX2-NEXT:    vextracti128 $1, %ymm0, %xmm1
 | 
						|
; AVX2-NEXT:    vpackuswb %xmm1, %xmm0, %xmm0
 | 
						|
; AVX2-NEXT:    vzeroupper
 | 
						|
; AVX2-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512-LABEL: trunc_usat_v16i64_v16i8:
 | 
						|
; AVX512:       # %bb.0:
 | 
						|
; AVX512-NEXT:    vpbroadcastq {{.*#+}} zmm2 = [255,255,255,255,255,255,255,255]
 | 
						|
; AVX512-NEXT:    vpminuq %zmm2, %zmm1, %zmm1
 | 
						|
; AVX512-NEXT:    vpminuq %zmm2, %zmm0, %zmm0
 | 
						|
; AVX512-NEXT:    vpmovqd %zmm0, %ymm0
 | 
						|
; AVX512-NEXT:    vpmovqd %zmm1, %ymm1
 | 
						|
; AVX512-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
 | 
						|
; AVX512-NEXT:    vpmovdb %zmm0, %xmm0
 | 
						|
; AVX512-NEXT:    vzeroupper
 | 
						|
; AVX512-NEXT:    retq
 | 
						|
  %1 = icmp ult <16 x i64> %a0, <i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255>
 | 
						|
  %2 = select <16 x i1> %1, <16 x i64> %a0, <16 x i64> <i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255, i64 255>
 | 
						|
  %3 = trunc <16 x i64> %2 to <16 x i8>
 | 
						|
  ret <16 x i8> %3
 | 
						|
}
 | 
						|
 | 
						|
define <8 x i8> @trunc_usat_v8i32_v8i8(<8 x i32> %a0) {
 | 
						|
; SSE2-LABEL: trunc_usat_v8i32_v8i8:
 | 
						|
; SSE2:       # %bb.0:
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm2 = [255,255,255,255]
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm3 = [2147483648,2147483648,2147483648,2147483648]
 | 
						|
; SSE2-NEXT:    movdqa %xmm1, %xmm4
 | 
						|
; SSE2-NEXT:    pxor %xmm3, %xmm4
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm5 = [2147483903,2147483903,2147483903,2147483903]
 | 
						|
; SSE2-NEXT:    movdqa %xmm5, %xmm6
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm4, %xmm6
 | 
						|
; SSE2-NEXT:    pand %xmm6, %xmm1
 | 
						|
; SSE2-NEXT:    pandn %xmm2, %xmm6
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm6
 | 
						|
; SSE2-NEXT:    pxor %xmm0, %xmm3
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm3, %xmm5
 | 
						|
; SSE2-NEXT:    pand %xmm5, %xmm0
 | 
						|
; SSE2-NEXT:    pandn %xmm2, %xmm5
 | 
						|
; SSE2-NEXT:    por %xmm5, %xmm0
 | 
						|
; SSE2-NEXT:    packuswb %xmm6, %xmm0
 | 
						|
; SSE2-NEXT:    retq
 | 
						|
;
 | 
						|
; SSSE3-LABEL: trunc_usat_v8i32_v8i8:
 | 
						|
; SSSE3:       # %bb.0:
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm2 = [255,255,255,255]
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm3 = [2147483648,2147483648,2147483648,2147483648]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm1, %xmm4
 | 
						|
; SSSE3-NEXT:    pxor %xmm3, %xmm4
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm5 = [2147483903,2147483903,2147483903,2147483903]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm5, %xmm6
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm4, %xmm6
 | 
						|
; SSSE3-NEXT:    pand %xmm6, %xmm1
 | 
						|
; SSSE3-NEXT:    pandn %xmm2, %xmm6
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm6
 | 
						|
; SSSE3-NEXT:    pxor %xmm0, %xmm3
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm3, %xmm5
 | 
						|
; SSSE3-NEXT:    pand %xmm5, %xmm0
 | 
						|
; SSSE3-NEXT:    pandn %xmm2, %xmm5
 | 
						|
; SSSE3-NEXT:    por %xmm5, %xmm0
 | 
						|
; SSSE3-NEXT:    packuswb %xmm6, %xmm0
 | 
						|
; SSSE3-NEXT:    retq
 | 
						|
;
 | 
						|
; SSE41-LABEL: trunc_usat_v8i32_v8i8:
 | 
						|
; SSE41:       # %bb.0:
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm2 = [255,255,255,255]
 | 
						|
; SSE41-NEXT:    pminud %xmm2, %xmm1
 | 
						|
; SSE41-NEXT:    pminud %xmm2, %xmm0
 | 
						|
; SSE41-NEXT:    packusdw %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX1-LABEL: trunc_usat_v8i32_v8i8:
 | 
						|
; AVX1:       # %bb.0:
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm2 = [255,255,255,255]
 | 
						|
; AVX1-NEXT:    vpminud %xmm2, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vpminud %xmm2, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm1, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vzeroupper
 | 
						|
; AVX1-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-LABEL: trunc_usat_v8i32_v8i8:
 | 
						|
; AVX2:       # %bb.0:
 | 
						|
; AVX2-NEXT:    vpbroadcastd {{.*#+}} ymm1 = [255,255,255,255,255,255,255,255]
 | 
						|
; AVX2-NEXT:    vpminud %ymm1, %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vextracti128 $1, %ymm0, %xmm1
 | 
						|
; AVX2-NEXT:    vpackusdw %xmm1, %xmm0, %xmm0
 | 
						|
; AVX2-NEXT:    vzeroupper
 | 
						|
; AVX2-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512F-LABEL: trunc_usat_v8i32_v8i8:
 | 
						|
; AVX512F:       # %bb.0:
 | 
						|
; AVX512F-NEXT:    vpbroadcastd {{.*#+}} ymm1 = [255,255,255,255,255,255,255,255]
 | 
						|
; AVX512F-NEXT:    vpminud %ymm1, %ymm0, %ymm0
 | 
						|
; AVX512F-NEXT:    vpmovdw %zmm0, %ymm0
 | 
						|
; AVX512F-NEXT:    # kill: def $xmm0 killed $xmm0 killed $ymm0
 | 
						|
; AVX512F-NEXT:    vzeroupper
 | 
						|
; AVX512F-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512VL-LABEL: trunc_usat_v8i32_v8i8:
 | 
						|
; AVX512VL:       # %bb.0:
 | 
						|
; AVX512VL-NEXT:    vpminud {{.*}}(%rip){1to8}, %ymm0, %ymm0
 | 
						|
; AVX512VL-NEXT:    vpmovdw %ymm0, %xmm0
 | 
						|
; AVX512VL-NEXT:    vzeroupper
 | 
						|
; AVX512VL-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512BW-LABEL: trunc_usat_v8i32_v8i8:
 | 
						|
; AVX512BW:       # %bb.0:
 | 
						|
; AVX512BW-NEXT:    vpbroadcastd {{.*#+}} ymm1 = [255,255,255,255,255,255,255,255]
 | 
						|
; AVX512BW-NEXT:    vpminud %ymm1, %ymm0, %ymm0
 | 
						|
; AVX512BW-NEXT:    vpmovdw %zmm0, %ymm0
 | 
						|
; AVX512BW-NEXT:    # kill: def $xmm0 killed $xmm0 killed $ymm0
 | 
						|
; AVX512BW-NEXT:    vzeroupper
 | 
						|
; AVX512BW-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512BWVL-LABEL: trunc_usat_v8i32_v8i8:
 | 
						|
; AVX512BWVL:       # %bb.0:
 | 
						|
; AVX512BWVL-NEXT:    vpminud {{.*}}(%rip){1to8}, %ymm0, %ymm0
 | 
						|
; AVX512BWVL-NEXT:    vpmovdw %ymm0, %xmm0
 | 
						|
; AVX512BWVL-NEXT:    vzeroupper
 | 
						|
; AVX512BWVL-NEXT:    retq
 | 
						|
  %1 = icmp ult <8 x i32> %a0, <i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255>
 | 
						|
  %2 = select <8 x i1> %1, <8 x i32> %a0, <8 x i32> <i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255>
 | 
						|
  %3 = trunc <8 x i32> %2 to <8 x i8>
 | 
						|
  ret <8 x i8> %3
 | 
						|
}
 | 
						|
 | 
						|
define void @trunc_usat_v8i32_v8i8_store(<8 x i32> %a0, <8 x i8> *%p1) {
 | 
						|
; SSE2-LABEL: trunc_usat_v8i32_v8i8_store:
 | 
						|
; SSE2:       # %bb.0:
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm2 = [255,255,255,255]
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm3 = [2147483648,2147483648,2147483648,2147483648]
 | 
						|
; SSE2-NEXT:    movdqa %xmm0, %xmm4
 | 
						|
; SSE2-NEXT:    pxor %xmm3, %xmm4
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm5 = [2147483903,2147483903,2147483903,2147483903]
 | 
						|
; SSE2-NEXT:    movdqa %xmm5, %xmm6
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm4, %xmm6
 | 
						|
; SSE2-NEXT:    pand %xmm6, %xmm0
 | 
						|
; SSE2-NEXT:    pandn %xmm2, %xmm6
 | 
						|
; SSE2-NEXT:    por %xmm0, %xmm6
 | 
						|
; SSE2-NEXT:    pxor %xmm1, %xmm3
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm3, %xmm5
 | 
						|
; SSE2-NEXT:    pand %xmm5, %xmm1
 | 
						|
; SSE2-NEXT:    pandn %xmm2, %xmm5
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm5
 | 
						|
; SSE2-NEXT:    pand %xmm2, %xmm5
 | 
						|
; SSE2-NEXT:    pand %xmm2, %xmm6
 | 
						|
; SSE2-NEXT:    packuswb %xmm5, %xmm6
 | 
						|
; SSE2-NEXT:    packuswb %xmm6, %xmm6
 | 
						|
; SSE2-NEXT:    movq %xmm6, (%rdi)
 | 
						|
; SSE2-NEXT:    retq
 | 
						|
;
 | 
						|
; SSSE3-LABEL: trunc_usat_v8i32_v8i8_store:
 | 
						|
; SSSE3:       # %bb.0:
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm2 = [255,255,255,255]
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm3 = [2147483648,2147483648,2147483648,2147483648]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm0, %xmm4
 | 
						|
; SSSE3-NEXT:    pxor %xmm3, %xmm4
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm5 = [2147483903,2147483903,2147483903,2147483903]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm5, %xmm6
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm4, %xmm6
 | 
						|
; SSSE3-NEXT:    pand %xmm6, %xmm0
 | 
						|
; SSSE3-NEXT:    pandn %xmm2, %xmm6
 | 
						|
; SSSE3-NEXT:    por %xmm0, %xmm6
 | 
						|
; SSSE3-NEXT:    pxor %xmm1, %xmm3
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm3, %xmm5
 | 
						|
; SSSE3-NEXT:    pand %xmm5, %xmm1
 | 
						|
; SSSE3-NEXT:    pandn %xmm2, %xmm5
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm5
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm0 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15]
 | 
						|
; SSSE3-NEXT:    pshufb %xmm0, %xmm5
 | 
						|
; SSSE3-NEXT:    pshufb %xmm0, %xmm6
 | 
						|
; SSSE3-NEXT:    punpcklqdq {{.*#+}} xmm6 = xmm6[0],xmm5[0]
 | 
						|
; SSSE3-NEXT:    pshufb {{.*#+}} xmm6 = xmm6[0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u]
 | 
						|
; SSSE3-NEXT:    movq %xmm6, (%rdi)
 | 
						|
; SSSE3-NEXT:    retq
 | 
						|
;
 | 
						|
; SSE41-LABEL: trunc_usat_v8i32_v8i8_store:
 | 
						|
; SSE41:       # %bb.0:
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm2 = [255,255,255,255]
 | 
						|
; SSE41-NEXT:    pminud %xmm2, %xmm1
 | 
						|
; SSE41-NEXT:    pminud %xmm2, %xmm0
 | 
						|
; SSE41-NEXT:    packusdw %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    packuswb %xmm0, %xmm0
 | 
						|
; SSE41-NEXT:    movq %xmm0, (%rdi)
 | 
						|
; SSE41-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX1-LABEL: trunc_usat_v8i32_v8i8_store:
 | 
						|
; AVX1:       # %bb.0:
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm2 = [255,255,255,255]
 | 
						|
; AVX1-NEXT:    vpminud %xmm2, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vpminud %xmm2, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm1, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackuswb %xmm0, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vmovq %xmm0, (%rdi)
 | 
						|
; AVX1-NEXT:    vzeroupper
 | 
						|
; AVX1-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-LABEL: trunc_usat_v8i32_v8i8_store:
 | 
						|
; AVX2:       # %bb.0:
 | 
						|
; AVX2-NEXT:    vpbroadcastd {{.*#+}} ymm1 = [255,255,255,255,255,255,255,255]
 | 
						|
; AVX2-NEXT:    vpminud %ymm1, %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vextracti128 $1, %ymm0, %xmm1
 | 
						|
; AVX2-NEXT:    vpackusdw %xmm1, %xmm0, %xmm0
 | 
						|
; AVX2-NEXT:    vpackuswb %xmm0, %xmm0, %xmm0
 | 
						|
; AVX2-NEXT:    vmovq %xmm0, (%rdi)
 | 
						|
; AVX2-NEXT:    vzeroupper
 | 
						|
; AVX2-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512F-LABEL: trunc_usat_v8i32_v8i8_store:
 | 
						|
; AVX512F:       # %bb.0:
 | 
						|
; AVX512F-NEXT:    vpbroadcastd {{.*#+}} ymm1 = [255,255,255,255,255,255,255,255]
 | 
						|
; AVX512F-NEXT:    vpminud %ymm1, %ymm0, %ymm0
 | 
						|
; AVX512F-NEXT:    vpmovdw %zmm0, %ymm0
 | 
						|
; AVX512F-NEXT:    vpackuswb %xmm0, %xmm0, %xmm0
 | 
						|
; AVX512F-NEXT:    vmovq %xmm0, (%rdi)
 | 
						|
; AVX512F-NEXT:    vzeroupper
 | 
						|
; AVX512F-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512VL-LABEL: trunc_usat_v8i32_v8i8_store:
 | 
						|
; AVX512VL:       # %bb.0:
 | 
						|
; AVX512VL-NEXT:    vpmovusdb %ymm0, (%rdi)
 | 
						|
; AVX512VL-NEXT:    vzeroupper
 | 
						|
; AVX512VL-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512BW-LABEL: trunc_usat_v8i32_v8i8_store:
 | 
						|
; AVX512BW:       # %bb.0:
 | 
						|
; AVX512BW-NEXT:    vpbroadcastd {{.*#+}} ymm1 = [255,255,255,255,255,255,255,255]
 | 
						|
; AVX512BW-NEXT:    vpminud %ymm1, %ymm0, %ymm0
 | 
						|
; AVX512BW-NEXT:    vpmovdw %zmm0, %ymm0
 | 
						|
; AVX512BW-NEXT:    vpackuswb %xmm0, %xmm0, %xmm0
 | 
						|
; AVX512BW-NEXT:    vmovq %xmm0, (%rdi)
 | 
						|
; AVX512BW-NEXT:    vzeroupper
 | 
						|
; AVX512BW-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512BWVL-LABEL: trunc_usat_v8i32_v8i8_store:
 | 
						|
; AVX512BWVL:       # %bb.0:
 | 
						|
; AVX512BWVL-NEXT:    vpmovusdb %ymm0, (%rdi)
 | 
						|
; AVX512BWVL-NEXT:    vzeroupper
 | 
						|
; AVX512BWVL-NEXT:    retq
 | 
						|
  %1 = icmp ult <8 x i32> %a0, <i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255>
 | 
						|
  %2 = select <8 x i1> %1, <8 x i32> %a0, <8 x i32> <i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255>
 | 
						|
  %3 = trunc <8 x i32> %2 to <8 x i8>
 | 
						|
  store <8 x i8> %3, <8 x i8> *%p1
 | 
						|
  ret void
 | 
						|
}
 | 
						|
 | 
						|
define <16 x i8> @trunc_usat_v16i32_v16i8(<16 x i32> %a0) {
 | 
						|
; SSE2-LABEL: trunc_usat_v16i32_v16i8:
 | 
						|
; SSE2:       # %bb.0:
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm8 = [255,255,255,255]
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm6 = [2147483648,2147483648,2147483648,2147483648]
 | 
						|
; SSE2-NEXT:    movdqa %xmm1, %xmm7
 | 
						|
; SSE2-NEXT:    pxor %xmm6, %xmm7
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm5 = [2147483903,2147483903,2147483903,2147483903]
 | 
						|
; SSE2-NEXT:    movdqa %xmm5, %xmm4
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm7, %xmm4
 | 
						|
; SSE2-NEXT:    pand %xmm4, %xmm1
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSE2-NEXT:    por %xmm1, %xmm4
 | 
						|
; SSE2-NEXT:    movdqa %xmm0, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm6, %xmm1
 | 
						|
; SSE2-NEXT:    movdqa %xmm5, %xmm7
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm1, %xmm7
 | 
						|
; SSE2-NEXT:    pand %xmm7, %xmm0
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm7
 | 
						|
; SSE2-NEXT:    por %xmm7, %xmm0
 | 
						|
; SSE2-NEXT:    packuswb %xmm4, %xmm0
 | 
						|
; SSE2-NEXT:    movdqa %xmm3, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm6, %xmm1
 | 
						|
; SSE2-NEXT:    movdqa %xmm5, %xmm4
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm1, %xmm4
 | 
						|
; SSE2-NEXT:    pand %xmm4, %xmm3
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSE2-NEXT:    por %xmm3, %xmm4
 | 
						|
; SSE2-NEXT:    pxor %xmm2, %xmm6
 | 
						|
; SSE2-NEXT:    pcmpgtd %xmm6, %xmm5
 | 
						|
; SSE2-NEXT:    pand %xmm5, %xmm2
 | 
						|
; SSE2-NEXT:    pandn %xmm8, %xmm5
 | 
						|
; SSE2-NEXT:    por %xmm2, %xmm5
 | 
						|
; SSE2-NEXT:    packuswb %xmm4, %xmm5
 | 
						|
; SSE2-NEXT:    packuswb %xmm5, %xmm0
 | 
						|
; SSE2-NEXT:    retq
 | 
						|
;
 | 
						|
; SSSE3-LABEL: trunc_usat_v16i32_v16i8:
 | 
						|
; SSSE3:       # %bb.0:
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm8 = [255,255,255,255]
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm6 = [2147483648,2147483648,2147483648,2147483648]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm1, %xmm7
 | 
						|
; SSSE3-NEXT:    pxor %xmm6, %xmm7
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm5 = [2147483903,2147483903,2147483903,2147483903]
 | 
						|
; SSSE3-NEXT:    movdqa %xmm5, %xmm4
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm7, %xmm4
 | 
						|
; SSSE3-NEXT:    pand %xmm4, %xmm1
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSSE3-NEXT:    por %xmm1, %xmm4
 | 
						|
; SSSE3-NEXT:    movdqa %xmm0, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm6, %xmm1
 | 
						|
; SSSE3-NEXT:    movdqa %xmm5, %xmm7
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm1, %xmm7
 | 
						|
; SSSE3-NEXT:    pand %xmm7, %xmm0
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm7
 | 
						|
; SSSE3-NEXT:    por %xmm7, %xmm0
 | 
						|
; SSSE3-NEXT:    packuswb %xmm4, %xmm0
 | 
						|
; SSSE3-NEXT:    movdqa %xmm3, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm6, %xmm1
 | 
						|
; SSSE3-NEXT:    movdqa %xmm5, %xmm4
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm1, %xmm4
 | 
						|
; SSSE3-NEXT:    pand %xmm4, %xmm3
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm4
 | 
						|
; SSSE3-NEXT:    por %xmm3, %xmm4
 | 
						|
; SSSE3-NEXT:    pxor %xmm2, %xmm6
 | 
						|
; SSSE3-NEXT:    pcmpgtd %xmm6, %xmm5
 | 
						|
; SSSE3-NEXT:    pand %xmm5, %xmm2
 | 
						|
; SSSE3-NEXT:    pandn %xmm8, %xmm5
 | 
						|
; SSSE3-NEXT:    por %xmm2, %xmm5
 | 
						|
; SSSE3-NEXT:    packuswb %xmm4, %xmm5
 | 
						|
; SSSE3-NEXT:    packuswb %xmm5, %xmm0
 | 
						|
; SSSE3-NEXT:    retq
 | 
						|
;
 | 
						|
; SSE41-LABEL: trunc_usat_v16i32_v16i8:
 | 
						|
; SSE41:       # %bb.0:
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm4 = [255,255,255,255]
 | 
						|
; SSE41-NEXT:    pminud %xmm4, %xmm1
 | 
						|
; SSE41-NEXT:    pminud %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    packusdw %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    pminud %xmm4, %xmm3
 | 
						|
; SSE41-NEXT:    pminud %xmm4, %xmm2
 | 
						|
; SSE41-NEXT:    packusdw %xmm3, %xmm2
 | 
						|
; SSE41-NEXT:    packuswb %xmm2, %xmm0
 | 
						|
; SSE41-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX1-LABEL: trunc_usat_v16i32_v16i8:
 | 
						|
; AVX1:       # %bb.0:
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm3 = [255,255,255,255]
 | 
						|
; AVX1-NEXT:    vpminud %xmm3, %xmm2, %xmm2
 | 
						|
; AVX1-NEXT:    vpminud %xmm3, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm2, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm2
 | 
						|
; AVX1-NEXT:    vpminud %xmm3, %xmm2, %xmm2
 | 
						|
; AVX1-NEXT:    vpminud %xmm3, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vpackusdw %xmm2, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vpackuswb %xmm1, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vzeroupper
 | 
						|
; AVX1-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-LABEL: trunc_usat_v16i32_v16i8:
 | 
						|
; AVX2:       # %bb.0:
 | 
						|
; AVX2-NEXT:    vpbroadcastd {{.*#+}} ymm2 = [255,255,255,255,255,255,255,255]
 | 
						|
; AVX2-NEXT:    vpminud %ymm2, %ymm1, %ymm1
 | 
						|
; AVX2-NEXT:    vpminud %ymm2, %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vpackusdw %ymm1, %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
 | 
						|
; AVX2-NEXT:    vextracti128 $1, %ymm0, %xmm1
 | 
						|
; AVX2-NEXT:    vpackuswb %xmm1, %xmm0, %xmm0
 | 
						|
; AVX2-NEXT:    vzeroupper
 | 
						|
; AVX2-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512-LABEL: trunc_usat_v16i32_v16i8:
 | 
						|
; AVX512:       # %bb.0:
 | 
						|
; AVX512-NEXT:    vpmovusdb %zmm0, %xmm0
 | 
						|
; AVX512-NEXT:    vzeroupper
 | 
						|
; AVX512-NEXT:    retq
 | 
						|
  %1 = icmp ult <16 x i32> %a0, <i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255>
 | 
						|
  %2 = select <16 x i1> %1, <16 x i32> %a0, <16 x i32> <i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255, i32 255>
 | 
						|
  %3 = trunc <16 x i32> %2 to <16 x i8>
 | 
						|
  ret <16 x i8> %3
 | 
						|
}
 | 
						|
 | 
						|
define <16 x i8> @trunc_usat_v16i16_v16i8(<16 x i16> %a0) {
 | 
						|
; SSE2-LABEL: trunc_usat_v16i16_v16i8:
 | 
						|
; SSE2:       # %bb.0:
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm2 = [32768,32768,32768,32768,32768,32768,32768,32768]
 | 
						|
; SSE2-NEXT:    pxor %xmm2, %xmm1
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm3 = [33023,33023,33023,33023,33023,33023,33023,33023]
 | 
						|
; SSE2-NEXT:    pminsw %xmm3, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm2, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm2, %xmm0
 | 
						|
; SSE2-NEXT:    pminsw %xmm3, %xmm0
 | 
						|
; SSE2-NEXT:    pxor %xmm2, %xmm0
 | 
						|
; SSE2-NEXT:    packuswb %xmm1, %xmm0
 | 
						|
; SSE2-NEXT:    retq
 | 
						|
;
 | 
						|
; SSSE3-LABEL: trunc_usat_v16i16_v16i8:
 | 
						|
; SSSE3:       # %bb.0:
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm2 = [32768,32768,32768,32768,32768,32768,32768,32768]
 | 
						|
; SSSE3-NEXT:    pxor %xmm2, %xmm1
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm3 = [33023,33023,33023,33023,33023,33023,33023,33023]
 | 
						|
; SSSE3-NEXT:    pminsw %xmm3, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm2, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm2, %xmm0
 | 
						|
; SSSE3-NEXT:    pminsw %xmm3, %xmm0
 | 
						|
; SSSE3-NEXT:    pxor %xmm2, %xmm0
 | 
						|
; SSSE3-NEXT:    packuswb %xmm1, %xmm0
 | 
						|
; SSSE3-NEXT:    retq
 | 
						|
;
 | 
						|
; SSE41-LABEL: trunc_usat_v16i16_v16i8:
 | 
						|
; SSE41:       # %bb.0:
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm2 = [255,255,255,255,255,255,255,255]
 | 
						|
; SSE41-NEXT:    pminuw %xmm2, %xmm1
 | 
						|
; SSE41-NEXT:    pminuw %xmm2, %xmm0
 | 
						|
; SSE41-NEXT:    packuswb %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX1-LABEL: trunc_usat_v16i16_v16i8:
 | 
						|
; AVX1:       # %bb.0:
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm1
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm2 = [255,255,255,255,255,255,255,255]
 | 
						|
; AVX1-NEXT:    vpminuw %xmm2, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vpminuw %xmm2, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackuswb %xmm1, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vzeroupper
 | 
						|
; AVX1-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-LABEL: trunc_usat_v16i16_v16i8:
 | 
						|
; AVX2:       # %bb.0:
 | 
						|
; AVX2-NEXT:    vpminuw {{.*}}(%rip), %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vextracti128 $1, %ymm0, %xmm1
 | 
						|
; AVX2-NEXT:    vpackuswb %xmm1, %xmm0, %xmm0
 | 
						|
; AVX2-NEXT:    vzeroupper
 | 
						|
; AVX2-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512F-LABEL: trunc_usat_v16i16_v16i8:
 | 
						|
; AVX512F:       # %bb.0:
 | 
						|
; AVX512F-NEXT:    vpminuw {{.*}}(%rip), %ymm0, %ymm0
 | 
						|
; AVX512F-NEXT:    vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
 | 
						|
; AVX512F-NEXT:    vpmovdb %zmm0, %xmm0
 | 
						|
; AVX512F-NEXT:    vzeroupper
 | 
						|
; AVX512F-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512VL-LABEL: trunc_usat_v16i16_v16i8:
 | 
						|
; AVX512VL:       # %bb.0:
 | 
						|
; AVX512VL-NEXT:    vpminuw {{.*}}(%rip), %ymm0, %ymm0
 | 
						|
; AVX512VL-NEXT:    vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
 | 
						|
; AVX512VL-NEXT:    vpmovdb %zmm0, %xmm0
 | 
						|
; AVX512VL-NEXT:    vzeroupper
 | 
						|
; AVX512VL-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512BW-LABEL: trunc_usat_v16i16_v16i8:
 | 
						|
; AVX512BW:       # %bb.0:
 | 
						|
; AVX512BW-NEXT:    vpminuw {{.*}}(%rip), %ymm0, %ymm0
 | 
						|
; AVX512BW-NEXT:    vpmovwb %zmm0, %ymm0
 | 
						|
; AVX512BW-NEXT:    # kill: def $xmm0 killed $xmm0 killed $ymm0
 | 
						|
; AVX512BW-NEXT:    vzeroupper
 | 
						|
; AVX512BW-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512BWVL-LABEL: trunc_usat_v16i16_v16i8:
 | 
						|
; AVX512BWVL:       # %bb.0:
 | 
						|
; AVX512BWVL-NEXT:    vpmovuswb %ymm0, %xmm0
 | 
						|
; AVX512BWVL-NEXT:    vzeroupper
 | 
						|
; AVX512BWVL-NEXT:    retq
 | 
						|
  %1 = icmp ult <16 x i16> %a0, <i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255>
 | 
						|
  %2 = select <16 x i1> %1, <16 x i16> %a0, <16 x i16> <i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255>
 | 
						|
  %3 = trunc <16 x i16> %2 to <16 x i8>
 | 
						|
  ret <16 x i8> %3
 | 
						|
}
 | 
						|
 | 
						|
define <32 x i8> @trunc_usat_v32i16_v32i8(<32 x i16> %a0) {
 | 
						|
; SSE2-LABEL: trunc_usat_v32i16_v32i8:
 | 
						|
; SSE2:       # %bb.0:
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm4 = [32768,32768,32768,32768,32768,32768,32768,32768]
 | 
						|
; SSE2-NEXT:    pxor %xmm4, %xmm3
 | 
						|
; SSE2-NEXT:    movdqa {{.*#+}} xmm5 = [33023,33023,33023,33023,33023,33023,33023,33023]
 | 
						|
; SSE2-NEXT:    pminsw %xmm5, %xmm3
 | 
						|
; SSE2-NEXT:    pxor %xmm4, %xmm3
 | 
						|
; SSE2-NEXT:    pxor %xmm4, %xmm2
 | 
						|
; SSE2-NEXT:    pminsw %xmm5, %xmm2
 | 
						|
; SSE2-NEXT:    pxor %xmm4, %xmm2
 | 
						|
; SSE2-NEXT:    packuswb %xmm3, %xmm2
 | 
						|
; SSE2-NEXT:    pxor %xmm4, %xmm1
 | 
						|
; SSE2-NEXT:    pminsw %xmm5, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm4, %xmm1
 | 
						|
; SSE2-NEXT:    pxor %xmm4, %xmm0
 | 
						|
; SSE2-NEXT:    pminsw %xmm5, %xmm0
 | 
						|
; SSE2-NEXT:    pxor %xmm4, %xmm0
 | 
						|
; SSE2-NEXT:    packuswb %xmm1, %xmm0
 | 
						|
; SSE2-NEXT:    movdqa %xmm2, %xmm1
 | 
						|
; SSE2-NEXT:    retq
 | 
						|
;
 | 
						|
; SSSE3-LABEL: trunc_usat_v32i16_v32i8:
 | 
						|
; SSSE3:       # %bb.0:
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm4 = [32768,32768,32768,32768,32768,32768,32768,32768]
 | 
						|
; SSSE3-NEXT:    pxor %xmm4, %xmm3
 | 
						|
; SSSE3-NEXT:    movdqa {{.*#+}} xmm5 = [33023,33023,33023,33023,33023,33023,33023,33023]
 | 
						|
; SSSE3-NEXT:    pminsw %xmm5, %xmm3
 | 
						|
; SSSE3-NEXT:    pxor %xmm4, %xmm3
 | 
						|
; SSSE3-NEXT:    pxor %xmm4, %xmm2
 | 
						|
; SSSE3-NEXT:    pminsw %xmm5, %xmm2
 | 
						|
; SSSE3-NEXT:    pxor %xmm4, %xmm2
 | 
						|
; SSSE3-NEXT:    packuswb %xmm3, %xmm2
 | 
						|
; SSSE3-NEXT:    pxor %xmm4, %xmm1
 | 
						|
; SSSE3-NEXT:    pminsw %xmm5, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm4, %xmm1
 | 
						|
; SSSE3-NEXT:    pxor %xmm4, %xmm0
 | 
						|
; SSSE3-NEXT:    pminsw %xmm5, %xmm0
 | 
						|
; SSSE3-NEXT:    pxor %xmm4, %xmm0
 | 
						|
; SSSE3-NEXT:    packuswb %xmm1, %xmm0
 | 
						|
; SSSE3-NEXT:    movdqa %xmm2, %xmm1
 | 
						|
; SSSE3-NEXT:    retq
 | 
						|
;
 | 
						|
; SSE41-LABEL: trunc_usat_v32i16_v32i8:
 | 
						|
; SSE41:       # %bb.0:
 | 
						|
; SSE41-NEXT:    movdqa {{.*#+}} xmm4 = [255,255,255,255,255,255,255,255]
 | 
						|
; SSE41-NEXT:    pminuw %xmm4, %xmm3
 | 
						|
; SSE41-NEXT:    pminuw %xmm4, %xmm2
 | 
						|
; SSE41-NEXT:    packuswb %xmm3, %xmm2
 | 
						|
; SSE41-NEXT:    pminuw %xmm4, %xmm1
 | 
						|
; SSE41-NEXT:    pminuw %xmm4, %xmm0
 | 
						|
; SSE41-NEXT:    packuswb %xmm1, %xmm0
 | 
						|
; SSE41-NEXT:    movdqa %xmm2, %xmm1
 | 
						|
; SSE41-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX1-LABEL: trunc_usat_v32i16_v32i8:
 | 
						|
; AVX1:       # %bb.0:
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm2
 | 
						|
; AVX1-NEXT:    vmovdqa {{.*#+}} xmm3 = [255,255,255,255,255,255,255,255]
 | 
						|
; AVX1-NEXT:    vpminuw %xmm3, %xmm2, %xmm2
 | 
						|
; AVX1-NEXT:    vpminuw %xmm3, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vpackuswb %xmm2, %xmm0, %xmm0
 | 
						|
; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm2
 | 
						|
; AVX1-NEXT:    vpminuw %xmm3, %xmm2, %xmm2
 | 
						|
; AVX1-NEXT:    vpminuw %xmm3, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vpackuswb %xmm2, %xmm1, %xmm1
 | 
						|
; AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm0, %ymm0
 | 
						|
; AVX1-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX2-LABEL: trunc_usat_v32i16_v32i8:
 | 
						|
; AVX2:       # %bb.0:
 | 
						|
; AVX2-NEXT:    vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255]
 | 
						|
; AVX2-NEXT:    vpminuw %ymm2, %ymm1, %ymm1
 | 
						|
; AVX2-NEXT:    vpminuw %ymm2, %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vpackuswb %ymm1, %ymm0, %ymm0
 | 
						|
; AVX2-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
 | 
						|
; AVX2-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512F-LABEL: trunc_usat_v32i16_v32i8:
 | 
						|
; AVX512F:       # %bb.0:
 | 
						|
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255]
 | 
						|
; AVX512F-NEXT:    vpminuw %ymm2, %ymm1, %ymm1
 | 
						|
; AVX512F-NEXT:    vpminuw %ymm2, %ymm0, %ymm0
 | 
						|
; AVX512F-NEXT:    vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
 | 
						|
; AVX512F-NEXT:    vpmovdb %zmm0, %xmm0
 | 
						|
; AVX512F-NEXT:    vpmovzxwd {{.*#+}} zmm1 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
 | 
						|
; AVX512F-NEXT:    vpmovdb %zmm1, %xmm1
 | 
						|
; AVX512F-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
 | 
						|
; AVX512F-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512VL-LABEL: trunc_usat_v32i16_v32i8:
 | 
						|
; AVX512VL:       # %bb.0:
 | 
						|
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255]
 | 
						|
; AVX512VL-NEXT:    vpminuw %ymm2, %ymm1, %ymm1
 | 
						|
; AVX512VL-NEXT:    vpminuw %ymm2, %ymm0, %ymm0
 | 
						|
; AVX512VL-NEXT:    vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
 | 
						|
; AVX512VL-NEXT:    vpmovdb %zmm0, %xmm0
 | 
						|
; AVX512VL-NEXT:    vpmovzxwd {{.*#+}} zmm1 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
 | 
						|
; AVX512VL-NEXT:    vpmovdb %zmm1, %xmm1
 | 
						|
; AVX512VL-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
 | 
						|
; AVX512VL-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512BW-LABEL: trunc_usat_v32i16_v32i8:
 | 
						|
; AVX512BW:       # %bb.0:
 | 
						|
; AVX512BW-NEXT:    vpmovuswb %zmm0, %ymm0
 | 
						|
; AVX512BW-NEXT:    retq
 | 
						|
;
 | 
						|
; AVX512BWVL-LABEL: trunc_usat_v32i16_v32i8:
 | 
						|
; AVX512BWVL:       # %bb.0:
 | 
						|
; AVX512BWVL-NEXT:    vpmovuswb %zmm0, %ymm0
 | 
						|
; AVX512BWVL-NEXT:    retq
 | 
						|
  %1 = icmp ult <32 x i16> %a0, <i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255>
 | 
						|
  %2 = select <32 x i1> %1, <32 x i16> %a0, <32 x i16> <i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255>
 | 
						|
  %3 = trunc <32 x i16> %2 to <32 x i8>
 | 
						|
  ret <32 x i8> %3
 | 
						|
}
 |