| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py | 
 | ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bf16 | FileCheck %s --check-prefix=X64 | 
 | ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx512bf16 | FileCheck %s --check-prefix=X86 | 
 |  | 
 | define dso_local void @funbf16(ptr readonly %src, ptr writeonly %dst) { | 
 | ; X64-LABEL: funbf16: | 
 | ; X64:       # %bb.0: # %entry | 
 | ; X64-NEXT:    vmovups (%rdi), %xmm0 | 
 | ; X64-NEXT:    vmovups %xmm0, (%rsi) | 
 | ; X64-NEXT:    vmovaps (%rdi), %xmm0 | 
 | ; X64-NEXT:    vmovaps %xmm0, (%rsi) | 
 | ; X64-NEXT:    vmovups (%rdi), %ymm0 | 
 | ; X64-NEXT:    vmovups %ymm0, (%rsi) | 
 | ; X64-NEXT:    vmovaps (%rdi), %ymm0 | 
 | ; X64-NEXT:    vmovaps %ymm0, (%rsi) | 
 | ; X64-NEXT:    vzeroupper | 
 | ; X64-NEXT:    retq | 
 | ; | 
 | ; X86-LABEL: funbf16: | 
 | ; X86:       # %bb.0: # %entry | 
 | ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
 | ; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
 | ; X86-NEXT:    vmovups (%ecx), %xmm0 | 
 | ; X86-NEXT:    vmovups %xmm0, (%eax) | 
 | ; X86-NEXT:    vmovaps (%ecx), %xmm0 | 
 | ; X86-NEXT:    vmovaps %xmm0, (%eax) | 
 | ; X86-NEXT:    vmovups (%ecx), %ymm0 | 
 | ; X86-NEXT:    vmovups %ymm0, (%eax) | 
 | ; X86-NEXT:    vmovaps (%ecx), %ymm0 | 
 | ; X86-NEXT:    vmovaps %ymm0, (%eax) | 
 | ; X86-NEXT:    vzeroupper | 
 | ; X86-NEXT:    retl | 
 | entry: | 
 |   %0 = load <8 x bfloat>, ptr %src, align 1 | 
 |   store <8 x bfloat> %0, ptr %dst, align 1 | 
 |   %1 = load <8 x bfloat>, ptr %src, align 32 | 
 |   store <8 x bfloat> %1, ptr %dst, align 32 | 
 |   %2 = load <16 x bfloat>, ptr %src, align 1 | 
 |   store <16 x bfloat> %2, ptr %dst, align 1 | 
 |   %3 = load <16 x bfloat>, ptr %src, align 32 | 
 |   store <16 x bfloat> %3, ptr %dst, align 32 | 
 |   ret void | 
 | } |