# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple=riscv32 -mattr=+v -run-pass=legalizer %s -o - | FileCheck --check-prefix=HasF64 %s
# RUN: llc -mtriple=riscv32 -mattr=+Zve64x -run-pass=legalizer %s -o - | FileCheck --check-prefix=NoF64 %s
---
name: splatvector_nxv1i64
legalized: false
tracksRegLiveness: true
body: |
bb.1:
; HasF64-LABEL: name: splatvector_nxv1i64
; HasF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; HasF64-NEXT: [[DEF1:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; HasF64-NEXT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[DEF]](s32), [[DEF1]](s32)
; HasF64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 1 x s64>) = G_SPLAT_VECTOR [[MV]](s64)
; HasF64-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 1 x s64>)
; HasF64-NEXT: PseudoRET implicit $v8
;
; NoF64-LABEL: name: splatvector_nxv1i64
; NoF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; NoF64-NEXT: [[DEF1:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; NoF64-NEXT: [[VMSET_VL:%[0-9]+]]:_(<vscale x 1 x s1>) = G_VMSET_VL $x0
; NoF64-NEXT: [[DEF2:%[0-9]+]]:_(<vscale x 1 x s64>) = G_IMPLICIT_DEF
; NoF64-NEXT: [[SPLAT_VECTOR_SPLIT_I64_VL:%[0-9]+]]:_(<vscale x 1 x s64>) = G_SPLAT_VECTOR_SPLIT_I64_VL [[DEF2]], [[DEF]](s32), [[DEF1]], $x0
; NoF64-NEXT: $v8 = COPY [[SPLAT_VECTOR_SPLIT_I64_VL]](<vscale x 1 x s64>)
; NoF64-NEXT: PseudoRET implicit $v8
%0:_(s64) = G_IMPLICIT_DEF
%1:_(<vscale x 1 x s64>) = G_SPLAT_VECTOR %0(s64)
$v8 = COPY %1(<vscale x 1 x s64>)
PseudoRET implicit $v8
...
---
name: splatvector_nxv2i64
legalized: false
tracksRegLiveness: true
body: |
bb.1:
; HasF64-LABEL: name: splatvector_nxv2i64
; HasF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; HasF64-NEXT: [[DEF1:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; HasF64-NEXT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[DEF]](s32), [[DEF1]](s32)
; HasF64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 2 x s64>) = G_SPLAT_VECTOR [[MV]](s64)
; HasF64-NEXT: $v8m2 = COPY [[SPLAT_VECTOR]](<vscale x 2 x s64>)
; HasF64-NEXT: PseudoRET implicit $v8m2
;
; NoF64-LABEL: name: splatvector_nxv2i64
; NoF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; NoF64-NEXT: [[DEF1:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; NoF64-NEXT: [[VMSET_VL:%[0-9]+]]:_(<vscale x 2 x s1>) = G_VMSET_VL $x0
; NoF64-NEXT: [[DEF2:%[0-9]+]]:_(<vscale x 2 x s64>) = G_IMPLICIT_DEF
; NoF64-NEXT: [[SPLAT_VECTOR_SPLIT_I64_VL:%[0-9]+]]:_(<vscale x 2 x s64>) = G_SPLAT_VECTOR_SPLIT_I64_VL [[DEF2]], [[DEF]](s32), [[DEF1]], $x0
; NoF64-NEXT: $v8m2 = COPY [[SPLAT_VECTOR_SPLIT_I64_VL]](<vscale x 2 x s64>)
; NoF64-NEXT: PseudoRET implicit $v8m2
%0:_(s64) = G_IMPLICIT_DEF
%1:_(<vscale x 2 x s64>) = G_SPLAT_VECTOR %0(s64)
$v8m2 = COPY %1(<vscale x 2 x s64>)
PseudoRET implicit $v8m2
...
---
name: splatvector_nxv4i64
legalized: false
tracksRegLiveness: true
body: |
bb.1:
; HasF64-LABEL: name: splatvector_nxv4i64
; HasF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; HasF64-NEXT: [[DEF1:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; HasF64-NEXT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[DEF]](s32), [[DEF1]](s32)
; HasF64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 4 x s64>) = G_SPLAT_VECTOR [[MV]](s64)
; HasF64-NEXT: $v8m4 = COPY [[SPLAT_VECTOR]](<vscale x 4 x s64>)
; HasF64-NEXT: PseudoRET implicit $v8m4
;
; NoF64-LABEL: name: splatvector_nxv4i64
; NoF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; NoF64-NEXT: [[DEF1:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; NoF64-NEXT: [[VMSET_VL:%[0-9]+]]:_(<vscale x 4 x s1>) = G_VMSET_VL $x0
; NoF64-NEXT: [[DEF2:%[0-9]+]]:_(<vscale x 4 x s64>) = G_IMPLICIT_DEF
; NoF64-NEXT: [[SPLAT_VECTOR_SPLIT_I64_VL:%[0-9]+]]:_(<vscale x 4 x s64>) = G_SPLAT_VECTOR_SPLIT_I64_VL [[DEF2]], [[DEF]](s32), [[DEF1]], $x0
; NoF64-NEXT: $v8m4 = COPY [[SPLAT_VECTOR_SPLIT_I64_VL]](<vscale x 4 x s64>)
; NoF64-NEXT: PseudoRET implicit $v8m4
%0:_(s64) = G_IMPLICIT_DEF
%1:_(<vscale x 4 x s64>) = G_SPLAT_VECTOR %0(s64)
$v8m4 = COPY %1(<vscale x 4 x s64>)
PseudoRET implicit $v8m4
...
---
name: splatvector_nxv8i64
legalized: false
tracksRegLiveness: true
body: |
bb.1:
; HasF64-LABEL: name: splatvector_nxv8i64
; HasF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; HasF64-NEXT: [[DEF1:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; HasF64-NEXT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[DEF]](s32), [[DEF1]](s32)
; HasF64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 8 x s64>) = G_SPLAT_VECTOR [[MV]](s64)
; HasF64-NEXT: $v8m8 = COPY [[SPLAT_VECTOR]](<vscale x 8 x s64>)
; HasF64-NEXT: PseudoRET implicit $v8m8
;
; NoF64-LABEL: name: splatvector_nxv8i64
; NoF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; NoF64-NEXT: [[DEF1:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
; NoF64-NEXT: [[VMSET_VL:%[0-9]+]]:_(<vscale x 8 x s1>) = G_VMSET_VL $x0
; NoF64-NEXT: [[DEF2:%[0-9]+]]:_(<vscale x 8 x s64>) = G_IMPLICIT_DEF
; NoF64-NEXT: [[SPLAT_VECTOR_SPLIT_I64_VL:%[0-9]+]]:_(<vscale x 8 x s64>) = G_SPLAT_VECTOR_SPLIT_I64_VL [[DEF2]], [[DEF]](s32), [[DEF1]], $x0
; NoF64-NEXT: $v8m8 = COPY [[SPLAT_VECTOR_SPLIT_I64_VL]](<vscale x 8 x s64>)
; NoF64-NEXT: PseudoRET implicit $v8m8
%0:_(s64) = G_IMPLICIT_DEF
%1:_(<vscale x 8 x s64>) = G_SPLAT_VECTOR %0(s64)
$v8m8 = COPY %1(<vscale x 8 x s64>)
PseudoRET implicit $v8m8
...