; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc --mtriple=loongarch32 -mattr=+d --verify-machineinstrs < %s | FileCheck %s --check-prefix=LA32
; RUN: llc --mtriple=loongarch64 -mattr=+d --verify-machineinstrs < %s | FileCheck %s --check-prefix=LA64
define void @cast0(ptr addrspace(1) %ptr) {
; LA32-LABEL: cast0:
; LA32: # %bb.0:
; LA32-NEXT: st.w $zero, $a0, 0
; LA32-NEXT: ret
;
; LA64-LABEL: cast0:
; LA64: # %bb.0:
; LA64-NEXT: st.w $zero, $a0, 0
; LA64-NEXT: ret
%ptr0 = addrspacecast ptr addrspace(1) %ptr to ptr addrspace(0)
store i32 0, ptr %ptr0
ret void
}
define void @cast1(ptr %ptr) {
; LA32-LABEL: cast1:
; LA32: # %bb.0:
; LA32-NEXT: addi.w $sp, $sp, -16
; LA32-NEXT: .cfi_def_cfa_offset 16
; LA32-NEXT: st.w $ra, $sp, 12 # 4-byte Folded Spill
; LA32-NEXT: .cfi_offset 1, -4
; LA32-NEXT: bl %plt(foo)
; LA32-NEXT: ld.w $ra, $sp, 12 # 4-byte Folded Reload
; LA32-NEXT: addi.w $sp, $sp, 16
; LA32-NEXT: ret
;
; LA64-LABEL: cast1:
; LA64: # %bb.0:
; LA64-NEXT: addi.d $sp, $sp, -16
; LA64-NEXT: .cfi_def_cfa_offset 16
; LA64-NEXT: st.d $ra, $sp, 8 # 8-byte Folded Spill
; LA64-NEXT: .cfi_offset 1, -8
; LA64-NEXT: bl %plt(foo)
; LA64-NEXT: ld.d $ra, $sp, 8 # 8-byte Folded Reload
; LA64-NEXT: addi.d $sp, $sp, 16
; LA64-NEXT: ret
%castptr = addrspacecast ptr %ptr to ptr addrspace(10)
call void @foo(ptr addrspace(10) %castptr)
ret void
}
declare void @foo(ptr addrspace(10))