; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 3
; RUN: opt -safe-stack -S -mtriple=i386-pc-linux-gnu < %s -o - | FileCheck %s --check-prefix=I386
; RUN: opt -safe-stack -S -mtriple=x86_64-pc-linux-gnu < %s -o - | FileCheck %s --check-prefix=X86-64
; RUN: opt -passes=safe-stack -S -mtriple=i386-pc-linux-gnu < %s -o - | FileCheck %s --check-prefix=I386
; RUN: opt -passes=safe-stack -S -mtriple=x86_64-pc-linux-gnu < %s -o - | FileCheck %s --check-prefix=X86-64
%struct.__jmp_buf_tag = type { [8 x i64], i32, %struct.__sigset_t }
%struct.__sigset_t = type { [16 x i64] }
@.str = private unnamed_addr constant [4 x i8] c"%s\0A\00", align 1
@buf = internal global [1 x %struct.__jmp_buf_tag] zeroinitializer, align 16
; setjmp/longjmp test with dynamically sized array.
; Requires protector.
define i32 @foo(i32 %size) nounwind uwtable safestack {
; I386-LABEL: define i32 @foo(
; I386-SAME: i32 [[SIZE:%.*]]) #[[ATTR0:[0-9]+]] {
; I386-NEXT: entry:
; I386-NEXT: [[UNSAFE_STACK_PTR:%.*]] = load ptr, ptr @__safestack_unsafe_stack_ptr, align 4
; I386-NEXT: [[UNSAFE_STACK_DYNAMIC_PTR:%.*]] = alloca ptr, align 4
; I386-NEXT: store ptr [[UNSAFE_STACK_PTR]], ptr [[UNSAFE_STACK_DYNAMIC_PTR]], align 4
; I386-NEXT: [[TMP0:%.*]] = mul i32 [[SIZE]], 4
; I386-NEXT: [[TMP1:%.*]] = load ptr, ptr @__safestack_unsafe_stack_ptr, align 4
; I386-NEXT: [[TMP2:%.*]] = ptrtoint ptr [[TMP1]] to i32
; I386-NEXT: [[TMP3:%.*]] = sub i32 [[TMP2]], [[TMP0]]
; I386-NEXT: [[TMP4:%.*]] = and i32 [[TMP3]], -16
; I386-NEXT: [[A:%.*]] = inttoptr i32 [[TMP4]] to ptr
; I386-NEXT: store ptr [[A]], ptr @__safestack_unsafe_stack_ptr, align 4
; I386-NEXT: store ptr [[A]], ptr [[UNSAFE_STACK_DYNAMIC_PTR]], align 4
; I386-NEXT: [[CALL:%.*]] = call i32 @_setjmp(ptr @buf) #[[ATTR1:[0-9]+]]
; I386-NEXT: [[TMP5:%.*]] = load ptr, ptr [[UNSAFE_STACK_DYNAMIC_PTR]], align 4
; I386-NEXT: store ptr [[TMP5]], ptr @__safestack_unsafe_stack_ptr, align 4
; I386-NEXT: call void @funcall(ptr [[A]])
; I386-NEXT: store ptr [[UNSAFE_STACK_PTR]], ptr @__safestack_unsafe_stack_ptr, align 4
; I386-NEXT: ret i32 0
;
; X86-64-LABEL: define i32 @foo(
; X86-64-SAME: i32 [[SIZE:%.*]]) #[[ATTR0:[0-9]+]] {
; X86-64-NEXT: entry:
; X86-64-NEXT: [[UNSAFE_STACK_PTR:%.*]] = load ptr, ptr @__safestack_unsafe_stack_ptr, align 8
; X86-64-NEXT: [[UNSAFE_STACK_DYNAMIC_PTR:%.*]] = alloca ptr, align 8
; X86-64-NEXT: store ptr [[UNSAFE_STACK_PTR]], ptr [[UNSAFE_STACK_DYNAMIC_PTR]], align 8
; X86-64-NEXT: [[TMP0:%.*]] = zext i32 [[SIZE]] to i64
; X86-64-NEXT: [[TMP1:%.*]] = mul i64 [[TMP0]], 4
; X86-64-NEXT: [[TMP2:%.*]] = load ptr, ptr @__safestack_unsafe_stack_ptr, align 8
; X86-64-NEXT: [[TMP3:%.*]] = ptrtoint ptr [[TMP2]] to i64
; X86-64-NEXT: [[TMP4:%.*]] = sub i64 [[TMP3]], [[TMP1]]
; X86-64-NEXT: [[TMP5:%.*]] = and i64 [[TMP4]], -16
; X86-64-NEXT: [[A:%.*]] = inttoptr i64 [[TMP5]] to ptr
; X86-64-NEXT: store ptr [[A]], ptr @__safestack_unsafe_stack_ptr, align 8
; X86-64-NEXT: store ptr [[A]], ptr [[UNSAFE_STACK_DYNAMIC_PTR]], align 8
; X86-64-NEXT: [[CALL:%.*]] = call i32 @_setjmp(ptr @buf) #[[ATTR1:[0-9]+]]
; X86-64-NEXT: [[TMP6:%.*]] = load ptr, ptr [[UNSAFE_STACK_DYNAMIC_PTR]], align 8
; X86-64-NEXT: store ptr [[TMP6]], ptr @__safestack_unsafe_stack_ptr, align 8
; X86-64-NEXT: call void @funcall(ptr [[A]])
; X86-64-NEXT: store ptr [[UNSAFE_STACK_PTR]], ptr @__safestack_unsafe_stack_ptr, align 8
; X86-64-NEXT: ret i32 0
;
entry:
%a = alloca i32, i32 %size
%call = call i32 @_setjmp(ptr @buf) returns_twice
call void @funcall(ptr %a)
ret i32 0
}
declare i32 @_setjmp(ptr)
declare void @funcall(ptr)