1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-11-23 11:13:28 +01:00
llvm-mirror/test/CodeGen/AArch64/arm64_32-gep-sink.ll
Tim Northover 1bb14916f2 AArch64: support arm64_32, an ILP32 slice for watchOS.
This is the main CodeGen patch to support the arm64_32 watchOS ABI in LLVM.
FastISel is mostly disabled for now since it would generate incorrect code for
ILP32.

llvm-svn: 371722
2019-09-12 10:22:23 +00:00

62 lines
1.7 KiB
LLVM

; RUN: opt -codegenprepare -mtriple=arm64_32-apple-ios %s -S -o - | FileCheck %s
define void @test_simple_sink(i1* %base, i64 %offset) {
; CHECK-LABEL: @test_simple_sink
; CHECK: next:
; CHECK: [[BASE8:%.*]] = bitcast i1* %base to i8*
; CHECK: [[ADDR8:%.*]] = getelementptr i8, i8* [[BASE8]], i64 %offset
; CHECK: [[ADDR:%.*]] = bitcast i8* [[ADDR8]] to i1*
; CHECK: load volatile i1, i1* [[ADDR]]
%addr = getelementptr i1, i1* %base, i64 %offset
%tst = load i1, i1* %addr
br i1 %tst, label %next, label %end
next:
load volatile i1, i1* %addr
ret void
end:
ret void
}
define void @test_inbounds_sink(i1* %base, i64 %offset) {
; CHECK-LABEL: @test_inbounds_sink
; CHECK: next:
; CHECK: [[BASE8:%.*]] = bitcast i1* %base to i8*
; CHECK: [[ADDR8:%.*]] = getelementptr inbounds i8, i8* [[BASE8]], i64 %offset
; CHECK: [[ADDR:%.*]] = bitcast i8* [[ADDR8]] to i1*
; CHECK: load volatile i1, i1* [[ADDR]]
%addr = getelementptr inbounds i1, i1* %base, i64 %offset
%tst = load i1, i1* %addr
br i1 %tst, label %next, label %end
next:
load volatile i1, i1* %addr
ret void
end:
ret void
}
; No address derived via an add can be guaranteed inbounds
define void @test_add_sink(i1* %base, i64 %offset) {
; CHECK-LABEL: @test_add_sink
; CHECK: next:
; CHECK: [[BASE8:%.*]] = bitcast i1* %base to i8*
; CHECK: [[ADDR8:%.*]] = getelementptr i8, i8* [[BASE8]], i64 %offset
; CHECK: [[ADDR:%.*]] = bitcast i8* [[ADDR8]] to i1*
; CHECK: load volatile i1, i1* [[ADDR]]
%base64 = ptrtoint i1* %base to i64
%addr64 = add nsw nuw i64 %base64, %offset
%addr = inttoptr i64 %addr64 to i1*
%tst = load i1, i1* %addr
br i1 %tst, label %next, label %end
next:
load volatile i1, i1* %addr
ret void
end:
ret void
}