trunc-load.ll
3.33 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
; RUN: opt < %s -instcombine -S -data-layout="e-n16:32:64" | FileCheck %s --check-prefixes=CHECK,LE
; RUN: opt < %s -instcombine -S -data-layout="E-n16:32:64" | FileCheck %s --check-prefixes=CHECK,BE
; Don't narrow if it would lose information about the dereferenceable range of the pointer.
define i32 @truncload_no_deref(i64* %ptr) {
; CHECK-LABEL: @truncload_no_deref(
; CHECK-NEXT: [[X:%.*]] = load i64, i64* [[PTR:%.*]], align 4
; CHECK-NEXT: [[R:%.*]] = trunc i64 [[X]] to i32
; CHECK-NEXT: ret i32 [[R]]
;
%x = load i64, i64* %ptr
%r = trunc i64 %x to i32
ret i32 %r
}
define i32 @truncload_small_deref(i64* dereferenceable(7) %ptr) {
; CHECK-LABEL: @truncload_small_deref(
; CHECK-NEXT: [[X:%.*]] = load i64, i64* [[PTR:%.*]], align 4
; CHECK-NEXT: [[R:%.*]] = trunc i64 [[X]] to i32
; CHECK-NEXT: ret i32 [[R]]
;
%x = load i64, i64* %ptr
%r = trunc i64 %x to i32
ret i32 %r
}
; On little-endian, we can narrow the load without an offset.
define i32 @truncload_deref(i64* dereferenceable(8) %ptr) {
; CHECK-LABEL: @truncload_deref(
; CHECK-NEXT: [[X:%.*]] = load i64, i64* [[PTR:%.*]], align 4
; CHECK-NEXT: [[R:%.*]] = trunc i64 [[X]] to i32
; CHECK-NEXT: ret i32 [[R]]
;
%x = load i64, i64* %ptr
%r = trunc i64 %x to i32
ret i32 %r
}
; Preserve alignment.
define i16 @truncload_align(i32* dereferenceable(14) %ptr) {
; CHECK-LABEL: @truncload_align(
; CHECK-NEXT: [[X:%.*]] = load i32, i32* [[PTR:%.*]], align 16
; CHECK-NEXT: [[R:%.*]] = trunc i32 [[X]] to i16
; CHECK-NEXT: ret i16 [[R]]
;
%x = load i32, i32* %ptr, align 16
%r = trunc i32 %x to i16
ret i16 %r
}
; Negative test - extra use means we would not eliminate the original load.
declare void @use(i64)
define i32 @truncload_extra_use(i64* dereferenceable(100) %ptr) {
; CHECK-LABEL: @truncload_extra_use(
; CHECK-NEXT: [[X:%.*]] = load i64, i64* [[PTR:%.*]], align 2
; CHECK-NEXT: call void @use(i64 [[X]])
; CHECK-NEXT: [[R:%.*]] = trunc i64 [[X]] to i32
; CHECK-NEXT: ret i32 [[R]]
;
%x = load i64, i64* %ptr, align 2
call void @use(i64 %x)
%r = trunc i64 %x to i32
ret i32 %r
}
; Negative test - don't create a load if the type is not allowed by the data-layout.
define i8 @truncload_type(i64* dereferenceable(9) %ptr) {
; CHECK-LABEL: @truncload_type(
; CHECK-NEXT: [[X:%.*]] = load i64, i64* [[PTR:%.*]], align 2
; CHECK-NEXT: [[R:%.*]] = trunc i64 [[X]] to i8
; CHECK-NEXT: ret i8 [[R]]
;
%x = load i64, i64* %ptr, align 2
%r = trunc i64 %x to i8
ret i8 %r
}
; Negative test - don't transform volatiles.
define i32 @truncload_volatile(i64* dereferenceable(8) %ptr) {
; CHECK-LABEL: @truncload_volatile(
; CHECK-NEXT: [[X:%.*]] = load volatile i64, i64* [[PTR:%.*]], align 8
; CHECK-NEXT: [[R:%.*]] = trunc i64 [[X]] to i32
; CHECK-NEXT: ret i32 [[R]]
;
%x = load volatile i64, i64* %ptr, align 8
%r = trunc i64 %x to i32
ret i32 %r
}
; Preserve address space.
define i32 @truncload_address_space(i64 addrspace(1)* dereferenceable(8) %ptr) {
; CHECK-LABEL: @truncload_address_space(
; CHECK-NEXT: [[X:%.*]] = load i64, i64 addrspace(1)* [[PTR:%.*]], align 4
; CHECK-NEXT: [[R:%.*]] = trunc i64 [[X]] to i32
; CHECK-NEXT: ret i32 [[R]]
;
%x = load i64, i64 addrspace(1)* %ptr, align 4
%r = trunc i64 %x to i32
ret i32 %r
}