1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
| # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple=aarch64-unknown-unknown -run-pass=instruction-select -verify-machineinstrs %s -o - | FileCheck %s
--- |
define i32 @test_store_release_i64(i32 %a, i64* %addr) {
ret i32 %a
}
define i32 @test_store_release_i32(i32 %a, i64* %addr) {
ret i32 %a
}
define void @test_store_release_i8(i32, i8 %val, i8* %addr) { ret void }
define void @test_store_release_i16(i32, i16 %val, i16* %addr) { ret void }
...
---
name: test_store_release_i64
alignment: 4
legalized: true
regBankSelected: true
tracksRegLiveness: true
body: |
bb.0:
liveins: $w0, $x1, $x2
; CHECK-LABEL: name: test_store_release_i64
; CHECK: liveins: $w0, $x1, $x2
; CHECK: [[COPY:%[0-9]+]]:gpr64 = COPY $x1
; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
; CHECK: early-clobber %2:gpr32 = STLXRX [[COPY]], [[COPY1]] :: (volatile store 8 into %ir.addr)
; CHECK: $w0 = COPY %2
; CHECK: RET_ReallyLR implicit $w0
%1:gpr(s64) = COPY $x1
%2:gpr(p0) = COPY $x2
%3:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %1(s64), %2(p0) :: (volatile store 8 into %ir.addr)
$w0 = COPY %3(s32)
RET_ReallyLR implicit $w0
...
---
name: test_store_release_i32
alignment: 4
legalized: true
regBankSelected: true
tracksRegLiveness: true
body: |
bb.0:
liveins: $w0, $w1, $x2
; CHECK-LABEL: name: test_store_release_i32
; CHECK: liveins: $w0, $w1, $x2
; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
; CHECK: early-clobber %3:gpr32 = STLXRW [[COPY]], [[COPY1]] :: (volatile store 4 into %ir.addr)
; CHECK: $w0 = COPY %3
; CHECK: RET_ReallyLR implicit $w0
%1:gpr(s32) = COPY $w1
%2:gpr(p0) = COPY $x2
%3:gpr(s64) = G_ZEXT %1(s32)
%4:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %3(s64), %2(p0) :: (volatile store 4 into %ir.addr)
$w0 = COPY %4(s32)
RET_ReallyLR implicit $w0
...
---
name: test_store_release_i8
alignment: 4
legalized: true
regBankSelected: true
tracksRegLiveness: true
body: |
bb.0:
liveins: $w0, $w1, $x2
; CHECK-LABEL: name: test_store_release_i8
; CHECK: liveins: $w0, $w1, $x2
; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
; CHECK: [[DEF:%[0-9]+]]:gpr64all = IMPLICIT_DEF
; CHECK: [[INSERT_SUBREG:%[0-9]+]]:gpr64 = INSERT_SUBREG [[DEF]], [[COPY]], %subreg.sub_32
; CHECK: [[COPY2:%[0-9]+]]:gpr32 = COPY [[INSERT_SUBREG]].sub_32
; CHECK: early-clobber %5:gpr32 = STLXRB [[COPY2]], [[COPY1]] :: (volatile store 1 into %ir.addr)
; CHECK: $w0 = COPY %5
; CHECK: RET_ReallyLR implicit $w0
%3:gpr(s32) = COPY $w1
%2:gpr(p0) = COPY $x2
%6:gpr(s64) = G_CONSTANT i64 255
%7:gpr(s64) = G_ANYEXT %3(s32)
%4:gpr(s64) = G_AND %7, %6
%5:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %4(s64), %2(p0) :: (volatile store 1 into %ir.addr)
$w0 = COPY %5(s32)
RET_ReallyLR implicit $w0
...
---
name: test_store_release_i16
alignment: 4
legalized: true
regBankSelected: true
tracksRegLiveness: true
body: |
bb.0:
liveins: $w0, $w1, $x2
; CHECK-LABEL: name: test_store_release_i16
; CHECK: liveins: $w0, $w1, $x2
; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
; CHECK: [[DEF:%[0-9]+]]:gpr64all = IMPLICIT_DEF
; CHECK: [[INSERT_SUBREG:%[0-9]+]]:gpr64 = INSERT_SUBREG [[DEF]], [[COPY]], %subreg.sub_32
; CHECK: [[COPY2:%[0-9]+]]:gpr32 = COPY [[INSERT_SUBREG]].sub_32
; CHECK: early-clobber %5:gpr32 = STLXRH [[COPY2]], [[COPY1]] :: (volatile store 2 into %ir.addr)
; CHECK: $w0 = COPY %5
; CHECK: RET_ReallyLR implicit $w0
%3:gpr(s32) = COPY $w1
%2:gpr(p0) = COPY $x2
%6:gpr(s64) = G_CONSTANT i64 65535
%7:gpr(s64) = G_ANYEXT %3(s32)
%4:gpr(s64) = G_AND %7, %6
%5:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %4(s64), %2(p0) :: (volatile store 2 into %ir.addr)
$w0 = COPY %5(s32)
RET_ReallyLR implicit $w0
|