@@ -58,6 +58,10 @@ use stdarch_test::assert_instr;
58
58
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "aes" ) ) ]
59
59
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
60
60
#[ cfg_attr( test, assert_instr( aese) ) ]
61
+ #[ cfg_attr(
62
+ not( target_arch = "arm" ) ,
63
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
64
+ ) ]
61
65
pub unsafe fn vaeseq_u8 ( data : uint8x16_t , key : uint8x16_t ) -> uint8x16_t {
62
66
vaeseq_u8_ ( data, key)
63
67
}
@@ -69,6 +73,10 @@ pub unsafe fn vaeseq_u8(data: uint8x16_t, key: uint8x16_t) -> uint8x16_t {
69
73
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "aes" ) ) ]
70
74
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
71
75
#[ cfg_attr( test, assert_instr( aesd) ) ]
76
+ #[ cfg_attr(
77
+ not( target_arch = "arm" ) ,
78
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
79
+ ) ]
72
80
pub unsafe fn vaesdq_u8 ( data : uint8x16_t , key : uint8x16_t ) -> uint8x16_t {
73
81
vaesdq_u8_ ( data, key)
74
82
}
@@ -80,6 +88,10 @@ pub unsafe fn vaesdq_u8(data: uint8x16_t, key: uint8x16_t) -> uint8x16_t {
80
88
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "aes" ) ) ]
81
89
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
82
90
#[ cfg_attr( test, assert_instr( aesmc) ) ]
91
+ #[ cfg_attr(
92
+ not( target_arch = "arm" ) ,
93
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
94
+ ) ]
83
95
pub unsafe fn vaesmcq_u8 ( data : uint8x16_t ) -> uint8x16_t {
84
96
vaesmcq_u8_ ( data)
85
97
}
@@ -91,6 +103,10 @@ pub unsafe fn vaesmcq_u8(data: uint8x16_t) -> uint8x16_t {
91
103
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "aes" ) ) ]
92
104
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
93
105
#[ cfg_attr( test, assert_instr( aesimc) ) ]
106
+ #[ cfg_attr(
107
+ not( target_arch = "arm" ) ,
108
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
109
+ ) ]
94
110
pub unsafe fn vaesimcq_u8 ( data : uint8x16_t ) -> uint8x16_t {
95
111
vaesimcq_u8_ ( data)
96
112
}
@@ -102,6 +118,10 @@ pub unsafe fn vaesimcq_u8(data: uint8x16_t) -> uint8x16_t {
102
118
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "sha2" ) ) ]
103
119
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
104
120
#[ cfg_attr( test, assert_instr( sha1h) ) ]
121
+ #[ cfg_attr(
122
+ not( target_arch = "arm" ) ,
123
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
124
+ ) ]
105
125
pub unsafe fn vsha1h_u32 ( hash_e : u32 ) -> u32 {
106
126
vsha1h_u32_ ( hash_e)
107
127
}
@@ -113,6 +133,10 @@ pub unsafe fn vsha1h_u32(hash_e: u32) -> u32 {
113
133
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "sha2" ) ) ]
114
134
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
115
135
#[ cfg_attr( test, assert_instr( sha1c) ) ]
136
+ #[ cfg_attr(
137
+ not( target_arch = "arm" ) ,
138
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
139
+ ) ]
116
140
pub unsafe fn vsha1cq_u32 ( hash_abcd : uint32x4_t , hash_e : u32 , wk : uint32x4_t ) -> uint32x4_t {
117
141
vsha1cq_u32_ ( hash_abcd, hash_e, wk)
118
142
}
@@ -124,6 +148,10 @@ pub unsafe fn vsha1cq_u32(hash_abcd: uint32x4_t, hash_e: u32, wk: uint32x4_t) ->
124
148
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "sha2" ) ) ]
125
149
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
126
150
#[ cfg_attr( test, assert_instr( sha1m) ) ]
151
+ #[ cfg_attr(
152
+ not( target_arch = "arm" ) ,
153
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
154
+ ) ]
127
155
pub unsafe fn vsha1mq_u32 ( hash_abcd : uint32x4_t , hash_e : u32 , wk : uint32x4_t ) -> uint32x4_t {
128
156
vsha1mq_u32_ ( hash_abcd, hash_e, wk)
129
157
}
@@ -135,6 +163,10 @@ pub unsafe fn vsha1mq_u32(hash_abcd: uint32x4_t, hash_e: u32, wk: uint32x4_t) ->
135
163
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "sha2" ) ) ]
136
164
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
137
165
#[ cfg_attr( test, assert_instr( sha1p) ) ]
166
+ #[ cfg_attr(
167
+ not( target_arch = "arm" ) ,
168
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
169
+ ) ]
138
170
pub unsafe fn vsha1pq_u32 ( hash_abcd : uint32x4_t , hash_e : u32 , wk : uint32x4_t ) -> uint32x4_t {
139
171
vsha1pq_u32_ ( hash_abcd, hash_e, wk)
140
172
}
@@ -146,6 +178,10 @@ pub unsafe fn vsha1pq_u32(hash_abcd: uint32x4_t, hash_e: u32, wk: uint32x4_t) ->
146
178
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "sha2" ) ) ]
147
179
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
148
180
#[ cfg_attr( test, assert_instr( sha1su0) ) ]
181
+ #[ cfg_attr(
182
+ not( target_arch = "arm" ) ,
183
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
184
+ ) ]
149
185
pub unsafe fn vsha1su0q_u32 ( w0_3 : uint32x4_t , w4_7 : uint32x4_t , w8_11 : uint32x4_t ) -> uint32x4_t {
150
186
vsha1su0q_u32_ ( w0_3, w4_7, w8_11)
151
187
}
@@ -157,6 +193,10 @@ pub unsafe fn vsha1su0q_u32(w0_3: uint32x4_t, w4_7: uint32x4_t, w8_11: uint32x4_
157
193
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "sha2" ) ) ]
158
194
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
159
195
#[ cfg_attr( test, assert_instr( sha1su1) ) ]
196
+ #[ cfg_attr(
197
+ not( target_arch = "arm" ) ,
198
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
199
+ ) ]
160
200
pub unsafe fn vsha1su1q_u32 ( tw0_3 : uint32x4_t , w12_15 : uint32x4_t ) -> uint32x4_t {
161
201
vsha1su1q_u32_ ( tw0_3, w12_15)
162
202
}
@@ -168,6 +208,10 @@ pub unsafe fn vsha1su1q_u32(tw0_3: uint32x4_t, w12_15: uint32x4_t) -> uint32x4_t
168
208
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "sha2" ) ) ]
169
209
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
170
210
#[ cfg_attr( test, assert_instr( sha256h) ) ]
211
+ #[ cfg_attr(
212
+ not( target_arch = "arm" ) ,
213
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
214
+ ) ]
171
215
pub unsafe fn vsha256hq_u32 (
172
216
hash_abcd : uint32x4_t ,
173
217
hash_efgh : uint32x4_t ,
@@ -183,6 +227,10 @@ pub unsafe fn vsha256hq_u32(
183
227
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "sha2" ) ) ]
184
228
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
185
229
#[ cfg_attr( test, assert_instr( sha256h2) ) ]
230
+ #[ cfg_attr(
231
+ not( target_arch = "arm" ) ,
232
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
233
+ ) ]
186
234
pub unsafe fn vsha256h2q_u32 (
187
235
hash_efgh : uint32x4_t ,
188
236
hash_abcd : uint32x4_t ,
@@ -198,6 +246,10 @@ pub unsafe fn vsha256h2q_u32(
198
246
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "sha2" ) ) ]
199
247
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
200
248
#[ cfg_attr( test, assert_instr( sha256su0) ) ]
249
+ #[ cfg_attr(
250
+ not( target_arch = "arm" ) ,
251
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
252
+ ) ]
201
253
pub unsafe fn vsha256su0q_u32 ( w0_3 : uint32x4_t , w4_7 : uint32x4_t ) -> uint32x4_t {
202
254
vsha256su0q_u32_ ( w0_3, w4_7)
203
255
}
@@ -209,6 +261,10 @@ pub unsafe fn vsha256su0q_u32(w0_3: uint32x4_t, w4_7: uint32x4_t) -> uint32x4_t
209
261
#[ cfg_attr( not( target_arch = "arm" ) , target_feature( enable = "sha2" ) ) ]
210
262
#[ cfg_attr( target_arch = "arm" , target_feature( enable = "crypto,v8" ) ) ]
211
263
#[ cfg_attr( test, assert_instr( sha256su1) ) ]
264
+ #[ cfg_attr(
265
+ not( target_arch = "arm" ) ,
266
+ stable( feature = "neon_intrinsics" , since = "CURRENT_RUSTC_VERSION" )
267
+ ) ]
212
268
pub unsafe fn vsha256su1q_u32 (
213
269
tw0_3 : uint32x4_t ,
214
270
w8_11 : uint32x4_t ,
0 commit comments