21#error "ACLE intrinsics support not enabled."
26#if defined(__cplusplus)
32#if !__has_builtin(__dmb)
33#define __dmb(i) __builtin_arm_dmb(i)
35#if !__has_builtin(__dsb)
36#define __dsb(i) __builtin_arm_dsb(i)
38#if !__has_builtin(__isb)
39#define __isb(i) __builtin_arm_isb(i)
44#if !__has_builtin(__wfi)
45static __inline__
void __attribute__((__always_inline__, __nodebug__)) __wfi(
void) {
50#if !__has_builtin(__wfe)
51static __inline__
void __attribute__((__always_inline__, __nodebug__)) __wfe(
void) {
56#if !__has_builtin(__sev)
57static __inline__
void __attribute__((__always_inline__, __nodebug__)) __sev(
void) {
62#if !__has_builtin(__sevl)
63static __inline__
void __attribute__((__always_inline__, __nodebug__)) __sevl(
void) {
68#if !__has_builtin(__yield)
69static __inline__
void __attribute__((__always_inline__, __nodebug__)) __yield(
void) {
70 __builtin_arm_yield();
74#if defined(__ARM_32BIT_STATE) && __ARM_32BIT_STATE
75#define __dbg(t) __builtin_arm_dbg(t)
78#if defined(__ARM_64BIT_STATE) && __ARM_64BIT_STATE
81__chkfeat(uint64_t __features) {
82 return __builtin_arm_chkfeat(__features) ^ __features;
88__swp(uint32_t __x, volatile uint32_t *
__p) {
91 v = __builtin_arm_ldrex(
__p);
92 while (__builtin_arm_strex(__x,
__p));
98#define __pld(addr) __pldx(0, 0, 0, addr)
100#if defined(__ARM_32BIT_STATE) && __ARM_32BIT_STATE
101#define __pldx(access_kind, cache_level, retention_policy, addr) \
102 __builtin_arm_prefetch(addr, access_kind, 1)
104#define __pldx(access_kind, cache_level, retention_policy, addr) \
105 __builtin_arm_prefetch(addr, access_kind, cache_level, retention_policy, 1)
109#define __pli(addr) __plix(0, 0, addr)
111#if defined(__ARM_32BIT_STATE) && __ARM_32BIT_STATE
112#define __plix(cache_level, retention_policy, addr) \
113 __builtin_arm_prefetch(addr, 0, 0)
115#define __plix(cache_level, retention_policy, addr) \
116 __builtin_arm_prefetch(addr, 0, cache_level, retention_policy, 0)
120#if !defined(_MSC_VER) || (!defined(__aarch64__) && !defined(__arm64ec__))
121static __inline__
void __attribute__((__always_inline__, __nodebug__)) __nop(
void) {
130__ror(uint32_t __x, uint32_t
__y) {
134 return (__x >>
__y) | (__x << (32 -
__y));
138__rorll(uint64_t __x, uint32_t
__y) {
142 return (__x >>
__y) | (__x << (64 -
__y));
145static __inline__
unsigned long __attribute__((__always_inline__, __nodebug__))
146__rorl(
unsigned long __x, uint32_t
__y) {
147#if __SIZEOF_LONG__ == 4
148 return __ror(__x,
__y);
150 return __rorll(__x,
__y);
156static __inline__
unsigned int __attribute__((__always_inline__, __nodebug__))
158 return __builtin_arm_clz(__t);
161static __inline__
unsigned int __attribute__((__always_inline__, __nodebug__))
162__clzl(
unsigned long __t) {
163#if __SIZEOF_LONG__ == 4
164 return __builtin_arm_clz(__t);
166 return __builtin_arm_clz64(__t);
170static __inline__
unsigned int __attribute__((__always_inline__, __nodebug__))
172 return __builtin_arm_clz64(__t);
176static __inline__
unsigned int __attribute__((__always_inline__, __nodebug__))
178 return __builtin_arm_cls(__t);
181static __inline__
unsigned int __attribute__((__always_inline__, __nodebug__))
182__clsl(
unsigned long __t) {
183#if __SIZEOF_LONG__ == 4
184 return __builtin_arm_cls(__t);
186 return __builtin_arm_cls64(__t);
190static __inline__
unsigned int __attribute__((__always_inline__, __nodebug__))
191__clsll(uint64_t __t) {
192 return __builtin_arm_cls64(__t);
198 return __builtin_bswap32(__t);
201static __inline__
unsigned long __attribute__((__always_inline__, __nodebug__))
202__revl(
unsigned long __t) {
203#if __SIZEOF_LONG__ == 4
204 return __builtin_bswap32(__t);
206 return __builtin_bswap64(__t);
211__revll(uint64_t __t) {
212 return __builtin_bswap64(__t);
217__rev16(uint32_t __t) {
218 return __ror(__rev(__t), 16);
222__rev16ll(uint64_t __t) {
223 return (((uint64_t)__rev16(__t >> 32)) << 32) | (
uint64_t)__rev16((uint32_t)__t);
226static __inline__
unsigned long __attribute__((__always_inline__, __nodebug__))
227__rev16l(
unsigned long __t) {
228#if __SIZEOF_LONG__ == 4
231 return __rev16ll(__t);
236static __inline__ int16_t
__attribute__((__always_inline__, __nodebug__))
237__revsh(int16_t __t) {
238 return (int16_t)__builtin_bswap16((uint16_t)__t);
243__rbit(uint32_t __t) {
244 return __builtin_arm_rbit(__t);
248__rbitll(uint64_t __t) {
249#if defined(__ARM_32BIT_STATE) && __ARM_32BIT_STATE
250 return (((uint64_t)__builtin_arm_rbit(__t)) << 32) |
251 __builtin_arm_rbit(__t >> 32);
253 return __builtin_arm_rbit64(__t);
257static __inline__
unsigned long __attribute__((__always_inline__, __nodebug__))
258__rbitl(
unsigned long __t) {
259#if __SIZEOF_LONG__ == 4
262 return __rbitll(__t);
267#if defined(__ARM_32BIT_STATE) && __ARM_32BIT_STATE
269__smulbb(int32_t
__a, int32_t
__b) {
270 return __builtin_arm_smulbb(
__a,
__b);
273__smulbt(int32_t
__a, int32_t
__b) {
274 return __builtin_arm_smulbt(
__a,
__b);
277__smultb(int32_t
__a, int32_t
__b) {
278 return __builtin_arm_smultb(
__a,
__b);
281__smultt(int32_t
__a, int32_t
__b) {
282 return __builtin_arm_smultt(
__a,
__b);
285__smulwb(int32_t
__a, int32_t
__b) {
286 return __builtin_arm_smulwb(
__a,
__b);
289__smulwt(int32_t
__a, int32_t
__b) {
290 return __builtin_arm_smulwt(
__a,
__b);
301#if defined(__ARM_FEATURE_SAT) && __ARM_FEATURE_SAT
302#define __ssat(x, y) __builtin_arm_ssat(x, y)
303#define __usat(x, y) __builtin_arm_usat(x, y)
307#if defined(__ARM_32BIT_STATE) && __ARM_32BIT_STATE
309__qadd(int32_t __t, int32_t
__v) {
310 return __builtin_arm_qadd(__t,
__v);
314__qsub(int32_t __t, int32_t
__v) {
315 return __builtin_arm_qsub(__t,
__v);
320 return __builtin_arm_qadd(__t, __t);
325#if defined(__ARM_32BIT_STATE) && __ARM_32BIT_STATE
327__smlabb(int32_t
__a, int32_t
__b, int32_t
__c) {
328 return __builtin_arm_smlabb(
__a,
__b,
__c);
331__smlabt(int32_t
__a, int32_t
__b, int32_t
__c) {
332 return __builtin_arm_smlabt(
__a,
__b,
__c);
335__smlatb(int32_t
__a, int32_t
__b, int32_t
__c) {
336 return __builtin_arm_smlatb(
__a,
__b,
__c);
339__smlatt(int32_t
__a, int32_t
__b, int32_t
__c) {
340 return __builtin_arm_smlatt(
__a,
__b,
__c);
343__smlawb(int32_t
__a, int32_t
__b, int32_t
__c) {
344 return __builtin_arm_smlawb(
__a,
__b,
__c);
347__smlawt(int32_t
__a, int32_t
__b, int32_t
__c) {
348 return __builtin_arm_smlawt(
__a,
__b,
__c);
354#if defined(__ARM_FEATURE_SIMD32) && __ARM_FEATURE_SIMD32
355#define __ssat16(x, y) __builtin_arm_ssat16(x, y)
356#define __usat16(x, y) __builtin_arm_usat16(x, y)
360#if defined(__ARM_FEATURE_SIMD32) && __ARM_FEATURE_SIMD32
366static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
367__sxtab16(int16x2_t
__a, int8x4_t
__b) {
368 return __builtin_arm_sxtab16(
__a,
__b);
370static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
371__sxtb16(int8x4_t
__a) {
372 return __builtin_arm_sxtb16(
__a);
374static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
375__uxtab16(int16x2_t
__a, int8x4_t
__b) {
376 return __builtin_arm_uxtab16(
__a,
__b);
378static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
379__uxtb16(int8x4_t
__a) {
380 return __builtin_arm_uxtb16(
__a);
385#if defined(__ARM_FEATURE_SIMD32) && __ARM_FEATURE_SIMD32
386static __inline__ uint8x4_t
__attribute__((__always_inline__, __nodebug__))
387__sel(uint8x4_t
__a, uint8x4_t
__b) {
388 return __builtin_arm_sel(
__a,
__b);
393#if defined(__ARM_FEATURE_SIMD32) && __ARM_FEATURE_SIMD32
394static __inline__ int8x4_t
__attribute__((__always_inline__, __nodebug__))
395__qadd8(int8x4_t
__a, int8x4_t
__b) {
396 return __builtin_arm_qadd8(
__a,
__b);
398static __inline__ int8x4_t
__attribute__((__always_inline__, __nodebug__))
399__qsub8(int8x4_t
__a, int8x4_t
__b) {
400 return __builtin_arm_qsub8(
__a,
__b);
402static __inline__ int8x4_t
__attribute__((__always_inline__, __nodebug__))
403__sadd8(int8x4_t
__a, int8x4_t
__b) {
404 return __builtin_arm_sadd8(
__a,
__b);
406static __inline__ int8x4_t
__attribute__((__always_inline__, __nodebug__))
407__shadd8(int8x4_t
__a, int8x4_t
__b) {
408 return __builtin_arm_shadd8(
__a,
__b);
410static __inline__ int8x4_t
__attribute__((__always_inline__, __nodebug__))
411__shsub8(int8x4_t
__a, int8x4_t
__b) {
412 return __builtin_arm_shsub8(
__a,
__b);
414static __inline__ int8x4_t
__attribute__((__always_inline__, __nodebug__))
415__ssub8(int8x4_t
__a, int8x4_t
__b) {
416 return __builtin_arm_ssub8(
__a,
__b);
418static __inline__ uint8x4_t
__attribute__((__always_inline__, __nodebug__))
419__uadd8(uint8x4_t
__a, uint8x4_t
__b) {
420 return __builtin_arm_uadd8(
__a,
__b);
422static __inline__ uint8x4_t
__attribute__((__always_inline__, __nodebug__))
423__uhadd8(uint8x4_t
__a, uint8x4_t
__b) {
424 return __builtin_arm_uhadd8(
__a,
__b);
426static __inline__ uint8x4_t
__attribute__((__always_inline__, __nodebug__))
427__uhsub8(uint8x4_t
__a, uint8x4_t
__b) {
428 return __builtin_arm_uhsub8(
__a,
__b);
430static __inline__ uint8x4_t
__attribute__((__always_inline__, __nodebug__))
431__uqadd8(uint8x4_t
__a, uint8x4_t
__b) {
432 return __builtin_arm_uqadd8(
__a,
__b);
434static __inline__ uint8x4_t
__attribute__((__always_inline__, __nodebug__))
435__uqsub8(uint8x4_t
__a, uint8x4_t
__b) {
436 return __builtin_arm_uqsub8(
__a,
__b);
438static __inline__ uint8x4_t
__attribute__((__always_inline__, __nodebug__))
439__usub8(uint8x4_t
__a, uint8x4_t
__b) {
440 return __builtin_arm_usub8(
__a,
__b);
445#if defined(__ARM_FEATURE_SIMD32) && __ARM_FEATURE_SIMD32
447__usad8(uint8x4_t
__a, uint8x4_t
__b) {
448 return __builtin_arm_usad8(
__a,
__b);
451__usada8(uint8x4_t
__a, uint8x4_t
__b, uint32_t
__c) {
452 return __builtin_arm_usada8(
__a,
__b,
__c);
457#if defined(__ARM_FEATURE_SIMD32) && __ARM_FEATURE_SIMD32
458static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
459__qadd16(int16x2_t
__a, int16x2_t
__b) {
460 return __builtin_arm_qadd16(
__a,
__b);
462static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
463__qasx(int16x2_t
__a, int16x2_t
__b) {
464 return __builtin_arm_qasx(
__a,
__b);
466static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
467__qsax(int16x2_t
__a, int16x2_t
__b) {
468 return __builtin_arm_qsax(
__a,
__b);
470static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
471__qsub16(int16x2_t
__a, int16x2_t
__b) {
472 return __builtin_arm_qsub16(
__a,
__b);
474static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
475__sadd16(int16x2_t
__a, int16x2_t
__b) {
476 return __builtin_arm_sadd16(
__a,
__b);
478static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
479__sasx(int16x2_t
__a, int16x2_t
__b) {
480 return __builtin_arm_sasx(
__a,
__b);
482static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
483__shadd16(int16x2_t
__a, int16x2_t
__b) {
484 return __builtin_arm_shadd16(
__a,
__b);
486static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
487__shasx(int16x2_t
__a, int16x2_t
__b) {
488 return __builtin_arm_shasx(
__a,
__b);
490static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
491__shsax(int16x2_t
__a, int16x2_t
__b) {
492 return __builtin_arm_shsax(
__a,
__b);
494static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
495__shsub16(int16x2_t
__a, int16x2_t
__b) {
496 return __builtin_arm_shsub16(
__a,
__b);
498static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
499__ssax(int16x2_t
__a, int16x2_t
__b) {
500 return __builtin_arm_ssax(
__a,
__b);
502static __inline__ int16x2_t
__attribute__((__always_inline__, __nodebug__))
503__ssub16(int16x2_t
__a, int16x2_t
__b) {
504 return __builtin_arm_ssub16(
__a,
__b);
506static __inline__ uint16x2_t
__attribute__((__always_inline__, __nodebug__))
507__uadd16(uint16x2_t
__a, uint16x2_t
__b) {
508 return __builtin_arm_uadd16(
__a,
__b);
510static __inline__ uint16x2_t
__attribute__((__always_inline__, __nodebug__))
511__uasx(uint16x2_t
__a, uint16x2_t
__b) {
512 return __builtin_arm_uasx(
__a,
__b);
514static __inline__ uint16x2_t
__attribute__((__always_inline__, __nodebug__))
515__uhadd16(uint16x2_t
__a, uint16x2_t
__b) {
516 return __builtin_arm_uhadd16(
__a,
__b);
518static __inline__ uint16x2_t
__attribute__((__always_inline__, __nodebug__))
519__uhasx(uint16x2_t
__a, uint16x2_t
__b) {
520 return __builtin_arm_uhasx(
__a,
__b);
522static __inline__ uint16x2_t
__attribute__((__always_inline__, __nodebug__))
523__uhsax(uint16x2_t
__a, uint16x2_t
__b) {
524 return __builtin_arm_uhsax(
__a,
__b);
526static __inline__ uint16x2_t
__attribute__((__always_inline__, __nodebug__))
527__uhsub16(uint16x2_t
__a, uint16x2_t
__b) {
528 return __builtin_arm_uhsub16(
__a,
__b);
530static __inline__ uint16x2_t
__attribute__((__always_inline__, __nodebug__))
531__uqadd16(uint16x2_t
__a, uint16x2_t
__b) {
532 return __builtin_arm_uqadd16(
__a,
__b);
534static __inline__ uint16x2_t
__attribute__((__always_inline__, __nodebug__))
535__uqasx(uint16x2_t
__a, uint16x2_t
__b) {
536 return __builtin_arm_uqasx(
__a,
__b);
538static __inline__ uint16x2_t
__attribute__((__always_inline__, __nodebug__))
539__uqsax(uint16x2_t
__a, uint16x2_t
__b) {
540 return __builtin_arm_uqsax(
__a,
__b);
542static __inline__ uint16x2_t
__attribute__((__always_inline__, __nodebug__))
543__uqsub16(uint16x2_t
__a, uint16x2_t
__b) {
544 return __builtin_arm_uqsub16(
__a,
__b);
546static __inline__ uint16x2_t
__attribute__((__always_inline__, __nodebug__))
547__usax(uint16x2_t
__a, uint16x2_t
__b) {
548 return __builtin_arm_usax(
__a,
__b);
550static __inline__ uint16x2_t
__attribute__((__always_inline__, __nodebug__))
551__usub16(uint16x2_t
__a, uint16x2_t
__b) {
552 return __builtin_arm_usub16(
__a,
__b);
557#if defined(__ARM_FEATURE_SIMD32) && __ARM_FEATURE_SIMD32
559__smlad(int16x2_t
__a, int16x2_t
__b, int32_t
__c) {
560 return __builtin_arm_smlad(
__a,
__b,
__c);
563__smladx(int16x2_t
__a, int16x2_t
__b, int32_t
__c) {
564 return __builtin_arm_smladx(
__a,
__b,
__c);
567__smlald(int16x2_t
__a, int16x2_t
__b, int64_t
__c) {
568 return __builtin_arm_smlald(
__a,
__b,
__c);
571__smlaldx(int16x2_t
__a, int16x2_t
__b, int64_t
__c) {
572 return __builtin_arm_smlaldx(
__a,
__b,
__c);
575__smlsd(int16x2_t
__a, int16x2_t
__b, int32_t
__c) {
576 return __builtin_arm_smlsd(
__a,
__b,
__c);
579__smlsdx(int16x2_t
__a, int16x2_t
__b, int32_t
__c) {
580 return __builtin_arm_smlsdx(
__a,
__b,
__c);
583__smlsld(int16x2_t
__a, int16x2_t
__b, int64_t
__c) {
584 return __builtin_arm_smlsld(
__a,
__b,
__c);
587__smlsldx(int16x2_t
__a, int16x2_t
__b, int64_t
__c) {
588 return __builtin_arm_smlsldx(
__a,
__b,
__c);
591__smuad(int16x2_t
__a, int16x2_t
__b) {
592 return __builtin_arm_smuad(
__a,
__b);
595__smuadx(int16x2_t
__a, int16x2_t
__b) {
596 return __builtin_arm_smuadx(
__a,
__b);
599__smusd(int16x2_t
__a, int16x2_t
__b) {
600 return __builtin_arm_smusd(
__a,
__b);
603__smusdx(int16x2_t
__a, int16x2_t
__b) {
604 return __builtin_arm_smusdx(
__a,
__b);
609#if (defined(__ARM_FEATURE_DIRECTED_ROUNDING) && \
610 (__ARM_FEATURE_DIRECTED_ROUNDING)) && \
611 (defined(__ARM_64BIT_STATE) && __ARM_64BIT_STATE)
612static __inline__
double __attribute__((__always_inline__, __nodebug__))
614 return __builtin_roundeven(
__a);
617static __inline__
float __attribute__((__always_inline__, __nodebug__))
619 return __builtin_roundevenf(
__a);
624static __inline__ uint32_t
__attribute__((__always_inline__, __nodebug__, target(
"crc")))
626 return __builtin_arm_crc32b(
__a,
__b);
630__crc32h(uint32_t
__a, uint16_t
__b) {
631 return __builtin_arm_crc32h(
__a,
__b);
636 return __builtin_arm_crc32w(
__a,
__b);
641 return __builtin_arm_crc32d(
__a,
__b);
645__crc32cb(uint32_t
__a, uint8_t
__b) {
646 return __builtin_arm_crc32cb(
__a,
__b);
650__crc32ch(uint32_t
__a, uint16_t
__b) {
651 return __builtin_arm_crc32ch(
__a,
__b);
655__crc32cw(uint32_t
__a, uint32_t
__b) {
656 return __builtin_arm_crc32cw(
__a,
__b);
660__crc32cd(uint32_t
__a, uint64_t
__b) {
661 return __builtin_arm_crc32cd(
__a,
__b);
666#if defined(__ARM_64BIT_STATE) && __ARM_64BIT_STATE
669 return __builtin_arm_jcvt(
__a);
674#if defined(__ARM_64BIT_STATE) && __ARM_64BIT_STATE
675static __inline__
float __attribute__((__always_inline__, __nodebug__, target(
"v8.5a")))
676__rint32zf(
float __a) {
677 return __builtin_arm_rint32zf(
__a);
680static __inline__
double __attribute__((__always_inline__, __nodebug__, target(
"v8.5a")))
681__rint32z(
double __a) {
682 return __builtin_arm_rint32z(
__a);
685static __inline__
float __attribute__((__always_inline__, __nodebug__, target(
"v8.5a")))
686__rint64zf(
float __a) {
687 return __builtin_arm_rint64zf(
__a);
690static __inline__
double __attribute__((__always_inline__, __nodebug__, target(
"v8.5a")))
691__rint64z(
double __a) {
692 return __builtin_arm_rint64z(
__a);
695static __inline__
float __attribute__((__always_inline__, __nodebug__, target(
"v8.5a")))
696__rint32xf(
float __a) {
697 return __builtin_arm_rint32xf(
__a);
700static __inline__
double __attribute__((__always_inline__, __nodebug__, target(
"v8.5a")))
701__rint32x(
double __a) {
702 return __builtin_arm_rint32x(
__a);
705static __inline__
float __attribute__((__always_inline__, __nodebug__, target(
"v8.5a")))
706__rint64xf(
float __a) {
707 return __builtin_arm_rint64xf(
__a);
710static __inline__
double __attribute__((__always_inline__, __nodebug__, target(
"v8.5a")))
711__rint64x(
double __a) {
712 return __builtin_arm_rint64x(
__a);
717#if defined(__ARM_64BIT_STATE) && __ARM_64BIT_STATE
722static __inline__ data512_t
__attribute__((__always_inline__, __nodebug__, target(
"ls64")))
723__arm_ld64b(const
void *__addr) {
725 __builtin_arm_ld64b(__addr,
__value.val);
728static __inline__
void __attribute__((__always_inline__, __nodebug__, target(
"ls64")))
729__arm_st64b(
void *__addr, data512_t
__value) {
730 __builtin_arm_st64b(__addr,
__value.val);
733__arm_st64bv(
void *__addr, data512_t
__value) {
734 return __builtin_arm_st64bv(__addr,
__value.val);
737__arm_st64bv0(
void *__addr, data512_t
__value) {
738 return __builtin_arm_st64bv0(__addr,
__value.val);
743#define __arm_rsr(sysreg) __builtin_arm_rsr(sysreg)
744#define __arm_rsr64(sysreg) __builtin_arm_rsr64(sysreg)
745#define __arm_rsr128(sysreg) __builtin_arm_rsr128(sysreg)
746#define __arm_rsrp(sysreg) __builtin_arm_rsrp(sysreg)
747#define __arm_rsrf(sysreg) __builtin_bit_cast(float, __arm_rsr(sysreg))
748#define __arm_rsrf64(sysreg) __builtin_bit_cast(double, __arm_rsr64(sysreg))
749#define __arm_wsr(sysreg, v) __builtin_arm_wsr(sysreg, v)
750#define __arm_wsr64(sysreg, v) __builtin_arm_wsr64(sysreg, v)
751#define __arm_wsr128(sysreg, v) __builtin_arm_wsr128(sysreg, v)
752#define __arm_wsrp(sysreg, v) __builtin_arm_wsrp(sysreg, v)
753#define __arm_wsrf(sysreg, v) __arm_wsr(sysreg, __builtin_bit_cast(uint32_t, v))
754#define __arm_wsrf64(sysreg, v) __arm_wsr64(sysreg, __builtin_bit_cast(uint64_t, v))
757#if defined(__ARM_64BIT_STATE) && __ARM_64BIT_STATE
758#define __arm_mte_create_random_tag(__ptr, __mask) __builtin_arm_irg(__ptr, __mask)
759#define __arm_mte_increment_tag(__ptr, __tag_offset) __builtin_arm_addg(__ptr, __tag_offset)
760#define __arm_mte_exclude_tag(__ptr, __excluded) __builtin_arm_gmi(__ptr, __excluded)
761#define __arm_mte_get_tag(__ptr) __builtin_arm_ldg(__ptr)
762#define __arm_mte_set_tag(__ptr) __builtin_arm_stg(__ptr)
763#define __arm_mte_ptrdiff(__ptra, __ptrb) __builtin_arm_subp(__ptra, __ptrb)
766#define __arm_mops_memset_tag(__tagged_address, __value, __size) \
767 __builtin_arm_mops_memset_tag(__tagged_address, __value, __size)
771#if defined(__ARM_FEATURE_COPROC)
773#if (__ARM_FEATURE_COPROC & 0x1)
776#define __arm_cdp(coproc, opc1, CRd, CRn, CRm, opc2) \
777 __builtin_arm_cdp(coproc, opc1, CRd, CRn, CRm, opc2)
780#define __arm_ldc(coproc, CRd, p) __builtin_arm_ldc(coproc, CRd, p)
781#define __arm_stc(coproc, CRd, p) __builtin_arm_stc(coproc, CRd, p)
783#define __arm_mcr(coproc, opc1, value, CRn, CRm, opc2) \
784 __builtin_arm_mcr(coproc, opc1, value, CRn, CRm, opc2)
785#define __arm_mrc(coproc, opc1, CRn, CRm, opc2) \
786 __builtin_arm_mrc(coproc, opc1, CRn, CRm, opc2)
788#if (__ARM_ARCH != 4) && (__ARM_ARCH < 8)
789#define __arm_ldcl(coproc, CRd, p) __builtin_arm_ldcl(coproc, CRd, p)
790#define __arm_stcl(coproc, CRd, p) __builtin_arm_stcl(coproc, CRd, p)
793#if (__ARM_ARCH_8M_MAIN__) || (__ARM_ARCH_8_1M_MAIN__)
794#define __arm_cdp(coproc, opc1, CRd, CRn, CRm, opc2) \
795 __builtin_arm_cdp(coproc, opc1, CRd, CRn, CRm, opc2)
796#define __arm_ldcl(coproc, CRd, p) __builtin_arm_ldcl(coproc, CRd, p)
797#define __arm_stcl(coproc, CRd, p) __builtin_arm_stcl(coproc, CRd, p)
802#if (__ARM_FEATURE_COPROC & 0x2)
803#define __arm_cdp2(coproc, opc1, CRd, CRn, CRm, opc2) \
804 __builtin_arm_cdp2(coproc, opc1, CRd, CRn, CRm, opc2)
805#define __arm_ldc2(coproc, CRd, p) __builtin_arm_ldc2(coproc, CRd, p)
806#define __arm_stc2(coproc, CRd, p) __builtin_arm_stc2(coproc, CRd, p)
807#define __arm_ldc2l(coproc, CRd, p) __builtin_arm_ldc2l(coproc, CRd, p)
808#define __arm_stc2l(coproc, CRd, p) __builtin_arm_stc2l(coproc, CRd, p)
809#define __arm_mcr2(coproc, opc1, value, CRn, CRm, opc2) \
810 __builtin_arm_mcr2(coproc, opc1, value, CRn, CRm, opc2)
811#define __arm_mrc2(coproc, opc1, CRn, CRm, opc2) \
812 __builtin_arm_mrc2(coproc, opc1, CRn, CRm, opc2)
815#if (__ARM_FEATURE_COPROC & 0x4)
816#define __arm_mcrr(coproc, opc1, value, CRm) \
817 __builtin_arm_mcrr(coproc, opc1, value, CRm)
818#define __arm_mrrc(coproc, opc1, CRm) __builtin_arm_mrrc(coproc, opc1, CRm)
821#if (__ARM_FEATURE_COPROC & 0x8)
822#define __arm_mcrr2(coproc, opc1, value, CRm) \
823 __builtin_arm_mcrr2(coproc, opc1, value, CRm)
824#define __arm_mrrc2(coproc, opc1, CRm) __builtin_arm_mrrc2(coproc, opc1, CRm)
830#if defined(__ARM_FEATURE_TME) && __ARM_FEATURE_TME
832#define _TMFAILURE_REASON 0x00007fffu
833#define _TMFAILURE_RTRY 0x00008000u
834#define _TMFAILURE_CNCL 0x00010000u
835#define _TMFAILURE_MEM 0x00020000u
836#define _TMFAILURE_IMP 0x00040000u
837#define _TMFAILURE_ERR 0x00080000u
838#define _TMFAILURE_SIZE 0x00100000u
839#define _TMFAILURE_NEST 0x00200000u
840#define _TMFAILURE_DBG 0x00400000u
841#define _TMFAILURE_INT 0x00800000u
842#define _TMFAILURE_TRIVIAL 0x01000000u
844#define __tstart() __builtin_arm_tstart()
845#define __tcommit() __builtin_arm_tcommit()
846#define __tcancel(__arg) __builtin_arm_tcancel(__arg)
847#define __ttest() __builtin_arm_ttest()
852#if defined(__ARM_64BIT_STATE) && __ARM_64BIT_STATE
853static __inline__
int __attribute__((__always_inline__, __nodebug__, target(
"rand")))
854__rndr(uint64_t *
__p) {
855 return __builtin_arm_rndr(
__p);
857static __inline__
int __attribute__((__always_inline__, __nodebug__, target(
"rand")))
858__rndrrs(uint64_t *
__p) {
859 return __builtin_arm_rndrrs(
__p);
864#if defined(__ARM_64BIT_STATE) && __ARM_64BIT_STATE
865static __inline__
void *
__attribute__((__always_inline__, __nodebug__))
867 return (
void *)__builtin_arm_rsr64(
"gcspr_el0");
872 return __builtin_arm_gcspopm(0);
875static __inline__
const void *
__attribute__((__always_inline__, __nodebug__, target(
"gcs")))
876__gcsss(const
void *__stack) {
877 return __builtin_arm_gcsss(__stack);
881#if defined(__cplusplus)
__DEVICE__ int __clzll(long long __a)
__DEVICE__ int __clz(int __a)
_Float16 __2f16 __attribute__((ext_vector_type(2)))
Zeroes the upper 128 bits (bits 255:128) of all YMM registers.
static __inline__ vector float vector float vector float __c
static __inline__ uint32_t volatile uint32_t * __p
static __inline__ uint32_t uint8_t __b
static __inline__ uint32_t uint32_t __y
static __inline__ void int __a
static __inline__ unsigned int __DEFAULT_FN_ATTRS_CRC32 __crc32d(unsigned int __C, unsigned int __D)
Adds the unsigned integer operand to the CRC-32C checksum of the second unsigned integer operand.
static __inline__ unsigned int __DEFAULT_FN_ATTRS_CRC32 __crc32b(unsigned int __C, unsigned char __D)
Adds the unsigned integer operand to the CRC-32C checksum of the unsigned char operand.
static __inline__ unsigned int __DEFAULT_FN_ATTRS_CRC32 __crc32w(unsigned int __C, unsigned short __D)
Adds the unsigned integer operand to the CRC-32C checksum of the unsigned short operand.
struct __storeu_i16 *__P __v
static __inline__ void unsigned int __value