10#ifndef __RISCV_BITMANIP_H
11#define __RISCV_BITMANIP_H
15#if defined(__cplusplus)
19#if defined(__riscv_zbb)
21__riscv_orc_b_32(uint32_t __x) {
22 return __builtin_riscv_orc_b_32(__x);
25static __inline__
unsigned __attribute__((__always_inline__, __nodebug__))
26__riscv_clz_32(uint32_t __x) {
27 return __builtin_riscv_clz_32(__x);
30static __inline__
unsigned __attribute__((__always_inline__, __nodebug__))
31__riscv_ctz_32(uint32_t __x) {
32 return __builtin_riscv_ctz_32(__x);
35static __inline__
unsigned __attribute__((__always_inline__, __nodebug__))
36__riscv_cpop_32(uint32_t __x) {
37 return __builtin_popcount(__x);
42__riscv_orc_b_64(uint64_t __x) {
43 return __builtin_riscv_orc_b_64(__x);
46static __inline__
unsigned __attribute__((__always_inline__, __nodebug__))
47__riscv_clz_64(uint64_t __x) {
48 return __builtin_riscv_clz_64(__x);
51static __inline__
unsigned __attribute__((__always_inline__, __nodebug__))
52__riscv_ctz_64(uint64_t __x) {
53 return __builtin_riscv_ctz_64(__x);
56static __inline__
unsigned __attribute__((__always_inline__, __nodebug__))
57__riscv_cpop_64(uint64_t __x) {
58 return __builtin_popcountll(__x);
63#if defined(__riscv_zbb) || defined(__riscv_zbkb)
65__riscv_rev8_32(uint32_t __x) {
66 return __builtin_bswap32(__x);
70__riscv_rol_32(uint32_t __x, uint32_t
__y) {
71 return __builtin_rotateleft32(__x,
__y);
75__riscv_ror_32(uint32_t __x, uint32_t
__y) {
76 return __builtin_rotateright32(__x,
__y);
81__riscv_rev8_64(uint64_t __x) {
82 return __builtin_bswap64(__x);
86__riscv_rol_64(uint64_t __x, uint32_t
__y) {
87 return __builtin_rotateleft64(__x,
__y);
91__riscv_ror_64(uint64_t __x, uint32_t
__y) {
92 return __builtin_rotateright64(__x,
__y);
97#if defined(__riscv_zbkb)
99__riscv_brev8_32(uint32_t __x) {
100 return __builtin_riscv_brev8_32(__x);
103#if __riscv_xlen == 64
105__riscv_brev8_64(uint64_t __x) {
106 return __builtin_riscv_brev8_64(__x);
110#if __riscv_xlen == 32
112__riscv_unzip_32(uint32_t __x) {
113 return __builtin_riscv_unzip_32(__x);
117__riscv_zip_32(uint32_t __x) {
118 return __builtin_riscv_zip_32(__x);
123#if defined(__riscv_zbc)
124#if __riscv_xlen == 32
126__riscv_clmulr_32(uint32_t __x, uint32_t
__y) {
127 return __builtin_riscv_clmulr_32(__x,
__y);
131#if __riscv_xlen == 64
133__riscv_clmulr_64(uint64_t __x, uint64_t
__y) {
134 return __builtin_riscv_clmulr_64(__x,
__y);
139#if defined(__riscv_zbkc) || defined(__riscv_zbc)
141__riscv_clmul_32(uint32_t __x, uint32_t
__y) {
142 return __builtin_riscv_clmul_32(__x,
__y);
145#if __riscv_xlen == 32
147__riscv_clmulh_32(uint32_t __x, uint32_t
__y) {
148 return __builtin_riscv_clmulh_32(__x,
__y);
152#if __riscv_xlen == 64
154__riscv_clmul_64(uint64_t __x, uint64_t
__y) {
155 return __builtin_riscv_clmul_64(__x,
__y);
159__riscv_clmulh_64(uint64_t __x, uint64_t
__y) {
160 return __builtin_riscv_clmulh_64(__x,
__y);
165#if defined(__riscv_zbkx)
166#if __riscv_xlen == 32
168__riscv_xperm4_32(uint32_t __x, uint32_t
__y) {
169 return __builtin_riscv_xperm4_32(__x,
__y);
173__riscv_xperm8_32(uint32_t __x, uint32_t
__y) {
174 return __builtin_riscv_xperm8_32(__x,
__y);
178#if __riscv_xlen == 64
180__riscv_xperm4_64(uint64_t __x, uint64_t
__y) {
181 return __builtin_riscv_xperm4_64(__x,
__y);
185__riscv_xperm8_64(uint64_t __x, uint64_t
__y) {
186 return __builtin_riscv_xperm8_64(__x,
__y);
191#if defined(__cplusplus)
_Float16 __2f16 __attribute__((ext_vector_type(2)))
Zeroes the upper 128 bits (bits 255:128) of all YMM registers.
static __inline__ uint32_t uint32_t __y