10#ifndef __RISCV_CRYPTO_H
11#define __RISCV_CRYPTO_H
15#if defined(__cplusplus)
19#if defined(__riscv_zknd)
21#define __riscv_aes32dsi(x, y, bs) __builtin_riscv_aes32dsi(x, y, bs)
22#define __riscv_aes32dsmi(x, y, bs) __builtin_riscv_aes32dsmi(x, y, bs)
27__riscv_aes64ds(uint64_t __x, uint64_t
__y) {
28 return __builtin_riscv_aes64ds(__x,
__y);
32__riscv_aes64dsm(uint64_t __x, uint64_t
__y) {
33 return __builtin_riscv_aes64dsm(__x,
__y);
37__riscv_aes64im(uint64_t __x) {
38 return __builtin_riscv_aes64im(__x);
43#if defined(__riscv_zkne)
45#define __riscv_aes32esi(x, y, bs) __builtin_riscv_aes32esi(x, y, bs)
46#define __riscv_aes32esmi(x, y, bs) __builtin_riscv_aes32esmi(x, y, bs)
51__riscv_aes64es(uint64_t __x, uint64_t
__y) {
52 return __builtin_riscv_aes64es(__x,
__y);
56__riscv_aes64esm(uint64_t __x, uint64_t
__y) {
57 return __builtin_riscv_aes64esm(__x,
__y);
62#if defined(__riscv_zknd) || defined(__riscv_zkne)
64#define __riscv_aes64ks1i(x, rnum) __builtin_riscv_aes64ks1i(x, rnum)
67__riscv_aes64ks2(uint64_t __x, uint64_t
__y) {
68 return __builtin_riscv_aes64ks2(__x,
__y);
73#if defined(__riscv_zknh)
75__riscv_sha256sig0(uint32_t __x) {
76 return __builtin_riscv_sha256sig0(__x);
80__riscv_sha256sig1(uint32_t __x) {
81 return __builtin_riscv_sha256sig1(__x);
85__riscv_sha256sum0(uint32_t __x) {
86 return __builtin_riscv_sha256sum0(__x);
90__riscv_sha256sum1(uint32_t __x) {
91 return __builtin_riscv_sha256sum1(__x);
96__riscv_sha512sig0h(uint32_t __x, uint32_t
__y) {
97 return __builtin_riscv_sha512sig0h(__x,
__y);
101__riscv_sha512sig0l(uint32_t __x, uint32_t
__y) {
102 return __builtin_riscv_sha512sig0l(__x,
__y);
106__riscv_sha512sig1h(uint32_t __x, uint32_t
__y) {
107 return __builtin_riscv_sha512sig1h(__x,
__y);
111__riscv_sha512sig1l(uint32_t __x, uint32_t
__y) {
112 return __builtin_riscv_sha512sig1l(__x,
__y);
116__riscv_sha512sum0r(uint32_t __x, uint32_t
__y) {
117 return __builtin_riscv_sha512sum0r(__x,
__y);
121__riscv_sha512sum1r(uint32_t __x, uint32_t
__y) {
122 return __builtin_riscv_sha512sum1r(__x,
__y);
126#if __riscv_xlen == 64
128__riscv_sha512sig0(uint64_t __x) {
129 return __builtin_riscv_sha512sig0(__x);
133__riscv_sha512sig1(uint64_t __x) {
134 return __builtin_riscv_sha512sig1(__x);
138__riscv_sha512sum0(uint64_t __x) {
139 return __builtin_riscv_sha512sum0(__x);
143__riscv_sha512sum1(uint64_t __x) {
144 return __builtin_riscv_sha512sum1(__x);
149#if defined(__riscv_zksh)
151__riscv_sm3p0(uint32_t __x) {
152 return __builtin_riscv_sm3p0(__x);
156__riscv_sm3p1(uint32_t __x) {
157 return __builtin_riscv_sm3p1(__x);
161#if defined(__riscv_zksed)
162#define __riscv_sm4ed(x, y, bs) __builtin_riscv_sm4ed(x, y, bs);
163#define __riscv_sm4ks(x, y, bs) __builtin_riscv_sm4ks(x, y, bs);
166#if defined(__cplusplus)
_Float16 __2f16 __attribute__((ext_vector_type(2)))
Zeroes the upper 128 bits (bits 255:128) of all YMM registers.
static __inline__ uint32_t uint32_t __y