#include <linux/hardirq.h>
#include <linux/types.h>
#include <linux/module.h>
#include <linux/err.h>
#include <crypto/algapi.h>
#include <crypto/aes.h>
#include <crypto/ctr.h>
#include <crypto/b128ops.h>
#include <crypto/gcm.h>
#include <crypto/xts.h>
#include <asm/cpu_device_id.h>
#include <asm/simd.h>
#include <crypto/scatterwalk.h>
#include <crypto/internal/aead.h>
#include <crypto/internal/simd.h>
#include <crypto/internal/skcipher.h>
#include <linux/jump_label.h>
#include <linux/workqueue.h>
#include <linux/spinlock.h>
#include <linux/static_call.h>
#define AESNI_ALIGN …
#define AESNI_ALIGN_ATTR …
#define AES_BLOCK_MASK …
#define AESNI_ALIGN_EXTRA …
#define CRYPTO_AES_CTX_SIZE …
#define XTS_AES_CTX_SIZE …
struct aesni_xts_ctx { … };
static inline void *aes_align_addr(void *addr)
{ … }
asmlinkage void aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
unsigned int key_len);
asmlinkage void aesni_enc(const void *ctx, u8 *out, const u8 *in);
asmlinkage void aesni_dec(const void *ctx, u8 *out, const u8 *in);
asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len);
asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len);
asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len, u8 *iv);
asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len, u8 *iv);
asmlinkage void aesni_cts_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len, u8 *iv);
asmlinkage void aesni_cts_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len, u8 *iv);
asmlinkage void aesni_xts_enc(const struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len, u8 *iv);
asmlinkage void aesni_xts_dec(const struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len, u8 *iv);
#ifdef CONFIG_X86_64
asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len, u8 *iv);
DEFINE_STATIC_CALL(…);
asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
void *keys, u8 *out, unsigned int num_bytes);
asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
void *keys, u8 *out, unsigned int num_bytes);
asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
void *keys, u8 *out, unsigned int num_bytes);
asmlinkage void aes_xctr_enc_128_avx_by8(const u8 *in, const u8 *iv,
const void *keys, u8 *out, unsigned int num_bytes,
unsigned int byte_ctr);
asmlinkage void aes_xctr_enc_192_avx_by8(const u8 *in, const u8 *iv,
const void *keys, u8 *out, unsigned int num_bytes,
unsigned int byte_ctr);
asmlinkage void aes_xctr_enc_256_avx_by8(const u8 *in, const u8 *iv,
const void *keys, u8 *out, unsigned int num_bytes,
unsigned int byte_ctr);
#endif
static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
{ … }
static inline struct aesni_xts_ctx *aes_xts_ctx(struct crypto_skcipher *tfm)
{ … }
static int aes_set_key_common(struct crypto_aes_ctx *ctx,
const u8 *in_key, unsigned int key_len)
{ … }
static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
unsigned int key_len)
{ … }
static void aesni_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
{ … }
static void aesni_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
{ … }
static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
unsigned int len)
{ … }
static int ecb_encrypt(struct skcipher_request *req)
{ … }
static int ecb_decrypt(struct skcipher_request *req)
{ … }
static int cbc_encrypt(struct skcipher_request *req)
{ … }
static int cbc_decrypt(struct skcipher_request *req)
{ … }
static int cts_cbc_encrypt(struct skcipher_request *req)
{ … }
static int cts_cbc_decrypt(struct skcipher_request *req)
{ … }
#ifdef CONFIG_X86_64
static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len, u8 *iv)
{ … }
static int ctr_crypt(struct skcipher_request *req)
{ … }
static void aesni_xctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len, u8 *iv,
unsigned int byte_ctr)
{ … }
static int xctr_crypt(struct skcipher_request *req)
{ … }
#endif
static int xts_setkey_aesni(struct crypto_skcipher *tfm, const u8 *key,
unsigned int keylen)
{ … }
xts_encrypt_iv_func;
xts_crypt_func;
static noinline int
xts_crypt_slowpath(struct skcipher_request *req, xts_crypt_func crypt_func)
{ … }
static __always_inline int
xts_crypt(struct skcipher_request *req, xts_encrypt_iv_func encrypt_iv,
xts_crypt_func crypt_func)
{ … }
static void aesni_xts_encrypt_iv(const struct crypto_aes_ctx *tweak_key,
u8 iv[AES_BLOCK_SIZE])
{ … }
static void aesni_xts_encrypt(const struct crypto_aes_ctx *key,
const u8 *src, u8 *dst, unsigned int len,
u8 tweak[AES_BLOCK_SIZE])
{ … }
static void aesni_xts_decrypt(const struct crypto_aes_ctx *key,
const u8 *src, u8 *dst, unsigned int len,
u8 tweak[AES_BLOCK_SIZE])
{ … }
static int xts_encrypt_aesni(struct skcipher_request *req)
{ … }
static int xts_decrypt_aesni(struct skcipher_request *req)
{ … }
static struct crypto_alg aesni_cipher_alg = …;
static struct skcipher_alg aesni_skciphers[] = …;
static
struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)];
#ifdef CONFIG_X86_64
static struct skcipher_alg aesni_xctr = …;
static struct simd_skcipher_alg *aesni_simd_xctr;
asmlinkage void aes_xts_encrypt_iv(const struct crypto_aes_ctx *tweak_key,
u8 iv[AES_BLOCK_SIZE]);
#define DEFINE_XTS_ALG(suffix, driver_name, priority) …
DEFINE_XTS_ALG(…);
#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
DEFINE_XTS_ALG(…);
DEFINE_XTS_ALG(…);
DEFINE_XTS_ALG(…);
#endif
struct aes_gcm_key { … };
struct aes_gcm_key_aesni { … };
#define AES_GCM_KEY_AESNI(key) …
#define AES_GCM_KEY_AESNI_SIZE …
struct aes_gcm_key_avx10 { … };
#define AES_GCM_KEY_AVX10(key) …
#define AES_GCM_KEY_AVX10_SIZE …
#define FLAG_RFC4106 …
#define FLAG_ENC …
#define FLAG_AVX …
#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
#define FLAG_AVX10_256 …
#define FLAG_AVX10_512 …
#else
#define FLAG_AVX10_256 …
#define FLAG_AVX10_512 …
#endif
static inline struct aes_gcm_key *
aes_gcm_key_get(struct crypto_aead *tfm, int flags)
{ … }
asmlinkage void
aes_gcm_precompute_aesni(struct aes_gcm_key_aesni *key);
asmlinkage void
aes_gcm_precompute_aesni_avx(struct aes_gcm_key_aesni *key);
asmlinkage void
aes_gcm_precompute_vaes_avx10_256(struct aes_gcm_key_avx10 *key);
asmlinkage void
aes_gcm_precompute_vaes_avx10_512(struct aes_gcm_key_avx10 *key);
static void aes_gcm_precompute(struct aes_gcm_key *key, int flags)
{ … }
asmlinkage void
aes_gcm_aad_update_aesni(const struct aes_gcm_key_aesni *key,
u8 ghash_acc[16], const u8 *aad, int aadlen);
asmlinkage void
aes_gcm_aad_update_aesni_avx(const struct aes_gcm_key_aesni *key,
u8 ghash_acc[16], const u8 *aad, int aadlen);
asmlinkage void
aes_gcm_aad_update_vaes_avx10(const struct aes_gcm_key_avx10 *key,
u8 ghash_acc[16], const u8 *aad, int aadlen);
static void aes_gcm_aad_update(const struct aes_gcm_key *key, u8 ghash_acc[16],
const u8 *aad, int aadlen, int flags)
{ … }
asmlinkage void
aes_gcm_enc_update_aesni(const struct aes_gcm_key_aesni *key,
const u32 le_ctr[4], u8 ghash_acc[16],
const u8 *src, u8 *dst, int datalen);
asmlinkage void
aes_gcm_enc_update_aesni_avx(const struct aes_gcm_key_aesni *key,
const u32 le_ctr[4], u8 ghash_acc[16],
const u8 *src, u8 *dst, int datalen);
asmlinkage void
aes_gcm_enc_update_vaes_avx10_256(const struct aes_gcm_key_avx10 *key,
const u32 le_ctr[4], u8 ghash_acc[16],
const u8 *src, u8 *dst, int datalen);
asmlinkage void
aes_gcm_enc_update_vaes_avx10_512(const struct aes_gcm_key_avx10 *key,
const u32 le_ctr[4], u8 ghash_acc[16],
const u8 *src, u8 *dst, int datalen);
asmlinkage void
aes_gcm_dec_update_aesni(const struct aes_gcm_key_aesni *key,
const u32 le_ctr[4], u8 ghash_acc[16],
const u8 *src, u8 *dst, int datalen);
asmlinkage void
aes_gcm_dec_update_aesni_avx(const struct aes_gcm_key_aesni *key,
const u32 le_ctr[4], u8 ghash_acc[16],
const u8 *src, u8 *dst, int datalen);
asmlinkage void
aes_gcm_dec_update_vaes_avx10_256(const struct aes_gcm_key_avx10 *key,
const u32 le_ctr[4], u8 ghash_acc[16],
const u8 *src, u8 *dst, int datalen);
asmlinkage void
aes_gcm_dec_update_vaes_avx10_512(const struct aes_gcm_key_avx10 *key,
const u32 le_ctr[4], u8 ghash_acc[16],
const u8 *src, u8 *dst, int datalen);
static __always_inline void
aes_gcm_update(const struct aes_gcm_key *key,
const u32 le_ctr[4], u8 ghash_acc[16],
const u8 *src, u8 *dst, int datalen, int flags)
{ … }
asmlinkage void
aes_gcm_enc_final_aesni(const struct aes_gcm_key_aesni *key,
const u32 le_ctr[4], u8 ghash_acc[16],
u64 total_aadlen, u64 total_datalen);
asmlinkage void
aes_gcm_enc_final_aesni_avx(const struct aes_gcm_key_aesni *key,
const u32 le_ctr[4], u8 ghash_acc[16],
u64 total_aadlen, u64 total_datalen);
asmlinkage void
aes_gcm_enc_final_vaes_avx10(const struct aes_gcm_key_avx10 *key,
const u32 le_ctr[4], u8 ghash_acc[16],
u64 total_aadlen, u64 total_datalen);
static __always_inline void
aes_gcm_enc_final(const struct aes_gcm_key *key,
const u32 le_ctr[4], u8 ghash_acc[16],
u64 total_aadlen, u64 total_datalen, int flags)
{ … }
asmlinkage bool __must_check
aes_gcm_dec_final_aesni(const struct aes_gcm_key_aesni *key,
const u32 le_ctr[4], const u8 ghash_acc[16],
u64 total_aadlen, u64 total_datalen,
const u8 tag[16], int taglen);
asmlinkage bool __must_check
aes_gcm_dec_final_aesni_avx(const struct aes_gcm_key_aesni *key,
const u32 le_ctr[4], const u8 ghash_acc[16],
u64 total_aadlen, u64 total_datalen,
const u8 tag[16], int taglen);
asmlinkage bool __must_check
aes_gcm_dec_final_vaes_avx10(const struct aes_gcm_key_avx10 *key,
const u32 le_ctr[4], const u8 ghash_acc[16],
u64 total_aadlen, u64 total_datalen,
const u8 tag[16], int taglen);
static __always_inline bool __must_check
aes_gcm_dec_final(const struct aes_gcm_key *key, const u32 le_ctr[4],
u8 ghash_acc[16], u64 total_aadlen, u64 total_datalen,
u8 tag[16], int taglen, int flags)
{ … }
static int common_rfc4106_set_authsize(struct crypto_aead *aead,
unsigned int authsize)
{ … }
static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
unsigned int authsize)
{ … }
static int gcm_setkey(struct crypto_aead *tfm, const u8 *raw_key,
unsigned int keylen, int flags)
{ … }
static void gcm_process_assoc(const struct aes_gcm_key *key, u8 ghash_acc[16],
struct scatterlist *sg_src, unsigned int assoclen,
int flags)
{ … }
static __always_inline int
gcm_crypt(struct aead_request *req, int flags)
{ … }
#define DEFINE_GCM_ALGS(suffix, flags, generic_driver_name, rfc_driver_name, \
ctxsize, priority) … \
DEFINE_GCM_ALGS(…) …;
DEFINE_GCM_ALGS(…) …;
#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
DEFINE_GCM_ALGS(…) …;
DEFINE_GCM_ALGS(…) …;
#endif
static const struct x86_cpu_id zmm_exclusion_list[] = …;
static int __init register_avx_algs(void)
{ … }
static void unregister_avx_algs(void)
{ … }
#else
static struct aead_alg aes_gcm_algs_aesni[0];
static struct simd_aead_alg *aes_gcm_simdalgs_aesni[0];
static int __init register_avx_algs(void)
{
return 0;
}
static void unregister_avx_algs(void)
{
}
#endif
static const struct x86_cpu_id aesni_cpu_id[] = …;
MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
static int __init aesni_init(void)
{ … }
static void __exit aesni_exit(void)
{ … }
late_initcall(aesni_init);
module_exit(aesni_exit);
MODULE_DESCRIPTION(…) …;
MODULE_LICENSE(…) …;
MODULE_ALIAS_CRYPTO(…) …;