linux/crypto/api.c

// SPDX-License-Identifier: GPL-2.0-or-later
/*
 * Scatterlist Cryptographic API.
 *
 * Copyright (c) 2002 James Morris <[email protected]>
 * Copyright (c) 2002 David S. Miller ([email protected])
 * Copyright (c) 2005 Herbert Xu <[email protected]>
 *
 * Portions derived from Cryptoapi, by Alexander Kjeldaas <[email protected]>
 * and Nettle, by Niels Möller.
 */

#include <linux/err.h>
#include <linux/errno.h>
#include <linux/jump_label.h>
#include <linux/kernel.h>
#include <linux/kmod.h>
#include <linux/module.h>
#include <linux/param.h>
#include <linux/sched/signal.h>
#include <linux/slab.h>
#include <linux/string.h>
#include <linux/completion.h>
#include "internal.h"

LIST_HEAD();
EXPORT_SYMBOL_GPL();
DECLARE_RWSEM();
EXPORT_SYMBOL_GPL();

BLOCKING_NOTIFIER_HEAD();
EXPORT_SYMBOL_GPL();

#if IS_BUILTIN(CONFIG_CRYPTO_ALGAPI) && \
    !IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
#endif

static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);

struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
{}
EXPORT_SYMBOL_GPL();

void crypto_mod_put(struct crypto_alg *alg)
{}
EXPORT_SYMBOL_GPL();

static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
					      u32 mask)
{}

static void crypto_larval_destroy(struct crypto_alg *alg)
{}

struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
{}
EXPORT_SYMBOL_GPL();

static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
					    u32 mask)
{}

void crypto_larval_kill(struct crypto_alg *alg)
{}
EXPORT_SYMBOL_GPL();

void crypto_wait_for_test(struct crypto_larval *larval)
{}
EXPORT_SYMBOL_GPL();

static void crypto_start_test(struct crypto_larval *larval)
{}

static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
{}

static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
					    u32 mask)
{}

static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
					       u32 mask)
{}

int crypto_probing_notify(unsigned long val, void *v)
{}
EXPORT_SYMBOL_GPL();

struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
{}
EXPORT_SYMBOL_GPL();

static void crypto_exit_ops(struct crypto_tfm *tfm)
{}

static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
{}

void crypto_shoot_alg(struct crypto_alg *alg)
{}
EXPORT_SYMBOL_GPL();

struct crypto_tfm *__crypto_alloc_tfmgfp(struct crypto_alg *alg, u32 type,
					 u32 mask, gfp_t gfp)
{}
EXPORT_SYMBOL_GPL();

struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
				      u32 mask)
{}
EXPORT_SYMBOL_GPL();

/*
 *	crypto_alloc_base - Locate algorithm and allocate transform
 *	@alg_name: Name of algorithm
 *	@type: Type of algorithm
 *	@mask: Mask for type comparison
 *
 *	This function should not be used by new algorithm types.
 *	Please use crypto_alloc_tfm instead.
 *
 *	crypto_alloc_base() will first attempt to locate an already loaded
 *	algorithm.  If that fails and the kernel supports dynamically loadable
 *	modules, it will then attempt to load a module of the same name or
 *	alias.  If that fails it will send a query to any loaded crypto manager
 *	to construct an algorithm on the fly.  A refcount is grabbed on the
 *	algorithm which is then associated with the new transform.
 *
 *	The returned transform is of a non-determinate type.  Most people
 *	should use one of the more specific allocation functions such as
 *	crypto_alloc_skcipher().
 *
 *	In case of error the return value is an error pointer.
 */
struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
{}
EXPORT_SYMBOL_GPL();

static void *crypto_alloc_tfmmem(struct crypto_alg *alg,
				 const struct crypto_type *frontend, int node,
				 gfp_t gfp)
{}

void *crypto_create_tfm_node(struct crypto_alg *alg,
			     const struct crypto_type *frontend,
			     int node)
{}
EXPORT_SYMBOL_GPL();

void *crypto_clone_tfm(const struct crypto_type *frontend,
		       struct crypto_tfm *otfm)
{}
EXPORT_SYMBOL_GPL();

struct crypto_alg *crypto_find_alg(const char *alg_name,
				   const struct crypto_type *frontend,
				   u32 type, u32 mask)
{}
EXPORT_SYMBOL_GPL();

/*
 *	crypto_alloc_tfm_node - Locate algorithm and allocate transform
 *	@alg_name: Name of algorithm
 *	@frontend: Frontend algorithm type
 *	@type: Type of algorithm
 *	@mask: Mask for type comparison
 *	@node: NUMA node in which users desire to put requests, if node is
 *		NUMA_NO_NODE, it means users have no special requirement.
 *
 *	crypto_alloc_tfm() will first attempt to locate an already loaded
 *	algorithm.  If that fails and the kernel supports dynamically loadable
 *	modules, it will then attempt to load a module of the same name or
 *	alias.  If that fails it will send a query to any loaded crypto manager
 *	to construct an algorithm on the fly.  A refcount is grabbed on the
 *	algorithm which is then associated with the new transform.
 *
 *	The returned transform is of a non-determinate type.  Most people
 *	should use one of the more specific allocation functions such as
 *	crypto_alloc_skcipher().
 *
 *	In case of error the return value is an error pointer.
 */

void *crypto_alloc_tfm_node(const char *alg_name,
		       const struct crypto_type *frontend, u32 type, u32 mask,
		       int node)
{}
EXPORT_SYMBOL_GPL();

/*
 *	crypto_destroy_tfm - Free crypto transform
 *	@mem: Start of tfm slab
 *	@tfm: Transform to free
 *
 *	This function frees up the transform and any associated resources,
 *	then drops the refcount on the associated algorithm.
 */
void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
{}
EXPORT_SYMBOL_GPL();

int crypto_has_alg(const char *name, u32 type, u32 mask)
{}
EXPORT_SYMBOL_GPL();

void crypto_req_done(void *data, int err)
{}
EXPORT_SYMBOL_GPL();

MODULE_DESCRIPTION();
MODULE_LICENSE();