2009-01-18 08:19:46 +03:00
|
|
|
/*
|
|
|
|
* Software async crypto daemon
|
2010-09-20 12:05:12 +04:00
|
|
|
*
|
|
|
|
* Added AEAD support to cryptd.
|
|
|
|
* Authors: Tadeusz Struk (tadeusz.struk@intel.com)
|
|
|
|
* Adrian Hoban <adrian.hoban@intel.com>
|
|
|
|
* Gabriele Paoloni <gabriele.paoloni@intel.com>
|
|
|
|
* Aidan O'Mahony (aidan.o.mahony@intel.com)
|
|
|
|
* Copyright (c) 2010, Intel Corporation.
|
2009-01-18 08:19:46 +03:00
|
|
|
*/
|
|
|
|
|
|
|
|
#ifndef _CRYPTO_CRYPT_H
|
|
|
|
#define _CRYPTO_CRYPT_H
|
|
|
|
|
|
|
|
#include <linux/kernel.h>
|
2015-05-11 12:48:11 +03:00
|
|
|
#include <crypto/aead.h>
|
2009-08-06 09:35:20 +04:00
|
|
|
#include <crypto/hash.h>
|
2016-11-22 15:08:23 +03:00
|
|
|
#include <crypto/skcipher.h>
|
2009-01-18 08:19:46 +03:00
|
|
|
|
|
|
|
struct cryptd_ablkcipher {
|
|
|
|
struct crypto_ablkcipher base;
|
|
|
|
};
|
|
|
|
|
|
|
|
static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast(
|
|
|
|
struct crypto_ablkcipher *tfm)
|
|
|
|
{
|
|
|
|
return (struct cryptd_ablkcipher *)tfm;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* alg_name should be algorithm to be cryptd-ed */
|
|
|
|
struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
|
|
|
|
u32 type, u32 mask);
|
|
|
|
struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm);
|
2016-06-21 11:55:13 +03:00
|
|
|
bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm);
|
2009-01-18 08:19:46 +03:00
|
|
|
void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm);
|
|
|
|
|
2016-11-22 15:08:23 +03:00
|
|
|
struct cryptd_skcipher {
|
|
|
|
struct crypto_skcipher base;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
|
|
|
|
u32 type, u32 mask);
|
|
|
|
struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm);
|
|
|
|
/* Must be called without moving CPUs. */
|
|
|
|
bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm);
|
|
|
|
void cryptd_free_skcipher(struct cryptd_skcipher *tfm);
|
|
|
|
|
2009-08-06 09:35:20 +04:00
|
|
|
struct cryptd_ahash {
|
|
|
|
struct crypto_ahash base;
|
|
|
|
};
|
|
|
|
|
|
|
|
static inline struct cryptd_ahash *__cryptd_ahash_cast(
|
|
|
|
struct crypto_ahash *tfm)
|
|
|
|
{
|
|
|
|
return (struct cryptd_ahash *)tfm;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* alg_name should be algorithm to be cryptd-ed */
|
|
|
|
struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
|
|
|
|
u32 type, u32 mask);
|
|
|
|
struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
|
2009-10-19 06:53:06 +04:00
|
|
|
struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
|
2016-06-21 11:55:13 +03:00
|
|
|
/* Must be called without moving CPUs. */
|
|
|
|
bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
|
2009-08-06 09:35:20 +04:00
|
|
|
void cryptd_free_ahash(struct cryptd_ahash *tfm);
|
|
|
|
|
2010-09-20 12:05:12 +04:00
|
|
|
struct cryptd_aead {
|
|
|
|
struct crypto_aead base;
|
|
|
|
};
|
|
|
|
|
|
|
|
static inline struct cryptd_aead *__cryptd_aead_cast(
|
|
|
|
struct crypto_aead *tfm)
|
|
|
|
{
|
|
|
|
return (struct cryptd_aead *)tfm;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
|
|
|
|
u32 type, u32 mask);
|
|
|
|
|
|
|
|
struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
|
2016-06-21 11:55:13 +03:00
|
|
|
/* Must be called without moving CPUs. */
|
|
|
|
bool cryptd_aead_queued(struct cryptd_aead *tfm);
|
2010-09-20 12:05:12 +04:00
|
|
|
|
|
|
|
void cryptd_free_aead(struct cryptd_aead *tfm);
|
|
|
|
|
2009-01-18 08:19:46 +03:00
|
|
|
#endif
|