cregit-Linux how code gets into the kernel

Release 4.12 include/crypto/cryptd.h

Directory: include/crypto
/*
 * Software async crypto daemon
 *
 * Added AEAD support to cryptd.
 *    Authors: Tadeusz Struk (tadeusz.struk@intel.com)
 *             Adrian Hoban <adrian.hoban@intel.com>
 *             Gabriele Paoloni <gabriele.paoloni@intel.com>
 *             Aidan O'Mahony (aidan.o.mahony@intel.com)
 *    Copyright (c) 2010, Intel Corporation.
 */

#ifndef _CRYPTO_CRYPT_H

#define _CRYPTO_CRYPT_H

#include <linux/kernel.h>
#include <crypto/aead.h>
#include <crypto/hash.h>
#include <crypto/skcipher.h>


struct cryptd_ablkcipher {
	
struct crypto_ablkcipher base;
};


static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast( struct crypto_ablkcipher *tfm) { return (struct cryptd_ablkcipher *)tfm; }

Contributors

PersonTokensPropCommitsCommitProp
Huang Ying22100.00%1100.00%
Total22100.00%1100.00%

/* alg_name should be algorithm to be cryptd-ed */ struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name, u32 type, u32 mask); struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm); bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm); void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm); struct cryptd_skcipher { struct crypto_skcipher base; }; struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name, u32 type, u32 mask); struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm); /* Must be called without moving CPUs. */ bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm); void cryptd_free_skcipher(struct cryptd_skcipher *tfm); struct cryptd_ahash { struct crypto_ahash base; };
static inline struct cryptd_ahash *__cryptd_ahash_cast( struct crypto_ahash *tfm) { return (struct cryptd_ahash *)tfm; }

Contributors

PersonTokensPropCommitsCommitProp
Huang Ying22100.00%1100.00%
Total22100.00%1100.00%

/* alg_name should be algorithm to be cryptd-ed */ struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, u32 type, u32 mask); struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm); struct shash_desc *cryptd_shash_desc(struct ahash_request *req); /* Must be called without moving CPUs. */ bool cryptd_ahash_queued(struct cryptd_ahash *tfm); void cryptd_free_ahash(struct cryptd_ahash *tfm); struct cryptd_aead { struct crypto_aead base; };
static inline struct cryptd_aead *__cryptd_aead_cast( struct crypto_aead *tfm) { return (struct cryptd_aead *)tfm; }

Contributors

PersonTokensPropCommitsCommitProp
Adrian Hoban22100.00%1100.00%
Total22100.00%1100.00%

struct cryptd_aead *cryptd_alloc_aead(const char *alg_name, u32 type, u32 mask); struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm); /* Must be called without moving CPUs. */ bool cryptd_aead_queued(struct cryptd_aead *tfm); void cryptd_free_aead(struct cryptd_aead *tfm); #endif

Overall Contributors

PersonTokensPropCommitsCommitProp
Huang Ying16350.46%342.86%
Herbert Xu9128.17%342.86%
Adrian Hoban6921.36%114.29%
Total323100.00%7100.00%
Directory: include/crypto
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.
Created with cregit.