/* * Software async crypto daemon * * Added AEAD support to cryptd. * Authors: Tadeusz Struk (tadeusz.struk@intel.com) * Adrian Hoban * Gabriele Paoloni * Aidan O'Mahony (aidan.o.mahony@intel.com) * Copyright (c) 2010, Intel Corporation. */ #ifndef _CRYPTO_CRYPT_H #define _CRYPTO_CRYPT_H #include #include #include #include struct cryptd_ablkcipher { struct crypto_ablkcipher base; }; static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast( struct crypto_ablkcipher *tfm) { return (struct cryptd_ablkcipher *)tfm; } /* alg_name should be algorithm to be cryptd-ed */ struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name, u32 type, u32 mask); struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm); bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm); void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm); struct cryptd_skcipher { struct crypto_skcipher base; }; struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name, u32 type, u32 mask); struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm); /* Must be called without moving CPUs. */ bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm); void cryptd_free_skcipher(struct cryptd_skcipher *tfm); struct cryptd_ahash { struct crypto_ahash base; }; static inline struct cryptd_ahash *__cryptd_ahash_cast( struct crypto_ahash *tfm) { return (struct cryptd_ahash *)tfm; } /* alg_name should be algorithm to be cryptd-ed */ struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, u32 type, u32 mask); struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm); struct shash_desc *cryptd_shash_desc(struct ahash_request *req); /* Must be called without moving CPUs. */ bool cryptd_ahash_queued(struct cryptd_ahash *tfm); void cryptd_free_ahash(struct cryptd_ahash *tfm); struct cryptd_aead { struct crypto_aead base; }; static inline struct cryptd_aead *__cryptd_aead_cast( struct crypto_aead *tfm) { return (struct cryptd_aead *)tfm; } struct cryptd_aead *cryptd_alloc_aead(const char *alg_name, u32 type, u32 mask); struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm); /* Must be called without moving CPUs. */ bool cryptd_aead_queued(struct cryptd_aead *tfm); void cryptd_free_aead(struct cryptd_aead *tfm); #endif 'committer'>committer
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2017-02-03 12:01:54 -0800
committerLinus Torvalds <torvalds@linux-foundation.org>2017-02-03 12:01:54 -0800
commitcd44691f7177b2c1e1509d1a17d9b198ebaa34eb (patch)
treeaf696f53d3cdbe9011cdb9168b73e1b524411ecf /drivers/usb/common/common.c
parent79c9089f97d37ffac88c3ddb6d359b2cf75058b7 (diff)
parent161e6d44a5e2d3f85365cb717d60e363171b39e6 (diff)
Merge tag 'mmc-v4.10-rc6' of git://git.kernel.org/pub/scm/linux/kernel/git/ulfh/mmc
Pull MMC fix from Ulf Hansson: "MMC host: sdhci: Avoid hang when receiving spurious CARD_INT interrupts" * tag 'mmc-v4.10-rc6' of git://git.kernel.org/pub/scm/linux/kernel/git/ulfh/mmc: mmc: sdhci: Ignore unexpected CARD_INT interrupts
Diffstat (limited to 'drivers/usb/common/common.c')