diff options
author | Srikant Patnaik | 2015-01-11 12:28:04 +0530 |
---|---|---|
committer | Srikant Patnaik | 2015-01-11 12:28:04 +0530 |
commit | 871480933a1c28f8a9fed4c4d34d06c439a7a422 (patch) | |
tree | 8718f573808810c2a1e8cb8fb6ac469093ca2784 /crypto | |
parent | 9d40ac5867b9aefe0722bc1f110b965ff294d30d (diff) | |
download | FOSSEE-netbook-kernel-source-871480933a1c28f8a9fed4c4d34d06c439a7a422.tar.gz FOSSEE-netbook-kernel-source-871480933a1c28f8a9fed4c4d34d06c439a7a422.tar.bz2 FOSSEE-netbook-kernel-source-871480933a1c28f8a9fed4c4d34d06c439a7a422.zip |
Moved, renamed, and deleted files
The original directory structure was scattered and unorganized.
Changes are basically to make it look like kernel structure.
Diffstat (limited to 'crypto')
95 files changed, 58031 insertions, 0 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig new file mode 100644 index 00000000..8e84225c --- /dev/null +++ b/crypto/Kconfig @@ -0,0 +1,982 @@ +# +# Generic algorithms support +# +config XOR_BLOCKS + tristate + +# +# async_tx api: hardware offloaded memory transfer/transform support +# +source "crypto/async_tx/Kconfig" + +# +# Cryptographic API Configuration +# +menuconfig CRYPTO + tristate "Cryptographic API" + help + This option provides the core Cryptographic API. + +if CRYPTO + +comment "Crypto core or helper" + +config CRYPTO_FIPS + bool "FIPS 200 compliance" + depends on CRYPTO_ANSI_CPRNG && !CRYPTO_MANAGER_DISABLE_TESTS + help + This options enables the fips boot option which is + required if you want to system to operate in a FIPS 200 + certification. You should say no unless you know what + this is. + +config CRYPTO_ALGAPI + tristate + select CRYPTO_ALGAPI2 + help + This option provides the API for cryptographic algorithms. + +config CRYPTO_ALGAPI2 + tristate + +config CRYPTO_AEAD + tristate + select CRYPTO_AEAD2 + select CRYPTO_ALGAPI + +config CRYPTO_AEAD2 + tristate + select CRYPTO_ALGAPI2 + +config CRYPTO_BLKCIPHER + tristate + select CRYPTO_BLKCIPHER2 + select CRYPTO_ALGAPI + +config CRYPTO_BLKCIPHER2 + tristate + select CRYPTO_ALGAPI2 + select CRYPTO_RNG2 + select CRYPTO_WORKQUEUE + +config CRYPTO_HASH + tristate + select CRYPTO_HASH2 + select CRYPTO_ALGAPI + +config CRYPTO_HASH2 + tristate + select CRYPTO_ALGAPI2 + +config CRYPTO_RNG + tristate + select CRYPTO_RNG2 + select CRYPTO_ALGAPI + +config CRYPTO_RNG2 + tristate + select CRYPTO_ALGAPI2 + +config CRYPTO_PCOMP + tristate + select CRYPTO_PCOMP2 + select CRYPTO_ALGAPI + +config CRYPTO_PCOMP2 + tristate + select CRYPTO_ALGAPI2 + +config CRYPTO_MANAGER + tristate "Cryptographic algorithm manager" + select CRYPTO_MANAGER2 + help + Create default cryptographic template instantiations such as + cbc(aes). + +config CRYPTO_MANAGER2 + def_tristate CRYPTO_MANAGER || (CRYPTO_MANAGER!=n && CRYPTO_ALGAPI=y) + select CRYPTO_AEAD2 + select CRYPTO_HASH2 + select CRYPTO_BLKCIPHER2 + select CRYPTO_PCOMP2 + +config CRYPTO_USER + tristate "Userspace cryptographic algorithm configuration" + depends on NET + select CRYPTO_MANAGER + help + Userspace configuration for cryptographic instantiations such as + cbc(aes). + +config CRYPTO_MANAGER_DISABLE_TESTS + bool "Disable run-time self tests" + default y + depends on CRYPTO_MANAGER2 + help + Disable run-time self tests that normally take place at + algorithm registration. + +config CRYPTO_GF128MUL + tristate "GF(2^128) multiplication functions" + help + Efficient table driven implementation of multiplications in the + field GF(2^128). This is needed by some cypher modes. This + option will be selected automatically if you select such a + cipher mode. Only select this option by hand if you expect to load + an external module that requires these functions. + +config CRYPTO_NULL + tristate "Null algorithms" + select CRYPTO_ALGAPI + select CRYPTO_BLKCIPHER + select CRYPTO_HASH + help + These are 'Null' algorithms, used by IPsec, which do nothing. + +config CRYPTO_PCRYPT + tristate "Parallel crypto engine (EXPERIMENTAL)" + depends on SMP && EXPERIMENTAL + select PADATA + select CRYPTO_MANAGER + select CRYPTO_AEAD + help + This converts an arbitrary crypto algorithm into a parallel + algorithm that executes in kernel threads. + +config CRYPTO_WORKQUEUE + tristate + +config CRYPTO_CRYPTD + tristate "Software async crypto daemon" + select CRYPTO_BLKCIPHER + select CRYPTO_HASH + select CRYPTO_MANAGER + select CRYPTO_WORKQUEUE + help + This is a generic software asynchronous crypto daemon that + converts an arbitrary synchronous software crypto algorithm + into an asynchronous algorithm that executes in a kernel thread. + +config CRYPTO_AUTHENC + tristate "Authenc support" + select CRYPTO_AEAD + select CRYPTO_BLKCIPHER + select CRYPTO_MANAGER + select CRYPTO_HASH + help + Authenc: Combined mode wrapper for IPsec. + This is required for IPSec. + +config CRYPTO_TEST + tristate "Testing module" + depends on m + select CRYPTO_MANAGER + help + Quick & dirty crypto test module. + +comment "Authenticated Encryption with Associated Data" + +config CRYPTO_CCM + tristate "CCM support" + select CRYPTO_CTR + select CRYPTO_AEAD + help + Support for Counter with CBC MAC. Required for IPsec. + +config CRYPTO_GCM + tristate "GCM/GMAC support" + select CRYPTO_CTR + select CRYPTO_AEAD + select CRYPTO_GHASH + help + Support for Galois/Counter Mode (GCM) and Galois Message + Authentication Code (GMAC). Required for IPSec. + +config CRYPTO_SEQIV + tristate "Sequence Number IV Generator" + select CRYPTO_AEAD + select CRYPTO_BLKCIPHER + select CRYPTO_RNG + help + This IV generator generates an IV based on a sequence number by + xoring it with a salt. This algorithm is mainly useful for CTR + +comment "Block modes" + +config CRYPTO_CBC + tristate "CBC support" + select CRYPTO_BLKCIPHER + select CRYPTO_MANAGER + help + CBC: Cipher Block Chaining mode + This block cipher algorithm is required for IPSec. + +config CRYPTO_CTR + tristate "CTR support" + select CRYPTO_BLKCIPHER + select CRYPTO_SEQIV + select CRYPTO_MANAGER + help + CTR: Counter mode + This block cipher algorithm is required for IPSec. + +config CRYPTO_CTS + tristate "CTS support" + select CRYPTO_BLKCIPHER + help + CTS: Cipher Text Stealing + This is the Cipher Text Stealing mode as described by + Section 8 of rfc2040 and referenced by rfc3962. + (rfc3962 includes errata information in its Appendix A) + This mode is required for Kerberos gss mechanism support + for AES encryption. + +config CRYPTO_ECB + tristate "ECB support" + select CRYPTO_BLKCIPHER + select CRYPTO_MANAGER + help + ECB: Electronic CodeBook mode + This is the simplest block cipher algorithm. It simply encrypts + the input block by block. + +config CRYPTO_LRW + tristate "LRW support" + select CRYPTO_BLKCIPHER + select CRYPTO_MANAGER + select CRYPTO_GF128MUL + help + LRW: Liskov Rivest Wagner, a tweakable, non malleable, non movable + narrow block cipher mode for dm-crypt. Use it with cipher + specification string aes-lrw-benbi, the key must be 256, 320 or 384. + The first 128, 192 or 256 bits in the key are used for AES and the + rest is used to tie each cipher block to its logical position. + +config CRYPTO_PCBC + tristate "PCBC support" + select CRYPTO_BLKCIPHER + select CRYPTO_MANAGER + help + PCBC: Propagating Cipher Block Chaining mode + This block cipher algorithm is required for RxRPC. + +config CRYPTO_XTS + tristate "XTS support" + select CRYPTO_BLKCIPHER + select CRYPTO_MANAGER + select CRYPTO_GF128MUL + help + XTS: IEEE1619/D16 narrow block cipher use with aes-xts-plain, + key size 256, 384 or 512 bits. This implementation currently + can't handle a sectorsize which is not a multiple of 16 bytes. + +comment "Hash modes" + +config CRYPTO_HMAC + tristate "HMAC support" + select CRYPTO_HASH + select CRYPTO_MANAGER + help + HMAC: Keyed-Hashing for Message Authentication (RFC2104). + This is required for IPSec. + +config CRYPTO_XCBC + tristate "XCBC support" + depends on EXPERIMENTAL + select CRYPTO_HASH + select CRYPTO_MANAGER + help + XCBC: Keyed-Hashing with encryption algorithm + http://www.ietf.org/rfc/rfc3566.txt + http://csrc.nist.gov/encryption/modes/proposedmodes/ + xcbc-mac/xcbc-mac-spec.pdf + +config CRYPTO_VMAC + tristate "VMAC support" + depends on EXPERIMENTAL + select CRYPTO_HASH + select CRYPTO_MANAGER + help + VMAC is a message authentication algorithm designed for + very high speed on 64-bit architectures. + + See also: + <http://fastcrypto.org/vmac> + +comment "Digest" + +config CRYPTO_CRC32C + tristate "CRC32c CRC algorithm" + select CRYPTO_HASH + select CRC32 + help + Castagnoli, et al Cyclic Redundancy-Check Algorithm. Used + by iSCSI for header and data digests and by others. + See Castagnoli93. Module will be crc32c. + +config CRYPTO_CRC32C_INTEL + tristate "CRC32c INTEL hardware acceleration" + depends on X86 + select CRYPTO_HASH + help + In Intel processor with SSE4.2 supported, the processor will + support CRC32C implementation using hardware accelerated CRC32 + instruction. This option will create 'crc32c-intel' module, + which will enable any routine to use the CRC32 instruction to + gain performance compared with software implementation. + Module will be crc32c-intel. + +config CRYPTO_GHASH + tristate "GHASH digest algorithm" + select CRYPTO_GF128MUL + help + GHASH is message digest algorithm for GCM (Galois/Counter Mode). + +config CRYPTO_MD4 + tristate "MD4 digest algorithm" + select CRYPTO_HASH + help + MD4 message digest algorithm (RFC1320). + +config CRYPTO_MD5 + tristate "MD5 digest algorithm" + select CRYPTO_HASH + help + MD5 message digest algorithm (RFC1321). + +config CRYPTO_MICHAEL_MIC + tristate "Michael MIC keyed digest algorithm" + select CRYPTO_HASH + help + Michael MIC is used for message integrity protection in TKIP + (IEEE 802.11i). This algorithm is required for TKIP, but it + should not be used for other purposes because of the weakness + of the algorithm. + +config CRYPTO_RMD128 + tristate "RIPEMD-128 digest algorithm" + select CRYPTO_HASH + help + RIPEMD-128 (ISO/IEC 10118-3:2004). + + RIPEMD-128 is a 128-bit cryptographic hash function. It should only + be used as a secure replacement for RIPEMD. For other use cases, + RIPEMD-160 should be used. + + Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel. + See <http://homes.esat.kuleuven.be/~bosselae/ripemd160.html> + +config CRYPTO_RMD160 + tristate "RIPEMD-160 digest algorithm" + select CRYPTO_HASH + help + RIPEMD-160 (ISO/IEC 10118-3:2004). + + RIPEMD-160 is a 160-bit cryptographic hash function. It is intended + to be used as a secure replacement for the 128-bit hash functions + MD4, MD5 and it's predecessor RIPEMD + (not to be confused with RIPEMD-128). + + It's speed is comparable to SHA1 and there are no known attacks + against RIPEMD-160. + + Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel. + See <http://homes.esat.kuleuven.be/~bosselae/ripemd160.html> + +config CRYPTO_RMD256 + tristate "RIPEMD-256 digest algorithm" + select CRYPTO_HASH + help + RIPEMD-256 is an optional extension of RIPEMD-128 with a + 256 bit hash. It is intended for applications that require + longer hash-results, without needing a larger security level + (than RIPEMD-128). + + Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel. + See <http://homes.esat.kuleuven.be/~bosselae/ripemd160.html> + +config CRYPTO_RMD320 + tristate "RIPEMD-320 digest algorithm" + select CRYPTO_HASH + help + RIPEMD-320 is an optional extension of RIPEMD-160 with a + 320 bit hash. It is intended for applications that require + longer hash-results, without needing a larger security level + (than RIPEMD-160). + + Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel. + See <http://homes.esat.kuleuven.be/~bosselae/ripemd160.html> + +config CRYPTO_SHA1 + tristate "SHA1 digest algorithm" + select CRYPTO_HASH + help + SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2). + +config CRYPTO_SHA1_SSSE3 + tristate "SHA1 digest algorithm (SSSE3/AVX)" + depends on X86 && 64BIT + select CRYPTO_SHA1 + select CRYPTO_HASH + help + SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2) implemented + using Supplemental SSE3 (SSSE3) instructions or Advanced Vector + Extensions (AVX), when available. + +config CRYPTO_SHA256 + tristate "SHA224 and SHA256 digest algorithm" + select CRYPTO_HASH + help + SHA256 secure hash standard (DFIPS 180-2). + + This version of SHA implements a 256 bit hash with 128 bits of + security against collision attacks. + + This code also includes SHA-224, a 224 bit hash with 112 bits + of security against collision attacks. + +config CRYPTO_SHA512 + tristate "SHA384 and SHA512 digest algorithms" + select CRYPTO_HASH + help + SHA512 secure hash standard (DFIPS 180-2). + + This version of SHA implements a 512 bit hash with 256 bits of + security against collision attacks. + + This code also includes SHA-384, a 384 bit hash with 192 bits + of security against collision attacks. + +config CRYPTO_TGR192 + tristate "Tiger digest algorithms" + select CRYPTO_HASH + help + Tiger hash algorithm 192, 160 and 128-bit hashes + + Tiger is a hash function optimized for 64-bit processors while + still having decent performance on 32-bit processors. + Tiger was developed by Ross Anderson and Eli Biham. + + See also: + <http://www.cs.technion.ac.il/~biham/Reports/Tiger/>. + +config CRYPTO_WP512 + tristate "Whirlpool digest algorithms" + select CRYPTO_HASH + help + Whirlpool hash algorithm 512, 384 and 256-bit hashes + + Whirlpool-512 is part of the NESSIE cryptographic primitives. + Whirlpool will be part of the ISO/IEC 10118-3:2003(E) standard + + See also: + <http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html> + +config CRYPTO_GHASH_CLMUL_NI_INTEL + tristate "GHASH digest algorithm (CLMUL-NI accelerated)" + depends on X86 && 64BIT + select CRYPTO_CRYPTD + help + GHASH is message digest algorithm for GCM (Galois/Counter Mode). + The implementation is accelerated by CLMUL-NI of Intel. + +comment "Ciphers" + +config CRYPTO_AES + tristate "AES cipher algorithms" + select CRYPTO_ALGAPI + help + AES cipher algorithms (FIPS-197). AES uses the Rijndael + algorithm. + + Rijndael appears to be consistently a very good performer in + both hardware and software across a wide range of computing + environments regardless of its use in feedback or non-feedback + modes. Its key setup time is excellent, and its key agility is + good. Rijndael's very low memory requirements make it very well + suited for restricted-space environments, in which it also + demonstrates excellent performance. Rijndael's operations are + among the easiest to defend against power and timing attacks. + + The AES specifies three key sizes: 128, 192 and 256 bits + + See <http://csrc.nist.gov/CryptoToolkit/aes/> for more information. + +config CRYPTO_AES_586 + tristate "AES cipher algorithms (i586)" + depends on (X86 || UML_X86) && !64BIT + select CRYPTO_ALGAPI + select CRYPTO_AES + help + AES cipher algorithms (FIPS-197). AES uses the Rijndael + algorithm. + + Rijndael appears to be consistently a very good performer in + both hardware and software across a wide range of computing + environments regardless of its use in feedback or non-feedback + modes. Its key setup time is excellent, and its key agility is + good. Rijndael's very low memory requirements make it very well + suited for restricted-space environments, in which it also + demonstrates excellent performance. Rijndael's operations are + among the easiest to defend against power and timing attacks. + + The AES specifies three key sizes: 128, 192 and 256 bits + + See <http://csrc.nist.gov/encryption/aes/> for more information. + +config CRYPTO_AES_X86_64 + tristate "AES cipher algorithms (x86_64)" + depends on (X86 || UML_X86) && 64BIT + select CRYPTO_ALGAPI + select CRYPTO_AES + help + AES cipher algorithms (FIPS-197). AES uses the Rijndael + algorithm. + + Rijndael appears to be consistently a very good performer in + both hardware and software across a wide range of computing + environments regardless of its use in feedback or non-feedback + modes. Its key setup time is excellent, and its key agility is + good. Rijndael's very low memory requirements make it very well + suited for restricted-space environments, in which it also + demonstrates excellent performance. Rijndael's operations are + among the easiest to defend against power and timing attacks. + + The AES specifies three key sizes: 128, 192 and 256 bits + + See <http://csrc.nist.gov/encryption/aes/> for more information. + +config CRYPTO_AES_NI_INTEL + tristate "AES cipher algorithms (AES-NI)" + depends on X86 + select CRYPTO_AES_X86_64 if 64BIT + select CRYPTO_AES_586 if !64BIT + select CRYPTO_CRYPTD + select CRYPTO_ALGAPI + help + Use Intel AES-NI instructions for AES algorithm. + + AES cipher algorithms (FIPS-197). AES uses the Rijndael + algorithm. + + Rijndael appears to be consistently a very good performer in + both hardware and software across a wide range of computing + environments regardless of its use in feedback or non-feedback + modes. Its key setup time is excellent, and its key agility is + good. Rijndael's very low memory requirements make it very well + suited for restricted-space environments, in which it also + demonstrates excellent performance. Rijndael's operations are + among the easiest to defend against power and timing attacks. + + The AES specifies three key sizes: 128, 192 and 256 bits + + See <http://csrc.nist.gov/encryption/aes/> for more information. + + In addition to AES cipher algorithm support, the acceleration + for some popular block cipher mode is supported too, including + ECB, CBC, LRW, PCBC, XTS. The 64 bit version has additional + acceleration for CTR. + +config CRYPTO_ANUBIS + tristate "Anubis cipher algorithm" + select CRYPTO_ALGAPI + help + Anubis cipher algorithm. + + Anubis is a variable key length cipher which can use keys from + 128 bits to 320 bits in length. It was evaluated as a entrant + in the NESSIE competition. + + See also: + <https://www.cosic.esat.kuleuven.be/nessie/reports/> + <http://www.larc.usp.br/~pbarreto/AnubisPage.html> + +config CRYPTO_ARC4 + tristate "ARC4 cipher algorithm" + select CRYPTO_ALGAPI + help + ARC4 cipher algorithm. + + ARC4 is a stream cipher using keys ranging from 8 bits to 2048 + bits in length. This algorithm is required for driver-based + WEP, but it should not be for other purposes because of the + weakness of the algorithm. + +config CRYPTO_BLOWFISH + tristate "Blowfish cipher algorithm" + select CRYPTO_ALGAPI + select CRYPTO_BLOWFISH_COMMON + help + Blowfish cipher algorithm, by Bruce Schneier. + + This is a variable key length cipher which can use keys from 32 + bits to 448 bits in length. It's fast, simple and specifically + designed for use on "large microprocessors". + + See also: + <http://www.schneier.com/blowfish.html> + +config CRYPTO_BLOWFISH_COMMON + tristate + help + Common parts of the Blowfish cipher algorithm shared by the + generic c and the assembler implementations. + + See also: + <http://www.schneier.com/blowfish.html> + +config CRYPTO_BLOWFISH_X86_64 + tristate "Blowfish cipher algorithm (x86_64)" + depends on X86 && 64BIT + select CRYPTO_ALGAPI + select CRYPTO_BLOWFISH_COMMON + help + Blowfish cipher algorithm (x86_64), by Bruce Schneier. + + This is a variable key length cipher which can use keys from 32 + bits to 448 bits in length. It's fast, simple and specifically + designed for use on "large microprocessors". + + See also: + <http://www.schneier.com/blowfish.html> + +config CRYPTO_CAMELLIA + tristate "Camellia cipher algorithms" + depends on CRYPTO + select CRYPTO_ALGAPI + help + Camellia cipher algorithms module. + + Camellia is a symmetric key block cipher developed jointly + at NTT and Mitsubishi Electric Corporation. + + The Camellia specifies three key sizes: 128, 192 and 256 bits. + + See also: + <https://info.isl.ntt.co.jp/crypt/eng/camellia/index_s.html> + +config CRYPTO_CAMELLIA_X86_64 + tristate "Camellia cipher algorithm (x86_64)" + depends on X86 && 64BIT + depends on CRYPTO + select CRYPTO_ALGAPI + select CRYPTO_LRW + select CRYPTO_XTS + help + Camellia cipher algorithm module (x86_64). + + Camellia is a symmetric key block cipher developed jointly + at NTT and Mitsubishi Electric Corporation. + + The Camellia specifies three key sizes: 128, 192 and 256 bits. + + See also: + <https://info.isl.ntt.co.jp/crypt/eng/camellia/index_s.html> + +config CRYPTO_CAST5 + tristate "CAST5 (CAST-128) cipher algorithm" + select CRYPTO_ALGAPI + help + The CAST5 encryption algorithm (synonymous with CAST-128) is + described in RFC2144. + +config CRYPTO_CAST6 + tristate "CAST6 (CAST-256) cipher algorithm" + select CRYPTO_ALGAPI + help + The CAST6 encryption algorithm (synonymous with CAST-256) is + described in RFC2612. + +config CRYPTO_DES + tristate "DES and Triple DES EDE cipher algorithms" + select CRYPTO_ALGAPI + help + DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3). + +config CRYPTO_FCRYPT + tristate "FCrypt cipher algorithm" + select CRYPTO_ALGAPI + select CRYPTO_BLKCIPHER + help + FCrypt algorithm used by RxRPC. + +config CRYPTO_KHAZAD + tristate "Khazad cipher algorithm" + select CRYPTO_ALGAPI + help + Khazad cipher algorithm. + + Khazad was a finalist in the initial NESSIE competition. It is + an algorithm optimized for 64-bit processors with good performance + on 32-bit processors. Khazad uses an 128 bit key size. + + See also: + <http://www.larc.usp.br/~pbarreto/KhazadPage.html> + +config CRYPTO_SALSA20 + tristate "Salsa20 stream cipher algorithm (EXPERIMENTAL)" + depends on EXPERIMENTAL + select CRYPTO_BLKCIPHER + help + Salsa20 stream cipher algorithm. + + Salsa20 is a stream cipher submitted to eSTREAM, the ECRYPT + Stream Cipher Project. See <http://www.ecrypt.eu.org/stream/> + + The Salsa20 stream cipher algorithm is designed by Daniel J. + Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html> + +config CRYPTO_SALSA20_586 + tristate "Salsa20 stream cipher algorithm (i586) (EXPERIMENTAL)" + depends on (X86 || UML_X86) && !64BIT + depends on EXPERIMENTAL + select CRYPTO_BLKCIPHER + help + Salsa20 stream cipher algorithm. + + Salsa20 is a stream cipher submitted to eSTREAM, the ECRYPT + Stream Cipher Project. See <http://www.ecrypt.eu.org/stream/> + + The Salsa20 stream cipher algorithm is designed by Daniel J. + Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html> + +config CRYPTO_SALSA20_X86_64 + tristate "Salsa20 stream cipher algorithm (x86_64) (EXPERIMENTAL)" + depends on (X86 || UML_X86) && 64BIT + depends on EXPERIMENTAL + select CRYPTO_BLKCIPHER + help + Salsa20 stream cipher algorithm. + + Salsa20 is a stream cipher submitted to eSTREAM, the ECRYPT + Stream Cipher Project. See <http://www.ecrypt.eu.org/stream/> + + The Salsa20 stream cipher algorithm is designed by Daniel J. + Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html> + +config CRYPTO_SEED + tristate "SEED cipher algorithm" + select CRYPTO_ALGAPI + help + SEED cipher algorithm (RFC4269). + + SEED is a 128-bit symmetric key block cipher that has been + developed by KISA (Korea Information Security Agency) as a + national standard encryption algorithm of the Republic of Korea. + It is a 16 round block cipher with the key size of 128 bit. + + See also: + <http://www.kisa.or.kr/kisa/seed/jsp/seed_eng.jsp> + +config CRYPTO_SERPENT + tristate "Serpent cipher algorithm" + select CRYPTO_ALGAPI + help + Serpent cipher algorithm, by Anderson, Biham & Knudsen. + + Keys are allowed to be from 0 to 256 bits in length, in steps + of 8 bits. Also includes the 'Tnepres' algorithm, a reversed + variant of Serpent for compatibility with old kerneli.org code. + + See also: + <http://www.cl.cam.ac.uk/~rja14/serpent.html> + +config CRYPTO_SERPENT_SSE2_X86_64 + tristate "Serpent cipher algorithm (x86_64/SSE2)" + depends on X86 && 64BIT + select CRYPTO_ALGAPI + select CRYPTO_CRYPTD + select CRYPTO_SERPENT + select CRYPTO_LRW + select CRYPTO_XTS + help + Serpent cipher algorithm, by Anderson, Biham & Knudsen. + + Keys are allowed to be from 0 to 256 bits in length, in steps + of 8 bits. + + This module provides Serpent cipher algorithm that processes eigth + blocks parallel using SSE2 instruction set. + + See also: + <http://www.cl.cam.ac.uk/~rja14/serpent.html> + +config CRYPTO_SERPENT_SSE2_586 + tristate "Serpent cipher algorithm (i586/SSE2)" + depends on X86 && !64BIT + select CRYPTO_ALGAPI + select CRYPTO_CRYPTD + select CRYPTO_SERPENT + select CRYPTO_LRW + select CRYPTO_XTS + help + Serpent cipher algorithm, by Anderson, Biham & Knudsen. + + Keys are allowed to be from 0 to 256 bits in length, in steps + of 8 bits. + + This module provides Serpent cipher algorithm that processes four + blocks parallel using SSE2 instruction set. + + See also: + <http://www.cl.cam.ac.uk/~rja14/serpent.html> + +config CRYPTO_TEA + tristate "TEA, XTEA and XETA cipher algorithms" + select CRYPTO_ALGAPI + help + TEA cipher algorithm. + + Tiny Encryption Algorithm is a simple cipher that uses + many rounds for security. It is very fast and uses + little memory. + + Xtendend Tiny Encryption Algorithm is a modification to + the TEA algorithm to address a potential key weakness + in the TEA algorithm. + + Xtendend Encryption Tiny Algorithm is a mis-implementation + of the XTEA algorithm for compatibility purposes. + +config CRYPTO_TWOFISH + tristate "Twofish cipher algorithm" + select CRYPTO_ALGAPI + select CRYPTO_TWOFISH_COMMON + help + Twofish cipher algorithm. + + Twofish was submitted as an AES (Advanced Encryption Standard) + candidate cipher by researchers at CounterPane Systems. It is a + 16 round block cipher supporting key sizes of 128, 192, and 256 + bits. + + See also: + <http://www.schneier.com/twofish.html> + +config CRYPTO_TWOFISH_COMMON + tristate + help + Common parts of the Twofish cipher algorithm shared by the + generic c and the assembler implementations. + +config CRYPTO_TWOFISH_586 + tristate "Twofish cipher algorithms (i586)" + depends on (X86 || UML_X86) && !64BIT + select CRYPTO_ALGAPI + select CRYPTO_TWOFISH_COMMON + help + Twofish cipher algorithm. + + Twofish was submitted as an AES (Advanced Encryption Standard) + candidate cipher by researchers at CounterPane Systems. It is a + 16 round block cipher supporting key sizes of 128, 192, and 256 + bits. + + See also: + <http://www.schneier.com/twofish.html> + +config CRYPTO_TWOFISH_X86_64 + tristate "Twofish cipher algorithm (x86_64)" + depends on (X86 || UML_X86) && 64BIT + select CRYPTO_ALGAPI + select CRYPTO_TWOFISH_COMMON + help + Twofish cipher algorithm (x86_64). + + Twofish was submitted as an AES (Advanced Encryption Standard) + candidate cipher by researchers at CounterPane Systems. It is a + 16 round block cipher supporting key sizes of 128, 192, and 256 + bits. + + See also: + <http://www.schneier.com/twofish.html> + +config CRYPTO_TWOFISH_X86_64_3WAY + tristate "Twofish cipher algorithm (x86_64, 3-way parallel)" + depends on X86 && 64BIT + select CRYPTO_ALGAPI + select CRYPTO_TWOFISH_COMMON + select CRYPTO_TWOFISH_X86_64 + select CRYPTO_LRW + select CRYPTO_XTS + help + Twofish cipher algorithm (x86_64, 3-way parallel). + + Twofish was submitted as an AES (Advanced Encryption Standard) + candidate cipher by researchers at CounterPane Systems. It is a + 16 round block cipher supporting key sizes of 128, 192, and 256 + bits. + + This module provides Twofish cipher algorithm that processes three + blocks parallel, utilizing resources of out-of-order CPUs better. + + See also: + <http://www.schneier.com/twofish.html> + +comment "Compression" + +config CRYPTO_DEFLATE + tristate "Deflate compression algorithm" + select CRYPTO_ALGAPI + select ZLIB_INFLATE + select ZLIB_DEFLATE + help + This is the Deflate algorithm (RFC1951), specified for use in + IPSec with the IPCOMP protocol (RFC3173, RFC2394). + + You will most probably want this if using IPSec. + +config CRYPTO_ZLIB + tristate "Zlib compression algorithm" + select CRYPTO_PCOMP + select ZLIB_INFLATE + select ZLIB_DEFLATE + select NLATTR + help + This is the zlib algorithm. + +config CRYPTO_LZO + tristate "LZO compression algorithm" + select CRYPTO_ALGAPI + select LZO_COMPRESS + select LZO_DECOMPRESS + help + This is the LZO algorithm. + +comment "Random Number Generation" + +config CRYPTO_ANSI_CPRNG + tristate "Pseudo Random Number Generation for Cryptographic modules" + default m + select CRYPTO_AES + select CRYPTO_RNG + help + This option enables the generic pseudo random number generator + for cryptographic modules. Uses the Algorithm specified in + ANSI X9.31 A.2.4. Note that this option must be enabled if + CRYPTO_FIPS is selected + +config CRYPTO_USER_API + tristate + +config CRYPTO_USER_API_HASH + tristate "User-space interface for hash algorithms" + depends on NET + select CRYPTO_HASH + select CRYPTO_USER_API + help + This option enables the user-spaces interface for hash + algorithms. + +config CRYPTO_USER_API_SKCIPHER + tristate "User-space interface for symmetric key cipher algorithms" + depends on NET + select CRYPTO_BLKCIPHER + select CRYPTO_USER_API + help + This option enables the user-spaces interface for symmetric + key cipher algorithms. + +source "drivers/crypto/Kconfig" + +endif # if CRYPTO diff --git a/crypto/Makefile b/crypto/Makefile new file mode 100644 index 00000000..30f33d67 --- /dev/null +++ b/crypto/Makefile @@ -0,0 +1,98 @@ +# +# Cryptographic API +# + +obj-$(CONFIG_CRYPTO) += crypto.o +crypto-y := api.o cipher.o compress.o + +obj-$(CONFIG_CRYPTO_WORKQUEUE) += crypto_wq.o + +obj-$(CONFIG_CRYPTO_FIPS) += fips.o + +crypto_algapi-$(CONFIG_PROC_FS) += proc.o +crypto_algapi-y := algapi.o scatterwalk.o $(crypto_algapi-y) +obj-$(CONFIG_CRYPTO_ALGAPI2) += crypto_algapi.o + +obj-$(CONFIG_CRYPTO_AEAD2) += aead.o + +crypto_blkcipher-y := ablkcipher.o +crypto_blkcipher-y += blkcipher.o +obj-$(CONFIG_CRYPTO_BLKCIPHER2) += crypto_blkcipher.o +obj-$(CONFIG_CRYPTO_BLKCIPHER2) += chainiv.o +obj-$(CONFIG_CRYPTO_BLKCIPHER2) += eseqiv.o +obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o + +crypto_hash-y += ahash.o +crypto_hash-y += shash.o +obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o + +obj-$(CONFIG_CRYPTO_PCOMP2) += pcompress.o + +cryptomgr-y := algboss.o testmgr.o + +obj-$(CONFIG_CRYPTO_MANAGER2) += cryptomgr.o +obj-$(CONFIG_CRYPTO_USER) += crypto_user.o +obj-$(CONFIG_CRYPTO_HMAC) += hmac.o +obj-$(CONFIG_CRYPTO_VMAC) += vmac.o +obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o +obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o +obj-$(CONFIG_CRYPTO_MD4) += md4.o +obj-$(CONFIG_CRYPTO_MD5) += md5.o +obj-$(CONFIG_CRYPTO_RMD128) += rmd128.o +obj-$(CONFIG_CRYPTO_RMD160) += rmd160.o +obj-$(CONFIG_CRYPTO_RMD256) += rmd256.o +obj-$(CONFIG_CRYPTO_RMD320) += rmd320.o +obj-$(CONFIG_CRYPTO_SHA1) += sha1_generic.o +obj-$(CONFIG_CRYPTO_SHA256) += sha256_generic.o +obj-$(CONFIG_CRYPTO_SHA512) += sha512_generic.o +obj-$(CONFIG_CRYPTO_WP512) += wp512.o +obj-$(CONFIG_CRYPTO_TGR192) += tgr192.o +obj-$(CONFIG_CRYPTO_GF128MUL) += gf128mul.o +obj-$(CONFIG_CRYPTO_ECB) += ecb.o +obj-$(CONFIG_CRYPTO_CBC) += cbc.o +obj-$(CONFIG_CRYPTO_PCBC) += pcbc.o +obj-$(CONFIG_CRYPTO_CTS) += cts.o +obj-$(CONFIG_CRYPTO_LRW) += lrw.o +obj-$(CONFIG_CRYPTO_XTS) += xts.o +obj-$(CONFIG_CRYPTO_CTR) += ctr.o +obj-$(CONFIG_CRYPTO_GCM) += gcm.o +obj-$(CONFIG_CRYPTO_CCM) += ccm.o +obj-$(CONFIG_CRYPTO_PCRYPT) += pcrypt.o +obj-$(CONFIG_CRYPTO_CRYPTD) += cryptd.o +obj-$(CONFIG_CRYPTO_DES) += des_generic.o +obj-$(CONFIG_CRYPTO_FCRYPT) += fcrypt.o +obj-$(CONFIG_CRYPTO_BLOWFISH) += blowfish_generic.o +obj-$(CONFIG_CRYPTO_BLOWFISH_COMMON) += blowfish_common.o +obj-$(CONFIG_CRYPTO_TWOFISH) += twofish_generic.o +obj-$(CONFIG_CRYPTO_TWOFISH_COMMON) += twofish_common.o +obj-$(CONFIG_CRYPTO_SERPENT) += serpent_generic.o +obj-$(CONFIG_CRYPTO_AES) += aes_generic.o +obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia_generic.o +obj-$(CONFIG_CRYPTO_CAST5) += cast5.o +obj-$(CONFIG_CRYPTO_CAST6) += cast6.o +obj-$(CONFIG_CRYPTO_ARC4) += arc4.o +obj-$(CONFIG_CRYPTO_TEA) += tea.o +obj-$(CONFIG_CRYPTO_KHAZAD) += khazad.o +obj-$(CONFIG_CRYPTO_ANUBIS) += anubis.o +obj-$(CONFIG_CRYPTO_SEED) += seed.o +obj-$(CONFIG_CRYPTO_SALSA20) += salsa20_generic.o +obj-$(CONFIG_CRYPTO_DEFLATE) += deflate.o +obj-$(CONFIG_CRYPTO_ZLIB) += zlib.o +obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o +obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o +obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o authencesn.o +obj-$(CONFIG_CRYPTO_LZO) += lzo.o +obj-$(CONFIG_CRYPTO_RNG2) += rng.o +obj-$(CONFIG_CRYPTO_RNG2) += krng.o +obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o +obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o +obj-$(CONFIG_CRYPTO_GHASH) += ghash-generic.o +obj-$(CONFIG_CRYPTO_USER_API) += af_alg.o +obj-$(CONFIG_CRYPTO_USER_API_HASH) += algif_hash.o +obj-$(CONFIG_CRYPTO_USER_API_SKCIPHER) += algif_skcipher.o + +# +# generic algorithms and the async_tx api +# +obj-$(CONFIG_XOR_BLOCKS) += xor.o +obj-$(CONFIG_ASYNC_CORE) += async_tx/ diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c new file mode 100644 index 00000000..8d3a056e --- /dev/null +++ b/crypto/ablkcipher.c @@ -0,0 +1,725 @@ +/* + * Asynchronous block chaining cipher operations. + * + * This is the asynchronous version of blkcipher.c indicating completion + * via a callback. + * + * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/internal/skcipher.h> +#include <linux/cpumask.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/rtnetlink.h> +#include <linux/sched.h> +#include <linux/slab.h> +#include <linux/seq_file.h> +#include <linux/cryptouser.h> +#include <net/netlink.h> + +#include <crypto/scatterwalk.h> + +#include "internal.h" + +static const char *skcipher_default_geniv __read_mostly; + +struct ablkcipher_buffer { + struct list_head entry; + struct scatter_walk dst; + unsigned int len; + void *data; +}; + +enum { + ABLKCIPHER_WALK_SLOW = 1 << 0, +}; + +static inline void ablkcipher_buffer_write(struct ablkcipher_buffer *p) +{ + scatterwalk_copychunks(p->data, &p->dst, p->len, 1); +} + +void __ablkcipher_walk_complete(struct ablkcipher_walk *walk) +{ + struct ablkcipher_buffer *p, *tmp; + + list_for_each_entry_safe(p, tmp, &walk->buffers, entry) { + ablkcipher_buffer_write(p); + list_del(&p->entry); + kfree(p); + } +} +EXPORT_SYMBOL_GPL(__ablkcipher_walk_complete); + +static inline void ablkcipher_queue_write(struct ablkcipher_walk *walk, + struct ablkcipher_buffer *p) +{ + p->dst = walk->out; + list_add_tail(&p->entry, &walk->buffers); +} + +/* Get a spot of the specified length that does not straddle a page. + * The caller needs to ensure that there is enough space for this operation. + */ +static inline u8 *ablkcipher_get_spot(u8 *start, unsigned int len) +{ + u8 *end_page = (u8 *)(((unsigned long)(start + len - 1)) & PAGE_MASK); + return max(start, end_page); +} + +static inline unsigned int ablkcipher_done_slow(struct ablkcipher_walk *walk, + unsigned int bsize) +{ + unsigned int n = bsize; + + for (;;) { + unsigned int len_this_page = scatterwalk_pagelen(&walk->out); + + if (len_this_page > n) + len_this_page = n; + scatterwalk_advance(&walk->out, n); + if (n == len_this_page) + break; + n -= len_this_page; + scatterwalk_start(&walk->out, scatterwalk_sg_next(walk->out.sg)); + } + + return bsize; +} + +static inline unsigned int ablkcipher_done_fast(struct ablkcipher_walk *walk, + unsigned int n) +{ + scatterwalk_advance(&walk->in, n); + scatterwalk_advance(&walk->out, n); + + return n; +} + +static int ablkcipher_walk_next(struct ablkcipher_request *req, + struct ablkcipher_walk *walk); + +int ablkcipher_walk_done(struct ablkcipher_request *req, + struct ablkcipher_walk *walk, int err) +{ + struct crypto_tfm *tfm = req->base.tfm; + unsigned int nbytes = 0; + + if (likely(err >= 0)) { + unsigned int n = walk->nbytes - err; + + if (likely(!(walk->flags & ABLKCIPHER_WALK_SLOW))) + n = ablkcipher_done_fast(walk, n); + else if (WARN_ON(err)) { + err = -EINVAL; + goto err; + } else + n = ablkcipher_done_slow(walk, n); + + nbytes = walk->total - n; + err = 0; + } + + scatterwalk_done(&walk->in, 0, nbytes); + scatterwalk_done(&walk->out, 1, nbytes); + +err: + walk->total = nbytes; + walk->nbytes = nbytes; + + if (nbytes) { + crypto_yield(req->base.flags); + return ablkcipher_walk_next(req, walk); + } + + if (walk->iv != req->info) + memcpy(req->info, walk->iv, tfm->crt_ablkcipher.ivsize); + kfree(walk->iv_buffer); + + return err; +} +EXPORT_SYMBOL_GPL(ablkcipher_walk_done); + +static inline int ablkcipher_next_slow(struct ablkcipher_request *req, + struct ablkcipher_walk *walk, + unsigned int bsize, + unsigned int alignmask, + void **src_p, void **dst_p) +{ + unsigned aligned_bsize = ALIGN(bsize, alignmask + 1); + struct ablkcipher_buffer *p; + void *src, *dst, *base; + unsigned int n; + + n = ALIGN(sizeof(struct ablkcipher_buffer), alignmask + 1); + n += (aligned_bsize * 3 - (alignmask + 1) + + (alignmask & ~(crypto_tfm_ctx_alignment() - 1))); + + p = kmalloc(n, GFP_ATOMIC); + if (!p) + return ablkcipher_walk_done(req, walk, -ENOMEM); + + base = p + 1; + + dst = (u8 *)ALIGN((unsigned long)base, alignmask + 1); + src = dst = ablkcipher_get_spot(dst, bsize); + + p->len = bsize; + p->data = dst; + + scatterwalk_copychunks(src, &walk->in, bsize, 0); + + ablkcipher_queue_write(walk, p); + + walk->nbytes = bsize; + walk->flags |= ABLKCIPHER_WALK_SLOW; + + *src_p = src; + *dst_p = dst; + + return 0; +} + +static inline int ablkcipher_copy_iv(struct ablkcipher_walk *walk, + struct crypto_tfm *tfm, + unsigned int alignmask) +{ + unsigned bs = walk->blocksize; + unsigned int ivsize = tfm->crt_ablkcipher.ivsize; + unsigned aligned_bs = ALIGN(bs, alignmask + 1); + unsigned int size = aligned_bs * 2 + ivsize + max(aligned_bs, ivsize) - + (alignmask + 1); + u8 *iv; + + size += alignmask & ~(crypto_tfm_ctx_alignment() - 1); + walk->iv_buffer = kmalloc(size, GFP_ATOMIC); + if (!walk->iv_buffer) + return -ENOMEM; + + iv = (u8 *)ALIGN((unsigned long)walk->iv_buffer, alignmask + 1); + iv = ablkcipher_get_spot(iv, bs) + aligned_bs; + iv = ablkcipher_get_spot(iv, bs) + aligned_bs; + iv = ablkcipher_get_spot(iv, ivsize); + + walk->iv = memcpy(iv, walk->iv, ivsize); + return 0; +} + +static inline int ablkcipher_next_fast(struct ablkcipher_request *req, + struct ablkcipher_walk *walk) +{ + walk->src.page = scatterwalk_page(&walk->in); + walk->src.offset = offset_in_page(walk->in.offset); + walk->dst.page = scatterwalk_page(&walk->out); + walk->dst.offset = offset_in_page(walk->out.offset); + + return 0; +} + +static int ablkcipher_walk_next(struct ablkcipher_request *req, + struct ablkcipher_walk *walk) +{ + struct crypto_tfm *tfm = req->base.tfm; + unsigned int alignmask, bsize, n; + void *src, *dst; + int err; + + alignmask = crypto_tfm_alg_alignmask(tfm); + n = walk->total; + if (unlikely(n < crypto_tfm_alg_blocksize(tfm))) { + req->base.flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; + return ablkcipher_walk_done(req, walk, -EINVAL); + } + + walk->flags &= ~ABLKCIPHER_WALK_SLOW; + src = dst = NULL; + + bsize = min(walk->blocksize, n); + n = scatterwalk_clamp(&walk->in, n); + n = scatterwalk_clamp(&walk->out, n); + + if (n < bsize || + !scatterwalk_aligned(&walk->in, alignmask) || + !scatterwalk_aligned(&walk->out, alignmask)) { + err = ablkcipher_next_slow(req, walk, bsize, alignmask, + &src, &dst); + goto set_phys_lowmem; + } + + walk->nbytes = n; + + return ablkcipher_next_fast(req, walk); + +set_phys_lowmem: + if (err >= 0) { + walk->src.page = virt_to_page(src); + walk->dst.page = virt_to_page(dst); + walk->src.offset = ((unsigned long)src & (PAGE_SIZE - 1)); + walk->dst.offset = ((unsigned long)dst & (PAGE_SIZE - 1)); + } + + return err; +} + +static int ablkcipher_walk_first(struct ablkcipher_request *req, + struct ablkcipher_walk *walk) +{ + struct crypto_tfm *tfm = req->base.tfm; + unsigned int alignmask; + + alignmask = crypto_tfm_alg_alignmask(tfm); + if (WARN_ON_ONCE(in_irq())) + return -EDEADLK; + + walk->nbytes = walk->total; + if (unlikely(!walk->total)) + return 0; + + walk->iv_buffer = NULL; + walk->iv = req->info; + if (unlikely(((unsigned long)walk->iv & alignmask))) { + int err = ablkcipher_copy_iv(walk, tfm, alignmask); + if (err) + return err; + } + + scatterwalk_start(&walk->in, walk->in.sg); + scatterwalk_start(&walk->out, walk->out.sg); + + return ablkcipher_walk_next(req, walk); +} + +int ablkcipher_walk_phys(struct ablkcipher_request *req, + struct ablkcipher_walk *walk) +{ + walk->blocksize = crypto_tfm_alg_blocksize(req->base.tfm); + return ablkcipher_walk_first(req, walk); +} +EXPORT_SYMBOL_GPL(ablkcipher_walk_phys); + +static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key, + unsigned int keylen) +{ + struct ablkcipher_alg *cipher = crypto_ablkcipher_alg(tfm); + unsigned long alignmask = crypto_ablkcipher_alignmask(tfm); + int ret; + u8 *buffer, *alignbuffer; + unsigned long absize; + + absize = keylen + alignmask; + buffer = kmalloc(absize, GFP_ATOMIC); + if (!buffer) + return -ENOMEM; + + alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); + memcpy(alignbuffer, key, keylen); + ret = cipher->setkey(tfm, alignbuffer, keylen); + memset(alignbuffer, 0, keylen); + kfree(buffer); + return ret; +} + +static int setkey(struct crypto_ablkcipher *tfm, const u8 *key, + unsigned int keylen) +{ + struct ablkcipher_alg *cipher = crypto_ablkcipher_alg(tfm); + unsigned long alignmask = crypto_ablkcipher_alignmask(tfm); + + if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) { + crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); + return -EINVAL; + } + + if ((unsigned long)key & alignmask) + return setkey_unaligned(tfm, key, keylen); + + return cipher->setkey(tfm, key, keylen); +} + +static unsigned int crypto_ablkcipher_ctxsize(struct crypto_alg *alg, u32 type, + u32 mask) +{ + return alg->cra_ctxsize; +} + +int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req) +{ + return crypto_ablkcipher_encrypt(&req->creq); +} + +int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req) +{ + return crypto_ablkcipher_decrypt(&req->creq); +} + +static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type, + u32 mask) +{ + struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher; + struct ablkcipher_tfm *crt = &tfm->crt_ablkcipher; + + if (alg->ivsize > PAGE_SIZE / 8) + return -EINVAL; + + crt->setkey = setkey; + crt->encrypt = alg->encrypt; + crt->decrypt = alg->decrypt; + if (!alg->ivsize) { + crt->givencrypt = skcipher_null_givencrypt; + crt->givdecrypt = skcipher_null_givdecrypt; + } + crt->base = __crypto_ablkcipher_cast(tfm); + crt->ivsize = alg->ivsize; + + return 0; +} + +#ifdef CONFIG_NET +static int crypto_ablkcipher_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + struct crypto_report_blkcipher rblkcipher; + + snprintf(rblkcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "ablkcipher"); + snprintf(rblkcipher.geniv, CRYPTO_MAX_ALG_NAME, "%s", + alg->cra_ablkcipher.geniv ?: "<default>"); + + rblkcipher.blocksize = alg->cra_blocksize; + rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize; + rblkcipher.max_keysize = alg->cra_ablkcipher.max_keysize; + rblkcipher.ivsize = alg->cra_ablkcipher.ivsize; + + NLA_PUT(skb, CRYPTOCFGA_REPORT_BLKCIPHER, + sizeof(struct crypto_report_blkcipher), &rblkcipher); + + return 0; + +nla_put_failure: + return -EMSGSIZE; +} +#else +static int crypto_ablkcipher_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + return -ENOSYS; +} +#endif + +static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg) + __attribute__ ((unused)); +static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg) +{ + struct ablkcipher_alg *ablkcipher = &alg->cra_ablkcipher; + + seq_printf(m, "type : ablkcipher\n"); + seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? + "yes" : "no"); + seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); + seq_printf(m, "min keysize : %u\n", ablkcipher->min_keysize); + seq_printf(m, "max keysize : %u\n", ablkcipher->max_keysize); + seq_printf(m, "ivsize : %u\n", ablkcipher->ivsize); + seq_printf(m, "geniv : %s\n", ablkcipher->geniv ?: "<default>"); +} + +const struct crypto_type crypto_ablkcipher_type = { + .ctxsize = crypto_ablkcipher_ctxsize, + .init = crypto_init_ablkcipher_ops, +#ifdef CONFIG_PROC_FS + .show = crypto_ablkcipher_show, +#endif + .report = crypto_ablkcipher_report, +}; +EXPORT_SYMBOL_GPL(crypto_ablkcipher_type); + +static int no_givdecrypt(struct skcipher_givcrypt_request *req) +{ + return -ENOSYS; +} + +static int crypto_init_givcipher_ops(struct crypto_tfm *tfm, u32 type, + u32 mask) +{ + struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher; + struct ablkcipher_tfm *crt = &tfm->crt_ablkcipher; + + if (alg->ivsize > PAGE_SIZE / 8) + return -EINVAL; + + crt->setkey = tfm->__crt_alg->cra_flags & CRYPTO_ALG_GENIV ? + alg->setkey : setkey; + crt->encrypt = alg->encrypt; + crt->decrypt = alg->decrypt; + crt->givencrypt = alg->givencrypt; + crt->givdecrypt = alg->givdecrypt ?: no_givdecrypt; + crt->base = __crypto_ablkcipher_cast(tfm); + crt->ivsize = alg->ivsize; + + return 0; +} + +#ifdef CONFIG_NET +static int crypto_givcipher_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + struct crypto_report_blkcipher rblkcipher; + + snprintf(rblkcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "givcipher"); + snprintf(rblkcipher.geniv, CRYPTO_MAX_ALG_NAME, "%s", + alg->cra_ablkcipher.geniv ?: "<built-in>"); + + rblkcipher.blocksize = alg->cra_blocksize; + rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize; + rblkcipher.max_keysize = alg->cra_ablkcipher.max_keysize; + rblkcipher.ivsize = alg->cra_ablkcipher.ivsize; + + NLA_PUT(skb, CRYPTOCFGA_REPORT_BLKCIPHER, + sizeof(struct crypto_report_blkcipher), &rblkcipher); + + return 0; + +nla_put_failure: + return -EMSGSIZE; +} +#else +static int crypto_givcipher_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + return -ENOSYS; +} +#endif + +static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg) + __attribute__ ((unused)); +static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg) +{ + struct ablkcipher_alg *ablkcipher = &alg->cra_ablkcipher; + + seq_printf(m, "type : givcipher\n"); + seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? + "yes" : "no"); + seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); + seq_printf(m, "min keysize : %u\n", ablkcipher->min_keysize); + seq_printf(m, "max keysize : %u\n", ablkcipher->max_keysize); + seq_printf(m, "ivsize : %u\n", ablkcipher->ivsize); + seq_printf(m, "geniv : %s\n", ablkcipher->geniv ?: "<built-in>"); +} + +const struct crypto_type crypto_givcipher_type = { + .ctxsize = crypto_ablkcipher_ctxsize, + .init = crypto_init_givcipher_ops, +#ifdef CONFIG_PROC_FS + .show = crypto_givcipher_show, +#endif + .report = crypto_givcipher_report, +}; +EXPORT_SYMBOL_GPL(crypto_givcipher_type); + +const char *crypto_default_geniv(const struct crypto_alg *alg) +{ + if (((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == + CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize : + alg->cra_ablkcipher.ivsize) != + alg->cra_blocksize) + return "chainiv"; + + return alg->cra_flags & CRYPTO_ALG_ASYNC ? + "eseqiv" : skcipher_default_geniv; +} + +static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask) +{ + struct rtattr *tb[3]; + struct { + struct rtattr attr; + struct crypto_attr_type data; + } ptype; + struct { + struct rtattr attr; + struct crypto_attr_alg data; + } palg; + struct crypto_template *tmpl; + struct crypto_instance *inst; + struct crypto_alg *larval; + const char *geniv; + int err; + + larval = crypto_larval_lookup(alg->cra_driver_name, + (type & ~CRYPTO_ALG_TYPE_MASK) | + CRYPTO_ALG_TYPE_GIVCIPHER, + mask | CRYPTO_ALG_TYPE_MASK); + err = PTR_ERR(larval); + if (IS_ERR(larval)) + goto out; + + err = -EAGAIN; + if (!crypto_is_larval(larval)) + goto drop_larval; + + ptype.attr.rta_len = sizeof(ptype); + ptype.attr.rta_type = CRYPTOA_TYPE; + ptype.data.type = type | CRYPTO_ALG_GENIV; + /* GENIV tells the template that we're making a default geniv. */ + ptype.data.mask = mask | CRYPTO_ALG_GENIV; + tb[0] = &ptype.attr; + + palg.attr.rta_len = sizeof(palg); + palg.attr.rta_type = CRYPTOA_ALG; + /* Must use the exact name to locate ourselves. */ + memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME); + tb[1] = &palg.attr; + + tb[2] = NULL; + + if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == + CRYPTO_ALG_TYPE_BLKCIPHER) + geniv = alg->cra_blkcipher.geniv; + else + geniv = alg->cra_ablkcipher.geniv; + + if (!geniv) + geniv = crypto_default_geniv(alg); + + tmpl = crypto_lookup_template(geniv); + err = -ENOENT; + if (!tmpl) + goto kill_larval; + + inst = tmpl->alloc(tb); + err = PTR_ERR(inst); + if (IS_ERR(inst)) + goto put_tmpl; + + if ((err = crypto_register_instance(tmpl, inst))) { + tmpl->free(inst); + goto put_tmpl; + } + + /* Redo the lookup to use the instance we just registered. */ + err = -EAGAIN; + +put_tmpl: + crypto_tmpl_put(tmpl); +kill_larval: + crypto_larval_kill(larval); +drop_larval: + crypto_mod_put(larval); +out: + crypto_mod_put(alg); + return err; +} + +struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type, u32 mask) +{ + struct crypto_alg *alg; + + alg = crypto_alg_mod_lookup(name, type, mask); + if (IS_ERR(alg)) + return alg; + + if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == + CRYPTO_ALG_TYPE_GIVCIPHER) + return alg; + + if (!((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == + CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize : + alg->cra_ablkcipher.ivsize)) + return alg; + + crypto_mod_put(alg); + alg = crypto_alg_mod_lookup(name, type | CRYPTO_ALG_TESTED, + mask & ~CRYPTO_ALG_TESTED); + if (IS_ERR(alg)) + return alg; + + if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == + CRYPTO_ALG_TYPE_GIVCIPHER) { + if ((alg->cra_flags ^ type ^ ~mask) & CRYPTO_ALG_TESTED) { + crypto_mod_put(alg); + alg = ERR_PTR(-ENOENT); + } + return alg; + } + + BUG_ON(!((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == + CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize : + alg->cra_ablkcipher.ivsize)); + + return ERR_PTR(crypto_givcipher_default(alg, type, mask)); +} +EXPORT_SYMBOL_GPL(crypto_lookup_skcipher); + +int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name, + u32 type, u32 mask) +{ + struct crypto_alg *alg; + int err; + + type = crypto_skcipher_type(type); + mask = crypto_skcipher_mask(mask); + + alg = crypto_lookup_skcipher(name, type, mask); + if (IS_ERR(alg)) + return PTR_ERR(alg); + + err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask); + crypto_mod_put(alg); + return err; +} +EXPORT_SYMBOL_GPL(crypto_grab_skcipher); + +struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name, + u32 type, u32 mask) +{ + struct crypto_tfm *tfm; + int err; + + type = crypto_skcipher_type(type); + mask = crypto_skcipher_mask(mask); + + for (;;) { + struct crypto_alg *alg; + + alg = crypto_lookup_skcipher(alg_name, type, mask); + if (IS_ERR(alg)) { + err = PTR_ERR(alg); + goto err; + } + + tfm = __crypto_alloc_tfm(alg, type, mask); + if (!IS_ERR(tfm)) + return __crypto_ablkcipher_cast(tfm); + + crypto_mod_put(alg); + err = PTR_ERR(tfm); + +err: + if (err != -EAGAIN) + break; + if (signal_pending(current)) { + err = -EINTR; + break; + } + } + + return ERR_PTR(err); +} +EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher); + +static int __init skcipher_module_init(void) +{ + skcipher_default_geniv = num_possible_cpus() > 1 ? + "eseqiv" : "chainiv"; + return 0; +} + +static void skcipher_module_exit(void) +{ +} + +module_init(skcipher_module_init); +module_exit(skcipher_module_exit); diff --git a/crypto/aead.c b/crypto/aead.c new file mode 100644 index 00000000..e4cb3515 --- /dev/null +++ b/crypto/aead.c @@ -0,0 +1,568 @@ +/* + * AEAD: Authenticated Encryption with Associated Data + * + * This file provides API support for AEAD algorithms. + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/internal/aead.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/rtnetlink.h> +#include <linux/sched.h> +#include <linux/slab.h> +#include <linux/seq_file.h> +#include <linux/cryptouser.h> +#include <net/netlink.h> + +#include "internal.h" + +static int setkey_unaligned(struct crypto_aead *tfm, const u8 *key, + unsigned int keylen) +{ + struct aead_alg *aead = crypto_aead_alg(tfm); + unsigned long alignmask = crypto_aead_alignmask(tfm); + int ret; + u8 *buffer, *alignbuffer; + unsigned long absize; + + absize = keylen + alignmask; + buffer = kmalloc(absize, GFP_ATOMIC); + if (!buffer) + return -ENOMEM; + + alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); + memcpy(alignbuffer, key, keylen); + ret = aead->setkey(tfm, alignbuffer, keylen); + memset(alignbuffer, 0, keylen); + kfree(buffer); + return ret; +} + +static int setkey(struct crypto_aead *tfm, const u8 *key, unsigned int keylen) +{ + struct aead_alg *aead = crypto_aead_alg(tfm); + unsigned long alignmask = crypto_aead_alignmask(tfm); + + if ((unsigned long)key & alignmask) + return setkey_unaligned(tfm, key, keylen); + + return aead->setkey(tfm, key, keylen); +} + +int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize) +{ + struct aead_tfm *crt = crypto_aead_crt(tfm); + int err; + + if (authsize > crypto_aead_alg(tfm)->maxauthsize) + return -EINVAL; + + if (crypto_aead_alg(tfm)->setauthsize) { + err = crypto_aead_alg(tfm)->setauthsize(crt->base, authsize); + if (err) + return err; + } + + crypto_aead_crt(crt->base)->authsize = authsize; + crt->authsize = authsize; + return 0; +} +EXPORT_SYMBOL_GPL(crypto_aead_setauthsize); + +static unsigned int crypto_aead_ctxsize(struct crypto_alg *alg, u32 type, + u32 mask) +{ + return alg->cra_ctxsize; +} + +static int no_givcrypt(struct aead_givcrypt_request *req) +{ + return -ENOSYS; +} + +static int crypto_init_aead_ops(struct crypto_tfm *tfm, u32 type, u32 mask) +{ + struct aead_alg *alg = &tfm->__crt_alg->cra_aead; + struct aead_tfm *crt = &tfm->crt_aead; + + if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8) + return -EINVAL; + + crt->setkey = tfm->__crt_alg->cra_flags & CRYPTO_ALG_GENIV ? + alg->setkey : setkey; + crt->encrypt = alg->encrypt; + crt->decrypt = alg->decrypt; + crt->givencrypt = alg->givencrypt ?: no_givcrypt; + crt->givdecrypt = alg->givdecrypt ?: no_givcrypt; + crt->base = __crypto_aead_cast(tfm); + crt->ivsize = alg->ivsize; + crt->authsize = alg->maxauthsize; + + return 0; +} + +#ifdef CONFIG_NET +static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + struct crypto_report_aead raead; + struct aead_alg *aead = &alg->cra_aead; + + snprintf(raead.type, CRYPTO_MAX_ALG_NAME, "%s", "aead"); + snprintf(raead.geniv, CRYPTO_MAX_ALG_NAME, "%s", + aead->geniv ?: "<built-in>"); + + raead.blocksize = alg->cra_blocksize; + raead.maxauthsize = aead->maxauthsize; + raead.ivsize = aead->ivsize; + + NLA_PUT(skb, CRYPTOCFGA_REPORT_AEAD, + sizeof(struct crypto_report_aead), &raead); + + return 0; + +nla_put_failure: + return -EMSGSIZE; +} +#else +static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + return -ENOSYS; +} +#endif + +static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg) + __attribute__ ((unused)); +static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg) +{ + struct aead_alg *aead = &alg->cra_aead; + + seq_printf(m, "type : aead\n"); + seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? + "yes" : "no"); + seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); + seq_printf(m, "ivsize : %u\n", aead->ivsize); + seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize); + seq_printf(m, "geniv : %s\n", aead->geniv ?: "<built-in>"); +} + +const struct crypto_type crypto_aead_type = { + .ctxsize = crypto_aead_ctxsize, + .init = crypto_init_aead_ops, +#ifdef CONFIG_PROC_FS + .show = crypto_aead_show, +#endif + .report = crypto_aead_report, +}; +EXPORT_SYMBOL_GPL(crypto_aead_type); + +static int aead_null_givencrypt(struct aead_givcrypt_request *req) +{ + return crypto_aead_encrypt(&req->areq); +} + +static int aead_null_givdecrypt(struct aead_givcrypt_request *req) +{ + return crypto_aead_decrypt(&req->areq); +} + +static int crypto_init_nivaead_ops(struct crypto_tfm *tfm, u32 type, u32 mask) +{ + struct aead_alg *alg = &tfm->__crt_alg->cra_aead; + struct aead_tfm *crt = &tfm->crt_aead; + + if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8) + return -EINVAL; + + crt->setkey = setkey; + crt->encrypt = alg->encrypt; + crt->decrypt = alg->decrypt; + if (!alg->ivsize) { + crt->givencrypt = aead_null_givencrypt; + crt->givdecrypt = aead_null_givdecrypt; + } + crt->base = __crypto_aead_cast(tfm); + crt->ivsize = alg->ivsize; + crt->authsize = alg->maxauthsize; + + return 0; +} + +#ifdef CONFIG_NET +static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + struct crypto_report_aead raead; + struct aead_alg *aead = &alg->cra_aead; + + snprintf(raead.type, CRYPTO_MAX_ALG_NAME, "%s", "nivaead"); + snprintf(raead.geniv, CRYPTO_MAX_ALG_NAME, "%s", aead->geniv); + + raead.blocksize = alg->cra_blocksize; + raead.maxauthsize = aead->maxauthsize; + raead.ivsize = aead->ivsize; + + NLA_PUT(skb, CRYPTOCFGA_REPORT_AEAD, + sizeof(struct crypto_report_aead), &raead); + + return 0; + +nla_put_failure: + return -EMSGSIZE; +} +#else +static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + return -ENOSYS; +} +#endif + + +static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg) + __attribute__ ((unused)); +static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg) +{ + struct aead_alg *aead = &alg->cra_aead; + + seq_printf(m, "type : nivaead\n"); + seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? + "yes" : "no"); + seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); + seq_printf(m, "ivsize : %u\n", aead->ivsize); + seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize); + seq_printf(m, "geniv : %s\n", aead->geniv); +} + +const struct crypto_type crypto_nivaead_type = { + .ctxsize = crypto_aead_ctxsize, + .init = crypto_init_nivaead_ops, +#ifdef CONFIG_PROC_FS + .show = crypto_nivaead_show, +#endif + .report = crypto_nivaead_report, +}; +EXPORT_SYMBOL_GPL(crypto_nivaead_type); + +static int crypto_grab_nivaead(struct crypto_aead_spawn *spawn, + const char *name, u32 type, u32 mask) +{ + struct crypto_alg *alg; + int err; + + type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); + type |= CRYPTO_ALG_TYPE_AEAD; + mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV; + + alg = crypto_alg_mod_lookup(name, type, mask); + if (IS_ERR(alg)) + return PTR_ERR(alg); + + err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask); + crypto_mod_put(alg); + return err; +} + +struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl, + struct rtattr **tb, u32 type, + u32 mask) +{ + const char *name; + struct crypto_aead_spawn *spawn; + struct crypto_attr_type *algt; + struct crypto_instance *inst; + struct crypto_alg *alg; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + if ((algt->type ^ (CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV)) & + algt->mask) + return ERR_PTR(-EINVAL); + + name = crypto_attr_alg_name(tb[1]); + err = PTR_ERR(name); + if (IS_ERR(name)) + return ERR_PTR(err); + + inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); + if (!inst) + return ERR_PTR(-ENOMEM); + + spawn = crypto_instance_ctx(inst); + + /* Ignore async algorithms if necessary. */ + mask |= crypto_requires_sync(algt->type, algt->mask); + + crypto_set_aead_spawn(spawn, inst); + err = crypto_grab_nivaead(spawn, name, type, mask); + if (err) + goto err_free_inst; + + alg = crypto_aead_spawn_alg(spawn); + + err = -EINVAL; + if (!alg->cra_aead.ivsize) + goto err_drop_alg; + + /* + * This is only true if we're constructing an algorithm with its + * default IV generator. For the default generator we elide the + * template name and double-check the IV generator. + */ + if (algt->mask & CRYPTO_ALG_GENIV) { + if (strcmp(tmpl->name, alg->cra_aead.geniv)) + goto err_drop_alg; + + memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); + memcpy(inst->alg.cra_driver_name, alg->cra_driver_name, + CRYPTO_MAX_ALG_NAME); + } else { + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, + "%s(%s)", tmpl->name, alg->cra_name) >= + CRYPTO_MAX_ALG_NAME) + goto err_drop_alg; + if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "%s(%s)", tmpl->name, alg->cra_driver_name) >= + CRYPTO_MAX_ALG_NAME) + goto err_drop_alg; + } + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV; + inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = alg->cra_blocksize; + inst->alg.cra_alignmask = alg->cra_alignmask; + inst->alg.cra_type = &crypto_aead_type; + + inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize; + inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize; + inst->alg.cra_aead.geniv = alg->cra_aead.geniv; + + inst->alg.cra_aead.setkey = alg->cra_aead.setkey; + inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize; + inst->alg.cra_aead.encrypt = alg->cra_aead.encrypt; + inst->alg.cra_aead.decrypt = alg->cra_aead.decrypt; + +out: + return inst; + +err_drop_alg: + crypto_drop_aead(spawn); +err_free_inst: + kfree(inst); + inst = ERR_PTR(err); + goto out; +} +EXPORT_SYMBOL_GPL(aead_geniv_alloc); + +void aead_geniv_free(struct crypto_instance *inst) +{ + crypto_drop_aead(crypto_instance_ctx(inst)); + kfree(inst); +} +EXPORT_SYMBOL_GPL(aead_geniv_free); + +int aead_geniv_init(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_aead *aead; + + aead = crypto_spawn_aead(crypto_instance_ctx(inst)); + if (IS_ERR(aead)) + return PTR_ERR(aead); + + tfm->crt_aead.base = aead; + tfm->crt_aead.reqsize += crypto_aead_reqsize(aead); + + return 0; +} +EXPORT_SYMBOL_GPL(aead_geniv_init); + +void aead_geniv_exit(struct crypto_tfm *tfm) +{ + crypto_free_aead(tfm->crt_aead.base); +} +EXPORT_SYMBOL_GPL(aead_geniv_exit); + +static int crypto_nivaead_default(struct crypto_alg *alg, u32 type, u32 mask) +{ + struct rtattr *tb[3]; + struct { + struct rtattr attr; + struct crypto_attr_type data; + } ptype; + struct { + struct rtattr attr; + struct crypto_attr_alg data; + } palg; + struct crypto_template *tmpl; + struct crypto_instance *inst; + struct crypto_alg *larval; + const char *geniv; + int err; + + larval = crypto_larval_lookup(alg->cra_driver_name, + CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV, + CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); + err = PTR_ERR(larval); + if (IS_ERR(larval)) + goto out; + + err = -EAGAIN; + if (!crypto_is_larval(larval)) + goto drop_larval; + + ptype.attr.rta_len = sizeof(ptype); + ptype.attr.rta_type = CRYPTOA_TYPE; + ptype.data.type = type | CRYPTO_ALG_GENIV; + /* GENIV tells the template that we're making a default geniv. */ + ptype.data.mask = mask | CRYPTO_ALG_GENIV; + tb[0] = &ptype.attr; + + palg.attr.rta_len = sizeof(palg); + palg.attr.rta_type = CRYPTOA_ALG; + /* Must use the exact name to locate ourselves. */ + memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME); + tb[1] = &palg.attr; + + tb[2] = NULL; + + geniv = alg->cra_aead.geniv; + + tmpl = crypto_lookup_template(geniv); + err = -ENOENT; + if (!tmpl) + goto kill_larval; + + inst = tmpl->alloc(tb); + err = PTR_ERR(inst); + if (IS_ERR(inst)) + goto put_tmpl; + + if ((err = crypto_register_instance(tmpl, inst))) { + tmpl->free(inst); + goto put_tmpl; + } + + /* Redo the lookup to use the instance we just registered. */ + err = -EAGAIN; + +put_tmpl: + crypto_tmpl_put(tmpl); +kill_larval: + crypto_larval_kill(larval); +drop_larval: + crypto_mod_put(larval); +out: + crypto_mod_put(alg); + return err; +} + +struct crypto_alg *crypto_lookup_aead(const char *name, u32 type, u32 mask) +{ + struct crypto_alg *alg; + + alg = crypto_alg_mod_lookup(name, type, mask); + if (IS_ERR(alg)) + return alg; + + if (alg->cra_type == &crypto_aead_type) + return alg; + + if (!alg->cra_aead.ivsize) + return alg; + + crypto_mod_put(alg); + alg = crypto_alg_mod_lookup(name, type | CRYPTO_ALG_TESTED, + mask & ~CRYPTO_ALG_TESTED); + if (IS_ERR(alg)) + return alg; + + if (alg->cra_type == &crypto_aead_type) { + if ((alg->cra_flags ^ type ^ ~mask) & CRYPTO_ALG_TESTED) { + crypto_mod_put(alg); + alg = ERR_PTR(-ENOENT); + } + return alg; + } + + BUG_ON(!alg->cra_aead.ivsize); + + return ERR_PTR(crypto_nivaead_default(alg, type, mask)); +} +EXPORT_SYMBOL_GPL(crypto_lookup_aead); + +int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name, + u32 type, u32 mask) +{ + struct crypto_alg *alg; + int err; + + type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); + type |= CRYPTO_ALG_TYPE_AEAD; + mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); + mask |= CRYPTO_ALG_TYPE_MASK; + + alg = crypto_lookup_aead(name, type, mask); + if (IS_ERR(alg)) + return PTR_ERR(alg); + + err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask); + crypto_mod_put(alg); + return err; +} +EXPORT_SYMBOL_GPL(crypto_grab_aead); + +struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask) +{ + struct crypto_tfm *tfm; + int err; + + type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); + type |= CRYPTO_ALG_TYPE_AEAD; + mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); + mask |= CRYPTO_ALG_TYPE_MASK; + + for (;;) { + struct crypto_alg *alg; + + alg = crypto_lookup_aead(alg_name, type, mask); + if (IS_ERR(alg)) { + err = PTR_ERR(alg); + goto err; + } + + tfm = __crypto_alloc_tfm(alg, type, mask); + if (!IS_ERR(tfm)) + return __crypto_aead_cast(tfm); + + crypto_mod_put(alg); + err = PTR_ERR(tfm); + +err: + if (err != -EAGAIN) + break; + if (signal_pending(current)) { + err = -EINTR; + break; + } + } + + return ERR_PTR(err); +} +EXPORT_SYMBOL_GPL(crypto_alloc_aead); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)"); diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c new file mode 100644 index 00000000..a68c73da --- /dev/null +++ b/crypto/aes_generic.c @@ -0,0 +1,1478 @@ +/* + * Cryptographic API. + * + * AES Cipher Algorithm. + * + * Based on Brian Gladman's code. + * + * Linux developers: + * Alexander Kjeldaas <astor@fast.no> + * Herbert Valerio Riedel <hvr@hvrlab.org> + * Kyle McMartin <kyle@debian.org> + * Adam J. Richter <adam@yggdrasil.com> (conversion to 2.5 API). + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * --------------------------------------------------------------------------- + * Copyright (c) 2002, Dr Brian Gladman <brg@gladman.me.uk>, Worcester, UK. + * All rights reserved. + * + * LICENSE TERMS + * + * The free distribution and use of this software in both source and binary + * form is allowed (with or without changes) provided that: + * + * 1. distributions of this source code include the above copyright + * notice, this list of conditions and the following disclaimer; + * + * 2. distributions in binary form include the above copyright + * notice, this list of conditions and the following disclaimer + * in the documentation and/or other associated materials; + * + * 3. the copyright holder's name is not used to endorse products + * built using this software without specific written permission. + * + * ALTERNATIVELY, provided that this notice is retained in full, this product + * may be distributed under the terms of the GNU General Public License (GPL), + * in which case the provisions of the GPL apply INSTEAD OF those given above. + * + * DISCLAIMER + * + * This software is provided 'as is' with no explicit or implied warranties + * in respect of its properties, including, but not limited to, correctness + * and/or fitness for purpose. + * --------------------------------------------------------------------------- + */ + +#include <crypto/aes.h> +#include <linux/module.h> +#include <linux/init.h> +#include <linux/types.h> +#include <linux/errno.h> +#include <linux/crypto.h> +#include <asm/byteorder.h> + +static inline u8 byte(const u32 x, const unsigned n) +{ + return x >> (n << 3); +} + +static const u32 rco_tab[10] = { 1, 2, 4, 8, 16, 32, 64, 128, 27, 54 }; + +const u32 crypto_ft_tab[4][256] = { + { + 0xa56363c6, 0x847c7cf8, 0x997777ee, 0x8d7b7bf6, + 0x0df2f2ff, 0xbd6b6bd6, 0xb16f6fde, 0x54c5c591, + 0x50303060, 0x03010102, 0xa96767ce, 0x7d2b2b56, + 0x19fefee7, 0x62d7d7b5, 0xe6abab4d, 0x9a7676ec, + 0x45caca8f, 0x9d82821f, 0x40c9c989, 0x877d7dfa, + 0x15fafaef, 0xeb5959b2, 0xc947478e, 0x0bf0f0fb, + 0xecadad41, 0x67d4d4b3, 0xfda2a25f, 0xeaafaf45, + 0xbf9c9c23, 0xf7a4a453, 0x967272e4, 0x5bc0c09b, + 0xc2b7b775, 0x1cfdfde1, 0xae93933d, 0x6a26264c, + 0x5a36366c, 0x413f3f7e, 0x02f7f7f5, 0x4fcccc83, + 0x5c343468, 0xf4a5a551, 0x34e5e5d1, 0x08f1f1f9, + 0x937171e2, 0x73d8d8ab, 0x53313162, 0x3f15152a, + 0x0c040408, 0x52c7c795, 0x65232346, 0x5ec3c39d, + 0x28181830, 0xa1969637, 0x0f05050a, 0xb59a9a2f, + 0x0907070e, 0x36121224, 0x9b80801b, 0x3de2e2df, + 0x26ebebcd, 0x6927274e, 0xcdb2b27f, 0x9f7575ea, + 0x1b090912, 0x9e83831d, 0x742c2c58, 0x2e1a1a34, + 0x2d1b1b36, 0xb26e6edc, 0xee5a5ab4, 0xfba0a05b, + 0xf65252a4, 0x4d3b3b76, 0x61d6d6b7, 0xceb3b37d, + 0x7b292952, 0x3ee3e3dd, 0x712f2f5e, 0x97848413, + 0xf55353a6, 0x68d1d1b9, 0x00000000, 0x2cededc1, + 0x60202040, 0x1ffcfce3, 0xc8b1b179, 0xed5b5bb6, + 0xbe6a6ad4, 0x46cbcb8d, 0xd9bebe67, 0x4b393972, + 0xde4a4a94, 0xd44c4c98, 0xe85858b0, 0x4acfcf85, + 0x6bd0d0bb, 0x2aefefc5, 0xe5aaaa4f, 0x16fbfbed, + 0xc5434386, 0xd74d4d9a, 0x55333366, 0x94858511, + 0xcf45458a, 0x10f9f9e9, 0x06020204, 0x817f7ffe, + 0xf05050a0, 0x443c3c78, 0xba9f9f25, 0xe3a8a84b, + 0xf35151a2, 0xfea3a35d, 0xc0404080, 0x8a8f8f05, + 0xad92923f, 0xbc9d9d21, 0x48383870, 0x04f5f5f1, + 0xdfbcbc63, 0xc1b6b677, 0x75dadaaf, 0x63212142, + 0x30101020, 0x1affffe5, 0x0ef3f3fd, 0x6dd2d2bf, + 0x4ccdcd81, 0x140c0c18, 0x35131326, 0x2fececc3, + 0xe15f5fbe, 0xa2979735, 0xcc444488, 0x3917172e, + 0x57c4c493, 0xf2a7a755, 0x827e7efc, 0x473d3d7a, + 0xac6464c8, 0xe75d5dba, 0x2b191932, 0x957373e6, + 0xa06060c0, 0x98818119, 0xd14f4f9e, 0x7fdcdca3, + 0x66222244, 0x7e2a2a54, 0xab90903b, 0x8388880b, + 0xca46468c, 0x29eeeec7, 0xd3b8b86b, 0x3c141428, + 0x79dedea7, 0xe25e5ebc, 0x1d0b0b16, 0x76dbdbad, + 0x3be0e0db, 0x56323264, 0x4e3a3a74, 0x1e0a0a14, + 0xdb494992, 0x0a06060c, 0x6c242448, 0xe45c5cb8, + 0x5dc2c29f, 0x6ed3d3bd, 0xefacac43, 0xa66262c4, + 0xa8919139, 0xa4959531, 0x37e4e4d3, 0x8b7979f2, + 0x32e7e7d5, 0x43c8c88b, 0x5937376e, 0xb76d6dda, + 0x8c8d8d01, 0x64d5d5b1, 0xd24e4e9c, 0xe0a9a949, + 0xb46c6cd8, 0xfa5656ac, 0x07f4f4f3, 0x25eaeacf, + 0xaf6565ca, 0x8e7a7af4, 0xe9aeae47, 0x18080810, + 0xd5baba6f, 0x887878f0, 0x6f25254a, 0x722e2e5c, + 0x241c1c38, 0xf1a6a657, 0xc7b4b473, 0x51c6c697, + 0x23e8e8cb, 0x7cdddda1, 0x9c7474e8, 0x211f1f3e, + 0xdd4b4b96, 0xdcbdbd61, 0x868b8b0d, 0x858a8a0f, + 0x907070e0, 0x423e3e7c, 0xc4b5b571, 0xaa6666cc, + 0xd8484890, 0x05030306, 0x01f6f6f7, 0x120e0e1c, + 0xa36161c2, 0x5f35356a, 0xf95757ae, 0xd0b9b969, + 0x91868617, 0x58c1c199, 0x271d1d3a, 0xb99e9e27, + 0x38e1e1d9, 0x13f8f8eb, 0xb398982b, 0x33111122, + 0xbb6969d2, 0x70d9d9a9, 0x898e8e07, 0xa7949433, + 0xb69b9b2d, 0x221e1e3c, 0x92878715, 0x20e9e9c9, + 0x49cece87, 0xff5555aa, 0x78282850, 0x7adfdfa5, + 0x8f8c8c03, 0xf8a1a159, 0x80898909, 0x170d0d1a, + 0xdabfbf65, 0x31e6e6d7, 0xc6424284, 0xb86868d0, + 0xc3414182, 0xb0999929, 0x772d2d5a, 0x110f0f1e, + 0xcbb0b07b, 0xfc5454a8, 0xd6bbbb6d, 0x3a16162c, + }, { + 0x6363c6a5, 0x7c7cf884, 0x7777ee99, 0x7b7bf68d, + 0xf2f2ff0d, 0x6b6bd6bd, 0x6f6fdeb1, 0xc5c59154, + 0x30306050, 0x01010203, 0x6767cea9, 0x2b2b567d, + 0xfefee719, 0xd7d7b562, 0xabab4de6, 0x7676ec9a, + 0xcaca8f45, 0x82821f9d, 0xc9c98940, 0x7d7dfa87, + 0xfafaef15, 0x5959b2eb, 0x47478ec9, 0xf0f0fb0b, + 0xadad41ec, 0xd4d4b367, 0xa2a25ffd, 0xafaf45ea, + 0x9c9c23bf, 0xa4a453f7, 0x7272e496, 0xc0c09b5b, + 0xb7b775c2, 0xfdfde11c, 0x93933dae, 0x26264c6a, + 0x36366c5a, 0x3f3f7e41, 0xf7f7f502, 0xcccc834f, + 0x3434685c, 0xa5a551f4, 0xe5e5d134, 0xf1f1f908, + 0x7171e293, 0xd8d8ab73, 0x31316253, 0x15152a3f, + 0x0404080c, 0xc7c79552, 0x23234665, 0xc3c39d5e, + 0x18183028, 0x969637a1, 0x05050a0f, 0x9a9a2fb5, + 0x07070e09, 0x12122436, 0x80801b9b, 0xe2e2df3d, + 0xebebcd26, 0x27274e69, 0xb2b27fcd, 0x7575ea9f, + 0x0909121b, 0x83831d9e, 0x2c2c5874, 0x1a1a342e, + 0x1b1b362d, 0x6e6edcb2, 0x5a5ab4ee, 0xa0a05bfb, + 0x5252a4f6, 0x3b3b764d, 0xd6d6b761, 0xb3b37dce, + 0x2929527b, 0xe3e3dd3e, 0x2f2f5e71, 0x84841397, + 0x5353a6f5, 0xd1d1b968, 0x00000000, 0xededc12c, + 0x20204060, 0xfcfce31f, 0xb1b179c8, 0x5b5bb6ed, + 0x6a6ad4be, 0xcbcb8d46, 0xbebe67d9, 0x3939724b, + 0x4a4a94de, 0x4c4c98d4, 0x5858b0e8, 0xcfcf854a, + 0xd0d0bb6b, 0xefefc52a, 0xaaaa4fe5, 0xfbfbed16, + 0x434386c5, 0x4d4d9ad7, 0x33336655, 0x85851194, + 0x45458acf, 0xf9f9e910, 0x02020406, 0x7f7ffe81, + 0x5050a0f0, 0x3c3c7844, 0x9f9f25ba, 0xa8a84be3, + 0x5151a2f3, 0xa3a35dfe, 0x404080c0, 0x8f8f058a, + 0x92923fad, 0x9d9d21bc, 0x38387048, 0xf5f5f104, + 0xbcbc63df, 0xb6b677c1, 0xdadaaf75, 0x21214263, + 0x10102030, 0xffffe51a, 0xf3f3fd0e, 0xd2d2bf6d, + 0xcdcd814c, 0x0c0c1814, 0x13132635, 0xececc32f, + 0x5f5fbee1, 0x979735a2, 0x444488cc, 0x17172e39, + 0xc4c49357, 0xa7a755f2, 0x7e7efc82, 0x3d3d7a47, + 0x6464c8ac, 0x5d5dbae7, 0x1919322b, 0x7373e695, + 0x6060c0a0, 0x81811998, 0x4f4f9ed1, 0xdcdca37f, + 0x22224466, 0x2a2a547e, 0x90903bab, 0x88880b83, + 0x46468cca, 0xeeeec729, 0xb8b86bd3, 0x1414283c, + 0xdedea779, 0x5e5ebce2, 0x0b0b161d, 0xdbdbad76, + 0xe0e0db3b, 0x32326456, 0x3a3a744e, 0x0a0a141e, + 0x494992db, 0x06060c0a, 0x2424486c, 0x5c5cb8e4, + 0xc2c29f5d, 0xd3d3bd6e, 0xacac43ef, 0x6262c4a6, + 0x919139a8, 0x959531a4, 0xe4e4d337, 0x7979f28b, + 0xe7e7d532, 0xc8c88b43, 0x37376e59, 0x6d6ddab7, + 0x8d8d018c, 0xd5d5b164, 0x4e4e9cd2, 0xa9a949e0, + 0x6c6cd8b4, 0x5656acfa, 0xf4f4f307, 0xeaeacf25, + 0x6565caaf, 0x7a7af48e, 0xaeae47e9, 0x08081018, + 0xbaba6fd5, 0x7878f088, 0x25254a6f, 0x2e2e5c72, + 0x1c1c3824, 0xa6a657f1, 0xb4b473c7, 0xc6c69751, + 0xe8e8cb23, 0xdddda17c, 0x7474e89c, 0x1f1f3e21, + 0x4b4b96dd, 0xbdbd61dc, 0x8b8b0d86, 0x8a8a0f85, + 0x7070e090, 0x3e3e7c42, 0xb5b571c4, 0x6666ccaa, + 0x484890d8, 0x03030605, 0xf6f6f701, 0x0e0e1c12, + 0x6161c2a3, 0x35356a5f, 0x5757aef9, 0xb9b969d0, + 0x86861791, 0xc1c19958, 0x1d1d3a27, 0x9e9e27b9, + 0xe1e1d938, 0xf8f8eb13, 0x98982bb3, 0x11112233, + 0x6969d2bb, 0xd9d9a970, 0x8e8e0789, 0x949433a7, + 0x9b9b2db6, 0x1e1e3c22, 0x87871592, 0xe9e9c920, + 0xcece8749, 0x5555aaff, 0x28285078, 0xdfdfa57a, + 0x8c8c038f, 0xa1a159f8, 0x89890980, 0x0d0d1a17, + 0xbfbf65da, 0xe6e6d731, 0x424284c6, 0x6868d0b8, + 0x414182c3, 0x999929b0, 0x2d2d5a77, 0x0f0f1e11, + 0xb0b07bcb, 0x5454a8fc, 0xbbbb6dd6, 0x16162c3a, + }, { + 0x63c6a563, 0x7cf8847c, 0x77ee9977, 0x7bf68d7b, + 0xf2ff0df2, 0x6bd6bd6b, 0x6fdeb16f, 0xc59154c5, + 0x30605030, 0x01020301, 0x67cea967, 0x2b567d2b, + 0xfee719fe, 0xd7b562d7, 0xab4de6ab, 0x76ec9a76, + 0xca8f45ca, 0x821f9d82, 0xc98940c9, 0x7dfa877d, + 0xfaef15fa, 0x59b2eb59, 0x478ec947, 0xf0fb0bf0, + 0xad41ecad, 0xd4b367d4, 0xa25ffda2, 0xaf45eaaf, + 0x9c23bf9c, 0xa453f7a4, 0x72e49672, 0xc09b5bc0, + 0xb775c2b7, 0xfde11cfd, 0x933dae93, 0x264c6a26, + 0x366c5a36, 0x3f7e413f, 0xf7f502f7, 0xcc834fcc, + 0x34685c34, 0xa551f4a5, 0xe5d134e5, 0xf1f908f1, + 0x71e29371, 0xd8ab73d8, 0x31625331, 0x152a3f15, + 0x04080c04, 0xc79552c7, 0x23466523, 0xc39d5ec3, + 0x18302818, 0x9637a196, 0x050a0f05, 0x9a2fb59a, + 0x070e0907, 0x12243612, 0x801b9b80, 0xe2df3de2, + 0xebcd26eb, 0x274e6927, 0xb27fcdb2, 0x75ea9f75, + 0x09121b09, 0x831d9e83, 0x2c58742c, 0x1a342e1a, + 0x1b362d1b, 0x6edcb26e, 0x5ab4ee5a, 0xa05bfba0, + 0x52a4f652, 0x3b764d3b, 0xd6b761d6, 0xb37dceb3, + 0x29527b29, 0xe3dd3ee3, 0x2f5e712f, 0x84139784, + 0x53a6f553, 0xd1b968d1, 0x00000000, 0xedc12ced, + 0x20406020, 0xfce31ffc, 0xb179c8b1, 0x5bb6ed5b, + 0x6ad4be6a, 0xcb8d46cb, 0xbe67d9be, 0x39724b39, + 0x4a94de4a, 0x4c98d44c, 0x58b0e858, 0xcf854acf, + 0xd0bb6bd0, 0xefc52aef, 0xaa4fe5aa, 0xfbed16fb, + 0x4386c543, 0x4d9ad74d, 0x33665533, 0x85119485, + 0x458acf45, 0xf9e910f9, 0x02040602, 0x7ffe817f, + 0x50a0f050, 0x3c78443c, 0x9f25ba9f, 0xa84be3a8, + 0x51a2f351, 0xa35dfea3, 0x4080c040, 0x8f058a8f, + 0x923fad92, 0x9d21bc9d, 0x38704838, 0xf5f104f5, + 0xbc63dfbc, 0xb677c1b6, 0xdaaf75da, 0x21426321, + 0x10203010, 0xffe51aff, 0xf3fd0ef3, 0xd2bf6dd2, + 0xcd814ccd, 0x0c18140c, 0x13263513, 0xecc32fec, + 0x5fbee15f, 0x9735a297, 0x4488cc44, 0x172e3917, + 0xc49357c4, 0xa755f2a7, 0x7efc827e, 0x3d7a473d, + 0x64c8ac64, 0x5dbae75d, 0x19322b19, 0x73e69573, + 0x60c0a060, 0x81199881, 0x4f9ed14f, 0xdca37fdc, + 0x22446622, 0x2a547e2a, 0x903bab90, 0x880b8388, + 0x468cca46, 0xeec729ee, 0xb86bd3b8, 0x14283c14, + 0xdea779de, 0x5ebce25e, 0x0b161d0b, 0xdbad76db, + 0xe0db3be0, 0x32645632, 0x3a744e3a, 0x0a141e0a, + 0x4992db49, 0x060c0a06, 0x24486c24, 0x5cb8e45c, + 0xc29f5dc2, 0xd3bd6ed3, 0xac43efac, 0x62c4a662, + 0x9139a891, 0x9531a495, 0xe4d337e4, 0x79f28b79, + 0xe7d532e7, 0xc88b43c8, 0x376e5937, 0x6ddab76d, + 0x8d018c8d, 0xd5b164d5, 0x4e9cd24e, 0xa949e0a9, + 0x6cd8b46c, 0x56acfa56, 0xf4f307f4, 0xeacf25ea, + 0x65caaf65, 0x7af48e7a, 0xae47e9ae, 0x08101808, + 0xba6fd5ba, 0x78f08878, 0x254a6f25, 0x2e5c722e, + 0x1c38241c, 0xa657f1a6, 0xb473c7b4, 0xc69751c6, + 0xe8cb23e8, 0xdda17cdd, 0x74e89c74, 0x1f3e211f, + 0x4b96dd4b, 0xbd61dcbd, 0x8b0d868b, 0x8a0f858a, + 0x70e09070, 0x3e7c423e, 0xb571c4b5, 0x66ccaa66, + 0x4890d848, 0x03060503, 0xf6f701f6, 0x0e1c120e, + 0x61c2a361, 0x356a5f35, 0x57aef957, 0xb969d0b9, + 0x86179186, 0xc19958c1, 0x1d3a271d, 0x9e27b99e, + 0xe1d938e1, 0xf8eb13f8, 0x982bb398, 0x11223311, + 0x69d2bb69, 0xd9a970d9, 0x8e07898e, 0x9433a794, + 0x9b2db69b, 0x1e3c221e, 0x87159287, 0xe9c920e9, + 0xce8749ce, 0x55aaff55, 0x28507828, 0xdfa57adf, + 0x8c038f8c, 0xa159f8a1, 0x89098089, 0x0d1a170d, + 0xbf65dabf, 0xe6d731e6, 0x4284c642, 0x68d0b868, + 0x4182c341, 0x9929b099, 0x2d5a772d, 0x0f1e110f, + 0xb07bcbb0, 0x54a8fc54, 0xbb6dd6bb, 0x162c3a16, + }, { + 0xc6a56363, 0xf8847c7c, 0xee997777, 0xf68d7b7b, + 0xff0df2f2, 0xd6bd6b6b, 0xdeb16f6f, 0x9154c5c5, + 0x60503030, 0x02030101, 0xcea96767, 0x567d2b2b, + 0xe719fefe, 0xb562d7d7, 0x4de6abab, 0xec9a7676, + 0x8f45caca, 0x1f9d8282, 0x8940c9c9, 0xfa877d7d, + 0xef15fafa, 0xb2eb5959, 0x8ec94747, 0xfb0bf0f0, + 0x41ecadad, 0xb367d4d4, 0x5ffda2a2, 0x45eaafaf, + 0x23bf9c9c, 0x53f7a4a4, 0xe4967272, 0x9b5bc0c0, + 0x75c2b7b7, 0xe11cfdfd, 0x3dae9393, 0x4c6a2626, + 0x6c5a3636, 0x7e413f3f, 0xf502f7f7, 0x834fcccc, + 0x685c3434, 0x51f4a5a5, 0xd134e5e5, 0xf908f1f1, + 0xe2937171, 0xab73d8d8, 0x62533131, 0x2a3f1515, + 0x080c0404, 0x9552c7c7, 0x46652323, 0x9d5ec3c3, + 0x30281818, 0x37a19696, 0x0a0f0505, 0x2fb59a9a, + 0x0e090707, 0x24361212, 0x1b9b8080, 0xdf3de2e2, + 0xcd26ebeb, 0x4e692727, 0x7fcdb2b2, 0xea9f7575, + 0x121b0909, 0x1d9e8383, 0x58742c2c, 0x342e1a1a, + 0x362d1b1b, 0xdcb26e6e, 0xb4ee5a5a, 0x5bfba0a0, + 0xa4f65252, 0x764d3b3b, 0xb761d6d6, 0x7dceb3b3, + 0x527b2929, 0xdd3ee3e3, 0x5e712f2f, 0x13978484, + 0xa6f55353, 0xb968d1d1, 0x00000000, 0xc12ceded, + 0x40602020, 0xe31ffcfc, 0x79c8b1b1, 0xb6ed5b5b, + 0xd4be6a6a, 0x8d46cbcb, 0x67d9bebe, 0x724b3939, + 0x94de4a4a, 0x98d44c4c, 0xb0e85858, 0x854acfcf, + 0xbb6bd0d0, 0xc52aefef, 0x4fe5aaaa, 0xed16fbfb, + 0x86c54343, 0x9ad74d4d, 0x66553333, 0x11948585, + 0x8acf4545, 0xe910f9f9, 0x04060202, 0xfe817f7f, + 0xa0f05050, 0x78443c3c, 0x25ba9f9f, 0x4be3a8a8, + 0xa2f35151, 0x5dfea3a3, 0x80c04040, 0x058a8f8f, + 0x3fad9292, 0x21bc9d9d, 0x70483838, 0xf104f5f5, + 0x63dfbcbc, 0x77c1b6b6, 0xaf75dada, 0x42632121, + 0x20301010, 0xe51affff, 0xfd0ef3f3, 0xbf6dd2d2, + 0x814ccdcd, 0x18140c0c, 0x26351313, 0xc32fecec, + 0xbee15f5f, 0x35a29797, 0x88cc4444, 0x2e391717, + 0x9357c4c4, 0x55f2a7a7, 0xfc827e7e, 0x7a473d3d, + 0xc8ac6464, 0xbae75d5d, 0x322b1919, 0xe6957373, + 0xc0a06060, 0x19988181, 0x9ed14f4f, 0xa37fdcdc, + 0x44662222, 0x547e2a2a, 0x3bab9090, 0x0b838888, + 0x8cca4646, 0xc729eeee, 0x6bd3b8b8, 0x283c1414, + 0xa779dede, 0xbce25e5e, 0x161d0b0b, 0xad76dbdb, + 0xdb3be0e0, 0x64563232, 0x744e3a3a, 0x141e0a0a, + 0x92db4949, 0x0c0a0606, 0x486c2424, 0xb8e45c5c, + 0x9f5dc2c2, 0xbd6ed3d3, 0x43efacac, 0xc4a66262, + 0x39a89191, 0x31a49595, 0xd337e4e4, 0xf28b7979, + 0xd532e7e7, 0x8b43c8c8, 0x6e593737, 0xdab76d6d, + 0x018c8d8d, 0xb164d5d5, 0x9cd24e4e, 0x49e0a9a9, + 0xd8b46c6c, 0xacfa5656, 0xf307f4f4, 0xcf25eaea, + 0xcaaf6565, 0xf48e7a7a, 0x47e9aeae, 0x10180808, + 0x6fd5baba, 0xf0887878, 0x4a6f2525, 0x5c722e2e, + 0x38241c1c, 0x57f1a6a6, 0x73c7b4b4, 0x9751c6c6, + 0xcb23e8e8, 0xa17cdddd, 0xe89c7474, 0x3e211f1f, + 0x96dd4b4b, 0x61dcbdbd, 0x0d868b8b, 0x0f858a8a, + 0xe0907070, 0x7c423e3e, 0x71c4b5b5, 0xccaa6666, + 0x90d84848, 0x06050303, 0xf701f6f6, 0x1c120e0e, + 0xc2a36161, 0x6a5f3535, 0xaef95757, 0x69d0b9b9, + 0x17918686, 0x9958c1c1, 0x3a271d1d, 0x27b99e9e, + 0xd938e1e1, 0xeb13f8f8, 0x2bb39898, 0x22331111, + 0xd2bb6969, 0xa970d9d9, 0x07898e8e, 0x33a79494, + 0x2db69b9b, 0x3c221e1e, 0x15928787, 0xc920e9e9, + 0x8749cece, 0xaaff5555, 0x50782828, 0xa57adfdf, + 0x038f8c8c, 0x59f8a1a1, 0x09808989, 0x1a170d0d, + 0x65dabfbf, 0xd731e6e6, 0x84c64242, 0xd0b86868, + 0x82c34141, 0x29b09999, 0x5a772d2d, 0x1e110f0f, + 0x7bcbb0b0, 0xa8fc5454, 0x6dd6bbbb, 0x2c3a1616, + } +}; + +const u32 crypto_fl_tab[4][256] = { + { + 0x00000063, 0x0000007c, 0x00000077, 0x0000007b, + 0x000000f2, 0x0000006b, 0x0000006f, 0x000000c5, + 0x00000030, 0x00000001, 0x00000067, 0x0000002b, + 0x000000fe, 0x000000d7, 0x000000ab, 0x00000076, + 0x000000ca, 0x00000082, 0x000000c9, 0x0000007d, + 0x000000fa, 0x00000059, 0x00000047, 0x000000f0, + 0x000000ad, 0x000000d4, 0x000000a2, 0x000000af, + 0x0000009c, 0x000000a4, 0x00000072, 0x000000c0, + 0x000000b7, 0x000000fd, 0x00000093, 0x00000026, + 0x00000036, 0x0000003f, 0x000000f7, 0x000000cc, + 0x00000034, 0x000000a5, 0x000000e5, 0x000000f1, + 0x00000071, 0x000000d8, 0x00000031, 0x00000015, + 0x00000004, 0x000000c7, 0x00000023, 0x000000c3, + 0x00000018, 0x00000096, 0x00000005, 0x0000009a, + 0x00000007, 0x00000012, 0x00000080, 0x000000e2, + 0x000000eb, 0x00000027, 0x000000b2, 0x00000075, + 0x00000009, 0x00000083, 0x0000002c, 0x0000001a, + 0x0000001b, 0x0000006e, 0x0000005a, 0x000000a0, + 0x00000052, 0x0000003b, 0x000000d6, 0x000000b3, + 0x00000029, 0x000000e3, 0x0000002f, 0x00000084, + 0x00000053, 0x000000d1, 0x00000000, 0x000000ed, + 0x00000020, 0x000000fc, 0x000000b1, 0x0000005b, + 0x0000006a, 0x000000cb, 0x000000be, 0x00000039, + 0x0000004a, 0x0000004c, 0x00000058, 0x000000cf, + 0x000000d0, 0x000000ef, 0x000000aa, 0x000000fb, + 0x00000043, 0x0000004d, 0x00000033, 0x00000085, + 0x00000045, 0x000000f9, 0x00000002, 0x0000007f, + 0x00000050, 0x0000003c, 0x0000009f, 0x000000a8, + 0x00000051, 0x000000a3, 0x00000040, 0x0000008f, + 0x00000092, 0x0000009d, 0x00000038, 0x000000f5, + 0x000000bc, 0x000000b6, 0x000000da, 0x00000021, + 0x00000010, 0x000000ff, 0x000000f3, 0x000000d2, + 0x000000cd, 0x0000000c, 0x00000013, 0x000000ec, + 0x0000005f, 0x00000097, 0x00000044, 0x00000017, + 0x000000c4, 0x000000a7, 0x0000007e, 0x0000003d, + 0x00000064, 0x0000005d, 0x00000019, 0x00000073, + 0x00000060, 0x00000081, 0x0000004f, 0x000000dc, + 0x00000022, 0x0000002a, 0x00000090, 0x00000088, + 0x00000046, 0x000000ee, 0x000000b8, 0x00000014, + 0x000000de, 0x0000005e, 0x0000000b, 0x000000db, + 0x000000e0, 0x00000032, 0x0000003a, 0x0000000a, + 0x00000049, 0x00000006, 0x00000024, 0x0000005c, + 0x000000c2, 0x000000d3, 0x000000ac, 0x00000062, + 0x00000091, 0x00000095, 0x000000e4, 0x00000079, + 0x000000e7, 0x000000c8, 0x00000037, 0x0000006d, + 0x0000008d, 0x000000d5, 0x0000004e, 0x000000a9, + 0x0000006c, 0x00000056, 0x000000f4, 0x000000ea, + 0x00000065, 0x0000007a, 0x000000ae, 0x00000008, + 0x000000ba, 0x00000078, 0x00000025, 0x0000002e, + 0x0000001c, 0x000000a6, 0x000000b4, 0x000000c6, + 0x000000e8, 0x000000dd, 0x00000074, 0x0000001f, + 0x0000004b, 0x000000bd, 0x0000008b, 0x0000008a, + 0x00000070, 0x0000003e, 0x000000b5, 0x00000066, + 0x00000048, 0x00000003, 0x000000f6, 0x0000000e, + 0x00000061, 0x00000035, 0x00000057, 0x000000b9, + 0x00000086, 0x000000c1, 0x0000001d, 0x0000009e, + 0x000000e1, 0x000000f8, 0x00000098, 0x00000011, + 0x00000069, 0x000000d9, 0x0000008e, 0x00000094, + 0x0000009b, 0x0000001e, 0x00000087, 0x000000e9, + 0x000000ce, 0x00000055, 0x00000028, 0x000000df, + 0x0000008c, 0x000000a1, 0x00000089, 0x0000000d, + 0x000000bf, 0x000000e6, 0x00000042, 0x00000068, + 0x00000041, 0x00000099, 0x0000002d, 0x0000000f, + 0x000000b0, 0x00000054, 0x000000bb, 0x00000016, + }, { + 0x00006300, 0x00007c00, 0x00007700, 0x00007b00, + 0x0000f200, 0x00006b00, 0x00006f00, 0x0000c500, + 0x00003000, 0x00000100, 0x00006700, 0x00002b00, + 0x0000fe00, 0x0000d700, 0x0000ab00, 0x00007600, + 0x0000ca00, 0x00008200, 0x0000c900, 0x00007d00, + 0x0000fa00, 0x00005900, 0x00004700, 0x0000f000, + 0x0000ad00, 0x0000d400, 0x0000a200, 0x0000af00, + 0x00009c00, 0x0000a400, 0x00007200, 0x0000c000, + 0x0000b700, 0x0000fd00, 0x00009300, 0x00002600, + 0x00003600, 0x00003f00, 0x0000f700, 0x0000cc00, + 0x00003400, 0x0000a500, 0x0000e500, 0x0000f100, + 0x00007100, 0x0000d800, 0x00003100, 0x00001500, + 0x00000400, 0x0000c700, 0x00002300, 0x0000c300, + 0x00001800, 0x00009600, 0x00000500, 0x00009a00, + 0x00000700, 0x00001200, 0x00008000, 0x0000e200, + 0x0000eb00, 0x00002700, 0x0000b200, 0x00007500, + 0x00000900, 0x00008300, 0x00002c00, 0x00001a00, + 0x00001b00, 0x00006e00, 0x00005a00, 0x0000a000, + 0x00005200, 0x00003b00, 0x0000d600, 0x0000b300, + 0x00002900, 0x0000e300, 0x00002f00, 0x00008400, + 0x00005300, 0x0000d100, 0x00000000, 0x0000ed00, + 0x00002000, 0x0000fc00, 0x0000b100, 0x00005b00, + 0x00006a00, 0x0000cb00, 0x0000be00, 0x00003900, + 0x00004a00, 0x00004c00, 0x00005800, 0x0000cf00, + 0x0000d000, 0x0000ef00, 0x0000aa00, 0x0000fb00, + 0x00004300, 0x00004d00, 0x00003300, 0x00008500, + 0x00004500, 0x0000f900, 0x00000200, 0x00007f00, + 0x00005000, 0x00003c00, 0x00009f00, 0x0000a800, + 0x00005100, 0x0000a300, 0x00004000, 0x00008f00, + 0x00009200, 0x00009d00, 0x00003800, 0x0000f500, + 0x0000bc00, 0x0000b600, 0x0000da00, 0x00002100, + 0x00001000, 0x0000ff00, 0x0000f300, 0x0000d200, + 0x0000cd00, 0x00000c00, 0x00001300, 0x0000ec00, + 0x00005f00, 0x00009700, 0x00004400, 0x00001700, + 0x0000c400, 0x0000a700, 0x00007e00, 0x00003d00, + 0x00006400, 0x00005d00, 0x00001900, 0x00007300, + 0x00006000, 0x00008100, 0x00004f00, 0x0000dc00, + 0x00002200, 0x00002a00, 0x00009000, 0x00008800, + 0x00004600, 0x0000ee00, 0x0000b800, 0x00001400, + 0x0000de00, 0x00005e00, 0x00000b00, 0x0000db00, + 0x0000e000, 0x00003200, 0x00003a00, 0x00000a00, + 0x00004900, 0x00000600, 0x00002400, 0x00005c00, + 0x0000c200, 0x0000d300, 0x0000ac00, 0x00006200, + 0x00009100, 0x00009500, 0x0000e400, 0x00007900, + 0x0000e700, 0x0000c800, 0x00003700, 0x00006d00, + 0x00008d00, 0x0000d500, 0x00004e00, 0x0000a900, + 0x00006c00, 0x00005600, 0x0000f400, 0x0000ea00, + 0x00006500, 0x00007a00, 0x0000ae00, 0x00000800, + 0x0000ba00, 0x00007800, 0x00002500, 0x00002e00, + 0x00001c00, 0x0000a600, 0x0000b400, 0x0000c600, + 0x0000e800, 0x0000dd00, 0x00007400, 0x00001f00, + 0x00004b00, 0x0000bd00, 0x00008b00, 0x00008a00, + 0x00007000, 0x00003e00, 0x0000b500, 0x00006600, + 0x00004800, 0x00000300, 0x0000f600, 0x00000e00, + 0x00006100, 0x00003500, 0x00005700, 0x0000b900, + 0x00008600, 0x0000c100, 0x00001d00, 0x00009e00, + 0x0000e100, 0x0000f800, 0x00009800, 0x00001100, + 0x00006900, 0x0000d900, 0x00008e00, 0x00009400, + 0x00009b00, 0x00001e00, 0x00008700, 0x0000e900, + 0x0000ce00, 0x00005500, 0x00002800, 0x0000df00, + 0x00008c00, 0x0000a100, 0x00008900, 0x00000d00, + 0x0000bf00, 0x0000e600, 0x00004200, 0x00006800, + 0x00004100, 0x00009900, 0x00002d00, 0x00000f00, + 0x0000b000, 0x00005400, 0x0000bb00, 0x00001600, + }, { + 0x00630000, 0x007c0000, 0x00770000, 0x007b0000, + 0x00f20000, 0x006b0000, 0x006f0000, 0x00c50000, + 0x00300000, 0x00010000, 0x00670000, 0x002b0000, + 0x00fe0000, 0x00d70000, 0x00ab0000, 0x00760000, + 0x00ca0000, 0x00820000, 0x00c90000, 0x007d0000, + 0x00fa0000, 0x00590000, 0x00470000, 0x00f00000, + 0x00ad0000, 0x00d40000, 0x00a20000, 0x00af0000, + 0x009c0000, 0x00a40000, 0x00720000, 0x00c00000, + 0x00b70000, 0x00fd0000, 0x00930000, 0x00260000, + 0x00360000, 0x003f0000, 0x00f70000, 0x00cc0000, + 0x00340000, 0x00a50000, 0x00e50000, 0x00f10000, + 0x00710000, 0x00d80000, 0x00310000, 0x00150000, + 0x00040000, 0x00c70000, 0x00230000, 0x00c30000, + 0x00180000, 0x00960000, 0x00050000, 0x009a0000, + 0x00070000, 0x00120000, 0x00800000, 0x00e20000, + 0x00eb0000, 0x00270000, 0x00b20000, 0x00750000, + 0x00090000, 0x00830000, 0x002c0000, 0x001a0000, + 0x001b0000, 0x006e0000, 0x005a0000, 0x00a00000, + 0x00520000, 0x003b0000, 0x00d60000, 0x00b30000, + 0x00290000, 0x00e30000, 0x002f0000, 0x00840000, + 0x00530000, 0x00d10000, 0x00000000, 0x00ed0000, + 0x00200000, 0x00fc0000, 0x00b10000, 0x005b0000, + 0x006a0000, 0x00cb0000, 0x00be0000, 0x00390000, + 0x004a0000, 0x004c0000, 0x00580000, 0x00cf0000, + 0x00d00000, 0x00ef0000, 0x00aa0000, 0x00fb0000, + 0x00430000, 0x004d0000, 0x00330000, 0x00850000, + 0x00450000, 0x00f90000, 0x00020000, 0x007f0000, + 0x00500000, 0x003c0000, 0x009f0000, 0x00a80000, + 0x00510000, 0x00a30000, 0x00400000, 0x008f0000, + 0x00920000, 0x009d0000, 0x00380000, 0x00f50000, + 0x00bc0000, 0x00b60000, 0x00da0000, 0x00210000, + 0x00100000, 0x00ff0000, 0x00f30000, 0x00d20000, + 0x00cd0000, 0x000c0000, 0x00130000, 0x00ec0000, + 0x005f0000, 0x00970000, 0x00440000, 0x00170000, + 0x00c40000, 0x00a70000, 0x007e0000, 0x003d0000, + 0x00640000, 0x005d0000, 0x00190000, 0x00730000, + 0x00600000, 0x00810000, 0x004f0000, 0x00dc0000, + 0x00220000, 0x002a0000, 0x00900000, 0x00880000, + 0x00460000, 0x00ee0000, 0x00b80000, 0x00140000, + 0x00de0000, 0x005e0000, 0x000b0000, 0x00db0000, + 0x00e00000, 0x00320000, 0x003a0000, 0x000a0000, + 0x00490000, 0x00060000, 0x00240000, 0x005c0000, + 0x00c20000, 0x00d30000, 0x00ac0000, 0x00620000, + 0x00910000, 0x00950000, 0x00e40000, 0x00790000, + 0x00e70000, 0x00c80000, 0x00370000, 0x006d0000, + 0x008d0000, 0x00d50000, 0x004e0000, 0x00a90000, + 0x006c0000, 0x00560000, 0x00f40000, 0x00ea0000, + 0x00650000, 0x007a0000, 0x00ae0000, 0x00080000, + 0x00ba0000, 0x00780000, 0x00250000, 0x002e0000, + 0x001c0000, 0x00a60000, 0x00b40000, 0x00c60000, + 0x00e80000, 0x00dd0000, 0x00740000, 0x001f0000, + 0x004b0000, 0x00bd0000, 0x008b0000, 0x008a0000, + 0x00700000, 0x003e0000, 0x00b50000, 0x00660000, + 0x00480000, 0x00030000, 0x00f60000, 0x000e0000, + 0x00610000, 0x00350000, 0x00570000, 0x00b90000, + 0x00860000, 0x00c10000, 0x001d0000, 0x009e0000, + 0x00e10000, 0x00f80000, 0x00980000, 0x00110000, + 0x00690000, 0x00d90000, 0x008e0000, 0x00940000, + 0x009b0000, 0x001e0000, 0x00870000, 0x00e90000, + 0x00ce0000, 0x00550000, 0x00280000, 0x00df0000, + 0x008c0000, 0x00a10000, 0x00890000, 0x000d0000, + 0x00bf0000, 0x00e60000, 0x00420000, 0x00680000, + 0x00410000, 0x00990000, 0x002d0000, 0x000f0000, + 0x00b00000, 0x00540000, 0x00bb0000, 0x00160000, + }, { + 0x63000000, 0x7c000000, 0x77000000, 0x7b000000, + 0xf2000000, 0x6b000000, 0x6f000000, 0xc5000000, + 0x30000000, 0x01000000, 0x67000000, 0x2b000000, + 0xfe000000, 0xd7000000, 0xab000000, 0x76000000, + 0xca000000, 0x82000000, 0xc9000000, 0x7d000000, + 0xfa000000, 0x59000000, 0x47000000, 0xf0000000, + 0xad000000, 0xd4000000, 0xa2000000, 0xaf000000, + 0x9c000000, 0xa4000000, 0x72000000, 0xc0000000, + 0xb7000000, 0xfd000000, 0x93000000, 0x26000000, + 0x36000000, 0x3f000000, 0xf7000000, 0xcc000000, + 0x34000000, 0xa5000000, 0xe5000000, 0xf1000000, + 0x71000000, 0xd8000000, 0x31000000, 0x15000000, + 0x04000000, 0xc7000000, 0x23000000, 0xc3000000, + 0x18000000, 0x96000000, 0x05000000, 0x9a000000, + 0x07000000, 0x12000000, 0x80000000, 0xe2000000, + 0xeb000000, 0x27000000, 0xb2000000, 0x75000000, + 0x09000000, 0x83000000, 0x2c000000, 0x1a000000, + 0x1b000000, 0x6e000000, 0x5a000000, 0xa0000000, + 0x52000000, 0x3b000000, 0xd6000000, 0xb3000000, + 0x29000000, 0xe3000000, 0x2f000000, 0x84000000, + 0x53000000, 0xd1000000, 0x00000000, 0xed000000, + 0x20000000, 0xfc000000, 0xb1000000, 0x5b000000, + 0x6a000000, 0xcb000000, 0xbe000000, 0x39000000, + 0x4a000000, 0x4c000000, 0x58000000, 0xcf000000, + 0xd0000000, 0xef000000, 0xaa000000, 0xfb000000, + 0x43000000, 0x4d000000, 0x33000000, 0x85000000, + 0x45000000, 0xf9000000, 0x02000000, 0x7f000000, + 0x50000000, 0x3c000000, 0x9f000000, 0xa8000000, + 0x51000000, 0xa3000000, 0x40000000, 0x8f000000, + 0x92000000, 0x9d000000, 0x38000000, 0xf5000000, + 0xbc000000, 0xb6000000, 0xda000000, 0x21000000, + 0x10000000, 0xff000000, 0xf3000000, 0xd2000000, + 0xcd000000, 0x0c000000, 0x13000000, 0xec000000, + 0x5f000000, 0x97000000, 0x44000000, 0x17000000, + 0xc4000000, 0xa7000000, 0x7e000000, 0x3d000000, + 0x64000000, 0x5d000000, 0x19000000, 0x73000000, + 0x60000000, 0x81000000, 0x4f000000, 0xdc000000, + 0x22000000, 0x2a000000, 0x90000000, 0x88000000, + 0x46000000, 0xee000000, 0xb8000000, 0x14000000, + 0xde000000, 0x5e000000, 0x0b000000, 0xdb000000, + 0xe0000000, 0x32000000, 0x3a000000, 0x0a000000, + 0x49000000, 0x06000000, 0x24000000, 0x5c000000, + 0xc2000000, 0xd3000000, 0xac000000, 0x62000000, + 0x91000000, 0x95000000, 0xe4000000, 0x79000000, + 0xe7000000, 0xc8000000, 0x37000000, 0x6d000000, + 0x8d000000, 0xd5000000, 0x4e000000, 0xa9000000, + 0x6c000000, 0x56000000, 0xf4000000, 0xea000000, + 0x65000000, 0x7a000000, 0xae000000, 0x08000000, + 0xba000000, 0x78000000, 0x25000000, 0x2e000000, + 0x1c000000, 0xa6000000, 0xb4000000, 0xc6000000, + 0xe8000000, 0xdd000000, 0x74000000, 0x1f000000, + 0x4b000000, 0xbd000000, 0x8b000000, 0x8a000000, + 0x70000000, 0x3e000000, 0xb5000000, 0x66000000, + 0x48000000, 0x03000000, 0xf6000000, 0x0e000000, + 0x61000000, 0x35000000, 0x57000000, 0xb9000000, + 0x86000000, 0xc1000000, 0x1d000000, 0x9e000000, + 0xe1000000, 0xf8000000, 0x98000000, 0x11000000, + 0x69000000, 0xd9000000, 0x8e000000, 0x94000000, + 0x9b000000, 0x1e000000, 0x87000000, 0xe9000000, + 0xce000000, 0x55000000, 0x28000000, 0xdf000000, + 0x8c000000, 0xa1000000, 0x89000000, 0x0d000000, + 0xbf000000, 0xe6000000, 0x42000000, 0x68000000, + 0x41000000, 0x99000000, 0x2d000000, 0x0f000000, + 0xb0000000, 0x54000000, 0xbb000000, 0x16000000, + } +}; + +const u32 crypto_it_tab[4][256] = { + { + 0x50a7f451, 0x5365417e, 0xc3a4171a, 0x965e273a, + 0xcb6bab3b, 0xf1459d1f, 0xab58faac, 0x9303e34b, + 0x55fa3020, 0xf66d76ad, 0x9176cc88, 0x254c02f5, + 0xfcd7e54f, 0xd7cb2ac5, 0x80443526, 0x8fa362b5, + 0x495ab1de, 0x671bba25, 0x980eea45, 0xe1c0fe5d, + 0x02752fc3, 0x12f04c81, 0xa397468d, 0xc6f9d36b, + 0xe75f8f03, 0x959c9215, 0xeb7a6dbf, 0xda595295, + 0x2d83bed4, 0xd3217458, 0x2969e049, 0x44c8c98e, + 0x6a89c275, 0x78798ef4, 0x6b3e5899, 0xdd71b927, + 0xb64fe1be, 0x17ad88f0, 0x66ac20c9, 0xb43ace7d, + 0x184adf63, 0x82311ae5, 0x60335197, 0x457f5362, + 0xe07764b1, 0x84ae6bbb, 0x1ca081fe, 0x942b08f9, + 0x58684870, 0x19fd458f, 0x876cde94, 0xb7f87b52, + 0x23d373ab, 0xe2024b72, 0x578f1fe3, 0x2aab5566, + 0x0728ebb2, 0x03c2b52f, 0x9a7bc586, 0xa50837d3, + 0xf2872830, 0xb2a5bf23, 0xba6a0302, 0x5c8216ed, + 0x2b1ccf8a, 0x92b479a7, 0xf0f207f3, 0xa1e2694e, + 0xcdf4da65, 0xd5be0506, 0x1f6234d1, 0x8afea6c4, + 0x9d532e34, 0xa055f3a2, 0x32e18a05, 0x75ebf6a4, + 0x39ec830b, 0xaaef6040, 0x069f715e, 0x51106ebd, + 0xf98a213e, 0x3d06dd96, 0xae053edd, 0x46bde64d, + 0xb58d5491, 0x055dc471, 0x6fd40604, 0xff155060, + 0x24fb9819, 0x97e9bdd6, 0xcc434089, 0x779ed967, + 0xbd42e8b0, 0x888b8907, 0x385b19e7, 0xdbeec879, + 0x470a7ca1, 0xe90f427c, 0xc91e84f8, 0x00000000, + 0x83868009, 0x48ed2b32, 0xac70111e, 0x4e725a6c, + 0xfbff0efd, 0x5638850f, 0x1ed5ae3d, 0x27392d36, + 0x64d90f0a, 0x21a65c68, 0xd1545b9b, 0x3a2e3624, + 0xb1670a0c, 0x0fe75793, 0xd296eeb4, 0x9e919b1b, + 0x4fc5c080, 0xa220dc61, 0x694b775a, 0x161a121c, + 0x0aba93e2, 0xe52aa0c0, 0x43e0223c, 0x1d171b12, + 0x0b0d090e, 0xadc78bf2, 0xb9a8b62d, 0xc8a91e14, + 0x8519f157, 0x4c0775af, 0xbbdd99ee, 0xfd607fa3, + 0x9f2601f7, 0xbcf5725c, 0xc53b6644, 0x347efb5b, + 0x7629438b, 0xdcc623cb, 0x68fcedb6, 0x63f1e4b8, + 0xcadc31d7, 0x10856342, 0x40229713, 0x2011c684, + 0x7d244a85, 0xf83dbbd2, 0x1132f9ae, 0x6da129c7, + 0x4b2f9e1d, 0xf330b2dc, 0xec52860d, 0xd0e3c177, + 0x6c16b32b, 0x99b970a9, 0xfa489411, 0x2264e947, + 0xc48cfca8, 0x1a3ff0a0, 0xd82c7d56, 0xef903322, + 0xc74e4987, 0xc1d138d9, 0xfea2ca8c, 0x360bd498, + 0xcf81f5a6, 0x28de7aa5, 0x268eb7da, 0xa4bfad3f, + 0xe49d3a2c, 0x0d927850, 0x9bcc5f6a, 0x62467e54, + 0xc2138df6, 0xe8b8d890, 0x5ef7392e, 0xf5afc382, + 0xbe805d9f, 0x7c93d069, 0xa92dd56f, 0xb31225cf, + 0x3b99acc8, 0xa77d1810, 0x6e639ce8, 0x7bbb3bdb, + 0x097826cd, 0xf418596e, 0x01b79aec, 0xa89a4f83, + 0x656e95e6, 0x7ee6ffaa, 0x08cfbc21, 0xe6e815ef, + 0xd99be7ba, 0xce366f4a, 0xd4099fea, 0xd67cb029, + 0xafb2a431, 0x31233f2a, 0x3094a5c6, 0xc066a235, + 0x37bc4e74, 0xa6ca82fc, 0xb0d090e0, 0x15d8a733, + 0x4a9804f1, 0xf7daec41, 0x0e50cd7f, 0x2ff69117, + 0x8dd64d76, 0x4db0ef43, 0x544daacc, 0xdf0496e4, + 0xe3b5d19e, 0x1b886a4c, 0xb81f2cc1, 0x7f516546, + 0x04ea5e9d, 0x5d358c01, 0x737487fa, 0x2e410bfb, + 0x5a1d67b3, 0x52d2db92, 0x335610e9, 0x1347d66d, + 0x8c61d79a, 0x7a0ca137, 0x8e14f859, 0x893c13eb, + 0xee27a9ce, 0x35c961b7, 0xede51ce1, 0x3cb1477a, + 0x59dfd29c, 0x3f73f255, 0x79ce1418, 0xbf37c773, + 0xeacdf753, 0x5baafd5f, 0x146f3ddf, 0x86db4478, + 0x81f3afca, 0x3ec468b9, 0x2c342438, 0x5f40a3c2, + 0x72c31d16, 0x0c25e2bc, 0x8b493c28, 0x41950dff, + 0x7101a839, 0xdeb30c08, 0x9ce4b4d8, 0x90c15664, + 0x6184cb7b, 0x70b632d5, 0x745c6c48, 0x4257b8d0, + }, { + 0xa7f45150, 0x65417e53, 0xa4171ac3, 0x5e273a96, + 0x6bab3bcb, 0x459d1ff1, 0x58faacab, 0x03e34b93, + 0xfa302055, 0x6d76adf6, 0x76cc8891, 0x4c02f525, + 0xd7e54ffc, 0xcb2ac5d7, 0x44352680, 0xa362b58f, + 0x5ab1de49, 0x1bba2567, 0x0eea4598, 0xc0fe5de1, + 0x752fc302, 0xf04c8112, 0x97468da3, 0xf9d36bc6, + 0x5f8f03e7, 0x9c921595, 0x7a6dbfeb, 0x595295da, + 0x83bed42d, 0x217458d3, 0x69e04929, 0xc8c98e44, + 0x89c2756a, 0x798ef478, 0x3e58996b, 0x71b927dd, + 0x4fe1beb6, 0xad88f017, 0xac20c966, 0x3ace7db4, + 0x4adf6318, 0x311ae582, 0x33519760, 0x7f536245, + 0x7764b1e0, 0xae6bbb84, 0xa081fe1c, 0x2b08f994, + 0x68487058, 0xfd458f19, 0x6cde9487, 0xf87b52b7, + 0xd373ab23, 0x024b72e2, 0x8f1fe357, 0xab55662a, + 0x28ebb207, 0xc2b52f03, 0x7bc5869a, 0x0837d3a5, + 0x872830f2, 0xa5bf23b2, 0x6a0302ba, 0x8216ed5c, + 0x1ccf8a2b, 0xb479a792, 0xf207f3f0, 0xe2694ea1, + 0xf4da65cd, 0xbe0506d5, 0x6234d11f, 0xfea6c48a, + 0x532e349d, 0x55f3a2a0, 0xe18a0532, 0xebf6a475, + 0xec830b39, 0xef6040aa, 0x9f715e06, 0x106ebd51, + 0x8a213ef9, 0x06dd963d, 0x053eddae, 0xbde64d46, + 0x8d5491b5, 0x5dc47105, 0xd406046f, 0x155060ff, + 0xfb981924, 0xe9bdd697, 0x434089cc, 0x9ed96777, + 0x42e8b0bd, 0x8b890788, 0x5b19e738, 0xeec879db, + 0x0a7ca147, 0x0f427ce9, 0x1e84f8c9, 0x00000000, + 0x86800983, 0xed2b3248, 0x70111eac, 0x725a6c4e, + 0xff0efdfb, 0x38850f56, 0xd5ae3d1e, 0x392d3627, + 0xd90f0a64, 0xa65c6821, 0x545b9bd1, 0x2e36243a, + 0x670a0cb1, 0xe757930f, 0x96eeb4d2, 0x919b1b9e, + 0xc5c0804f, 0x20dc61a2, 0x4b775a69, 0x1a121c16, + 0xba93e20a, 0x2aa0c0e5, 0xe0223c43, 0x171b121d, + 0x0d090e0b, 0xc78bf2ad, 0xa8b62db9, 0xa91e14c8, + 0x19f15785, 0x0775af4c, 0xdd99eebb, 0x607fa3fd, + 0x2601f79f, 0xf5725cbc, 0x3b6644c5, 0x7efb5b34, + 0x29438b76, 0xc623cbdc, 0xfcedb668, 0xf1e4b863, + 0xdc31d7ca, 0x85634210, 0x22971340, 0x11c68420, + 0x244a857d, 0x3dbbd2f8, 0x32f9ae11, 0xa129c76d, + 0x2f9e1d4b, 0x30b2dcf3, 0x52860dec, 0xe3c177d0, + 0x16b32b6c, 0xb970a999, 0x489411fa, 0x64e94722, + 0x8cfca8c4, 0x3ff0a01a, 0x2c7d56d8, 0x903322ef, + 0x4e4987c7, 0xd138d9c1, 0xa2ca8cfe, 0x0bd49836, + 0x81f5a6cf, 0xde7aa528, 0x8eb7da26, 0xbfad3fa4, + 0x9d3a2ce4, 0x9278500d, 0xcc5f6a9b, 0x467e5462, + 0x138df6c2, 0xb8d890e8, 0xf7392e5e, 0xafc382f5, + 0x805d9fbe, 0x93d0697c, 0x2dd56fa9, 0x1225cfb3, + 0x99acc83b, 0x7d1810a7, 0x639ce86e, 0xbb3bdb7b, + 0x7826cd09, 0x18596ef4, 0xb79aec01, 0x9a4f83a8, + 0x6e95e665, 0xe6ffaa7e, 0xcfbc2108, 0xe815efe6, + 0x9be7bad9, 0x366f4ace, 0x099fead4, 0x7cb029d6, + 0xb2a431af, 0x233f2a31, 0x94a5c630, 0x66a235c0, + 0xbc4e7437, 0xca82fca6, 0xd090e0b0, 0xd8a73315, + 0x9804f14a, 0xdaec41f7, 0x50cd7f0e, 0xf691172f, + 0xd64d768d, 0xb0ef434d, 0x4daacc54, 0x0496e4df, + 0xb5d19ee3, 0x886a4c1b, 0x1f2cc1b8, 0x5165467f, + 0xea5e9d04, 0x358c015d, 0x7487fa73, 0x410bfb2e, + 0x1d67b35a, 0xd2db9252, 0x5610e933, 0x47d66d13, + 0x61d79a8c, 0x0ca1377a, 0x14f8598e, 0x3c13eb89, + 0x27a9ceee, 0xc961b735, 0xe51ce1ed, 0xb1477a3c, + 0xdfd29c59, 0x73f2553f, 0xce141879, 0x37c773bf, + 0xcdf753ea, 0xaafd5f5b, 0x6f3ddf14, 0xdb447886, + 0xf3afca81, 0xc468b93e, 0x3424382c, 0x40a3c25f, + 0xc31d1672, 0x25e2bc0c, 0x493c288b, 0x950dff41, + 0x01a83971, 0xb30c08de, 0xe4b4d89c, 0xc1566490, + 0x84cb7b61, 0xb632d570, 0x5c6c4874, 0x57b8d042, + }, { + 0xf45150a7, 0x417e5365, 0x171ac3a4, 0x273a965e, + 0xab3bcb6b, 0x9d1ff145, 0xfaacab58, 0xe34b9303, + 0x302055fa, 0x76adf66d, 0xcc889176, 0x02f5254c, + 0xe54ffcd7, 0x2ac5d7cb, 0x35268044, 0x62b58fa3, + 0xb1de495a, 0xba25671b, 0xea45980e, 0xfe5de1c0, + 0x2fc30275, 0x4c8112f0, 0x468da397, 0xd36bc6f9, + 0x8f03e75f, 0x9215959c, 0x6dbfeb7a, 0x5295da59, + 0xbed42d83, 0x7458d321, 0xe0492969, 0xc98e44c8, + 0xc2756a89, 0x8ef47879, 0x58996b3e, 0xb927dd71, + 0xe1beb64f, 0x88f017ad, 0x20c966ac, 0xce7db43a, + 0xdf63184a, 0x1ae58231, 0x51976033, 0x5362457f, + 0x64b1e077, 0x6bbb84ae, 0x81fe1ca0, 0x08f9942b, + 0x48705868, 0x458f19fd, 0xde94876c, 0x7b52b7f8, + 0x73ab23d3, 0x4b72e202, 0x1fe3578f, 0x55662aab, + 0xebb20728, 0xb52f03c2, 0xc5869a7b, 0x37d3a508, + 0x2830f287, 0xbf23b2a5, 0x0302ba6a, 0x16ed5c82, + 0xcf8a2b1c, 0x79a792b4, 0x07f3f0f2, 0x694ea1e2, + 0xda65cdf4, 0x0506d5be, 0x34d11f62, 0xa6c48afe, + 0x2e349d53, 0xf3a2a055, 0x8a0532e1, 0xf6a475eb, + 0x830b39ec, 0x6040aaef, 0x715e069f, 0x6ebd5110, + 0x213ef98a, 0xdd963d06, 0x3eddae05, 0xe64d46bd, + 0x5491b58d, 0xc471055d, 0x06046fd4, 0x5060ff15, + 0x981924fb, 0xbdd697e9, 0x4089cc43, 0xd967779e, + 0xe8b0bd42, 0x8907888b, 0x19e7385b, 0xc879dbee, + 0x7ca1470a, 0x427ce90f, 0x84f8c91e, 0x00000000, + 0x80098386, 0x2b3248ed, 0x111eac70, 0x5a6c4e72, + 0x0efdfbff, 0x850f5638, 0xae3d1ed5, 0x2d362739, + 0x0f0a64d9, 0x5c6821a6, 0x5b9bd154, 0x36243a2e, + 0x0a0cb167, 0x57930fe7, 0xeeb4d296, 0x9b1b9e91, + 0xc0804fc5, 0xdc61a220, 0x775a694b, 0x121c161a, + 0x93e20aba, 0xa0c0e52a, 0x223c43e0, 0x1b121d17, + 0x090e0b0d, 0x8bf2adc7, 0xb62db9a8, 0x1e14c8a9, + 0xf1578519, 0x75af4c07, 0x99eebbdd, 0x7fa3fd60, + 0x01f79f26, 0x725cbcf5, 0x6644c53b, 0xfb5b347e, + 0x438b7629, 0x23cbdcc6, 0xedb668fc, 0xe4b863f1, + 0x31d7cadc, 0x63421085, 0x97134022, 0xc6842011, + 0x4a857d24, 0xbbd2f83d, 0xf9ae1132, 0x29c76da1, + 0x9e1d4b2f, 0xb2dcf330, 0x860dec52, 0xc177d0e3, + 0xb32b6c16, 0x70a999b9, 0x9411fa48, 0xe9472264, + 0xfca8c48c, 0xf0a01a3f, 0x7d56d82c, 0x3322ef90, + 0x4987c74e, 0x38d9c1d1, 0xca8cfea2, 0xd498360b, + 0xf5a6cf81, 0x7aa528de, 0xb7da268e, 0xad3fa4bf, + 0x3a2ce49d, 0x78500d92, 0x5f6a9bcc, 0x7e546246, + 0x8df6c213, 0xd890e8b8, 0x392e5ef7, 0xc382f5af, + 0x5d9fbe80, 0xd0697c93, 0xd56fa92d, 0x25cfb312, + 0xacc83b99, 0x1810a77d, 0x9ce86e63, 0x3bdb7bbb, + 0x26cd0978, 0x596ef418, 0x9aec01b7, 0x4f83a89a, + 0x95e6656e, 0xffaa7ee6, 0xbc2108cf, 0x15efe6e8, + 0xe7bad99b, 0x6f4ace36, 0x9fead409, 0xb029d67c, + 0xa431afb2, 0x3f2a3123, 0xa5c63094, 0xa235c066, + 0x4e7437bc, 0x82fca6ca, 0x90e0b0d0, 0xa73315d8, + 0x04f14a98, 0xec41f7da, 0xcd7f0e50, 0x91172ff6, + 0x4d768dd6, 0xef434db0, 0xaacc544d, 0x96e4df04, + 0xd19ee3b5, 0x6a4c1b88, 0x2cc1b81f, 0x65467f51, + 0x5e9d04ea, 0x8c015d35, 0x87fa7374, 0x0bfb2e41, + 0x67b35a1d, 0xdb9252d2, 0x10e93356, 0xd66d1347, + 0xd79a8c61, 0xa1377a0c, 0xf8598e14, 0x13eb893c, + 0xa9ceee27, 0x61b735c9, 0x1ce1ede5, 0x477a3cb1, + 0xd29c59df, 0xf2553f73, 0x141879ce, 0xc773bf37, + 0xf753eacd, 0xfd5f5baa, 0x3ddf146f, 0x447886db, + 0xafca81f3, 0x68b93ec4, 0x24382c34, 0xa3c25f40, + 0x1d1672c3, 0xe2bc0c25, 0x3c288b49, 0x0dff4195, + 0xa8397101, 0x0c08deb3, 0xb4d89ce4, 0x566490c1, + 0xcb7b6184, 0x32d570b6, 0x6c48745c, 0xb8d04257, + }, { + 0x5150a7f4, 0x7e536541, 0x1ac3a417, 0x3a965e27, + 0x3bcb6bab, 0x1ff1459d, 0xacab58fa, 0x4b9303e3, + 0x2055fa30, 0xadf66d76, 0x889176cc, 0xf5254c02, + 0x4ffcd7e5, 0xc5d7cb2a, 0x26804435, 0xb58fa362, + 0xde495ab1, 0x25671bba, 0x45980eea, 0x5de1c0fe, + 0xc302752f, 0x8112f04c, 0x8da39746, 0x6bc6f9d3, + 0x03e75f8f, 0x15959c92, 0xbfeb7a6d, 0x95da5952, + 0xd42d83be, 0x58d32174, 0x492969e0, 0x8e44c8c9, + 0x756a89c2, 0xf478798e, 0x996b3e58, 0x27dd71b9, + 0xbeb64fe1, 0xf017ad88, 0xc966ac20, 0x7db43ace, + 0x63184adf, 0xe582311a, 0x97603351, 0x62457f53, + 0xb1e07764, 0xbb84ae6b, 0xfe1ca081, 0xf9942b08, + 0x70586848, 0x8f19fd45, 0x94876cde, 0x52b7f87b, + 0xab23d373, 0x72e2024b, 0xe3578f1f, 0x662aab55, + 0xb20728eb, 0x2f03c2b5, 0x869a7bc5, 0xd3a50837, + 0x30f28728, 0x23b2a5bf, 0x02ba6a03, 0xed5c8216, + 0x8a2b1ccf, 0xa792b479, 0xf3f0f207, 0x4ea1e269, + 0x65cdf4da, 0x06d5be05, 0xd11f6234, 0xc48afea6, + 0x349d532e, 0xa2a055f3, 0x0532e18a, 0xa475ebf6, + 0x0b39ec83, 0x40aaef60, 0x5e069f71, 0xbd51106e, + 0x3ef98a21, 0x963d06dd, 0xddae053e, 0x4d46bde6, + 0x91b58d54, 0x71055dc4, 0x046fd406, 0x60ff1550, + 0x1924fb98, 0xd697e9bd, 0x89cc4340, 0x67779ed9, + 0xb0bd42e8, 0x07888b89, 0xe7385b19, 0x79dbeec8, + 0xa1470a7c, 0x7ce90f42, 0xf8c91e84, 0x00000000, + 0x09838680, 0x3248ed2b, 0x1eac7011, 0x6c4e725a, + 0xfdfbff0e, 0x0f563885, 0x3d1ed5ae, 0x3627392d, + 0x0a64d90f, 0x6821a65c, 0x9bd1545b, 0x243a2e36, + 0x0cb1670a, 0x930fe757, 0xb4d296ee, 0x1b9e919b, + 0x804fc5c0, 0x61a220dc, 0x5a694b77, 0x1c161a12, + 0xe20aba93, 0xc0e52aa0, 0x3c43e022, 0x121d171b, + 0x0e0b0d09, 0xf2adc78b, 0x2db9a8b6, 0x14c8a91e, + 0x578519f1, 0xaf4c0775, 0xeebbdd99, 0xa3fd607f, + 0xf79f2601, 0x5cbcf572, 0x44c53b66, 0x5b347efb, + 0x8b762943, 0xcbdcc623, 0xb668fced, 0xb863f1e4, + 0xd7cadc31, 0x42108563, 0x13402297, 0x842011c6, + 0x857d244a, 0xd2f83dbb, 0xae1132f9, 0xc76da129, + 0x1d4b2f9e, 0xdcf330b2, 0x0dec5286, 0x77d0e3c1, + 0x2b6c16b3, 0xa999b970, 0x11fa4894, 0x472264e9, + 0xa8c48cfc, 0xa01a3ff0, 0x56d82c7d, 0x22ef9033, + 0x87c74e49, 0xd9c1d138, 0x8cfea2ca, 0x98360bd4, + 0xa6cf81f5, 0xa528de7a, 0xda268eb7, 0x3fa4bfad, + 0x2ce49d3a, 0x500d9278, 0x6a9bcc5f, 0x5462467e, + 0xf6c2138d, 0x90e8b8d8, 0x2e5ef739, 0x82f5afc3, + 0x9fbe805d, 0x697c93d0, 0x6fa92dd5, 0xcfb31225, + 0xc83b99ac, 0x10a77d18, 0xe86e639c, 0xdb7bbb3b, + 0xcd097826, 0x6ef41859, 0xec01b79a, 0x83a89a4f, + 0xe6656e95, 0xaa7ee6ff, 0x2108cfbc, 0xefe6e815, + 0xbad99be7, 0x4ace366f, 0xead4099f, 0x29d67cb0, + 0x31afb2a4, 0x2a31233f, 0xc63094a5, 0x35c066a2, + 0x7437bc4e, 0xfca6ca82, 0xe0b0d090, 0x3315d8a7, + 0xf14a9804, 0x41f7daec, 0x7f0e50cd, 0x172ff691, + 0x768dd64d, 0x434db0ef, 0xcc544daa, 0xe4df0496, + 0x9ee3b5d1, 0x4c1b886a, 0xc1b81f2c, 0x467f5165, + 0x9d04ea5e, 0x015d358c, 0xfa737487, 0xfb2e410b, + 0xb35a1d67, 0x9252d2db, 0xe9335610, 0x6d1347d6, + 0x9a8c61d7, 0x377a0ca1, 0x598e14f8, 0xeb893c13, + 0xceee27a9, 0xb735c961, 0xe1ede51c, 0x7a3cb147, + 0x9c59dfd2, 0x553f73f2, 0x1879ce14, 0x73bf37c7, + 0x53eacdf7, 0x5f5baafd, 0xdf146f3d, 0x7886db44, + 0xca81f3af, 0xb93ec468, 0x382c3424, 0xc25f40a3, + 0x1672c31d, 0xbc0c25e2, 0x288b493c, 0xff41950d, + 0x397101a8, 0x08deb30c, 0xd89ce4b4, 0x6490c156, + 0x7b6184cb, 0xd570b632, 0x48745c6c, 0xd04257b8, + } +}; + +const u32 crypto_il_tab[4][256] = { + { + 0x00000052, 0x00000009, 0x0000006a, 0x000000d5, + 0x00000030, 0x00000036, 0x000000a5, 0x00000038, + 0x000000bf, 0x00000040, 0x000000a3, 0x0000009e, + 0x00000081, 0x000000f3, 0x000000d7, 0x000000fb, + 0x0000007c, 0x000000e3, 0x00000039, 0x00000082, + 0x0000009b, 0x0000002f, 0x000000ff, 0x00000087, + 0x00000034, 0x0000008e, 0x00000043, 0x00000044, + 0x000000c4, 0x000000de, 0x000000e9, 0x000000cb, + 0x00000054, 0x0000007b, 0x00000094, 0x00000032, + 0x000000a6, 0x000000c2, 0x00000023, 0x0000003d, + 0x000000ee, 0x0000004c, 0x00000095, 0x0000000b, + 0x00000042, 0x000000fa, 0x000000c3, 0x0000004e, + 0x00000008, 0x0000002e, 0x000000a1, 0x00000066, + 0x00000028, 0x000000d9, 0x00000024, 0x000000b2, + 0x00000076, 0x0000005b, 0x000000a2, 0x00000049, + 0x0000006d, 0x0000008b, 0x000000d1, 0x00000025, + 0x00000072, 0x000000f8, 0x000000f6, 0x00000064, + 0x00000086, 0x00000068, 0x00000098, 0x00000016, + 0x000000d4, 0x000000a4, 0x0000005c, 0x000000cc, + 0x0000005d, 0x00000065, 0x000000b6, 0x00000092, + 0x0000006c, 0x00000070, 0x00000048, 0x00000050, + 0x000000fd, 0x000000ed, 0x000000b9, 0x000000da, + 0x0000005e, 0x00000015, 0x00000046, 0x00000057, + 0x000000a7, 0x0000008d, 0x0000009d, 0x00000084, + 0x00000090, 0x000000d8, 0x000000ab, 0x00000000, + 0x0000008c, 0x000000bc, 0x000000d3, 0x0000000a, + 0x000000f7, 0x000000e4, 0x00000058, 0x00000005, + 0x000000b8, 0x000000b3, 0x00000045, 0x00000006, + 0x000000d0, 0x0000002c, 0x0000001e, 0x0000008f, + 0x000000ca, 0x0000003f, 0x0000000f, 0x00000002, + 0x000000c1, 0x000000af, 0x000000bd, 0x00000003, + 0x00000001, 0x00000013, 0x0000008a, 0x0000006b, + 0x0000003a, 0x00000091, 0x00000011, 0x00000041, + 0x0000004f, 0x00000067, 0x000000dc, 0x000000ea, + 0x00000097, 0x000000f2, 0x000000cf, 0x000000ce, + 0x000000f0, 0x000000b4, 0x000000e6, 0x00000073, + 0x00000096, 0x000000ac, 0x00000074, 0x00000022, + 0x000000e7, 0x000000ad, 0x00000035, 0x00000085, + 0x000000e2, 0x000000f9, 0x00000037, 0x000000e8, + 0x0000001c, 0x00000075, 0x000000df, 0x0000006e, + 0x00000047, 0x000000f1, 0x0000001a, 0x00000071, + 0x0000001d, 0x00000029, 0x000000c5, 0x00000089, + 0x0000006f, 0x000000b7, 0x00000062, 0x0000000e, + 0x000000aa, 0x00000018, 0x000000be, 0x0000001b, + 0x000000fc, 0x00000056, 0x0000003e, 0x0000004b, + 0x000000c6, 0x000000d2, 0x00000079, 0x00000020, + 0x0000009a, 0x000000db, 0x000000c0, 0x000000fe, + 0x00000078, 0x000000cd, 0x0000005a, 0x000000f4, + 0x0000001f, 0x000000dd, 0x000000a8, 0x00000033, + 0x00000088, 0x00000007, 0x000000c7, 0x00000031, + 0x000000b1, 0x00000012, 0x00000010, 0x00000059, + 0x00000027, 0x00000080, 0x000000ec, 0x0000005f, + 0x00000060, 0x00000051, 0x0000007f, 0x000000a9, + 0x00000019, 0x000000b5, 0x0000004a, 0x0000000d, + 0x0000002d, 0x000000e5, 0x0000007a, 0x0000009f, + 0x00000093, 0x000000c9, 0x0000009c, 0x000000ef, + 0x000000a0, 0x000000e0, 0x0000003b, 0x0000004d, + 0x000000ae, 0x0000002a, 0x000000f5, 0x000000b0, + 0x000000c8, 0x000000eb, 0x000000bb, 0x0000003c, + 0x00000083, 0x00000053, 0x00000099, 0x00000061, + 0x00000017, 0x0000002b, 0x00000004, 0x0000007e, + 0x000000ba, 0x00000077, 0x000000d6, 0x00000026, + 0x000000e1, 0x00000069, 0x00000014, 0x00000063, + 0x00000055, 0x00000021, 0x0000000c, 0x0000007d, + }, { + 0x00005200, 0x00000900, 0x00006a00, 0x0000d500, + 0x00003000, 0x00003600, 0x0000a500, 0x00003800, + 0x0000bf00, 0x00004000, 0x0000a300, 0x00009e00, + 0x00008100, 0x0000f300, 0x0000d700, 0x0000fb00, + 0x00007c00, 0x0000e300, 0x00003900, 0x00008200, + 0x00009b00, 0x00002f00, 0x0000ff00, 0x00008700, + 0x00003400, 0x00008e00, 0x00004300, 0x00004400, + 0x0000c400, 0x0000de00, 0x0000e900, 0x0000cb00, + 0x00005400, 0x00007b00, 0x00009400, 0x00003200, + 0x0000a600, 0x0000c200, 0x00002300, 0x00003d00, + 0x0000ee00, 0x00004c00, 0x00009500, 0x00000b00, + 0x00004200, 0x0000fa00, 0x0000c300, 0x00004e00, + 0x00000800, 0x00002e00, 0x0000a100, 0x00006600, + 0x00002800, 0x0000d900, 0x00002400, 0x0000b200, + 0x00007600, 0x00005b00, 0x0000a200, 0x00004900, + 0x00006d00, 0x00008b00, 0x0000d100, 0x00002500, + 0x00007200, 0x0000f800, 0x0000f600, 0x00006400, + 0x00008600, 0x00006800, 0x00009800, 0x00001600, + 0x0000d400, 0x0000a400, 0x00005c00, 0x0000cc00, + 0x00005d00, 0x00006500, 0x0000b600, 0x00009200, + 0x00006c00, 0x00007000, 0x00004800, 0x00005000, + 0x0000fd00, 0x0000ed00, 0x0000b900, 0x0000da00, + 0x00005e00, 0x00001500, 0x00004600, 0x00005700, + 0x0000a700, 0x00008d00, 0x00009d00, 0x00008400, + 0x00009000, 0x0000d800, 0x0000ab00, 0x00000000, + 0x00008c00, 0x0000bc00, 0x0000d300, 0x00000a00, + 0x0000f700, 0x0000e400, 0x00005800, 0x00000500, + 0x0000b800, 0x0000b300, 0x00004500, 0x00000600, + 0x0000d000, 0x00002c00, 0x00001e00, 0x00008f00, + 0x0000ca00, 0x00003f00, 0x00000f00, 0x00000200, + 0x0000c100, 0x0000af00, 0x0000bd00, 0x00000300, + 0x00000100, 0x00001300, 0x00008a00, 0x00006b00, + 0x00003a00, 0x00009100, 0x00001100, 0x00004100, + 0x00004f00, 0x00006700, 0x0000dc00, 0x0000ea00, + 0x00009700, 0x0000f200, 0x0000cf00, 0x0000ce00, + 0x0000f000, 0x0000b400, 0x0000e600, 0x00007300, + 0x00009600, 0x0000ac00, 0x00007400, 0x00002200, + 0x0000e700, 0x0000ad00, 0x00003500, 0x00008500, + 0x0000e200, 0x0000f900, 0x00003700, 0x0000e800, + 0x00001c00, 0x00007500, 0x0000df00, 0x00006e00, + 0x00004700, 0x0000f100, 0x00001a00, 0x00007100, + 0x00001d00, 0x00002900, 0x0000c500, 0x00008900, + 0x00006f00, 0x0000b700, 0x00006200, 0x00000e00, + 0x0000aa00, 0x00001800, 0x0000be00, 0x00001b00, + 0x0000fc00, 0x00005600, 0x00003e00, 0x00004b00, + 0x0000c600, 0x0000d200, 0x00007900, 0x00002000, + 0x00009a00, 0x0000db00, 0x0000c000, 0x0000fe00, + 0x00007800, 0x0000cd00, 0x00005a00, 0x0000f400, + 0x00001f00, 0x0000dd00, 0x0000a800, 0x00003300, + 0x00008800, 0x00000700, 0x0000c700, 0x00003100, + 0x0000b100, 0x00001200, 0x00001000, 0x00005900, + 0x00002700, 0x00008000, 0x0000ec00, 0x00005f00, + 0x00006000, 0x00005100, 0x00007f00, 0x0000a900, + 0x00001900, 0x0000b500, 0x00004a00, 0x00000d00, + 0x00002d00, 0x0000e500, 0x00007a00, 0x00009f00, + 0x00009300, 0x0000c900, 0x00009c00, 0x0000ef00, + 0x0000a000, 0x0000e000, 0x00003b00, 0x00004d00, + 0x0000ae00, 0x00002a00, 0x0000f500, 0x0000b000, + 0x0000c800, 0x0000eb00, 0x0000bb00, 0x00003c00, + 0x00008300, 0x00005300, 0x00009900, 0x00006100, + 0x00001700, 0x00002b00, 0x00000400, 0x00007e00, + 0x0000ba00, 0x00007700, 0x0000d600, 0x00002600, + 0x0000e100, 0x00006900, 0x00001400, 0x00006300, + 0x00005500, 0x00002100, 0x00000c00, 0x00007d00, + }, { + 0x00520000, 0x00090000, 0x006a0000, 0x00d50000, + 0x00300000, 0x00360000, 0x00a50000, 0x00380000, + 0x00bf0000, 0x00400000, 0x00a30000, 0x009e0000, + 0x00810000, 0x00f30000, 0x00d70000, 0x00fb0000, + 0x007c0000, 0x00e30000, 0x00390000, 0x00820000, + 0x009b0000, 0x002f0000, 0x00ff0000, 0x00870000, + 0x00340000, 0x008e0000, 0x00430000, 0x00440000, + 0x00c40000, 0x00de0000, 0x00e90000, 0x00cb0000, + 0x00540000, 0x007b0000, 0x00940000, 0x00320000, + 0x00a60000, 0x00c20000, 0x00230000, 0x003d0000, + 0x00ee0000, 0x004c0000, 0x00950000, 0x000b0000, + 0x00420000, 0x00fa0000, 0x00c30000, 0x004e0000, + 0x00080000, 0x002e0000, 0x00a10000, 0x00660000, + 0x00280000, 0x00d90000, 0x00240000, 0x00b20000, + 0x00760000, 0x005b0000, 0x00a20000, 0x00490000, + 0x006d0000, 0x008b0000, 0x00d10000, 0x00250000, + 0x00720000, 0x00f80000, 0x00f60000, 0x00640000, + 0x00860000, 0x00680000, 0x00980000, 0x00160000, + 0x00d40000, 0x00a40000, 0x005c0000, 0x00cc0000, + 0x005d0000, 0x00650000, 0x00b60000, 0x00920000, + 0x006c0000, 0x00700000, 0x00480000, 0x00500000, + 0x00fd0000, 0x00ed0000, 0x00b90000, 0x00da0000, + 0x005e0000, 0x00150000, 0x00460000, 0x00570000, + 0x00a70000, 0x008d0000, 0x009d0000, 0x00840000, + 0x00900000, 0x00d80000, 0x00ab0000, 0x00000000, + 0x008c0000, 0x00bc0000, 0x00d30000, 0x000a0000, + 0x00f70000, 0x00e40000, 0x00580000, 0x00050000, + 0x00b80000, 0x00b30000, 0x00450000, 0x00060000, + 0x00d00000, 0x002c0000, 0x001e0000, 0x008f0000, + 0x00ca0000, 0x003f0000, 0x000f0000, 0x00020000, + 0x00c10000, 0x00af0000, 0x00bd0000, 0x00030000, + 0x00010000, 0x00130000, 0x008a0000, 0x006b0000, + 0x003a0000, 0x00910000, 0x00110000, 0x00410000, + 0x004f0000, 0x00670000, 0x00dc0000, 0x00ea0000, + 0x00970000, 0x00f20000, 0x00cf0000, 0x00ce0000, + 0x00f00000, 0x00b40000, 0x00e60000, 0x00730000, + 0x00960000, 0x00ac0000, 0x00740000, 0x00220000, + 0x00e70000, 0x00ad0000, 0x00350000, 0x00850000, + 0x00e20000, 0x00f90000, 0x00370000, 0x00e80000, + 0x001c0000, 0x00750000, 0x00df0000, 0x006e0000, + 0x00470000, 0x00f10000, 0x001a0000, 0x00710000, + 0x001d0000, 0x00290000, 0x00c50000, 0x00890000, + 0x006f0000, 0x00b70000, 0x00620000, 0x000e0000, + 0x00aa0000, 0x00180000, 0x00be0000, 0x001b0000, + 0x00fc0000, 0x00560000, 0x003e0000, 0x004b0000, + 0x00c60000, 0x00d20000, 0x00790000, 0x00200000, + 0x009a0000, 0x00db0000, 0x00c00000, 0x00fe0000, + 0x00780000, 0x00cd0000, 0x005a0000, 0x00f40000, + 0x001f0000, 0x00dd0000, 0x00a80000, 0x00330000, + 0x00880000, 0x00070000, 0x00c70000, 0x00310000, + 0x00b10000, 0x00120000, 0x00100000, 0x00590000, + 0x00270000, 0x00800000, 0x00ec0000, 0x005f0000, + 0x00600000, 0x00510000, 0x007f0000, 0x00a90000, + 0x00190000, 0x00b50000, 0x004a0000, 0x000d0000, + 0x002d0000, 0x00e50000, 0x007a0000, 0x009f0000, + 0x00930000, 0x00c90000, 0x009c0000, 0x00ef0000, + 0x00a00000, 0x00e00000, 0x003b0000, 0x004d0000, + 0x00ae0000, 0x002a0000, 0x00f50000, 0x00b00000, + 0x00c80000, 0x00eb0000, 0x00bb0000, 0x003c0000, + 0x00830000, 0x00530000, 0x00990000, 0x00610000, + 0x00170000, 0x002b0000, 0x00040000, 0x007e0000, + 0x00ba0000, 0x00770000, 0x00d60000, 0x00260000, + 0x00e10000, 0x00690000, 0x00140000, 0x00630000, + 0x00550000, 0x00210000, 0x000c0000, 0x007d0000, + }, { + 0x52000000, 0x09000000, 0x6a000000, 0xd5000000, + 0x30000000, 0x36000000, 0xa5000000, 0x38000000, + 0xbf000000, 0x40000000, 0xa3000000, 0x9e000000, + 0x81000000, 0xf3000000, 0xd7000000, 0xfb000000, + 0x7c000000, 0xe3000000, 0x39000000, 0x82000000, + 0x9b000000, 0x2f000000, 0xff000000, 0x87000000, + 0x34000000, 0x8e000000, 0x43000000, 0x44000000, + 0xc4000000, 0xde000000, 0xe9000000, 0xcb000000, + 0x54000000, 0x7b000000, 0x94000000, 0x32000000, + 0xa6000000, 0xc2000000, 0x23000000, 0x3d000000, + 0xee000000, 0x4c000000, 0x95000000, 0x0b000000, + 0x42000000, 0xfa000000, 0xc3000000, 0x4e000000, + 0x08000000, 0x2e000000, 0xa1000000, 0x66000000, + 0x28000000, 0xd9000000, 0x24000000, 0xb2000000, + 0x76000000, 0x5b000000, 0xa2000000, 0x49000000, + 0x6d000000, 0x8b000000, 0xd1000000, 0x25000000, + 0x72000000, 0xf8000000, 0xf6000000, 0x64000000, + 0x86000000, 0x68000000, 0x98000000, 0x16000000, + 0xd4000000, 0xa4000000, 0x5c000000, 0xcc000000, + 0x5d000000, 0x65000000, 0xb6000000, 0x92000000, + 0x6c000000, 0x70000000, 0x48000000, 0x50000000, + 0xfd000000, 0xed000000, 0xb9000000, 0xda000000, + 0x5e000000, 0x15000000, 0x46000000, 0x57000000, + 0xa7000000, 0x8d000000, 0x9d000000, 0x84000000, + 0x90000000, 0xd8000000, 0xab000000, 0x00000000, + 0x8c000000, 0xbc000000, 0xd3000000, 0x0a000000, + 0xf7000000, 0xe4000000, 0x58000000, 0x05000000, + 0xb8000000, 0xb3000000, 0x45000000, 0x06000000, + 0xd0000000, 0x2c000000, 0x1e000000, 0x8f000000, + 0xca000000, 0x3f000000, 0x0f000000, 0x02000000, + 0xc1000000, 0xaf000000, 0xbd000000, 0x03000000, + 0x01000000, 0x13000000, 0x8a000000, 0x6b000000, + 0x3a000000, 0x91000000, 0x11000000, 0x41000000, + 0x4f000000, 0x67000000, 0xdc000000, 0xea000000, + 0x97000000, 0xf2000000, 0xcf000000, 0xce000000, + 0xf0000000, 0xb4000000, 0xe6000000, 0x73000000, + 0x96000000, 0xac000000, 0x74000000, 0x22000000, + 0xe7000000, 0xad000000, 0x35000000, 0x85000000, + 0xe2000000, 0xf9000000, 0x37000000, 0xe8000000, + 0x1c000000, 0x75000000, 0xdf000000, 0x6e000000, + 0x47000000, 0xf1000000, 0x1a000000, 0x71000000, + 0x1d000000, 0x29000000, 0xc5000000, 0x89000000, + 0x6f000000, 0xb7000000, 0x62000000, 0x0e000000, + 0xaa000000, 0x18000000, 0xbe000000, 0x1b000000, + 0xfc000000, 0x56000000, 0x3e000000, 0x4b000000, + 0xc6000000, 0xd2000000, 0x79000000, 0x20000000, + 0x9a000000, 0xdb000000, 0xc0000000, 0xfe000000, + 0x78000000, 0xcd000000, 0x5a000000, 0xf4000000, + 0x1f000000, 0xdd000000, 0xa8000000, 0x33000000, + 0x88000000, 0x07000000, 0xc7000000, 0x31000000, + 0xb1000000, 0x12000000, 0x10000000, 0x59000000, + 0x27000000, 0x80000000, 0xec000000, 0x5f000000, + 0x60000000, 0x51000000, 0x7f000000, 0xa9000000, + 0x19000000, 0xb5000000, 0x4a000000, 0x0d000000, + 0x2d000000, 0xe5000000, 0x7a000000, 0x9f000000, + 0x93000000, 0xc9000000, 0x9c000000, 0xef000000, + 0xa0000000, 0xe0000000, 0x3b000000, 0x4d000000, + 0xae000000, 0x2a000000, 0xf5000000, 0xb0000000, + 0xc8000000, 0xeb000000, 0xbb000000, 0x3c000000, + 0x83000000, 0x53000000, 0x99000000, 0x61000000, + 0x17000000, 0x2b000000, 0x04000000, 0x7e000000, + 0xba000000, 0x77000000, 0xd6000000, 0x26000000, + 0xe1000000, 0x69000000, 0x14000000, 0x63000000, + 0x55000000, 0x21000000, 0x0c000000, 0x7d000000, + } +}; + +EXPORT_SYMBOL_GPL(crypto_ft_tab); +EXPORT_SYMBOL_GPL(crypto_fl_tab); +EXPORT_SYMBOL_GPL(crypto_it_tab); +EXPORT_SYMBOL_GPL(crypto_il_tab); + +/* initialise the key schedule from the user supplied key */ + +#define star_x(x) (((x) & 0x7f7f7f7f) << 1) ^ ((((x) & 0x80808080) >> 7) * 0x1b) + +#define imix_col(y, x) do { \ + u = star_x(x); \ + v = star_x(u); \ + w = star_x(v); \ + t = w ^ (x); \ + (y) = u ^ v ^ w; \ + (y) ^= ror32(u ^ t, 8) ^ \ + ror32(v ^ t, 16) ^ \ + ror32(t, 24); \ +} while (0) + +#define ls_box(x) \ + crypto_fl_tab[0][byte(x, 0)] ^ \ + crypto_fl_tab[1][byte(x, 1)] ^ \ + crypto_fl_tab[2][byte(x, 2)] ^ \ + crypto_fl_tab[3][byte(x, 3)] + +#define loop4(i) do { \ + t = ror32(t, 8); \ + t = ls_box(t) ^ rco_tab[i]; \ + t ^= ctx->key_enc[4 * i]; \ + ctx->key_enc[4 * i + 4] = t; \ + t ^= ctx->key_enc[4 * i + 1]; \ + ctx->key_enc[4 * i + 5] = t; \ + t ^= ctx->key_enc[4 * i + 2]; \ + ctx->key_enc[4 * i + 6] = t; \ + t ^= ctx->key_enc[4 * i + 3]; \ + ctx->key_enc[4 * i + 7] = t; \ +} while (0) + +#define loop6(i) do { \ + t = ror32(t, 8); \ + t = ls_box(t) ^ rco_tab[i]; \ + t ^= ctx->key_enc[6 * i]; \ + ctx->key_enc[6 * i + 6] = t; \ + t ^= ctx->key_enc[6 * i + 1]; \ + ctx->key_enc[6 * i + 7] = t; \ + t ^= ctx->key_enc[6 * i + 2]; \ + ctx->key_enc[6 * i + 8] = t; \ + t ^= ctx->key_enc[6 * i + 3]; \ + ctx->key_enc[6 * i + 9] = t; \ + t ^= ctx->key_enc[6 * i + 4]; \ + ctx->key_enc[6 * i + 10] = t; \ + t ^= ctx->key_enc[6 * i + 5]; \ + ctx->key_enc[6 * i + 11] = t; \ +} while (0) + +#define loop8tophalf(i) do { \ + t = ror32(t, 8); \ + t = ls_box(t) ^ rco_tab[i]; \ + t ^= ctx->key_enc[8 * i]; \ + ctx->key_enc[8 * i + 8] = t; \ + t ^= ctx->key_enc[8 * i + 1]; \ + ctx->key_enc[8 * i + 9] = t; \ + t ^= ctx->key_enc[8 * i + 2]; \ + ctx->key_enc[8 * i + 10] = t; \ + t ^= ctx->key_enc[8 * i + 3]; \ + ctx->key_enc[8 * i + 11] = t; \ +} while (0) + +#define loop8(i) do { \ + loop8tophalf(i); \ + t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \ + ctx->key_enc[8 * i + 12] = t; \ + t ^= ctx->key_enc[8 * i + 5]; \ + ctx->key_enc[8 * i + 13] = t; \ + t ^= ctx->key_enc[8 * i + 6]; \ + ctx->key_enc[8 * i + 14] = t; \ + t ^= ctx->key_enc[8 * i + 7]; \ + ctx->key_enc[8 * i + 15] = t; \ +} while (0) + +/** + * crypto_aes_expand_key - Expands the AES key as described in FIPS-197 + * @ctx: The location where the computed key will be stored. + * @in_key: The supplied key. + * @key_len: The length of the supplied key. + * + * Returns 0 on success. The function fails only if an invalid key size (or + * pointer) is supplied. + * The expanded key size is 240 bytes (max of 14 rounds with a unique 16 bytes + * key schedule plus a 16 bytes key which is used before the first round). + * The decryption key is prepared for the "Equivalent Inverse Cipher" as + * described in FIPS-197. The first slot (16 bytes) of each key (enc or dec) is + * for the initial combination, the second slot for the first round and so on. + */ +int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key, + unsigned int key_len) +{ + const __le32 *key = (const __le32 *)in_key; + u32 i, t, u, v, w, j; + + if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 && + key_len != AES_KEYSIZE_256) + return -EINVAL; + + ctx->key_length = key_len; + + ctx->key_dec[key_len + 24] = ctx->key_enc[0] = le32_to_cpu(key[0]); + ctx->key_dec[key_len + 25] = ctx->key_enc[1] = le32_to_cpu(key[1]); + ctx->key_dec[key_len + 26] = ctx->key_enc[2] = le32_to_cpu(key[2]); + ctx->key_dec[key_len + 27] = ctx->key_enc[3] = le32_to_cpu(key[3]); + + switch (key_len) { + case AES_KEYSIZE_128: + t = ctx->key_enc[3]; + for (i = 0; i < 10; ++i) + loop4(i); + break; + + case AES_KEYSIZE_192: + ctx->key_enc[4] = le32_to_cpu(key[4]); + t = ctx->key_enc[5] = le32_to_cpu(key[5]); + for (i = 0; i < 8; ++i) + loop6(i); + break; + + case AES_KEYSIZE_256: + ctx->key_enc[4] = le32_to_cpu(key[4]); + ctx->key_enc[5] = le32_to_cpu(key[5]); + ctx->key_enc[6] = le32_to_cpu(key[6]); + t = ctx->key_enc[7] = le32_to_cpu(key[7]); + for (i = 0; i < 6; ++i) + loop8(i); + loop8tophalf(i); + break; + } + + ctx->key_dec[0] = ctx->key_enc[key_len + 24]; + ctx->key_dec[1] = ctx->key_enc[key_len + 25]; + ctx->key_dec[2] = ctx->key_enc[key_len + 26]; + ctx->key_dec[3] = ctx->key_enc[key_len + 27]; + + for (i = 4; i < key_len + 24; ++i) { + j = key_len + 24 - (i & ~3) + (i & 3); + imix_col(ctx->key_dec[j], ctx->key_enc[i]); + } + return 0; +} +EXPORT_SYMBOL_GPL(crypto_aes_expand_key); + +/** + * crypto_aes_set_key - Set the AES key. + * @tfm: The %crypto_tfm that is used in the context. + * @in_key: The input key. + * @key_len: The size of the key. + * + * Returns 0 on success, on failure the %CRYPTO_TFM_RES_BAD_KEY_LEN flag in tfm + * is set. The function uses crypto_aes_expand_key() to expand the key. + * &crypto_aes_ctx _must_ be the private data embedded in @tfm which is + * retrieved with crypto_tfm_ctx(). + */ +int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len) +{ + struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); + u32 *flags = &tfm->crt_flags; + int ret; + + ret = crypto_aes_expand_key(ctx, in_key, key_len); + if (!ret) + return 0; + + *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; +} +EXPORT_SYMBOL_GPL(crypto_aes_set_key); + +/* encrypt a block of text */ + +#define f_rn(bo, bi, n, k) do { \ + bo[n] = crypto_ft_tab[0][byte(bi[n], 0)] ^ \ + crypto_ft_tab[1][byte(bi[(n + 1) & 3], 1)] ^ \ + crypto_ft_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \ + crypto_ft_tab[3][byte(bi[(n + 3) & 3], 3)] ^ *(k + n); \ +} while (0) + +#define f_nround(bo, bi, k) do {\ + f_rn(bo, bi, 0, k); \ + f_rn(bo, bi, 1, k); \ + f_rn(bo, bi, 2, k); \ + f_rn(bo, bi, 3, k); \ + k += 4; \ +} while (0) + +#define f_rl(bo, bi, n, k) do { \ + bo[n] = crypto_fl_tab[0][byte(bi[n], 0)] ^ \ + crypto_fl_tab[1][byte(bi[(n + 1) & 3], 1)] ^ \ + crypto_fl_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \ + crypto_fl_tab[3][byte(bi[(n + 3) & 3], 3)] ^ *(k + n); \ +} while (0) + +#define f_lround(bo, bi, k) do {\ + f_rl(bo, bi, 0, k); \ + f_rl(bo, bi, 1, k); \ + f_rl(bo, bi, 2, k); \ + f_rl(bo, bi, 3, k); \ +} while (0) + +static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); + const __le32 *src = (const __le32 *)in; + __le32 *dst = (__le32 *)out; + u32 b0[4], b1[4]; + const u32 *kp = ctx->key_enc + 4; + const int key_len = ctx->key_length; + + b0[0] = le32_to_cpu(src[0]) ^ ctx->key_enc[0]; + b0[1] = le32_to_cpu(src[1]) ^ ctx->key_enc[1]; + b0[2] = le32_to_cpu(src[2]) ^ ctx->key_enc[2]; + b0[3] = le32_to_cpu(src[3]) ^ ctx->key_enc[3]; + + if (key_len > 24) { + f_nround(b1, b0, kp); + f_nround(b0, b1, kp); + } + + if (key_len > 16) { + f_nround(b1, b0, kp); + f_nround(b0, b1, kp); + } + + f_nround(b1, b0, kp); + f_nround(b0, b1, kp); + f_nround(b1, b0, kp); + f_nround(b0, b1, kp); + f_nround(b1, b0, kp); + f_nround(b0, b1, kp); + f_nround(b1, b0, kp); + f_nround(b0, b1, kp); + f_nround(b1, b0, kp); + f_lround(b0, b1, kp); + + dst[0] = cpu_to_le32(b0[0]); + dst[1] = cpu_to_le32(b0[1]); + dst[2] = cpu_to_le32(b0[2]); + dst[3] = cpu_to_le32(b0[3]); +} + +/* decrypt a block of text */ + +#define i_rn(bo, bi, n, k) do { \ + bo[n] = crypto_it_tab[0][byte(bi[n], 0)] ^ \ + crypto_it_tab[1][byte(bi[(n + 3) & 3], 1)] ^ \ + crypto_it_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \ + crypto_it_tab[3][byte(bi[(n + 1) & 3], 3)] ^ *(k + n); \ +} while (0) + +#define i_nround(bo, bi, k) do {\ + i_rn(bo, bi, 0, k); \ + i_rn(bo, bi, 1, k); \ + i_rn(bo, bi, 2, k); \ + i_rn(bo, bi, 3, k); \ + k += 4; \ +} while (0) + +#define i_rl(bo, bi, n, k) do { \ + bo[n] = crypto_il_tab[0][byte(bi[n], 0)] ^ \ + crypto_il_tab[1][byte(bi[(n + 3) & 3], 1)] ^ \ + crypto_il_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \ + crypto_il_tab[3][byte(bi[(n + 1) & 3], 3)] ^ *(k + n); \ +} while (0) + +#define i_lround(bo, bi, k) do {\ + i_rl(bo, bi, 0, k); \ + i_rl(bo, bi, 1, k); \ + i_rl(bo, bi, 2, k); \ + i_rl(bo, bi, 3, k); \ +} while (0) + +static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); + const __le32 *src = (const __le32 *)in; + __le32 *dst = (__le32 *)out; + u32 b0[4], b1[4]; + const int key_len = ctx->key_length; + const u32 *kp = ctx->key_dec + 4; + + b0[0] = le32_to_cpu(src[0]) ^ ctx->key_dec[0]; + b0[1] = le32_to_cpu(src[1]) ^ ctx->key_dec[1]; + b0[2] = le32_to_cpu(src[2]) ^ ctx->key_dec[2]; + b0[3] = le32_to_cpu(src[3]) ^ ctx->key_dec[3]; + + if (key_len > 24) { + i_nround(b1, b0, kp); + i_nround(b0, b1, kp); + } + + if (key_len > 16) { + i_nround(b1, b0, kp); + i_nround(b0, b1, kp); + } + + i_nround(b1, b0, kp); + i_nround(b0, b1, kp); + i_nround(b1, b0, kp); + i_nround(b0, b1, kp); + i_nround(b1, b0, kp); + i_nround(b0, b1, kp); + i_nround(b1, b0, kp); + i_nround(b0, b1, kp); + i_nround(b1, b0, kp); + i_lround(b0, b1, kp); + + dst[0] = cpu_to_le32(b0[0]); + dst[1] = cpu_to_le32(b0[1]); + dst[2] = cpu_to_le32(b0[2]); + dst[3] = cpu_to_le32(b0[3]); +} + +static struct crypto_alg aes_alg = { + .cra_name = "aes", + .cra_driver_name = "aes-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = AES_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct crypto_aes_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), + .cra_u = { + .cipher = { + .cia_min_keysize = AES_MIN_KEY_SIZE, + .cia_max_keysize = AES_MAX_KEY_SIZE, + .cia_setkey = crypto_aes_set_key, + .cia_encrypt = aes_encrypt, + .cia_decrypt = aes_decrypt + } + } +}; + +static int __init aes_init(void) +{ + return crypto_register_alg(&aes_alg); +} + +static void __exit aes_fini(void) +{ + crypto_unregister_alg(&aes_alg); +} + +module_init(aes_init); +module_exit(aes_fini); + +MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm"); +MODULE_LICENSE("Dual BSD/GPL"); +MODULE_ALIAS("aes"); diff --git a/crypto/af_alg.c b/crypto/af_alg.c new file mode 100644 index 00000000..ac33d5f3 --- /dev/null +++ b/crypto/af_alg.c @@ -0,0 +1,483 @@ +/* + * af_alg: User-space algorithm interface + * + * This file provides the user-space API for algorithms. + * + * Copyright (c) 2010 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <linux/atomic.h> +#include <crypto/if_alg.h> +#include <linux/crypto.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/list.h> +#include <linux/module.h> +#include <linux/net.h> +#include <linux/rwsem.h> + +struct alg_type_list { + const struct af_alg_type *type; + struct list_head list; +}; + +static atomic_long_t alg_memory_allocated; + +static struct proto alg_proto = { + .name = "ALG", + .owner = THIS_MODULE, + .memory_allocated = &alg_memory_allocated, + .obj_size = sizeof(struct alg_sock), +}; + +static LIST_HEAD(alg_types); +static DECLARE_RWSEM(alg_types_sem); + +static const struct af_alg_type *alg_get_type(const char *name) +{ + const struct af_alg_type *type = ERR_PTR(-ENOENT); + struct alg_type_list *node; + + down_read(&alg_types_sem); + list_for_each_entry(node, &alg_types, list) { + if (strcmp(node->type->name, name)) + continue; + + if (try_module_get(node->type->owner)) + type = node->type; + break; + } + up_read(&alg_types_sem); + + return type; +} + +int af_alg_register_type(const struct af_alg_type *type) +{ + struct alg_type_list *node; + int err = -EEXIST; + + down_write(&alg_types_sem); + list_for_each_entry(node, &alg_types, list) { + if (!strcmp(node->type->name, type->name)) + goto unlock; + } + + node = kmalloc(sizeof(*node), GFP_KERNEL); + err = -ENOMEM; + if (!node) + goto unlock; + + type->ops->owner = THIS_MODULE; + node->type = type; + list_add(&node->list, &alg_types); + err = 0; + +unlock: + up_write(&alg_types_sem); + + return err; +} +EXPORT_SYMBOL_GPL(af_alg_register_type); + +int af_alg_unregister_type(const struct af_alg_type *type) +{ + struct alg_type_list *node; + int err = -ENOENT; + + down_write(&alg_types_sem); + list_for_each_entry(node, &alg_types, list) { + if (strcmp(node->type->name, type->name)) + continue; + + list_del(&node->list); + kfree(node); + err = 0; + break; + } + up_write(&alg_types_sem); + + return err; +} +EXPORT_SYMBOL_GPL(af_alg_unregister_type); + +static void alg_do_release(const struct af_alg_type *type, void *private) +{ + if (!type) + return; + + type->release(private); + module_put(type->owner); +} + +int af_alg_release(struct socket *sock) +{ + if (sock->sk) + sock_put(sock->sk); + return 0; +} +EXPORT_SYMBOL_GPL(af_alg_release); + +static int alg_bind(struct socket *sock, struct sockaddr *uaddr, int addr_len) +{ + struct sock *sk = sock->sk; + struct alg_sock *ask = alg_sk(sk); + struct sockaddr_alg *sa = (void *)uaddr; + const struct af_alg_type *type; + void *private; + + if (sock->state == SS_CONNECTED) + return -EINVAL; + + if (addr_len != sizeof(*sa)) + return -EINVAL; + + sa->salg_type[sizeof(sa->salg_type) - 1] = 0; + sa->salg_name[sizeof(sa->salg_name) - 1] = 0; + + type = alg_get_type(sa->salg_type); + if (IS_ERR(type) && PTR_ERR(type) == -ENOENT) { + request_module("algif-%s", sa->salg_type); + type = alg_get_type(sa->salg_type); + } + + if (IS_ERR(type)) + return PTR_ERR(type); + + private = type->bind(sa->salg_name, sa->salg_feat, sa->salg_mask); + if (IS_ERR(private)) { + module_put(type->owner); + return PTR_ERR(private); + } + + lock_sock(sk); + + swap(ask->type, type); + swap(ask->private, private); + + release_sock(sk); + + alg_do_release(type, private); + + return 0; +} + +static int alg_setkey(struct sock *sk, char __user *ukey, + unsigned int keylen) +{ + struct alg_sock *ask = alg_sk(sk); + const struct af_alg_type *type = ask->type; + u8 *key; + int err; + + key = sock_kmalloc(sk, keylen, GFP_KERNEL); + if (!key) + return -ENOMEM; + + err = -EFAULT; + if (copy_from_user(key, ukey, keylen)) + goto out; + + err = type->setkey(ask->private, key, keylen); + +out: + sock_kfree_s(sk, key, keylen); + + return err; +} + +static int alg_setsockopt(struct socket *sock, int level, int optname, + char __user *optval, unsigned int optlen) +{ + struct sock *sk = sock->sk; + struct alg_sock *ask = alg_sk(sk); + const struct af_alg_type *type; + int err = -ENOPROTOOPT; + + lock_sock(sk); + type = ask->type; + + if (level != SOL_ALG || !type) + goto unlock; + + switch (optname) { + case ALG_SET_KEY: + if (sock->state == SS_CONNECTED) + goto unlock; + if (!type->setkey) + goto unlock; + + err = alg_setkey(sk, optval, optlen); + } + +unlock: + release_sock(sk); + + return err; +} + +int af_alg_accept(struct sock *sk, struct socket *newsock) +{ + struct alg_sock *ask = alg_sk(sk); + const struct af_alg_type *type; + struct sock *sk2; + int err; + + lock_sock(sk); + type = ask->type; + + err = -EINVAL; + if (!type) + goto unlock; + + sk2 = sk_alloc(sock_net(sk), PF_ALG, GFP_KERNEL, &alg_proto); + err = -ENOMEM; + if (!sk2) + goto unlock; + + sock_init_data(newsock, sk2); + sock_graft(sk2, newsock); + + err = type->accept(ask->private, sk2); + if (err) { + sk_free(sk2); + goto unlock; + } + + sk2->sk_family = PF_ALG; + + sock_hold(sk); + alg_sk(sk2)->parent = sk; + alg_sk(sk2)->type = type; + + newsock->ops = type->ops; + newsock->state = SS_CONNECTED; + + err = 0; + +unlock: + release_sock(sk); + + return err; +} +EXPORT_SYMBOL_GPL(af_alg_accept); + +static int alg_accept(struct socket *sock, struct socket *newsock, int flags) +{ + return af_alg_accept(sock->sk, newsock); +} + +static const struct proto_ops alg_proto_ops = { + .family = PF_ALG, + .owner = THIS_MODULE, + + .connect = sock_no_connect, + .socketpair = sock_no_socketpair, + .getname = sock_no_getname, + .ioctl = sock_no_ioctl, + .listen = sock_no_listen, + .shutdown = sock_no_shutdown, + .getsockopt = sock_no_getsockopt, + .mmap = sock_no_mmap, + .sendpage = sock_no_sendpage, + .sendmsg = sock_no_sendmsg, + .recvmsg = sock_no_recvmsg, + .poll = sock_no_poll, + + .bind = alg_bind, + .release = af_alg_release, + .setsockopt = alg_setsockopt, + .accept = alg_accept, +}; + +static void alg_sock_destruct(struct sock *sk) +{ + struct alg_sock *ask = alg_sk(sk); + + alg_do_release(ask->type, ask->private); +} + +static int alg_create(struct net *net, struct socket *sock, int protocol, + int kern) +{ + struct sock *sk; + int err; + + if (sock->type != SOCK_SEQPACKET) + return -ESOCKTNOSUPPORT; + if (protocol != 0) + return -EPROTONOSUPPORT; + + err = -ENOMEM; + sk = sk_alloc(net, PF_ALG, GFP_KERNEL, &alg_proto); + if (!sk) + goto out; + + sock->ops = &alg_proto_ops; + sock_init_data(sock, sk); + + sk->sk_family = PF_ALG; + sk->sk_destruct = alg_sock_destruct; + + return 0; +out: + return err; +} + +static const struct net_proto_family alg_family = { + .family = PF_ALG, + .create = alg_create, + .owner = THIS_MODULE, +}; + +int af_alg_make_sg(struct af_alg_sgl *sgl, void __user *addr, int len, + int write) +{ + unsigned long from = (unsigned long)addr; + unsigned long npages; + unsigned off; + int err; + int i; + + err = -EFAULT; + if (!access_ok(write ? VERIFY_READ : VERIFY_WRITE, addr, len)) + goto out; + + off = from & ~PAGE_MASK; + npages = (off + len + PAGE_SIZE - 1) >> PAGE_SHIFT; + if (npages > ALG_MAX_PAGES) + npages = ALG_MAX_PAGES; + + err = get_user_pages_fast(from, npages, write, sgl->pages); + if (err < 0) + goto out; + + npages = err; + err = -EINVAL; + if (WARN_ON(npages == 0)) + goto out; + + err = 0; + + sg_init_table(sgl->sg, npages); + + for (i = 0; i < npages; i++) { + int plen = min_t(int, len, PAGE_SIZE - off); + + sg_set_page(sgl->sg + i, sgl->pages[i], plen, off); + + off = 0; + len -= plen; + err += plen; + } + +out: + return err; +} +EXPORT_SYMBOL_GPL(af_alg_make_sg); + +void af_alg_free_sg(struct af_alg_sgl *sgl) +{ + int i; + + i = 0; + do { + put_page(sgl->pages[i]); + } while (!sg_is_last(sgl->sg + (i++))); +} +EXPORT_SYMBOL_GPL(af_alg_free_sg); + +int af_alg_cmsg_send(struct msghdr *msg, struct af_alg_control *con) +{ + struct cmsghdr *cmsg; + + for (cmsg = CMSG_FIRSTHDR(msg); cmsg; cmsg = CMSG_NXTHDR(msg, cmsg)) { + if (!CMSG_OK(msg, cmsg)) + return -EINVAL; + if (cmsg->cmsg_level != SOL_ALG) + continue; + + switch(cmsg->cmsg_type) { + case ALG_SET_IV: + if (cmsg->cmsg_len < CMSG_LEN(sizeof(*con->iv))) + return -EINVAL; + con->iv = (void *)CMSG_DATA(cmsg); + if (cmsg->cmsg_len < CMSG_LEN(con->iv->ivlen + + sizeof(*con->iv))) + return -EINVAL; + break; + + case ALG_SET_OP: + if (cmsg->cmsg_len < CMSG_LEN(sizeof(u32))) + return -EINVAL; + con->op = *(u32 *)CMSG_DATA(cmsg); + break; + + default: + return -EINVAL; + } + } + + return 0; +} +EXPORT_SYMBOL_GPL(af_alg_cmsg_send); + +int af_alg_wait_for_completion(int err, struct af_alg_completion *completion) +{ + switch (err) { + case -EINPROGRESS: + case -EBUSY: + wait_for_completion(&completion->completion); + INIT_COMPLETION(completion->completion); + err = completion->err; + break; + }; + + return err; +} +EXPORT_SYMBOL_GPL(af_alg_wait_for_completion); + +void af_alg_complete(struct crypto_async_request *req, int err) +{ + struct af_alg_completion *completion = req->data; + + completion->err = err; + complete(&completion->completion); +} +EXPORT_SYMBOL_GPL(af_alg_complete); + +static int __init af_alg_init(void) +{ + int err = proto_register(&alg_proto, 0); + + if (err) + goto out; + + err = sock_register(&alg_family); + if (err != 0) + goto out_unregister_proto; + +out: + return err; + +out_unregister_proto: + proto_unregister(&alg_proto); + goto out; +} + +static void __exit af_alg_exit(void) +{ + sock_unregister(PF_ALG); + proto_unregister(&alg_proto); +} + +module_init(af_alg_init); +module_exit(af_alg_exit); +MODULE_LICENSE("GPL"); +MODULE_ALIAS_NETPROTO(AF_ALG); diff --git a/crypto/ahash.c b/crypto/ahash.c new file mode 100644 index 00000000..33bc9b62 --- /dev/null +++ b/crypto/ahash.c @@ -0,0 +1,533 @@ +/* + * Asynchronous Cryptographic Hash operations. + * + * This is the asynchronous version of hash.c with notification of + * completion via a callback. + * + * Copyright (c) 2008 Loc Ho <lho@amcc.com> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/internal/hash.h> +#include <crypto/scatterwalk.h> +#include <linux/err.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/sched.h> +#include <linux/slab.h> +#include <linux/seq_file.h> +#include <linux/cryptouser.h> +#include <net/netlink.h> + +#include "internal.h" + +struct ahash_request_priv { + crypto_completion_t complete; + void *data; + u8 *result; + void *ubuf[] CRYPTO_MINALIGN_ATTR; +}; + +static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash) +{ + return container_of(crypto_hash_alg_common(hash), struct ahash_alg, + halg); +} + +static int hash_walk_next(struct crypto_hash_walk *walk) +{ + unsigned int alignmask = walk->alignmask; + unsigned int offset = walk->offset; + unsigned int nbytes = min(walk->entrylen, + ((unsigned int)(PAGE_SIZE)) - offset); + + walk->data = kmap_atomic(walk->pg); + walk->data += offset; + + if (offset & alignmask) { + unsigned int unaligned = alignmask + 1 - (offset & alignmask); + if (nbytes > unaligned) + nbytes = unaligned; + } + + walk->entrylen -= nbytes; + return nbytes; +} + +static int hash_walk_new_entry(struct crypto_hash_walk *walk) +{ + struct scatterlist *sg; + + sg = walk->sg; + walk->pg = sg_page(sg); + walk->offset = sg->offset; + walk->entrylen = sg->length; + + if (walk->entrylen > walk->total) + walk->entrylen = walk->total; + walk->total -= walk->entrylen; + + return hash_walk_next(walk); +} + +int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err) +{ + unsigned int alignmask = walk->alignmask; + unsigned int nbytes = walk->entrylen; + + walk->data -= walk->offset; + + if (nbytes && walk->offset & alignmask && !err) { + walk->offset = ALIGN(walk->offset, alignmask + 1); + walk->data += walk->offset; + + nbytes = min(nbytes, + ((unsigned int)(PAGE_SIZE)) - walk->offset); + walk->entrylen -= nbytes; + + return nbytes; + } + + kunmap_atomic(walk->data); + crypto_yield(walk->flags); + + if (err) + return err; + + if (nbytes) { + walk->offset = 0; + walk->pg++; + return hash_walk_next(walk); + } + + if (!walk->total) + return 0; + + walk->sg = scatterwalk_sg_next(walk->sg); + + return hash_walk_new_entry(walk); +} +EXPORT_SYMBOL_GPL(crypto_hash_walk_done); + +int crypto_hash_walk_first(struct ahash_request *req, + struct crypto_hash_walk *walk) +{ + walk->total = req->nbytes; + + if (!walk->total) + return 0; + + walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req)); + walk->sg = req->src; + walk->flags = req->base.flags; + + return hash_walk_new_entry(walk); +} +EXPORT_SYMBOL_GPL(crypto_hash_walk_first); + +int crypto_hash_walk_first_compat(struct hash_desc *hdesc, + struct crypto_hash_walk *walk, + struct scatterlist *sg, unsigned int len) +{ + walk->total = len; + + if (!walk->total) + return 0; + + walk->alignmask = crypto_hash_alignmask(hdesc->tfm); + walk->sg = sg; + walk->flags = hdesc->flags; + + return hash_walk_new_entry(walk); +} + +static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key, + unsigned int keylen) +{ + unsigned long alignmask = crypto_ahash_alignmask(tfm); + int ret; + u8 *buffer, *alignbuffer; + unsigned long absize; + + absize = keylen + alignmask; + buffer = kmalloc(absize, GFP_KERNEL); + if (!buffer) + return -ENOMEM; + + alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); + memcpy(alignbuffer, key, keylen); + ret = tfm->setkey(tfm, alignbuffer, keylen); + kzfree(buffer); + return ret; +} + +int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, + unsigned int keylen) +{ + unsigned long alignmask = crypto_ahash_alignmask(tfm); + + if ((unsigned long)key & alignmask) + return ahash_setkey_unaligned(tfm, key, keylen); + + return tfm->setkey(tfm, key, keylen); +} +EXPORT_SYMBOL_GPL(crypto_ahash_setkey); + +static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, + unsigned int keylen) +{ + return -ENOSYS; +} + +static inline unsigned int ahash_align_buffer_size(unsigned len, + unsigned long mask) +{ + return len + (mask & ~(crypto_tfm_ctx_alignment() - 1)); +} + +static void ahash_op_unaligned_finish(struct ahash_request *req, int err) +{ + struct ahash_request_priv *priv = req->priv; + + if (err == -EINPROGRESS) + return; + + if (!err) + memcpy(priv->result, req->result, + crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); + + kzfree(priv); +} + +static void ahash_op_unaligned_done(struct crypto_async_request *req, int err) +{ + struct ahash_request *areq = req->data; + struct ahash_request_priv *priv = areq->priv; + crypto_completion_t complete = priv->complete; + void *data = priv->data; + + ahash_op_unaligned_finish(areq, err); + + complete(data, err); +} + +static int ahash_op_unaligned(struct ahash_request *req, + int (*op)(struct ahash_request *)) +{ + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); + unsigned long alignmask = crypto_ahash_alignmask(tfm); + unsigned int ds = crypto_ahash_digestsize(tfm); + struct ahash_request_priv *priv; + int err; + + priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), + (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? + GFP_KERNEL : GFP_ATOMIC); + if (!priv) + return -ENOMEM; + + priv->result = req->result; + priv->complete = req->base.complete; + priv->data = req->base.data; + + req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); + req->base.complete = ahash_op_unaligned_done; + req->base.data = req; + req->priv = priv; + + err = op(req); + ahash_op_unaligned_finish(req, err); + + return err; +} + +static int crypto_ahash_op(struct ahash_request *req, + int (*op)(struct ahash_request *)) +{ + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); + unsigned long alignmask = crypto_ahash_alignmask(tfm); + + if ((unsigned long)req->result & alignmask) + return ahash_op_unaligned(req, op); + + return op(req); +} + +int crypto_ahash_final(struct ahash_request *req) +{ + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final); +} +EXPORT_SYMBOL_GPL(crypto_ahash_final); + +int crypto_ahash_finup(struct ahash_request *req) +{ + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup); +} +EXPORT_SYMBOL_GPL(crypto_ahash_finup); + +int crypto_ahash_digest(struct ahash_request *req) +{ + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->digest); +} +EXPORT_SYMBOL_GPL(crypto_ahash_digest); + +static void ahash_def_finup_finish2(struct ahash_request *req, int err) +{ + struct ahash_request_priv *priv = req->priv; + + if (err == -EINPROGRESS) + return; + + if (!err) + memcpy(priv->result, req->result, + crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); + + kzfree(priv); +} + +static void ahash_def_finup_done2(struct crypto_async_request *req, int err) +{ + struct ahash_request *areq = req->data; + struct ahash_request_priv *priv = areq->priv; + crypto_completion_t complete = priv->complete; + void *data = priv->data; + + ahash_def_finup_finish2(areq, err); + + complete(data, err); +} + +static int ahash_def_finup_finish1(struct ahash_request *req, int err) +{ + if (err) + goto out; + + req->base.complete = ahash_def_finup_done2; + req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + err = crypto_ahash_reqtfm(req)->final(req); + +out: + ahash_def_finup_finish2(req, err); + return err; +} + +static void ahash_def_finup_done1(struct crypto_async_request *req, int err) +{ + struct ahash_request *areq = req->data; + struct ahash_request_priv *priv = areq->priv; + crypto_completion_t complete = priv->complete; + void *data = priv->data; + + err = ahash_def_finup_finish1(areq, err); + + complete(data, err); +} + +static int ahash_def_finup(struct ahash_request *req) +{ + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); + unsigned long alignmask = crypto_ahash_alignmask(tfm); + unsigned int ds = crypto_ahash_digestsize(tfm); + struct ahash_request_priv *priv; + + priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), + (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? + GFP_KERNEL : GFP_ATOMIC); + if (!priv) + return -ENOMEM; + + priv->result = req->result; + priv->complete = req->base.complete; + priv->data = req->base.data; + + req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); + req->base.complete = ahash_def_finup_done1; + req->base.data = req; + req->priv = priv; + + return ahash_def_finup_finish1(req, tfm->update(req)); +} + +static int ahash_no_export(struct ahash_request *req, void *out) +{ + return -ENOSYS; +} + +static int ahash_no_import(struct ahash_request *req, const void *in) +{ + return -ENOSYS; +} + +static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_ahash *hash = __crypto_ahash_cast(tfm); + struct ahash_alg *alg = crypto_ahash_alg(hash); + + hash->setkey = ahash_nosetkey; + hash->export = ahash_no_export; + hash->import = ahash_no_import; + + if (tfm->__crt_alg->cra_type != &crypto_ahash_type) + return crypto_init_shash_ops_async(tfm); + + hash->init = alg->init; + hash->update = alg->update; + hash->final = alg->final; + hash->finup = alg->finup ?: ahash_def_finup; + hash->digest = alg->digest; + + if (alg->setkey) + hash->setkey = alg->setkey; + if (alg->export) + hash->export = alg->export; + if (alg->import) + hash->import = alg->import; + + return 0; +} + +static unsigned int crypto_ahash_extsize(struct crypto_alg *alg) +{ + if (alg->cra_type == &crypto_ahash_type) + return alg->cra_ctxsize; + + return sizeof(struct crypto_shash *); +} + +#ifdef CONFIG_NET +static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + struct crypto_report_hash rhash; + + snprintf(rhash.type, CRYPTO_MAX_ALG_NAME, "%s", "ahash"); + + rhash.blocksize = alg->cra_blocksize; + rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize; + + NLA_PUT(skb, CRYPTOCFGA_REPORT_HASH, + sizeof(struct crypto_report_hash), &rhash); + + return 0; + +nla_put_failure: + return -EMSGSIZE; +} +#else +static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + return -ENOSYS; +} +#endif + +static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) + __attribute__ ((unused)); +static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) +{ + seq_printf(m, "type : ahash\n"); + seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? + "yes" : "no"); + seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); + seq_printf(m, "digestsize : %u\n", + __crypto_hash_alg_common(alg)->digestsize); +} + +const struct crypto_type crypto_ahash_type = { + .extsize = crypto_ahash_extsize, + .init_tfm = crypto_ahash_init_tfm, +#ifdef CONFIG_PROC_FS + .show = crypto_ahash_show, +#endif + .report = crypto_ahash_report, + .maskclear = ~CRYPTO_ALG_TYPE_MASK, + .maskset = CRYPTO_ALG_TYPE_AHASH_MASK, + .type = CRYPTO_ALG_TYPE_AHASH, + .tfmsize = offsetof(struct crypto_ahash, base), +}; +EXPORT_SYMBOL_GPL(crypto_ahash_type); + +struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type, + u32 mask) +{ + return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask); +} +EXPORT_SYMBOL_GPL(crypto_alloc_ahash); + +static int ahash_prepare_alg(struct ahash_alg *alg) +{ + struct crypto_alg *base = &alg->halg.base; + + if (alg->halg.digestsize > PAGE_SIZE / 8 || + alg->halg.statesize > PAGE_SIZE / 8) + return -EINVAL; + + base->cra_type = &crypto_ahash_type; + base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; + base->cra_flags |= CRYPTO_ALG_TYPE_AHASH; + + return 0; +} + +int crypto_register_ahash(struct ahash_alg *alg) +{ + struct crypto_alg *base = &alg->halg.base; + int err; + + err = ahash_prepare_alg(alg); + if (err) + return err; + + return crypto_register_alg(base); +} +EXPORT_SYMBOL_GPL(crypto_register_ahash); + +int crypto_unregister_ahash(struct ahash_alg *alg) +{ + return crypto_unregister_alg(&alg->halg.base); +} +EXPORT_SYMBOL_GPL(crypto_unregister_ahash); + +int ahash_register_instance(struct crypto_template *tmpl, + struct ahash_instance *inst) +{ + int err; + + err = ahash_prepare_alg(&inst->alg); + if (err) + return err; + + return crypto_register_instance(tmpl, ahash_crypto_instance(inst)); +} +EXPORT_SYMBOL_GPL(ahash_register_instance); + +void ahash_free_instance(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(ahash_instance(inst)); +} +EXPORT_SYMBOL_GPL(ahash_free_instance); + +int crypto_init_ahash_spawn(struct crypto_ahash_spawn *spawn, + struct hash_alg_common *alg, + struct crypto_instance *inst) +{ + return crypto_init_spawn2(&spawn->base, &alg->base, inst, + &crypto_ahash_type); +} +EXPORT_SYMBOL_GPL(crypto_init_ahash_spawn); + +struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask) +{ + struct crypto_alg *alg; + + alg = crypto_attr_alg2(rta, &crypto_ahash_type, type, mask); + return IS_ERR(alg) ? ERR_CAST(alg) : __crypto_hash_alg_common(alg); +} +EXPORT_SYMBOL_GPL(ahash_attr_alg); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Asynchronous cryptographic hash type"); diff --git a/crypto/algapi.c b/crypto/algapi.c new file mode 100644 index 00000000..056571b8 --- /dev/null +++ b/crypto/algapi.c @@ -0,0 +1,989 @@ +/* + * Cryptographic API for algorithms (i.e., low-level API). + * + * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <linux/err.h> +#include <linux/errno.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/list.h> +#include <linux/module.h> +#include <linux/rtnetlink.h> +#include <linux/slab.h> +#include <linux/string.h> + +#include "internal.h" + +static LIST_HEAD(crypto_template_list); + +void crypto_larval_error(const char *name, u32 type, u32 mask) +{ + struct crypto_alg *alg; + + alg = crypto_alg_lookup(name, type, mask); + + if (alg) { + if (crypto_is_larval(alg)) { + struct crypto_larval *larval = (void *)alg; + complete_all(&larval->completion); + } + crypto_mod_put(alg); + } +} +EXPORT_SYMBOL_GPL(crypto_larval_error); + +static inline int crypto_set_driver_name(struct crypto_alg *alg) +{ + static const char suffix[] = "-generic"; + char *driver_name = alg->cra_driver_name; + int len; + + if (*driver_name) + return 0; + + len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); + if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME) + return -ENAMETOOLONG; + + memcpy(driver_name + len, suffix, sizeof(suffix)); + return 0; +} + +static int crypto_check_alg(struct crypto_alg *alg) +{ + if (alg->cra_alignmask & (alg->cra_alignmask + 1)) + return -EINVAL; + + if (alg->cra_blocksize > PAGE_SIZE / 8) + return -EINVAL; + + if (alg->cra_priority < 0) + return -EINVAL; + + return crypto_set_driver_name(alg); +} + +static void crypto_destroy_instance(struct crypto_alg *alg) +{ + struct crypto_instance *inst = (void *)alg; + struct crypto_template *tmpl = inst->tmpl; + + tmpl->free(inst); + crypto_tmpl_put(tmpl); +} + +static struct list_head *crypto_more_spawns(struct crypto_alg *alg, + struct list_head *stack, + struct list_head *top, + struct list_head *secondary_spawns) +{ + struct crypto_spawn *spawn, *n; + + if (list_empty(stack)) + return NULL; + + spawn = list_first_entry(stack, struct crypto_spawn, list); + n = list_entry(spawn->list.next, struct crypto_spawn, list); + + if (spawn->alg && &n->list != stack && !n->alg) + n->alg = (n->list.next == stack) ? alg : + &list_entry(n->list.next, struct crypto_spawn, + list)->inst->alg; + + list_move(&spawn->list, secondary_spawns); + + return &n->list == stack ? top : &n->inst->alg.cra_users; +} + +static void crypto_remove_spawn(struct crypto_spawn *spawn, + struct list_head *list) +{ + struct crypto_instance *inst = spawn->inst; + struct crypto_template *tmpl = inst->tmpl; + + if (crypto_is_dead(&inst->alg)) + return; + + inst->alg.cra_flags |= CRYPTO_ALG_DEAD; + if (hlist_unhashed(&inst->list)) + return; + + if (!tmpl || !crypto_tmpl_get(tmpl)) + return; + + crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, &inst->alg); + list_move(&inst->alg.cra_list, list); + hlist_del(&inst->list); + inst->alg.cra_destroy = crypto_destroy_instance; + + BUG_ON(!list_empty(&inst->alg.cra_users)); +} + +void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list, + struct crypto_alg *nalg) +{ + u32 new_type = (nalg ?: alg)->cra_flags; + struct crypto_spawn *spawn, *n; + LIST_HEAD(secondary_spawns); + struct list_head *spawns; + LIST_HEAD(stack); + LIST_HEAD(top); + + spawns = &alg->cra_users; + list_for_each_entry_safe(spawn, n, spawns, list) { + if ((spawn->alg->cra_flags ^ new_type) & spawn->mask) + continue; + + list_move(&spawn->list, &top); + } + + spawns = ⊤ + do { + while (!list_empty(spawns)) { + struct crypto_instance *inst; + + spawn = list_first_entry(spawns, struct crypto_spawn, + list); + inst = spawn->inst; + + BUG_ON(&inst->alg == alg); + + list_move(&spawn->list, &stack); + + if (&inst->alg == nalg) + break; + + spawn->alg = NULL; + spawns = &inst->alg.cra_users; + } + } while ((spawns = crypto_more_spawns(alg, &stack, &top, + &secondary_spawns))); + + list_for_each_entry_safe(spawn, n, &secondary_spawns, list) { + if (spawn->alg) + list_move(&spawn->list, &spawn->alg->cra_users); + else + crypto_remove_spawn(spawn, list); + } +} +EXPORT_SYMBOL_GPL(crypto_remove_spawns); + +static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg) +{ + struct crypto_alg *q; + struct crypto_larval *larval; + int ret = -EAGAIN; + + if (crypto_is_dead(alg)) + goto err; + + INIT_LIST_HEAD(&alg->cra_users); + + /* No cheating! */ + alg->cra_flags &= ~CRYPTO_ALG_TESTED; + + ret = -EEXIST; + + atomic_set(&alg->cra_refcnt, 1); + list_for_each_entry(q, &crypto_alg_list, cra_list) { + if (q == alg) + goto err; + + if (crypto_is_moribund(q)) + continue; + + if (crypto_is_larval(q)) { + if (!strcmp(alg->cra_driver_name, q->cra_driver_name)) + goto err; + continue; + } + + if (!strcmp(q->cra_driver_name, alg->cra_name) || + !strcmp(q->cra_name, alg->cra_driver_name)) + goto err; + } + + larval = crypto_larval_alloc(alg->cra_name, + alg->cra_flags | CRYPTO_ALG_TESTED, 0); + if (IS_ERR(larval)) + goto out; + + ret = -ENOENT; + larval->adult = crypto_mod_get(alg); + if (!larval->adult) + goto free_larval; + + atomic_set(&larval->alg.cra_refcnt, 1); + memcpy(larval->alg.cra_driver_name, alg->cra_driver_name, + CRYPTO_MAX_ALG_NAME); + larval->alg.cra_priority = alg->cra_priority; + + list_add(&alg->cra_list, &crypto_alg_list); + list_add(&larval->alg.cra_list, &crypto_alg_list); + +out: + return larval; + +free_larval: + kfree(larval); +err: + larval = ERR_PTR(ret); + goto out; +} + +void crypto_alg_tested(const char *name, int err) +{ + struct crypto_larval *test; + struct crypto_alg *alg; + struct crypto_alg *q; + LIST_HEAD(list); + + down_write(&crypto_alg_sem); + list_for_each_entry(q, &crypto_alg_list, cra_list) { + if (crypto_is_moribund(q) || !crypto_is_larval(q)) + continue; + + test = (struct crypto_larval *)q; + + if (!strcmp(q->cra_driver_name, name)) + goto found; + } + + printk(KERN_ERR "alg: Unexpected test result for %s: %d\n", name, err); + goto unlock; + +found: + q->cra_flags |= CRYPTO_ALG_DEAD; + alg = test->adult; + if (err || list_empty(&alg->cra_list)) + goto complete; + + alg->cra_flags |= CRYPTO_ALG_TESTED; + + list_for_each_entry(q, &crypto_alg_list, cra_list) { + if (q == alg) + continue; + + if (crypto_is_moribund(q)) + continue; + + if (crypto_is_larval(q)) { + struct crypto_larval *larval = (void *)q; + + /* + * Check to see if either our generic name or + * specific name can satisfy the name requested + * by the larval entry q. + */ + if (strcmp(alg->cra_name, q->cra_name) && + strcmp(alg->cra_driver_name, q->cra_name)) + continue; + + if (larval->adult) + continue; + if ((q->cra_flags ^ alg->cra_flags) & larval->mask) + continue; + if (!crypto_mod_get(alg)) + continue; + + larval->adult = alg; + complete_all(&larval->completion); + continue; + } + + if (strcmp(alg->cra_name, q->cra_name)) + continue; + + if (strcmp(alg->cra_driver_name, q->cra_driver_name) && + q->cra_priority > alg->cra_priority) + continue; + + crypto_remove_spawns(q, &list, alg); + } + +complete: + complete_all(&test->completion); + +unlock: + up_write(&crypto_alg_sem); + + crypto_remove_final(&list); +} +EXPORT_SYMBOL_GPL(crypto_alg_tested); + +void crypto_remove_final(struct list_head *list) +{ + struct crypto_alg *alg; + struct crypto_alg *n; + + list_for_each_entry_safe(alg, n, list, cra_list) { + list_del_init(&alg->cra_list); + crypto_alg_put(alg); + } +} +EXPORT_SYMBOL_GPL(crypto_remove_final); + +static void crypto_wait_for_test(struct crypto_larval *larval) +{ + int err; + + err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult); + if (err != NOTIFY_STOP) { + if (WARN_ON(err != NOTIFY_DONE)) + goto out; + crypto_alg_tested(larval->alg.cra_driver_name, 0); + } + + err = wait_for_completion_interruptible(&larval->completion); + WARN_ON(err); + +out: + crypto_larval_kill(&larval->alg); +} + +int crypto_register_alg(struct crypto_alg *alg) +{ + struct crypto_larval *larval; + int err; + + err = crypto_check_alg(alg); + if (err) + return err; + + down_write(&crypto_alg_sem); + larval = __crypto_register_alg(alg); + up_write(&crypto_alg_sem); + + if (IS_ERR(larval)) + return PTR_ERR(larval); + + crypto_wait_for_test(larval); + return 0; +} +EXPORT_SYMBOL_GPL(crypto_register_alg); + +static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list) +{ + if (unlikely(list_empty(&alg->cra_list))) + return -ENOENT; + + alg->cra_flags |= CRYPTO_ALG_DEAD; + + crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg); + list_del_init(&alg->cra_list); + crypto_remove_spawns(alg, list, NULL); + + return 0; +} + +int crypto_unregister_alg(struct crypto_alg *alg) +{ + int ret; + LIST_HEAD(list); + + down_write(&crypto_alg_sem); + ret = crypto_remove_alg(alg, &list); + up_write(&crypto_alg_sem); + + if (ret) + return ret; + + BUG_ON(atomic_read(&alg->cra_refcnt) != 1); + if (alg->cra_destroy) + alg->cra_destroy(alg); + + crypto_remove_final(&list); + return 0; +} +EXPORT_SYMBOL_GPL(crypto_unregister_alg); + +int crypto_register_algs(struct crypto_alg *algs, int count) +{ + int i, ret; + + for (i = 0; i < count; i++) { + ret = crypto_register_alg(&algs[i]); + if (ret) + goto err; + } + + return 0; + +err: + for (--i; i >= 0; --i) + crypto_unregister_alg(&algs[i]); + + return ret; +} +EXPORT_SYMBOL_GPL(crypto_register_algs); + +int crypto_unregister_algs(struct crypto_alg *algs, int count) +{ + int i, ret; + + for (i = 0; i < count; i++) { + ret = crypto_unregister_alg(&algs[i]); + if (ret) + pr_err("Failed to unregister %s %s: %d\n", + algs[i].cra_driver_name, algs[i].cra_name, ret); + } + + return 0; +} +EXPORT_SYMBOL_GPL(crypto_unregister_algs); + +int crypto_register_template(struct crypto_template *tmpl) +{ + struct crypto_template *q; + int err = -EEXIST; + + down_write(&crypto_alg_sem); + + list_for_each_entry(q, &crypto_template_list, list) { + if (q == tmpl) + goto out; + } + + list_add(&tmpl->list, &crypto_template_list); + crypto_notify(CRYPTO_MSG_TMPL_REGISTER, tmpl); + err = 0; +out: + up_write(&crypto_alg_sem); + return err; +} +EXPORT_SYMBOL_GPL(crypto_register_template); + +void crypto_unregister_template(struct crypto_template *tmpl) +{ + struct crypto_instance *inst; + struct hlist_node *p, *n; + struct hlist_head *list; + LIST_HEAD(users); + + down_write(&crypto_alg_sem); + + BUG_ON(list_empty(&tmpl->list)); + list_del_init(&tmpl->list); + + list = &tmpl->instances; + hlist_for_each_entry(inst, p, list, list) { + int err = crypto_remove_alg(&inst->alg, &users); + BUG_ON(err); + } + + crypto_notify(CRYPTO_MSG_TMPL_UNREGISTER, tmpl); + + up_write(&crypto_alg_sem); + + hlist_for_each_entry_safe(inst, p, n, list, list) { + BUG_ON(atomic_read(&inst->alg.cra_refcnt) != 1); + tmpl->free(inst); + } + crypto_remove_final(&users); +} +EXPORT_SYMBOL_GPL(crypto_unregister_template); + +static struct crypto_template *__crypto_lookup_template(const char *name) +{ + struct crypto_template *q, *tmpl = NULL; + + down_read(&crypto_alg_sem); + list_for_each_entry(q, &crypto_template_list, list) { + if (strcmp(q->name, name)) + continue; + if (unlikely(!crypto_tmpl_get(q))) + continue; + + tmpl = q; + break; + } + up_read(&crypto_alg_sem); + + return tmpl; +} + +struct crypto_template *crypto_lookup_template(const char *name) +{ + return try_then_request_module(__crypto_lookup_template(name), name); +} +EXPORT_SYMBOL_GPL(crypto_lookup_template); + +int crypto_register_instance(struct crypto_template *tmpl, + struct crypto_instance *inst) +{ + struct crypto_larval *larval; + int err; + + err = crypto_check_alg(&inst->alg); + if (err) + goto err; + + inst->alg.cra_module = tmpl->module; + inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE; + + down_write(&crypto_alg_sem); + + larval = __crypto_register_alg(&inst->alg); + if (IS_ERR(larval)) + goto unlock; + + hlist_add_head(&inst->list, &tmpl->instances); + inst->tmpl = tmpl; + +unlock: + up_write(&crypto_alg_sem); + + err = PTR_ERR(larval); + if (IS_ERR(larval)) + goto err; + + crypto_wait_for_test(larval); + err = 0; + +err: + return err; +} +EXPORT_SYMBOL_GPL(crypto_register_instance); + +int crypto_unregister_instance(struct crypto_alg *alg) +{ + int err; + struct crypto_instance *inst = (void *)alg; + struct crypto_template *tmpl = inst->tmpl; + LIST_HEAD(users); + + if (!(alg->cra_flags & CRYPTO_ALG_INSTANCE)) + return -EINVAL; + + BUG_ON(atomic_read(&alg->cra_refcnt) != 1); + + down_write(&crypto_alg_sem); + + hlist_del_init(&inst->list); + err = crypto_remove_alg(alg, &users); + + up_write(&crypto_alg_sem); + + if (err) + return err; + + tmpl->free(inst); + crypto_remove_final(&users); + + return 0; +} +EXPORT_SYMBOL_GPL(crypto_unregister_instance); + +int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, + struct crypto_instance *inst, u32 mask) +{ + int err = -EAGAIN; + + spawn->inst = inst; + spawn->mask = mask; + + down_write(&crypto_alg_sem); + if (!crypto_is_moribund(alg)) { + list_add(&spawn->list, &alg->cra_users); + spawn->alg = alg; + err = 0; + } + up_write(&crypto_alg_sem); + + return err; +} +EXPORT_SYMBOL_GPL(crypto_init_spawn); + +int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg, + struct crypto_instance *inst, + const struct crypto_type *frontend) +{ + int err = -EINVAL; + + if ((alg->cra_flags ^ frontend->type) & frontend->maskset) + goto out; + + spawn->frontend = frontend; + err = crypto_init_spawn(spawn, alg, inst, frontend->maskset); + +out: + return err; +} +EXPORT_SYMBOL_GPL(crypto_init_spawn2); + +void crypto_drop_spawn(struct crypto_spawn *spawn) +{ + if (!spawn->alg) + return; + + down_write(&crypto_alg_sem); + list_del(&spawn->list); + up_write(&crypto_alg_sem); +} +EXPORT_SYMBOL_GPL(crypto_drop_spawn); + +static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn) +{ + struct crypto_alg *alg; + struct crypto_alg *alg2; + + down_read(&crypto_alg_sem); + alg = spawn->alg; + alg2 = alg; + if (alg2) + alg2 = crypto_mod_get(alg2); + up_read(&crypto_alg_sem); + + if (!alg2) { + if (alg) + crypto_shoot_alg(alg); + return ERR_PTR(-EAGAIN); + } + + return alg; +} + +struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, + u32 mask) +{ + struct crypto_alg *alg; + struct crypto_tfm *tfm; + + alg = crypto_spawn_alg(spawn); + if (IS_ERR(alg)) + return ERR_CAST(alg); + + tfm = ERR_PTR(-EINVAL); + if (unlikely((alg->cra_flags ^ type) & mask)) + goto out_put_alg; + + tfm = __crypto_alloc_tfm(alg, type, mask); + if (IS_ERR(tfm)) + goto out_put_alg; + + return tfm; + +out_put_alg: + crypto_mod_put(alg); + return tfm; +} +EXPORT_SYMBOL_GPL(crypto_spawn_tfm); + +void *crypto_spawn_tfm2(struct crypto_spawn *spawn) +{ + struct crypto_alg *alg; + struct crypto_tfm *tfm; + + alg = crypto_spawn_alg(spawn); + if (IS_ERR(alg)) + return ERR_CAST(alg); + + tfm = crypto_create_tfm(alg, spawn->frontend); + if (IS_ERR(tfm)) + goto out_put_alg; + + return tfm; + +out_put_alg: + crypto_mod_put(alg); + return tfm; +} +EXPORT_SYMBOL_GPL(crypto_spawn_tfm2); + +int crypto_register_notifier(struct notifier_block *nb) +{ + return blocking_notifier_chain_register(&crypto_chain, nb); +} +EXPORT_SYMBOL_GPL(crypto_register_notifier); + +int crypto_unregister_notifier(struct notifier_block *nb) +{ + return blocking_notifier_chain_unregister(&crypto_chain, nb); +} +EXPORT_SYMBOL_GPL(crypto_unregister_notifier); + +struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb) +{ + struct rtattr *rta = tb[0]; + struct crypto_attr_type *algt; + + if (!rta) + return ERR_PTR(-ENOENT); + if (RTA_PAYLOAD(rta) < sizeof(*algt)) + return ERR_PTR(-EINVAL); + if (rta->rta_type != CRYPTOA_TYPE) + return ERR_PTR(-EINVAL); + + algt = RTA_DATA(rta); + + return algt; +} +EXPORT_SYMBOL_GPL(crypto_get_attr_type); + +int crypto_check_attr_type(struct rtattr **tb, u32 type) +{ + struct crypto_attr_type *algt; + + algt = crypto_get_attr_type(tb); + if (IS_ERR(algt)) + return PTR_ERR(algt); + + if ((algt->type ^ type) & algt->mask) + return -EINVAL; + + return 0; +} +EXPORT_SYMBOL_GPL(crypto_check_attr_type); + +const char *crypto_attr_alg_name(struct rtattr *rta) +{ + struct crypto_attr_alg *alga; + + if (!rta) + return ERR_PTR(-ENOENT); + if (RTA_PAYLOAD(rta) < sizeof(*alga)) + return ERR_PTR(-EINVAL); + if (rta->rta_type != CRYPTOA_ALG) + return ERR_PTR(-EINVAL); + + alga = RTA_DATA(rta); + alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0; + + return alga->name; +} +EXPORT_SYMBOL_GPL(crypto_attr_alg_name); + +struct crypto_alg *crypto_attr_alg2(struct rtattr *rta, + const struct crypto_type *frontend, + u32 type, u32 mask) +{ + const char *name; + int err; + + name = crypto_attr_alg_name(rta); + err = PTR_ERR(name); + if (IS_ERR(name)) + return ERR_PTR(err); + + return crypto_find_alg(name, frontend, type, mask); +} +EXPORT_SYMBOL_GPL(crypto_attr_alg2); + +int crypto_attr_u32(struct rtattr *rta, u32 *num) +{ + struct crypto_attr_u32 *nu32; + + if (!rta) + return -ENOENT; + if (RTA_PAYLOAD(rta) < sizeof(*nu32)) + return -EINVAL; + if (rta->rta_type != CRYPTOA_U32) + return -EINVAL; + + nu32 = RTA_DATA(rta); + *num = nu32->num; + + return 0; +} +EXPORT_SYMBOL_GPL(crypto_attr_u32); + +void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg, + unsigned int head) +{ + struct crypto_instance *inst; + char *p; + int err; + + p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn), + GFP_KERNEL); + if (!p) + return ERR_PTR(-ENOMEM); + + inst = (void *)(p + head); + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, + alg->cra_name) >= CRYPTO_MAX_ALG_NAME) + goto err_free_inst; + + if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", + name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) + goto err_free_inst; + + return p; + +err_free_inst: + kfree(p); + return ERR_PTR(err); +} +EXPORT_SYMBOL_GPL(crypto_alloc_instance2); + +struct crypto_instance *crypto_alloc_instance(const char *name, + struct crypto_alg *alg) +{ + struct crypto_instance *inst; + struct crypto_spawn *spawn; + int err; + + inst = crypto_alloc_instance2(name, alg, 0); + if (IS_ERR(inst)) + goto out; + + spawn = crypto_instance_ctx(inst); + err = crypto_init_spawn(spawn, alg, inst, + CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); + + if (err) + goto err_free_inst; + + return inst; + +err_free_inst: + kfree(inst); + inst = ERR_PTR(err); + +out: + return inst; +} +EXPORT_SYMBOL_GPL(crypto_alloc_instance); + +void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen) +{ + INIT_LIST_HEAD(&queue->list); + queue->backlog = &queue->list; + queue->qlen = 0; + queue->max_qlen = max_qlen; +} +EXPORT_SYMBOL_GPL(crypto_init_queue); + +int crypto_enqueue_request(struct crypto_queue *queue, + struct crypto_async_request *request) +{ + int err = -EINPROGRESS; + + if (unlikely(queue->qlen >= queue->max_qlen)) { + err = -EBUSY; + if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) + goto out; + if (queue->backlog == &queue->list) + queue->backlog = &request->list; + } + + queue->qlen++; + list_add_tail(&request->list, &queue->list); + +out: + return err; +} +EXPORT_SYMBOL_GPL(crypto_enqueue_request); + +void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset) +{ + struct list_head *request; + + if (unlikely(!queue->qlen)) + return NULL; + + queue->qlen--; + + if (queue->backlog != &queue->list) + queue->backlog = queue->backlog->next; + + request = queue->list.next; + list_del(request); + + return (char *)list_entry(request, struct crypto_async_request, list) - + offset; +} +EXPORT_SYMBOL_GPL(__crypto_dequeue_request); + +struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue) +{ + return __crypto_dequeue_request(queue, 0); +} +EXPORT_SYMBOL_GPL(crypto_dequeue_request); + +int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm) +{ + struct crypto_async_request *req; + + list_for_each_entry(req, &queue->list, list) { + if (req->tfm == tfm) + return 1; + } + + return 0; +} +EXPORT_SYMBOL_GPL(crypto_tfm_in_queue); + +static inline void crypto_inc_byte(u8 *a, unsigned int size) +{ + u8 *b = (a + size); + u8 c; + + for (; size; size--) { + c = *--b + 1; + *b = c; + if (c) + break; + } +} + +void crypto_inc(u8 *a, unsigned int size) +{ + __be32 *b = (__be32 *)(a + size); + u32 c; + + for (; size >= 4; size -= 4) { + c = be32_to_cpu(*--b) + 1; + *b = cpu_to_be32(c); + if (c) + return; + } + + crypto_inc_byte(a, size); +} +EXPORT_SYMBOL_GPL(crypto_inc); + +static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size) +{ + for (; size; size--) + *a++ ^= *b++; +} + +void crypto_xor(u8 *dst, const u8 *src, unsigned int size) +{ + u32 *a = (u32 *)dst; + u32 *b = (u32 *)src; + + for (; size >= 4; size -= 4) + *a++ ^= *b++; + + crypto_xor_byte((u8 *)a, (u8 *)b, size); +} +EXPORT_SYMBOL_GPL(crypto_xor); + +static int __init crypto_algapi_init(void) +{ + crypto_init_proc(); + return 0; +} + +static void __exit crypto_algapi_exit(void) +{ + crypto_exit_proc(); +} + +module_init(crypto_algapi_init); +module_exit(crypto_algapi_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Cryptographic algorithms API"); diff --git a/crypto/algboss.c b/crypto/algboss.c new file mode 100644 index 00000000..791d1949 --- /dev/null +++ b/crypto/algboss.c @@ -0,0 +1,306 @@ +/* + * Create default crypto algorithm instances. + * + * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/internal/aead.h> +#include <linux/ctype.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kthread.h> +#include <linux/module.h> +#include <linux/notifier.h> +#include <linux/rtnetlink.h> +#include <linux/sched.h> +#include <linux/slab.h> +#include <linux/string.h> + +#include "internal.h" + +struct cryptomgr_param { + struct rtattr *tb[CRYPTO_MAX_ATTRS + 2]; + + struct { + struct rtattr attr; + struct crypto_attr_type data; + } type; + + union { + struct rtattr attr; + struct { + struct rtattr attr; + struct crypto_attr_alg data; + } alg; + struct { + struct rtattr attr; + struct crypto_attr_u32 data; + } nu32; + } attrs[CRYPTO_MAX_ATTRS]; + + char larval[CRYPTO_MAX_ALG_NAME]; + char template[CRYPTO_MAX_ALG_NAME]; + + u32 otype; + u32 omask; +}; + +struct crypto_test_param { + char driver[CRYPTO_MAX_ALG_NAME]; + char alg[CRYPTO_MAX_ALG_NAME]; + u32 type; +}; + +static int cryptomgr_probe(void *data) +{ + struct cryptomgr_param *param = data; + struct crypto_template *tmpl; + struct crypto_instance *inst; + int err; + + tmpl = crypto_lookup_template(param->template); + if (!tmpl) + goto err; + + do { + if (tmpl->create) { + err = tmpl->create(tmpl, param->tb); + continue; + } + + inst = tmpl->alloc(param->tb); + if (IS_ERR(inst)) + err = PTR_ERR(inst); + else if ((err = crypto_register_instance(tmpl, inst))) + tmpl->free(inst); + } while (err == -EAGAIN && !signal_pending(current)); + + crypto_tmpl_put(tmpl); + + if (err) + goto err; + +out: + kfree(param); + module_put_and_exit(0); + +err: + crypto_larval_error(param->larval, param->otype, param->omask); + goto out; +} + +static int cryptomgr_schedule_probe(struct crypto_larval *larval) +{ + struct task_struct *thread; + struct cryptomgr_param *param; + const char *name = larval->alg.cra_name; + const char *p; + unsigned int len; + int i; + + if (!try_module_get(THIS_MODULE)) + goto err; + + param = kzalloc(sizeof(*param), GFP_KERNEL); + if (!param) + goto err_put_module; + + for (p = name; isalnum(*p) || *p == '-' || *p == '_'; p++) + ; + + len = p - name; + if (!len || *p != '(') + goto err_free_param; + + memcpy(param->template, name, len); + + i = 0; + for (;;) { + int notnum = 0; + + name = ++p; + len = 0; + + for (; isalnum(*p) || *p == '-' || *p == '_'; p++) + notnum |= !isdigit(*p); + + if (*p == '(') { + int recursion = 0; + + for (;;) { + if (!*++p) + goto err_free_param; + if (*p == '(') + recursion++; + else if (*p == ')' && !recursion--) + break; + } + + notnum = 1; + p++; + } + + len = p - name; + if (!len) + goto err_free_param; + + if (notnum) { + param->attrs[i].alg.attr.rta_len = + sizeof(param->attrs[i].alg); + param->attrs[i].alg.attr.rta_type = CRYPTOA_ALG; + memcpy(param->attrs[i].alg.data.name, name, len); + } else { + param->attrs[i].nu32.attr.rta_len = + sizeof(param->attrs[i].nu32); + param->attrs[i].nu32.attr.rta_type = CRYPTOA_U32; + param->attrs[i].nu32.data.num = + simple_strtol(name, NULL, 0); + } + + param->tb[i + 1] = ¶m->attrs[i].attr; + i++; + + if (i >= CRYPTO_MAX_ATTRS) + goto err_free_param; + + if (*p == ')') + break; + + if (*p != ',') + goto err_free_param; + } + + if (!i) + goto err_free_param; + + param->tb[i + 1] = NULL; + + param->type.attr.rta_len = sizeof(param->type); + param->type.attr.rta_type = CRYPTOA_TYPE; + param->type.data.type = larval->alg.cra_flags & ~CRYPTO_ALG_TESTED; + param->type.data.mask = larval->mask & ~CRYPTO_ALG_TESTED; + param->tb[0] = ¶m->type.attr; + + param->otype = larval->alg.cra_flags; + param->omask = larval->mask; + + memcpy(param->larval, larval->alg.cra_name, CRYPTO_MAX_ALG_NAME); + + thread = kthread_run(cryptomgr_probe, param, "cryptomgr_probe"); + if (IS_ERR(thread)) + goto err_free_param; + + return NOTIFY_STOP; + +err_free_param: + kfree(param); +err_put_module: + module_put(THIS_MODULE); +err: + return NOTIFY_OK; +} + +static int cryptomgr_test(void *data) +{ + struct crypto_test_param *param = data; + u32 type = param->type; + int err = 0; + +#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS + goto skiptest; +#endif + + if (type & CRYPTO_ALG_TESTED) + goto skiptest; + + err = alg_test(param->driver, param->alg, type, CRYPTO_ALG_TESTED); + +skiptest: + crypto_alg_tested(param->driver, err); + + kfree(param); + module_put_and_exit(0); +} + +static int cryptomgr_schedule_test(struct crypto_alg *alg) +{ + struct task_struct *thread; + struct crypto_test_param *param; + u32 type; + + if (!try_module_get(THIS_MODULE)) + goto err; + + param = kzalloc(sizeof(*param), GFP_KERNEL); + if (!param) + goto err_put_module; + + memcpy(param->driver, alg->cra_driver_name, sizeof(param->driver)); + memcpy(param->alg, alg->cra_name, sizeof(param->alg)); + type = alg->cra_flags; + + /* This piece of crap needs to disappear into per-type test hooks. */ + if ((!((type ^ CRYPTO_ALG_TYPE_BLKCIPHER) & + CRYPTO_ALG_TYPE_BLKCIPHER_MASK) && !(type & CRYPTO_ALG_GENIV) && + ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == + CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize : + alg->cra_ablkcipher.ivsize)) || + (!((type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK) && + alg->cra_type == &crypto_nivaead_type && alg->cra_aead.ivsize)) + type |= CRYPTO_ALG_TESTED; + + param->type = type; + + thread = kthread_run(cryptomgr_test, param, "cryptomgr_test"); + if (IS_ERR(thread)) + goto err_free_param; + + return NOTIFY_STOP; + +err_free_param: + kfree(param); +err_put_module: + module_put(THIS_MODULE); +err: + return NOTIFY_OK; +} + +static int cryptomgr_notify(struct notifier_block *this, unsigned long msg, + void *data) +{ + switch (msg) { + case CRYPTO_MSG_ALG_REQUEST: + return cryptomgr_schedule_probe(data); + case CRYPTO_MSG_ALG_REGISTER: + return cryptomgr_schedule_test(data); + } + + return NOTIFY_DONE; +} + +static struct notifier_block cryptomgr_notifier = { + .notifier_call = cryptomgr_notify, +}; + +static int __init cryptomgr_init(void) +{ + return crypto_register_notifier(&cryptomgr_notifier); +} + +static void __exit cryptomgr_exit(void) +{ + int err = crypto_unregister_notifier(&cryptomgr_notifier); + BUG_ON(err); +} + +subsys_initcall(cryptomgr_init); +module_exit(cryptomgr_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Crypto Algorithm Manager"); diff --git a/crypto/algif_hash.c b/crypto/algif_hash.c new file mode 100644 index 00000000..ef5356cd --- /dev/null +++ b/crypto/algif_hash.c @@ -0,0 +1,321 @@ +/* + * algif_hash: User-space interface for hash algorithms + * + * This file provides the user-space API for hash algorithms. + * + * Copyright (c) 2010 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/hash.h> +#include <crypto/if_alg.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/mm.h> +#include <linux/module.h> +#include <linux/net.h> +#include <net/sock.h> + +struct hash_ctx { + struct af_alg_sgl sgl; + + u8 *result; + + struct af_alg_completion completion; + + unsigned int len; + bool more; + + struct ahash_request req; +}; + +static int hash_sendmsg(struct kiocb *unused, struct socket *sock, + struct msghdr *msg, size_t ignored) +{ + int limit = ALG_MAX_PAGES * PAGE_SIZE; + struct sock *sk = sock->sk; + struct alg_sock *ask = alg_sk(sk); + struct hash_ctx *ctx = ask->private; + unsigned long iovlen; + struct iovec *iov; + long copied = 0; + int err; + + if (limit > sk->sk_sndbuf) + limit = sk->sk_sndbuf; + + lock_sock(sk); + if (!ctx->more) { + err = crypto_ahash_init(&ctx->req); + if (err) + goto unlock; + } + + ctx->more = 0; + + for (iov = msg->msg_iov, iovlen = msg->msg_iovlen; iovlen > 0; + iovlen--, iov++) { + unsigned long seglen = iov->iov_len; + char __user *from = iov->iov_base; + + while (seglen) { + int len = min_t(unsigned long, seglen, limit); + int newlen; + + newlen = af_alg_make_sg(&ctx->sgl, from, len, 0); + if (newlen < 0) { + err = copied ? 0 : newlen; + goto unlock; + } + + ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, NULL, + newlen); + + err = af_alg_wait_for_completion( + crypto_ahash_update(&ctx->req), + &ctx->completion); + + af_alg_free_sg(&ctx->sgl); + + if (err) + goto unlock; + + seglen -= newlen; + from += newlen; + copied += newlen; + } + } + + err = 0; + + ctx->more = msg->msg_flags & MSG_MORE; + if (!ctx->more) { + ahash_request_set_crypt(&ctx->req, NULL, ctx->result, 0); + err = af_alg_wait_for_completion(crypto_ahash_final(&ctx->req), + &ctx->completion); + } + +unlock: + release_sock(sk); + + return err ?: copied; +} + +static ssize_t hash_sendpage(struct socket *sock, struct page *page, + int offset, size_t size, int flags) +{ + struct sock *sk = sock->sk; + struct alg_sock *ask = alg_sk(sk); + struct hash_ctx *ctx = ask->private; + int err; + + lock_sock(sk); + sg_init_table(ctx->sgl.sg, 1); + sg_set_page(ctx->sgl.sg, page, size, offset); + + ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, ctx->result, size); + + if (!(flags & MSG_MORE)) { + if (ctx->more) + err = crypto_ahash_finup(&ctx->req); + else + err = crypto_ahash_digest(&ctx->req); + } else { + if (!ctx->more) { + err = crypto_ahash_init(&ctx->req); + if (err) + goto unlock; + } + + err = crypto_ahash_update(&ctx->req); + } + + err = af_alg_wait_for_completion(err, &ctx->completion); + if (err) + goto unlock; + + ctx->more = flags & MSG_MORE; + +unlock: + release_sock(sk); + + return err ?: size; +} + +static int hash_recvmsg(struct kiocb *unused, struct socket *sock, + struct msghdr *msg, size_t len, int flags) +{ + struct sock *sk = sock->sk; + struct alg_sock *ask = alg_sk(sk); + struct hash_ctx *ctx = ask->private; + unsigned ds = crypto_ahash_digestsize(crypto_ahash_reqtfm(&ctx->req)); + int err; + + if (len > ds) + len = ds; + else if (len < ds) + msg->msg_flags |= MSG_TRUNC; + + lock_sock(sk); + if (ctx->more) { + ctx->more = 0; + ahash_request_set_crypt(&ctx->req, NULL, ctx->result, 0); + err = af_alg_wait_for_completion(crypto_ahash_final(&ctx->req), + &ctx->completion); + if (err) + goto unlock; + } + + err = memcpy_toiovec(msg->msg_iov, ctx->result, len); + +unlock: + release_sock(sk); + + return err ?: len; +} + +static int hash_accept(struct socket *sock, struct socket *newsock, int flags) +{ + struct sock *sk = sock->sk; + struct alg_sock *ask = alg_sk(sk); + struct hash_ctx *ctx = ask->private; + struct ahash_request *req = &ctx->req; + char state[crypto_ahash_statesize(crypto_ahash_reqtfm(req))]; + struct sock *sk2; + struct alg_sock *ask2; + struct hash_ctx *ctx2; + int err; + + err = crypto_ahash_export(req, state); + if (err) + return err; + + err = af_alg_accept(ask->parent, newsock); + if (err) + return err; + + sk2 = newsock->sk; + ask2 = alg_sk(sk2); + ctx2 = ask2->private; + ctx2->more = 1; + + err = crypto_ahash_import(&ctx2->req, state); + if (err) { + sock_orphan(sk2); + sock_put(sk2); + } + + return err; +} + +static struct proto_ops algif_hash_ops = { + .family = PF_ALG, + + .connect = sock_no_connect, + .socketpair = sock_no_socketpair, + .getname = sock_no_getname, + .ioctl = sock_no_ioctl, + .listen = sock_no_listen, + .shutdown = sock_no_shutdown, + .getsockopt = sock_no_getsockopt, + .mmap = sock_no_mmap, + .bind = sock_no_bind, + .setsockopt = sock_no_setsockopt, + .poll = sock_no_poll, + + .release = af_alg_release, + .sendmsg = hash_sendmsg, + .sendpage = hash_sendpage, + .recvmsg = hash_recvmsg, + .accept = hash_accept, +}; + +static void *hash_bind(const char *name, u32 type, u32 mask) +{ + return crypto_alloc_ahash(name, type, mask); +} + +static void hash_release(void *private) +{ + crypto_free_ahash(private); +} + +static int hash_setkey(void *private, const u8 *key, unsigned int keylen) +{ + return crypto_ahash_setkey(private, key, keylen); +} + +static void hash_sock_destruct(struct sock *sk) +{ + struct alg_sock *ask = alg_sk(sk); + struct hash_ctx *ctx = ask->private; + + sock_kfree_s(sk, ctx->result, + crypto_ahash_digestsize(crypto_ahash_reqtfm(&ctx->req))); + sock_kfree_s(sk, ctx, ctx->len); + af_alg_release_parent(sk); +} + +static int hash_accept_parent(void *private, struct sock *sk) +{ + struct hash_ctx *ctx; + struct alg_sock *ask = alg_sk(sk); + unsigned len = sizeof(*ctx) + crypto_ahash_reqsize(private); + unsigned ds = crypto_ahash_digestsize(private); + + ctx = sock_kmalloc(sk, len, GFP_KERNEL); + if (!ctx) + return -ENOMEM; + + ctx->result = sock_kmalloc(sk, ds, GFP_KERNEL); + if (!ctx->result) { + sock_kfree_s(sk, ctx, len); + return -ENOMEM; + } + + memset(ctx->result, 0, ds); + + ctx->len = len; + ctx->more = 0; + af_alg_init_completion(&ctx->completion); + + ask->private = ctx; + + ahash_request_set_tfm(&ctx->req, private); + ahash_request_set_callback(&ctx->req, CRYPTO_TFM_REQ_MAY_BACKLOG, + af_alg_complete, &ctx->completion); + + sk->sk_destruct = hash_sock_destruct; + + return 0; +} + +static const struct af_alg_type algif_type_hash = { + .bind = hash_bind, + .release = hash_release, + .setkey = hash_setkey, + .accept = hash_accept_parent, + .ops = &algif_hash_ops, + .name = "hash", + .owner = THIS_MODULE +}; + +static int __init algif_hash_init(void) +{ + return af_alg_register_type(&algif_type_hash); +} + +static void __exit algif_hash_exit(void) +{ + int err = af_alg_unregister_type(&algif_type_hash); + BUG_ON(err); +} + +module_init(algif_hash_init); +module_exit(algif_hash_exit); +MODULE_LICENSE("GPL"); diff --git a/crypto/algif_skcipher.c b/crypto/algif_skcipher.c new file mode 100644 index 00000000..6a6dfc06 --- /dev/null +++ b/crypto/algif_skcipher.c @@ -0,0 +1,632 @@ +/* + * algif_skcipher: User-space interface for skcipher algorithms + * + * This file provides the user-space API for symmetric key ciphers. + * + * Copyright (c) 2010 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/scatterwalk.h> +#include <crypto/skcipher.h> +#include <crypto/if_alg.h> +#include <linux/init.h> +#include <linux/list.h> +#include <linux/kernel.h> +#include <linux/mm.h> +#include <linux/module.h> +#include <linux/net.h> +#include <net/sock.h> + +struct skcipher_sg_list { + struct list_head list; + + int cur; + + struct scatterlist sg[0]; +}; + +struct skcipher_ctx { + struct list_head tsgl; + struct af_alg_sgl rsgl; + + void *iv; + + struct af_alg_completion completion; + + unsigned used; + + unsigned int len; + bool more; + bool merge; + bool enc; + + struct ablkcipher_request req; +}; + +#define MAX_SGL_ENTS ((PAGE_SIZE - sizeof(struct skcipher_sg_list)) / \ + sizeof(struct scatterlist) - 1) + +static inline int skcipher_sndbuf(struct sock *sk) +{ + struct alg_sock *ask = alg_sk(sk); + struct skcipher_ctx *ctx = ask->private; + + return max_t(int, max_t(int, sk->sk_sndbuf & PAGE_MASK, PAGE_SIZE) - + ctx->used, 0); +} + +static inline bool skcipher_writable(struct sock *sk) +{ + return PAGE_SIZE <= skcipher_sndbuf(sk); +} + +static int skcipher_alloc_sgl(struct sock *sk) +{ + struct alg_sock *ask = alg_sk(sk); + struct skcipher_ctx *ctx = ask->private; + struct skcipher_sg_list *sgl; + struct scatterlist *sg = NULL; + + sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list); + if (!list_empty(&ctx->tsgl)) + sg = sgl->sg; + + if (!sg || sgl->cur >= MAX_SGL_ENTS) { + sgl = sock_kmalloc(sk, sizeof(*sgl) + + sizeof(sgl->sg[0]) * (MAX_SGL_ENTS + 1), + GFP_KERNEL); + if (!sgl) + return -ENOMEM; + + sg_init_table(sgl->sg, MAX_SGL_ENTS + 1); + sgl->cur = 0; + + if (sg) + scatterwalk_sg_chain(sg, MAX_SGL_ENTS + 1, sgl->sg); + + list_add_tail(&sgl->list, &ctx->tsgl); + } + + return 0; +} + +static void skcipher_pull_sgl(struct sock *sk, int used) +{ + struct alg_sock *ask = alg_sk(sk); + struct skcipher_ctx *ctx = ask->private; + struct skcipher_sg_list *sgl; + struct scatterlist *sg; + int i; + + while (!list_empty(&ctx->tsgl)) { + sgl = list_first_entry(&ctx->tsgl, struct skcipher_sg_list, + list); + sg = sgl->sg; + + for (i = 0; i < sgl->cur; i++) { + int plen = min_t(int, used, sg[i].length); + + if (!sg_page(sg + i)) + continue; + + sg[i].length -= plen; + sg[i].offset += plen; + + used -= plen; + ctx->used -= plen; + + if (sg[i].length) + return; + + put_page(sg_page(sg + i)); + sg_assign_page(sg + i, NULL); + } + + list_del(&sgl->list); + sock_kfree_s(sk, sgl, + sizeof(*sgl) + sizeof(sgl->sg[0]) * + (MAX_SGL_ENTS + 1)); + } + + if (!ctx->used) + ctx->merge = 0; +} + +static void skcipher_free_sgl(struct sock *sk) +{ + struct alg_sock *ask = alg_sk(sk); + struct skcipher_ctx *ctx = ask->private; + + skcipher_pull_sgl(sk, ctx->used); +} + +static int skcipher_wait_for_wmem(struct sock *sk, unsigned flags) +{ + long timeout; + DEFINE_WAIT(wait); + int err = -ERESTARTSYS; + + if (flags & MSG_DONTWAIT) + return -EAGAIN; + + set_bit(SOCK_ASYNC_NOSPACE, &sk->sk_socket->flags); + + for (;;) { + if (signal_pending(current)) + break; + prepare_to_wait(sk_sleep(sk), &wait, TASK_INTERRUPTIBLE); + timeout = MAX_SCHEDULE_TIMEOUT; + if (sk_wait_event(sk, &timeout, skcipher_writable(sk))) { + err = 0; + break; + } + } + finish_wait(sk_sleep(sk), &wait); + + return err; +} + +static void skcipher_wmem_wakeup(struct sock *sk) +{ + struct socket_wq *wq; + + if (!skcipher_writable(sk)) + return; + + rcu_read_lock(); + wq = rcu_dereference(sk->sk_wq); + if (wq_has_sleeper(wq)) + wake_up_interruptible_sync_poll(&wq->wait, POLLIN | + POLLRDNORM | + POLLRDBAND); + sk_wake_async(sk, SOCK_WAKE_WAITD, POLL_IN); + rcu_read_unlock(); +} + +static int skcipher_wait_for_data(struct sock *sk, unsigned flags) +{ + struct alg_sock *ask = alg_sk(sk); + struct skcipher_ctx *ctx = ask->private; + long timeout; + DEFINE_WAIT(wait); + int err = -ERESTARTSYS; + + if (flags & MSG_DONTWAIT) { + return -EAGAIN; + } + + set_bit(SOCK_ASYNC_WAITDATA, &sk->sk_socket->flags); + + for (;;) { + if (signal_pending(current)) + break; + prepare_to_wait(sk_sleep(sk), &wait, TASK_INTERRUPTIBLE); + timeout = MAX_SCHEDULE_TIMEOUT; + if (sk_wait_event(sk, &timeout, ctx->used)) { + err = 0; + break; + } + } + finish_wait(sk_sleep(sk), &wait); + + clear_bit(SOCK_ASYNC_WAITDATA, &sk->sk_socket->flags); + + return err; +} + +static void skcipher_data_wakeup(struct sock *sk) +{ + struct alg_sock *ask = alg_sk(sk); + struct skcipher_ctx *ctx = ask->private; + struct socket_wq *wq; + + if (!ctx->used) + return; + + rcu_read_lock(); + wq = rcu_dereference(sk->sk_wq); + if (wq_has_sleeper(wq)) + wake_up_interruptible_sync_poll(&wq->wait, POLLOUT | + POLLRDNORM | + POLLRDBAND); + sk_wake_async(sk, SOCK_WAKE_SPACE, POLL_OUT); + rcu_read_unlock(); +} + +static int skcipher_sendmsg(struct kiocb *unused, struct socket *sock, + struct msghdr *msg, size_t size) +{ + struct sock *sk = sock->sk; + struct alg_sock *ask = alg_sk(sk); + struct skcipher_ctx *ctx = ask->private; + struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(&ctx->req); + unsigned ivsize = crypto_ablkcipher_ivsize(tfm); + struct skcipher_sg_list *sgl; + struct af_alg_control con = {}; + long copied = 0; + bool enc = 0; + int err; + int i; + + if (msg->msg_controllen) { + err = af_alg_cmsg_send(msg, &con); + if (err) + return err; + + switch (con.op) { + case ALG_OP_ENCRYPT: + enc = 1; + break; + case ALG_OP_DECRYPT: + enc = 0; + break; + default: + return -EINVAL; + } + + if (con.iv && con.iv->ivlen != ivsize) + return -EINVAL; + } + + err = -EINVAL; + + lock_sock(sk); + if (!ctx->more && ctx->used) + goto unlock; + + if (!ctx->used) { + ctx->enc = enc; + if (con.iv) + memcpy(ctx->iv, con.iv->iv, ivsize); + } + + while (size) { + struct scatterlist *sg; + unsigned long len = size; + int plen; + + if (ctx->merge) { + sgl = list_entry(ctx->tsgl.prev, + struct skcipher_sg_list, list); + sg = sgl->sg + sgl->cur - 1; + len = min_t(unsigned long, len, + PAGE_SIZE - sg->offset - sg->length); + + err = memcpy_fromiovec(page_address(sg_page(sg)) + + sg->offset + sg->length, + msg->msg_iov, len); + if (err) + goto unlock; + + sg->length += len; + ctx->merge = (sg->offset + sg->length) & + (PAGE_SIZE - 1); + + ctx->used += len; + copied += len; + size -= len; + continue; + } + + if (!skcipher_writable(sk)) { + err = skcipher_wait_for_wmem(sk, msg->msg_flags); + if (err) + goto unlock; + } + + len = min_t(unsigned long, len, skcipher_sndbuf(sk)); + + err = skcipher_alloc_sgl(sk); + if (err) + goto unlock; + + sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list); + sg = sgl->sg; + do { + i = sgl->cur; + plen = min_t(int, len, PAGE_SIZE); + + sg_assign_page(sg + i, alloc_page(GFP_KERNEL)); + err = -ENOMEM; + if (!sg_page(sg + i)) + goto unlock; + + err = memcpy_fromiovec(page_address(sg_page(sg + i)), + msg->msg_iov, plen); + if (err) { + __free_page(sg_page(sg + i)); + sg_assign_page(sg + i, NULL); + goto unlock; + } + + sg[i].length = plen; + len -= plen; + ctx->used += plen; + copied += plen; + size -= plen; + sgl->cur++; + } while (len && sgl->cur < MAX_SGL_ENTS); + + ctx->merge = plen & (PAGE_SIZE - 1); + } + + err = 0; + + ctx->more = msg->msg_flags & MSG_MORE; + if (!ctx->more && !list_empty(&ctx->tsgl)) + sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list); + +unlock: + skcipher_data_wakeup(sk); + release_sock(sk); + + return copied ?: err; +} + +static ssize_t skcipher_sendpage(struct socket *sock, struct page *page, + int offset, size_t size, int flags) +{ + struct sock *sk = sock->sk; + struct alg_sock *ask = alg_sk(sk); + struct skcipher_ctx *ctx = ask->private; + struct skcipher_sg_list *sgl; + int err = -EINVAL; + + lock_sock(sk); + if (!ctx->more && ctx->used) + goto unlock; + + if (!size) + goto done; + + if (!skcipher_writable(sk)) { + err = skcipher_wait_for_wmem(sk, flags); + if (err) + goto unlock; + } + + err = skcipher_alloc_sgl(sk); + if (err) + goto unlock; + + ctx->merge = 0; + sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list); + + get_page(page); + sg_set_page(sgl->sg + sgl->cur, page, size, offset); + sgl->cur++; + ctx->used += size; + +done: + ctx->more = flags & MSG_MORE; + if (!ctx->more && !list_empty(&ctx->tsgl)) + sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list); + +unlock: + skcipher_data_wakeup(sk); + release_sock(sk); + + return err ?: size; +} + +static int skcipher_recvmsg(struct kiocb *unused, struct socket *sock, + struct msghdr *msg, size_t ignored, int flags) +{ + struct sock *sk = sock->sk; + struct alg_sock *ask = alg_sk(sk); + struct skcipher_ctx *ctx = ask->private; + unsigned bs = crypto_ablkcipher_blocksize(crypto_ablkcipher_reqtfm( + &ctx->req)); + struct skcipher_sg_list *sgl; + struct scatterlist *sg; + unsigned long iovlen; + struct iovec *iov; + int err = -EAGAIN; + int used; + long copied = 0; + + lock_sock(sk); + for (iov = msg->msg_iov, iovlen = msg->msg_iovlen; iovlen > 0; + iovlen--, iov++) { + unsigned long seglen = iov->iov_len; + char __user *from = iov->iov_base; + + while (seglen) { + sgl = list_first_entry(&ctx->tsgl, + struct skcipher_sg_list, list); + sg = sgl->sg; + + while (!sg->length) + sg++; + + used = ctx->used; + if (!used) { + err = skcipher_wait_for_data(sk, flags); + if (err) + goto unlock; + } + + used = min_t(unsigned long, used, seglen); + + used = af_alg_make_sg(&ctx->rsgl, from, used, 1); + err = used; + if (err < 0) + goto unlock; + + if (ctx->more || used < ctx->used) + used -= used % bs; + + err = -EINVAL; + if (!used) + goto free; + + ablkcipher_request_set_crypt(&ctx->req, sg, + ctx->rsgl.sg, used, + ctx->iv); + + err = af_alg_wait_for_completion( + ctx->enc ? + crypto_ablkcipher_encrypt(&ctx->req) : + crypto_ablkcipher_decrypt(&ctx->req), + &ctx->completion); + +free: + af_alg_free_sg(&ctx->rsgl); + + if (err) + goto unlock; + + copied += used; + from += used; + seglen -= used; + skcipher_pull_sgl(sk, used); + } + } + + err = 0; + +unlock: + skcipher_wmem_wakeup(sk); + release_sock(sk); + + return copied ?: err; +} + + +static unsigned int skcipher_poll(struct file *file, struct socket *sock, + poll_table *wait) +{ + struct sock *sk = sock->sk; + struct alg_sock *ask = alg_sk(sk); + struct skcipher_ctx *ctx = ask->private; + unsigned int mask; + + sock_poll_wait(file, sk_sleep(sk), wait); + mask = 0; + + if (ctx->used) + mask |= POLLIN | POLLRDNORM; + + if (skcipher_writable(sk)) + mask |= POLLOUT | POLLWRNORM | POLLWRBAND; + + return mask; +} + +static struct proto_ops algif_skcipher_ops = { + .family = PF_ALG, + + .connect = sock_no_connect, + .socketpair = sock_no_socketpair, + .getname = sock_no_getname, + .ioctl = sock_no_ioctl, + .listen = sock_no_listen, + .shutdown = sock_no_shutdown, + .getsockopt = sock_no_getsockopt, + .mmap = sock_no_mmap, + .bind = sock_no_bind, + .accept = sock_no_accept, + .setsockopt = sock_no_setsockopt, + + .release = af_alg_release, + .sendmsg = skcipher_sendmsg, + .sendpage = skcipher_sendpage, + .recvmsg = skcipher_recvmsg, + .poll = skcipher_poll, +}; + +static void *skcipher_bind(const char *name, u32 type, u32 mask) +{ + return crypto_alloc_ablkcipher(name, type, mask); +} + +static void skcipher_release(void *private) +{ + crypto_free_ablkcipher(private); +} + +static int skcipher_setkey(void *private, const u8 *key, unsigned int keylen) +{ + return crypto_ablkcipher_setkey(private, key, keylen); +} + +static void skcipher_sock_destruct(struct sock *sk) +{ + struct alg_sock *ask = alg_sk(sk); + struct skcipher_ctx *ctx = ask->private; + struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(&ctx->req); + + skcipher_free_sgl(sk); + sock_kfree_s(sk, ctx->iv, crypto_ablkcipher_ivsize(tfm)); + sock_kfree_s(sk, ctx, ctx->len); + af_alg_release_parent(sk); +} + +static int skcipher_accept_parent(void *private, struct sock *sk) +{ + struct skcipher_ctx *ctx; + struct alg_sock *ask = alg_sk(sk); + unsigned int len = sizeof(*ctx) + crypto_ablkcipher_reqsize(private); + + ctx = sock_kmalloc(sk, len, GFP_KERNEL); + if (!ctx) + return -ENOMEM; + + ctx->iv = sock_kmalloc(sk, crypto_ablkcipher_ivsize(private), + GFP_KERNEL); + if (!ctx->iv) { + sock_kfree_s(sk, ctx, len); + return -ENOMEM; + } + + memset(ctx->iv, 0, crypto_ablkcipher_ivsize(private)); + + INIT_LIST_HEAD(&ctx->tsgl); + ctx->len = len; + ctx->used = 0; + ctx->more = 0; + ctx->merge = 0; + ctx->enc = 0; + af_alg_init_completion(&ctx->completion); + + ask->private = ctx; + + ablkcipher_request_set_tfm(&ctx->req, private); + ablkcipher_request_set_callback(&ctx->req, CRYPTO_TFM_REQ_MAY_BACKLOG, + af_alg_complete, &ctx->completion); + + sk->sk_destruct = skcipher_sock_destruct; + + return 0; +} + +static const struct af_alg_type algif_type_skcipher = { + .bind = skcipher_bind, + .release = skcipher_release, + .setkey = skcipher_setkey, + .accept = skcipher_accept_parent, + .ops = &algif_skcipher_ops, + .name = "skcipher", + .owner = THIS_MODULE +}; + +static int __init algif_skcipher_init(void) +{ + return af_alg_register_type(&algif_type_skcipher); +} + +static void __exit algif_skcipher_exit(void) +{ + int err = af_alg_unregister_type(&algif_type_skcipher); + BUG_ON(err); +} + +module_init(algif_skcipher_init); +module_exit(algif_skcipher_exit); +MODULE_LICENSE("GPL"); diff --git a/crypto/ansi_cprng.c b/crypto/ansi_cprng.c new file mode 100644 index 00000000..6ddd99e6 --- /dev/null +++ b/crypto/ansi_cprng.c @@ -0,0 +1,496 @@ +/* + * PRNG: Pseudo Random Number Generator + * Based on NIST Recommended PRNG From ANSI X9.31 Appendix A.2.4 using + * AES 128 cipher + * + * (C) Neil Horman <nhorman@tuxdriver.com> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the + * Free Software Foundation; either version 2 of the License, or (at your + * any later version. + * + * + */ + +#include <crypto/internal/rng.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/moduleparam.h> +#include <linux/string.h> + +#include "internal.h" + +#define DEFAULT_PRNG_KEY "0123456789abcdef" +#define DEFAULT_PRNG_KSZ 16 +#define DEFAULT_BLK_SZ 16 +#define DEFAULT_V_SEED "zaybxcwdveuftgsh" + +/* + * Flags for the prng_context flags field + */ + +#define PRNG_FIXED_SIZE 0x1 +#define PRNG_NEED_RESET 0x2 + +/* + * Note: DT is our counter value + * I is our intermediate value + * V is our seed vector + * See http://csrc.nist.gov/groups/STM/cavp/documents/rng/931rngext.pdf + * for implementation details + */ + + +struct prng_context { + spinlock_t prng_lock; + unsigned char rand_data[DEFAULT_BLK_SZ]; + unsigned char last_rand_data[DEFAULT_BLK_SZ]; + unsigned char DT[DEFAULT_BLK_SZ]; + unsigned char I[DEFAULT_BLK_SZ]; + unsigned char V[DEFAULT_BLK_SZ]; + u32 rand_data_valid; + struct crypto_cipher *tfm; + u32 flags; +}; + +static int dbg; + +static void hexdump(char *note, unsigned char *buf, unsigned int len) +{ + if (dbg) { + printk(KERN_CRIT "%s", note); + print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, + 16, 1, + buf, len, false); + } +} + +#define dbgprint(format, args...) do {\ +if (dbg)\ + printk(format, ##args);\ +} while (0) + +static void xor_vectors(unsigned char *in1, unsigned char *in2, + unsigned char *out, unsigned int size) +{ + int i; + + for (i = 0; i < size; i++) + out[i] = in1[i] ^ in2[i]; + +} +/* + * Returns DEFAULT_BLK_SZ bytes of random data per call + * returns 0 if generation succeeded, <0 if something went wrong + */ +static int _get_more_prng_bytes(struct prng_context *ctx, int cont_test) +{ + int i; + unsigned char tmp[DEFAULT_BLK_SZ]; + unsigned char *output = NULL; + + + dbgprint(KERN_CRIT "Calling _get_more_prng_bytes for context %p\n", + ctx); + + hexdump("Input DT: ", ctx->DT, DEFAULT_BLK_SZ); + hexdump("Input I: ", ctx->I, DEFAULT_BLK_SZ); + hexdump("Input V: ", ctx->V, DEFAULT_BLK_SZ); + + /* + * This algorithm is a 3 stage state machine + */ + for (i = 0; i < 3; i++) { + + switch (i) { + case 0: + /* + * Start by encrypting the counter value + * This gives us an intermediate value I + */ + memcpy(tmp, ctx->DT, DEFAULT_BLK_SZ); + output = ctx->I; + hexdump("tmp stage 0: ", tmp, DEFAULT_BLK_SZ); + break; + case 1: + + /* + * Next xor I with our secret vector V + * encrypt that result to obtain our + * pseudo random data which we output + */ + xor_vectors(ctx->I, ctx->V, tmp, DEFAULT_BLK_SZ); + hexdump("tmp stage 1: ", tmp, DEFAULT_BLK_SZ); + output = ctx->rand_data; + break; + case 2: + /* + * First check that we didn't produce the same + * random data that we did last time around through this + */ + if (!memcmp(ctx->rand_data, ctx->last_rand_data, + DEFAULT_BLK_SZ)) { + if (cont_test) { + panic("cprng %p Failed repetition check!\n", + ctx); + } + + printk(KERN_ERR + "ctx %p Failed repetition check!\n", + ctx); + + ctx->flags |= PRNG_NEED_RESET; + return -EINVAL; + } + memcpy(ctx->last_rand_data, ctx->rand_data, + DEFAULT_BLK_SZ); + + /* + * Lastly xor the random data with I + * and encrypt that to obtain a new secret vector V + */ + xor_vectors(ctx->rand_data, ctx->I, tmp, + DEFAULT_BLK_SZ); + output = ctx->V; + hexdump("tmp stage 2: ", tmp, DEFAULT_BLK_SZ); + break; + } + + + /* do the encryption */ + crypto_cipher_encrypt_one(ctx->tfm, output, tmp); + + } + + /* + * Now update our DT value + */ + for (i = DEFAULT_BLK_SZ - 1; i >= 0; i--) { + ctx->DT[i] += 1; + if (ctx->DT[i] != 0) + break; + } + + dbgprint("Returning new block for context %p\n", ctx); + ctx->rand_data_valid = 0; + + hexdump("Output DT: ", ctx->DT, DEFAULT_BLK_SZ); + hexdump("Output I: ", ctx->I, DEFAULT_BLK_SZ); + hexdump("Output V: ", ctx->V, DEFAULT_BLK_SZ); + hexdump("New Random Data: ", ctx->rand_data, DEFAULT_BLK_SZ); + + return 0; +} + +/* Our exported functions */ +static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx, + int do_cont_test) +{ + unsigned char *ptr = buf; + unsigned int byte_count = (unsigned int)nbytes; + int err; + + + spin_lock_bh(&ctx->prng_lock); + + err = -EINVAL; + if (ctx->flags & PRNG_NEED_RESET) + goto done; + + /* + * If the FIXED_SIZE flag is on, only return whole blocks of + * pseudo random data + */ + err = -EINVAL; + if (ctx->flags & PRNG_FIXED_SIZE) { + if (nbytes < DEFAULT_BLK_SZ) + goto done; + byte_count = DEFAULT_BLK_SZ; + } + + err = byte_count; + + dbgprint(KERN_CRIT "getting %d random bytes for context %p\n", + byte_count, ctx); + + +remainder: + if (ctx->rand_data_valid == DEFAULT_BLK_SZ) { + if (_get_more_prng_bytes(ctx, do_cont_test) < 0) { + memset(buf, 0, nbytes); + err = -EINVAL; + goto done; + } + } + + /* + * Copy any data less than an entire block + */ + if (byte_count < DEFAULT_BLK_SZ) { +empty_rbuf: + for (; ctx->rand_data_valid < DEFAULT_BLK_SZ; + ctx->rand_data_valid++) { + *ptr = ctx->rand_data[ctx->rand_data_valid]; + ptr++; + byte_count--; + if (byte_count == 0) + goto done; + } + } + + /* + * Now copy whole blocks + */ + for (; byte_count >= DEFAULT_BLK_SZ; byte_count -= DEFAULT_BLK_SZ) { + if (ctx->rand_data_valid == DEFAULT_BLK_SZ) { + if (_get_more_prng_bytes(ctx, do_cont_test) < 0) { + memset(buf, 0, nbytes); + err = -EINVAL; + goto done; + } + } + if (ctx->rand_data_valid > 0) + goto empty_rbuf; + memcpy(ptr, ctx->rand_data, DEFAULT_BLK_SZ); + ctx->rand_data_valid += DEFAULT_BLK_SZ; + ptr += DEFAULT_BLK_SZ; + } + + /* + * Now go back and get any remaining partial block + */ + if (byte_count) + goto remainder; + +done: + spin_unlock_bh(&ctx->prng_lock); + dbgprint(KERN_CRIT "returning %d from get_prng_bytes in context %p\n", + err, ctx); + return err; +} + +static void free_prng_context(struct prng_context *ctx) +{ + crypto_free_cipher(ctx->tfm); +} + +static int reset_prng_context(struct prng_context *ctx, + unsigned char *key, size_t klen, + unsigned char *V, unsigned char *DT) +{ + int ret; + unsigned char *prng_key; + + spin_lock_bh(&ctx->prng_lock); + ctx->flags |= PRNG_NEED_RESET; + + prng_key = (key != NULL) ? key : (unsigned char *)DEFAULT_PRNG_KEY; + + if (!key) + klen = DEFAULT_PRNG_KSZ; + + if (V) + memcpy(ctx->V, V, DEFAULT_BLK_SZ); + else + memcpy(ctx->V, DEFAULT_V_SEED, DEFAULT_BLK_SZ); + + if (DT) + memcpy(ctx->DT, DT, DEFAULT_BLK_SZ); + else + memset(ctx->DT, 0, DEFAULT_BLK_SZ); + + memset(ctx->rand_data, 0, DEFAULT_BLK_SZ); + memset(ctx->last_rand_data, 0, DEFAULT_BLK_SZ); + + ctx->rand_data_valid = DEFAULT_BLK_SZ; + + ret = crypto_cipher_setkey(ctx->tfm, prng_key, klen); + if (ret) { + dbgprint(KERN_CRIT "PRNG: setkey() failed flags=%x\n", + crypto_cipher_get_flags(ctx->tfm)); + goto out; + } + + ret = 0; + ctx->flags &= ~PRNG_NEED_RESET; +out: + spin_unlock_bh(&ctx->prng_lock); + return ret; +} + +static int cprng_init(struct crypto_tfm *tfm) +{ + struct prng_context *ctx = crypto_tfm_ctx(tfm); + + spin_lock_init(&ctx->prng_lock); + ctx->tfm = crypto_alloc_cipher("aes", 0, 0); + if (IS_ERR(ctx->tfm)) { + dbgprint(KERN_CRIT "Failed to alloc tfm for context %p\n", + ctx); + return PTR_ERR(ctx->tfm); + } + + if (reset_prng_context(ctx, NULL, DEFAULT_PRNG_KSZ, NULL, NULL) < 0) + return -EINVAL; + + /* + * after allocation, we should always force the user to reset + * so they don't inadvertently use the insecure default values + * without specifying them intentially + */ + ctx->flags |= PRNG_NEED_RESET; + return 0; +} + +static void cprng_exit(struct crypto_tfm *tfm) +{ + free_prng_context(crypto_tfm_ctx(tfm)); +} + +static int cprng_get_random(struct crypto_rng *tfm, u8 *rdata, + unsigned int dlen) +{ + struct prng_context *prng = crypto_rng_ctx(tfm); + + return get_prng_bytes(rdata, dlen, prng, 0); +} + +/* + * This is the cprng_registered reset method the seed value is + * interpreted as the tuple { V KEY DT} + * V and KEY are required during reset, and DT is optional, detected + * as being present by testing the length of the seed + */ +static int cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen) +{ + struct prng_context *prng = crypto_rng_ctx(tfm); + u8 *key = seed + DEFAULT_BLK_SZ; + u8 *dt = NULL; + + if (slen < DEFAULT_PRNG_KSZ + DEFAULT_BLK_SZ) + return -EINVAL; + + if (slen >= (2 * DEFAULT_BLK_SZ + DEFAULT_PRNG_KSZ)) + dt = key + DEFAULT_PRNG_KSZ; + + reset_prng_context(prng, key, DEFAULT_PRNG_KSZ, seed, dt); + + if (prng->flags & PRNG_NEED_RESET) + return -EINVAL; + return 0; +} + +static struct crypto_alg rng_alg = { + .cra_name = "stdrng", + .cra_driver_name = "ansi_cprng", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_RNG, + .cra_ctxsize = sizeof(struct prng_context), + .cra_type = &crypto_rng_type, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(rng_alg.cra_list), + .cra_init = cprng_init, + .cra_exit = cprng_exit, + .cra_u = { + .rng = { + .rng_make_random = cprng_get_random, + .rng_reset = cprng_reset, + .seedsize = DEFAULT_PRNG_KSZ + 2*DEFAULT_BLK_SZ, + } + } +}; + +#ifdef CONFIG_CRYPTO_FIPS +static int fips_cprng_get_random(struct crypto_rng *tfm, u8 *rdata, + unsigned int dlen) +{ + struct prng_context *prng = crypto_rng_ctx(tfm); + + return get_prng_bytes(rdata, dlen, prng, 1); +} + +static int fips_cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen) +{ + u8 rdata[DEFAULT_BLK_SZ]; + u8 *key = seed + DEFAULT_BLK_SZ; + int rc; + + struct prng_context *prng = crypto_rng_ctx(tfm); + + if (slen < DEFAULT_PRNG_KSZ + DEFAULT_BLK_SZ) + return -EINVAL; + + /* fips strictly requires seed != key */ + if (!memcmp(seed, key, DEFAULT_PRNG_KSZ)) + return -EINVAL; + + rc = cprng_reset(tfm, seed, slen); + + if (!rc) + goto out; + + /* this primes our continuity test */ + rc = get_prng_bytes(rdata, DEFAULT_BLK_SZ, prng, 0); + prng->rand_data_valid = DEFAULT_BLK_SZ; + +out: + return rc; +} + +static struct crypto_alg fips_rng_alg = { + .cra_name = "fips(ansi_cprng)", + .cra_driver_name = "fips_ansi_cprng", + .cra_priority = 300, + .cra_flags = CRYPTO_ALG_TYPE_RNG, + .cra_ctxsize = sizeof(struct prng_context), + .cra_type = &crypto_rng_type, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(rng_alg.cra_list), + .cra_init = cprng_init, + .cra_exit = cprng_exit, + .cra_u = { + .rng = { + .rng_make_random = fips_cprng_get_random, + .rng_reset = fips_cprng_reset, + .seedsize = DEFAULT_PRNG_KSZ + 2*DEFAULT_BLK_SZ, + } + } +}; +#endif + +/* Module initalization */ +static int __init prng_mod_init(void) +{ + int rc = 0; + + rc = crypto_register_alg(&rng_alg); +#ifdef CONFIG_CRYPTO_FIPS + if (rc) + goto out; + + rc = crypto_register_alg(&fips_rng_alg); + +out: +#endif + return rc; +} + +static void __exit prng_mod_fini(void) +{ + crypto_unregister_alg(&rng_alg); +#ifdef CONFIG_CRYPTO_FIPS + crypto_unregister_alg(&fips_rng_alg); +#endif + return; +} + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Software Pseudo Random Number Generator"); +MODULE_AUTHOR("Neil Horman <nhorman@tuxdriver.com>"); +module_param(dbg, int, 0); +MODULE_PARM_DESC(dbg, "Boolean to enable debugging (0/1 == off/on)"); +module_init(prng_mod_init); +module_exit(prng_mod_fini); +MODULE_ALIAS("stdrng"); diff --git a/crypto/anubis.c b/crypto/anubis.c new file mode 100644 index 00000000..77530d57 --- /dev/null +++ b/crypto/anubis.c @@ -0,0 +1,707 @@ +/* + * Cryptographic API. + * + * Anubis Algorithm + * + * The Anubis algorithm was developed by Paulo S. L. M. Barreto and + * Vincent Rijmen. + * + * See + * + * P.S.L.M. Barreto, V. Rijmen, + * ``The Anubis block cipher,'' + * NESSIE submission, 2000. + * + * This software implements the "tweaked" version of Anubis. + * Only the S-box and (consequently) the rounds constants have been + * changed. + * + * The original authors have disclaimed all copyright interest in this + * code and thus put it in the public domain. The subsequent authors + * have put this under the GNU General Public License. + * + * By Aaron Grothe ajgrothe@yahoo.com, October 28, 2004 + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + */ + +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <asm/byteorder.h> +#include <linux/crypto.h> +#include <linux/types.h> + +#define ANUBIS_MIN_KEY_SIZE 16 +#define ANUBIS_MAX_KEY_SIZE 40 +#define ANUBIS_BLOCK_SIZE 16 +#define ANUBIS_MAX_N 10 +#define ANUBIS_MAX_ROUNDS (8 + ANUBIS_MAX_N) + +struct anubis_ctx { + int key_len; // in bits + int R; + u32 E[ANUBIS_MAX_ROUNDS + 1][4]; + u32 D[ANUBIS_MAX_ROUNDS + 1][4]; +}; + +static const u32 T0[256] = { + 0xba69d2bbU, 0x54a84de5U, 0x2f5ebce2U, 0x74e8cd25U, + 0x53a651f7U, 0xd3bb6bd0U, 0xd2b96fd6U, 0x4d9a29b3U, + 0x50a05dfdU, 0xac458acfU, 0x8d070e09U, 0xbf63c6a5U, + 0x70e0dd3dU, 0x52a455f1U, 0x9a29527bU, 0x4c982db5U, + 0xeac98f46U, 0xd5b773c4U, 0x97336655U, 0xd1bf63dcU, + 0x3366ccaaU, 0x51a259fbU, 0x5bb671c7U, 0xa651a2f3U, + 0xdea15ffeU, 0x48903dadU, 0xa84d9ad7U, 0x992f5e71U, + 0xdbab4be0U, 0x3264c8acU, 0xb773e695U, 0xfce5d732U, + 0xe3dbab70U, 0x9e214263U, 0x913f7e41U, 0x9b2b567dU, + 0xe2d9af76U, 0xbb6bd6bdU, 0x4182199bU, 0x6edca579U, + 0xa557aef9U, 0xcb8b0b80U, 0x6bd6b167U, 0x95376e59U, + 0xa15fbee1U, 0xf3fbeb10U, 0xb17ffe81U, 0x0204080cU, + 0xcc851792U, 0xc49537a2U, 0x1d3a744eU, 0x14285078U, + 0xc39b2bb0U, 0x63c69157U, 0xdaa94fe6U, 0x5dba69d3U, + 0x5fbe61dfU, 0xdca557f2U, 0x7dfae913U, 0xcd871394U, + 0x7ffee11fU, 0x5ab475c1U, 0x6cd8ad75U, 0x5cb86dd5U, + 0xf7f3fb08U, 0x264c98d4U, 0xffe3db38U, 0xedc79354U, + 0xe8cd874aU, 0x9d274e69U, 0x6fdea17fU, 0x8e010203U, + 0x19326456U, 0xa05dbae7U, 0xf0fde71aU, 0x890f1e11U, + 0x0f1e3c22U, 0x070e1c12U, 0xaf4386c5U, 0xfbebcb20U, + 0x08102030U, 0x152a547eU, 0x0d1a342eU, 0x04081018U, + 0x01020406U, 0x64c88d45U, 0xdfa35bf8U, 0x76ecc529U, + 0x79f2f90bU, 0xdda753f4U, 0x3d7af48eU, 0x162c5874U, + 0x3f7efc82U, 0x376edcb2U, 0x6ddaa973U, 0x3870e090U, + 0xb96fdeb1U, 0x73e6d137U, 0xe9cf834cU, 0x356ad4beU, + 0x55aa49e3U, 0x71e2d93bU, 0x7bf6f107U, 0x8c050a0fU, + 0x72e4d531U, 0x880d1a17U, 0xf6f1ff0eU, 0x2a54a8fcU, + 0x3e7cf884U, 0x5ebc65d9U, 0x274e9cd2U, 0x468c0589U, + 0x0c183028U, 0x65ca8943U, 0x68d0bd6dU, 0x61c2995bU, + 0x03060c0aU, 0xc19f23bcU, 0x57ae41efU, 0xd6b17fceU, + 0xd9af43ecU, 0x58b07dcdU, 0xd8ad47eaU, 0x66cc8549U, + 0xd7b37bc8U, 0x3a74e89cU, 0xc88d078aU, 0x3c78f088U, + 0xfae9cf26U, 0x96316253U, 0xa753a6f5U, 0x982d5a77U, + 0xecc59752U, 0xb86ddab7U, 0xc7933ba8U, 0xae4182c3U, + 0x69d2b96bU, 0x4b9631a7U, 0xab4b96ddU, 0xa94f9ed1U, + 0x67ce814fU, 0x0a14283cU, 0x478e018fU, 0xf2f9ef16U, + 0xb577ee99U, 0x224488ccU, 0xe5d7b364U, 0xeec19f5eU, + 0xbe61c2a3U, 0x2b56acfaU, 0x811f3e21U, 0x1224486cU, + 0x831b362dU, 0x1b366c5aU, 0x0e1c3824U, 0x23468ccaU, + 0xf5f7f304U, 0x458a0983U, 0x214284c6U, 0xce811f9eU, + 0x499239abU, 0x2c58b0e8U, 0xf9efc32cU, 0xe6d1bf6eU, + 0xb671e293U, 0x2850a0f0U, 0x172e5c72U, 0x8219322bU, + 0x1a34685cU, 0x8b0b161dU, 0xfee1df3eU, 0x8a09121bU, + 0x09122436U, 0xc98f038cU, 0x87132635U, 0x4e9c25b9U, + 0xe1dfa37cU, 0x2e5cb8e4U, 0xe4d5b762U, 0xe0dda77aU, + 0xebcb8b40U, 0x903d7a47U, 0xa455aaffU, 0x1e3c7844U, + 0x85172e39U, 0x60c09d5dU, 0x00000000U, 0x254a94deU, + 0xf4f5f702U, 0xf1ffe31cU, 0x94356a5fU, 0x0b162c3aU, + 0xe7d3bb68U, 0x75eac923U, 0xefc39b58U, 0x3468d0b8U, + 0x3162c4a6U, 0xd4b577c2U, 0xd0bd67daU, 0x86112233U, + 0x7efce519U, 0xad478ec9U, 0xfde7d334U, 0x2952a4f6U, + 0x3060c0a0U, 0x3b76ec9aU, 0x9f234665U, 0xf8edc72aU, + 0xc6913faeU, 0x13264c6aU, 0x060c1814U, 0x050a141eU, + 0xc59733a4U, 0x11224466U, 0x77eec12fU, 0x7cf8ed15U, + 0x7af4f501U, 0x78f0fd0dU, 0x366cd8b4U, 0x1c387048U, + 0x3972e496U, 0x59b279cbU, 0x18306050U, 0x56ac45e9U, + 0xb37bf68dU, 0xb07dfa87U, 0x244890d8U, 0x204080c0U, + 0xb279f28bU, 0x9239724bU, 0xa35bb6edU, 0xc09d27baU, + 0x44880d85U, 0x62c49551U, 0x10204060U, 0xb475ea9fU, + 0x84152a3fU, 0x43861197U, 0x933b764dU, 0xc2992fb6U, + 0x4a9435a1U, 0xbd67cea9U, 0x8f030605U, 0x2d5ab4eeU, + 0xbc65caafU, 0x9c254a6fU, 0x6ad4b561U, 0x40801d9dU, + 0xcf831b98U, 0xa259b2ebU, 0x801d3a27U, 0x4f9e21bfU, + 0x1f3e7c42U, 0xca890f86U, 0xaa4992dbU, 0x42841591U, +}; + +static const u32 T1[256] = { + 0x69babbd2U, 0xa854e54dU, 0x5e2fe2bcU, 0xe87425cdU, + 0xa653f751U, 0xbbd3d06bU, 0xb9d2d66fU, 0x9a4db329U, + 0xa050fd5dU, 0x45accf8aU, 0x078d090eU, 0x63bfa5c6U, + 0xe0703dddU, 0xa452f155U, 0x299a7b52U, 0x984cb52dU, + 0xc9ea468fU, 0xb7d5c473U, 0x33975566U, 0xbfd1dc63U, + 0x6633aaccU, 0xa251fb59U, 0xb65bc771U, 0x51a6f3a2U, + 0xa1defe5fU, 0x9048ad3dU, 0x4da8d79aU, 0x2f99715eU, + 0xabdbe04bU, 0x6432acc8U, 0x73b795e6U, 0xe5fc32d7U, + 0xdbe370abU, 0x219e6342U, 0x3f91417eU, 0x2b9b7d56U, + 0xd9e276afU, 0x6bbbbdd6U, 0x82419b19U, 0xdc6e79a5U, + 0x57a5f9aeU, 0x8bcb800bU, 0xd66b67b1U, 0x3795596eU, + 0x5fa1e1beU, 0xfbf310ebU, 0x7fb181feU, 0x04020c08U, + 0x85cc9217U, 0x95c4a237U, 0x3a1d4e74U, 0x28147850U, + 0x9bc3b02bU, 0xc6635791U, 0xa9dae64fU, 0xba5dd369U, + 0xbe5fdf61U, 0xa5dcf257U, 0xfa7d13e9U, 0x87cd9413U, + 0xfe7f1fe1U, 0xb45ac175U, 0xd86c75adU, 0xb85cd56dU, + 0xf3f708fbU, 0x4c26d498U, 0xe3ff38dbU, 0xc7ed5493U, + 0xcde84a87U, 0x279d694eU, 0xde6f7fa1U, 0x018e0302U, + 0x32195664U, 0x5da0e7baU, 0xfdf01ae7U, 0x0f89111eU, + 0x1e0f223cU, 0x0e07121cU, 0x43afc586U, 0xebfb20cbU, + 0x10083020U, 0x2a157e54U, 0x1a0d2e34U, 0x08041810U, + 0x02010604U, 0xc864458dU, 0xa3dff85bU, 0xec7629c5U, + 0xf2790bf9U, 0xa7ddf453U, 0x7a3d8ef4U, 0x2c167458U, + 0x7e3f82fcU, 0x6e37b2dcU, 0xda6d73a9U, 0x703890e0U, + 0x6fb9b1deU, 0xe67337d1U, 0xcfe94c83U, 0x6a35bed4U, + 0xaa55e349U, 0xe2713bd9U, 0xf67b07f1U, 0x058c0f0aU, + 0xe47231d5U, 0x0d88171aU, 0xf1f60effU, 0x542afca8U, + 0x7c3e84f8U, 0xbc5ed965U, 0x4e27d29cU, 0x8c468905U, + 0x180c2830U, 0xca654389U, 0xd0686dbdU, 0xc2615b99U, + 0x06030a0cU, 0x9fc1bc23U, 0xae57ef41U, 0xb1d6ce7fU, + 0xafd9ec43U, 0xb058cd7dU, 0xadd8ea47U, 0xcc664985U, + 0xb3d7c87bU, 0x743a9ce8U, 0x8dc88a07U, 0x783c88f0U, + 0xe9fa26cfU, 0x31965362U, 0x53a7f5a6U, 0x2d98775aU, + 0xc5ec5297U, 0x6db8b7daU, 0x93c7a83bU, 0x41aec382U, + 0xd2696bb9U, 0x964ba731U, 0x4babdd96U, 0x4fa9d19eU, + 0xce674f81U, 0x140a3c28U, 0x8e478f01U, 0xf9f216efU, + 0x77b599eeU, 0x4422cc88U, 0xd7e564b3U, 0xc1ee5e9fU, + 0x61bea3c2U, 0x562bfaacU, 0x1f81213eU, 0x24126c48U, + 0x1b832d36U, 0x361b5a6cU, 0x1c0e2438U, 0x4623ca8cU, + 0xf7f504f3U, 0x8a458309U, 0x4221c684U, 0x81ce9e1fU, + 0x9249ab39U, 0x582ce8b0U, 0xeff92cc3U, 0xd1e66ebfU, + 0x71b693e2U, 0x5028f0a0U, 0x2e17725cU, 0x19822b32U, + 0x341a5c68U, 0x0b8b1d16U, 0xe1fe3edfU, 0x098a1b12U, + 0x12093624U, 0x8fc98c03U, 0x13873526U, 0x9c4eb925U, + 0xdfe17ca3U, 0x5c2ee4b8U, 0xd5e462b7U, 0xdde07aa7U, + 0xcbeb408bU, 0x3d90477aU, 0x55a4ffaaU, 0x3c1e4478U, + 0x1785392eU, 0xc0605d9dU, 0x00000000U, 0x4a25de94U, + 0xf5f402f7U, 0xfff11ce3U, 0x35945f6aU, 0x160b3a2cU, + 0xd3e768bbU, 0xea7523c9U, 0xc3ef589bU, 0x6834b8d0U, + 0x6231a6c4U, 0xb5d4c277U, 0xbdd0da67U, 0x11863322U, + 0xfc7e19e5U, 0x47adc98eU, 0xe7fd34d3U, 0x5229f6a4U, + 0x6030a0c0U, 0x763b9aecU, 0x239f6546U, 0xedf82ac7U, + 0x91c6ae3fU, 0x26136a4cU, 0x0c061418U, 0x0a051e14U, + 0x97c5a433U, 0x22116644U, 0xee772fc1U, 0xf87c15edU, + 0xf47a01f5U, 0xf0780dfdU, 0x6c36b4d8U, 0x381c4870U, + 0x723996e4U, 0xb259cb79U, 0x30185060U, 0xac56e945U, + 0x7bb38df6U, 0x7db087faU, 0x4824d890U, 0x4020c080U, + 0x79b28bf2U, 0x39924b72U, 0x5ba3edb6U, 0x9dc0ba27U, + 0x8844850dU, 0xc4625195U, 0x20106040U, 0x75b49feaU, + 0x15843f2aU, 0x86439711U, 0x3b934d76U, 0x99c2b62fU, + 0x944aa135U, 0x67bda9ceU, 0x038f0506U, 0x5a2deeb4U, + 0x65bcafcaU, 0x259c6f4aU, 0xd46a61b5U, 0x80409d1dU, + 0x83cf981bU, 0x59a2ebb2U, 0x1d80273aU, 0x9e4fbf21U, + 0x3e1f427cU, 0x89ca860fU, 0x49aadb92U, 0x84429115U, +}; + +static const u32 T2[256] = { + 0xd2bbba69U, 0x4de554a8U, 0xbce22f5eU, 0xcd2574e8U, + 0x51f753a6U, 0x6bd0d3bbU, 0x6fd6d2b9U, 0x29b34d9aU, + 0x5dfd50a0U, 0x8acfac45U, 0x0e098d07U, 0xc6a5bf63U, + 0xdd3d70e0U, 0x55f152a4U, 0x527b9a29U, 0x2db54c98U, + 0x8f46eac9U, 0x73c4d5b7U, 0x66559733U, 0x63dcd1bfU, + 0xccaa3366U, 0x59fb51a2U, 0x71c75bb6U, 0xa2f3a651U, + 0x5ffedea1U, 0x3dad4890U, 0x9ad7a84dU, 0x5e71992fU, + 0x4be0dbabU, 0xc8ac3264U, 0xe695b773U, 0xd732fce5U, + 0xab70e3dbU, 0x42639e21U, 0x7e41913fU, 0x567d9b2bU, + 0xaf76e2d9U, 0xd6bdbb6bU, 0x199b4182U, 0xa5796edcU, + 0xaef9a557U, 0x0b80cb8bU, 0xb1676bd6U, 0x6e599537U, + 0xbee1a15fU, 0xeb10f3fbU, 0xfe81b17fU, 0x080c0204U, + 0x1792cc85U, 0x37a2c495U, 0x744e1d3aU, 0x50781428U, + 0x2bb0c39bU, 0x915763c6U, 0x4fe6daa9U, 0x69d35dbaU, + 0x61df5fbeU, 0x57f2dca5U, 0xe9137dfaU, 0x1394cd87U, + 0xe11f7ffeU, 0x75c15ab4U, 0xad756cd8U, 0x6dd55cb8U, + 0xfb08f7f3U, 0x98d4264cU, 0xdb38ffe3U, 0x9354edc7U, + 0x874ae8cdU, 0x4e699d27U, 0xa17f6fdeU, 0x02038e01U, + 0x64561932U, 0xbae7a05dU, 0xe71af0fdU, 0x1e11890fU, + 0x3c220f1eU, 0x1c12070eU, 0x86c5af43U, 0xcb20fbebU, + 0x20300810U, 0x547e152aU, 0x342e0d1aU, 0x10180408U, + 0x04060102U, 0x8d4564c8U, 0x5bf8dfa3U, 0xc52976ecU, + 0xf90b79f2U, 0x53f4dda7U, 0xf48e3d7aU, 0x5874162cU, + 0xfc823f7eU, 0xdcb2376eU, 0xa9736ddaU, 0xe0903870U, + 0xdeb1b96fU, 0xd13773e6U, 0x834ce9cfU, 0xd4be356aU, + 0x49e355aaU, 0xd93b71e2U, 0xf1077bf6U, 0x0a0f8c05U, + 0xd53172e4U, 0x1a17880dU, 0xff0ef6f1U, 0xa8fc2a54U, + 0xf8843e7cU, 0x65d95ebcU, 0x9cd2274eU, 0x0589468cU, + 0x30280c18U, 0x894365caU, 0xbd6d68d0U, 0x995b61c2U, + 0x0c0a0306U, 0x23bcc19fU, 0x41ef57aeU, 0x7fced6b1U, + 0x43ecd9afU, 0x7dcd58b0U, 0x47ead8adU, 0x854966ccU, + 0x7bc8d7b3U, 0xe89c3a74U, 0x078ac88dU, 0xf0883c78U, + 0xcf26fae9U, 0x62539631U, 0xa6f5a753U, 0x5a77982dU, + 0x9752ecc5U, 0xdab7b86dU, 0x3ba8c793U, 0x82c3ae41U, + 0xb96b69d2U, 0x31a74b96U, 0x96ddab4bU, 0x9ed1a94fU, + 0x814f67ceU, 0x283c0a14U, 0x018f478eU, 0xef16f2f9U, + 0xee99b577U, 0x88cc2244U, 0xb364e5d7U, 0x9f5eeec1U, + 0xc2a3be61U, 0xacfa2b56U, 0x3e21811fU, 0x486c1224U, + 0x362d831bU, 0x6c5a1b36U, 0x38240e1cU, 0x8cca2346U, + 0xf304f5f7U, 0x0983458aU, 0x84c62142U, 0x1f9ece81U, + 0x39ab4992U, 0xb0e82c58U, 0xc32cf9efU, 0xbf6ee6d1U, + 0xe293b671U, 0xa0f02850U, 0x5c72172eU, 0x322b8219U, + 0x685c1a34U, 0x161d8b0bU, 0xdf3efee1U, 0x121b8a09U, + 0x24360912U, 0x038cc98fU, 0x26358713U, 0x25b94e9cU, + 0xa37ce1dfU, 0xb8e42e5cU, 0xb762e4d5U, 0xa77ae0ddU, + 0x8b40ebcbU, 0x7a47903dU, 0xaaffa455U, 0x78441e3cU, + 0x2e398517U, 0x9d5d60c0U, 0x00000000U, 0x94de254aU, + 0xf702f4f5U, 0xe31cf1ffU, 0x6a5f9435U, 0x2c3a0b16U, + 0xbb68e7d3U, 0xc92375eaU, 0x9b58efc3U, 0xd0b83468U, + 0xc4a63162U, 0x77c2d4b5U, 0x67dad0bdU, 0x22338611U, + 0xe5197efcU, 0x8ec9ad47U, 0xd334fde7U, 0xa4f62952U, + 0xc0a03060U, 0xec9a3b76U, 0x46659f23U, 0xc72af8edU, + 0x3faec691U, 0x4c6a1326U, 0x1814060cU, 0x141e050aU, + 0x33a4c597U, 0x44661122U, 0xc12f77eeU, 0xed157cf8U, + 0xf5017af4U, 0xfd0d78f0U, 0xd8b4366cU, 0x70481c38U, + 0xe4963972U, 0x79cb59b2U, 0x60501830U, 0x45e956acU, + 0xf68db37bU, 0xfa87b07dU, 0x90d82448U, 0x80c02040U, + 0xf28bb279U, 0x724b9239U, 0xb6eda35bU, 0x27bac09dU, + 0x0d854488U, 0x955162c4U, 0x40601020U, 0xea9fb475U, + 0x2a3f8415U, 0x11974386U, 0x764d933bU, 0x2fb6c299U, + 0x35a14a94U, 0xcea9bd67U, 0x06058f03U, 0xb4ee2d5aU, + 0xcaafbc65U, 0x4a6f9c25U, 0xb5616ad4U, 0x1d9d4080U, + 0x1b98cf83U, 0xb2eba259U, 0x3a27801dU, 0x21bf4f9eU, + 0x7c421f3eU, 0x0f86ca89U, 0x92dbaa49U, 0x15914284U, +}; + +static const u32 T3[256] = { + 0xbbd269baU, 0xe54da854U, 0xe2bc5e2fU, 0x25cde874U, + 0xf751a653U, 0xd06bbbd3U, 0xd66fb9d2U, 0xb3299a4dU, + 0xfd5da050U, 0xcf8a45acU, 0x090e078dU, 0xa5c663bfU, + 0x3ddde070U, 0xf155a452U, 0x7b52299aU, 0xb52d984cU, + 0x468fc9eaU, 0xc473b7d5U, 0x55663397U, 0xdc63bfd1U, + 0xaacc6633U, 0xfb59a251U, 0xc771b65bU, 0xf3a251a6U, + 0xfe5fa1deU, 0xad3d9048U, 0xd79a4da8U, 0x715e2f99U, + 0xe04babdbU, 0xacc86432U, 0x95e673b7U, 0x32d7e5fcU, + 0x70abdbe3U, 0x6342219eU, 0x417e3f91U, 0x7d562b9bU, + 0x76afd9e2U, 0xbdd66bbbU, 0x9b198241U, 0x79a5dc6eU, + 0xf9ae57a5U, 0x800b8bcbU, 0x67b1d66bU, 0x596e3795U, + 0xe1be5fa1U, 0x10ebfbf3U, 0x81fe7fb1U, 0x0c080402U, + 0x921785ccU, 0xa23795c4U, 0x4e743a1dU, 0x78502814U, + 0xb02b9bc3U, 0x5791c663U, 0xe64fa9daU, 0xd369ba5dU, + 0xdf61be5fU, 0xf257a5dcU, 0x13e9fa7dU, 0x941387cdU, + 0x1fe1fe7fU, 0xc175b45aU, 0x75add86cU, 0xd56db85cU, + 0x08fbf3f7U, 0xd4984c26U, 0x38dbe3ffU, 0x5493c7edU, + 0x4a87cde8U, 0x694e279dU, 0x7fa1de6fU, 0x0302018eU, + 0x56643219U, 0xe7ba5da0U, 0x1ae7fdf0U, 0x111e0f89U, + 0x223c1e0fU, 0x121c0e07U, 0xc58643afU, 0x20cbebfbU, + 0x30201008U, 0x7e542a15U, 0x2e341a0dU, 0x18100804U, + 0x06040201U, 0x458dc864U, 0xf85ba3dfU, 0x29c5ec76U, + 0x0bf9f279U, 0xf453a7ddU, 0x8ef47a3dU, 0x74582c16U, + 0x82fc7e3fU, 0xb2dc6e37U, 0x73a9da6dU, 0x90e07038U, + 0xb1de6fb9U, 0x37d1e673U, 0x4c83cfe9U, 0xbed46a35U, + 0xe349aa55U, 0x3bd9e271U, 0x07f1f67bU, 0x0f0a058cU, + 0x31d5e472U, 0x171a0d88U, 0x0efff1f6U, 0xfca8542aU, + 0x84f87c3eU, 0xd965bc5eU, 0xd29c4e27U, 0x89058c46U, + 0x2830180cU, 0x4389ca65U, 0x6dbdd068U, 0x5b99c261U, + 0x0a0c0603U, 0xbc239fc1U, 0xef41ae57U, 0xce7fb1d6U, + 0xec43afd9U, 0xcd7db058U, 0xea47add8U, 0x4985cc66U, + 0xc87bb3d7U, 0x9ce8743aU, 0x8a078dc8U, 0x88f0783cU, + 0x26cfe9faU, 0x53623196U, 0xf5a653a7U, 0x775a2d98U, + 0x5297c5ecU, 0xb7da6db8U, 0xa83b93c7U, 0xc38241aeU, + 0x6bb9d269U, 0xa731964bU, 0xdd964babU, 0xd19e4fa9U, + 0x4f81ce67U, 0x3c28140aU, 0x8f018e47U, 0x16eff9f2U, + 0x99ee77b5U, 0xcc884422U, 0x64b3d7e5U, 0x5e9fc1eeU, + 0xa3c261beU, 0xfaac562bU, 0x213e1f81U, 0x6c482412U, + 0x2d361b83U, 0x5a6c361bU, 0x24381c0eU, 0xca8c4623U, + 0x04f3f7f5U, 0x83098a45U, 0xc6844221U, 0x9e1f81ceU, + 0xab399249U, 0xe8b0582cU, 0x2cc3eff9U, 0x6ebfd1e6U, + 0x93e271b6U, 0xf0a05028U, 0x725c2e17U, 0x2b321982U, + 0x5c68341aU, 0x1d160b8bU, 0x3edfe1feU, 0x1b12098aU, + 0x36241209U, 0x8c038fc9U, 0x35261387U, 0xb9259c4eU, + 0x7ca3dfe1U, 0xe4b85c2eU, 0x62b7d5e4U, 0x7aa7dde0U, + 0x408bcbebU, 0x477a3d90U, 0xffaa55a4U, 0x44783c1eU, + 0x392e1785U, 0x5d9dc060U, 0x00000000U, 0xde944a25U, + 0x02f7f5f4U, 0x1ce3fff1U, 0x5f6a3594U, 0x3a2c160bU, + 0x68bbd3e7U, 0x23c9ea75U, 0x589bc3efU, 0xb8d06834U, + 0xa6c46231U, 0xc277b5d4U, 0xda67bdd0U, 0x33221186U, + 0x19e5fc7eU, 0xc98e47adU, 0x34d3e7fdU, 0xf6a45229U, + 0xa0c06030U, 0x9aec763bU, 0x6546239fU, 0x2ac7edf8U, + 0xae3f91c6U, 0x6a4c2613U, 0x14180c06U, 0x1e140a05U, + 0xa43397c5U, 0x66442211U, 0x2fc1ee77U, 0x15edf87cU, + 0x01f5f47aU, 0x0dfdf078U, 0xb4d86c36U, 0x4870381cU, + 0x96e47239U, 0xcb79b259U, 0x50603018U, 0xe945ac56U, + 0x8df67bb3U, 0x87fa7db0U, 0xd8904824U, 0xc0804020U, + 0x8bf279b2U, 0x4b723992U, 0xedb65ba3U, 0xba279dc0U, + 0x850d8844U, 0x5195c462U, 0x60402010U, 0x9fea75b4U, + 0x3f2a1584U, 0x97118643U, 0x4d763b93U, 0xb62f99c2U, + 0xa135944aU, 0xa9ce67bdU, 0x0506038fU, 0xeeb45a2dU, + 0xafca65bcU, 0x6f4a259cU, 0x61b5d46aU, 0x9d1d8040U, + 0x981b83cfU, 0xebb259a2U, 0x273a1d80U, 0xbf219e4fU, + 0x427c3e1fU, 0x860f89caU, 0xdb9249aaU, 0x91158442U, +}; + +static const u32 T4[256] = { + 0xbabababaU, 0x54545454U, 0x2f2f2f2fU, 0x74747474U, + 0x53535353U, 0xd3d3d3d3U, 0xd2d2d2d2U, 0x4d4d4d4dU, + 0x50505050U, 0xacacacacU, 0x8d8d8d8dU, 0xbfbfbfbfU, + 0x70707070U, 0x52525252U, 0x9a9a9a9aU, 0x4c4c4c4cU, + 0xeaeaeaeaU, 0xd5d5d5d5U, 0x97979797U, 0xd1d1d1d1U, + 0x33333333U, 0x51515151U, 0x5b5b5b5bU, 0xa6a6a6a6U, + 0xdedededeU, 0x48484848U, 0xa8a8a8a8U, 0x99999999U, + 0xdbdbdbdbU, 0x32323232U, 0xb7b7b7b7U, 0xfcfcfcfcU, + 0xe3e3e3e3U, 0x9e9e9e9eU, 0x91919191U, 0x9b9b9b9bU, + 0xe2e2e2e2U, 0xbbbbbbbbU, 0x41414141U, 0x6e6e6e6eU, + 0xa5a5a5a5U, 0xcbcbcbcbU, 0x6b6b6b6bU, 0x95959595U, + 0xa1a1a1a1U, 0xf3f3f3f3U, 0xb1b1b1b1U, 0x02020202U, + 0xccccccccU, 0xc4c4c4c4U, 0x1d1d1d1dU, 0x14141414U, + 0xc3c3c3c3U, 0x63636363U, 0xdadadadaU, 0x5d5d5d5dU, + 0x5f5f5f5fU, 0xdcdcdcdcU, 0x7d7d7d7dU, 0xcdcdcdcdU, + 0x7f7f7f7fU, 0x5a5a5a5aU, 0x6c6c6c6cU, 0x5c5c5c5cU, + 0xf7f7f7f7U, 0x26262626U, 0xffffffffU, 0xededededU, + 0xe8e8e8e8U, 0x9d9d9d9dU, 0x6f6f6f6fU, 0x8e8e8e8eU, + 0x19191919U, 0xa0a0a0a0U, 0xf0f0f0f0U, 0x89898989U, + 0x0f0f0f0fU, 0x07070707U, 0xafafafafU, 0xfbfbfbfbU, + 0x08080808U, 0x15151515U, 0x0d0d0d0dU, 0x04040404U, + 0x01010101U, 0x64646464U, 0xdfdfdfdfU, 0x76767676U, + 0x79797979U, 0xddddddddU, 0x3d3d3d3dU, 0x16161616U, + 0x3f3f3f3fU, 0x37373737U, 0x6d6d6d6dU, 0x38383838U, + 0xb9b9b9b9U, 0x73737373U, 0xe9e9e9e9U, 0x35353535U, + 0x55555555U, 0x71717171U, 0x7b7b7b7bU, 0x8c8c8c8cU, + 0x72727272U, 0x88888888U, 0xf6f6f6f6U, 0x2a2a2a2aU, + 0x3e3e3e3eU, 0x5e5e5e5eU, 0x27272727U, 0x46464646U, + 0x0c0c0c0cU, 0x65656565U, 0x68686868U, 0x61616161U, + 0x03030303U, 0xc1c1c1c1U, 0x57575757U, 0xd6d6d6d6U, + 0xd9d9d9d9U, 0x58585858U, 0xd8d8d8d8U, 0x66666666U, + 0xd7d7d7d7U, 0x3a3a3a3aU, 0xc8c8c8c8U, 0x3c3c3c3cU, + 0xfafafafaU, 0x96969696U, 0xa7a7a7a7U, 0x98989898U, + 0xececececU, 0xb8b8b8b8U, 0xc7c7c7c7U, 0xaeaeaeaeU, + 0x69696969U, 0x4b4b4b4bU, 0xababababU, 0xa9a9a9a9U, + 0x67676767U, 0x0a0a0a0aU, 0x47474747U, 0xf2f2f2f2U, + 0xb5b5b5b5U, 0x22222222U, 0xe5e5e5e5U, 0xeeeeeeeeU, + 0xbebebebeU, 0x2b2b2b2bU, 0x81818181U, 0x12121212U, + 0x83838383U, 0x1b1b1b1bU, 0x0e0e0e0eU, 0x23232323U, + 0xf5f5f5f5U, 0x45454545U, 0x21212121U, 0xcecececeU, + 0x49494949U, 0x2c2c2c2cU, 0xf9f9f9f9U, 0xe6e6e6e6U, + 0xb6b6b6b6U, 0x28282828U, 0x17171717U, 0x82828282U, + 0x1a1a1a1aU, 0x8b8b8b8bU, 0xfefefefeU, 0x8a8a8a8aU, + 0x09090909U, 0xc9c9c9c9U, 0x87878787U, 0x4e4e4e4eU, + 0xe1e1e1e1U, 0x2e2e2e2eU, 0xe4e4e4e4U, 0xe0e0e0e0U, + 0xebebebebU, 0x90909090U, 0xa4a4a4a4U, 0x1e1e1e1eU, + 0x85858585U, 0x60606060U, 0x00000000U, 0x25252525U, + 0xf4f4f4f4U, 0xf1f1f1f1U, 0x94949494U, 0x0b0b0b0bU, + 0xe7e7e7e7U, 0x75757575U, 0xefefefefU, 0x34343434U, + 0x31313131U, 0xd4d4d4d4U, 0xd0d0d0d0U, 0x86868686U, + 0x7e7e7e7eU, 0xadadadadU, 0xfdfdfdfdU, 0x29292929U, + 0x30303030U, 0x3b3b3b3bU, 0x9f9f9f9fU, 0xf8f8f8f8U, + 0xc6c6c6c6U, 0x13131313U, 0x06060606U, 0x05050505U, + 0xc5c5c5c5U, 0x11111111U, 0x77777777U, 0x7c7c7c7cU, + 0x7a7a7a7aU, 0x78787878U, 0x36363636U, 0x1c1c1c1cU, + 0x39393939U, 0x59595959U, 0x18181818U, 0x56565656U, + 0xb3b3b3b3U, 0xb0b0b0b0U, 0x24242424U, 0x20202020U, + 0xb2b2b2b2U, 0x92929292U, 0xa3a3a3a3U, 0xc0c0c0c0U, + 0x44444444U, 0x62626262U, 0x10101010U, 0xb4b4b4b4U, + 0x84848484U, 0x43434343U, 0x93939393U, 0xc2c2c2c2U, + 0x4a4a4a4aU, 0xbdbdbdbdU, 0x8f8f8f8fU, 0x2d2d2d2dU, + 0xbcbcbcbcU, 0x9c9c9c9cU, 0x6a6a6a6aU, 0x40404040U, + 0xcfcfcfcfU, 0xa2a2a2a2U, 0x80808080U, 0x4f4f4f4fU, + 0x1f1f1f1fU, 0xcacacacaU, 0xaaaaaaaaU, 0x42424242U, +}; + +static const u32 T5[256] = { + 0x00000000U, 0x01020608U, 0x02040c10U, 0x03060a18U, + 0x04081820U, 0x050a1e28U, 0x060c1430U, 0x070e1238U, + 0x08103040U, 0x09123648U, 0x0a143c50U, 0x0b163a58U, + 0x0c182860U, 0x0d1a2e68U, 0x0e1c2470U, 0x0f1e2278U, + 0x10206080U, 0x11226688U, 0x12246c90U, 0x13266a98U, + 0x142878a0U, 0x152a7ea8U, 0x162c74b0U, 0x172e72b8U, + 0x183050c0U, 0x193256c8U, 0x1a345cd0U, 0x1b365ad8U, + 0x1c3848e0U, 0x1d3a4ee8U, 0x1e3c44f0U, 0x1f3e42f8U, + 0x2040c01dU, 0x2142c615U, 0x2244cc0dU, 0x2346ca05U, + 0x2448d83dU, 0x254ade35U, 0x264cd42dU, 0x274ed225U, + 0x2850f05dU, 0x2952f655U, 0x2a54fc4dU, 0x2b56fa45U, + 0x2c58e87dU, 0x2d5aee75U, 0x2e5ce46dU, 0x2f5ee265U, + 0x3060a09dU, 0x3162a695U, 0x3264ac8dU, 0x3366aa85U, + 0x3468b8bdU, 0x356abeb5U, 0x366cb4adU, 0x376eb2a5U, + 0x387090ddU, 0x397296d5U, 0x3a749ccdU, 0x3b769ac5U, + 0x3c7888fdU, 0x3d7a8ef5U, 0x3e7c84edU, 0x3f7e82e5U, + 0x40809d3aU, 0x41829b32U, 0x4284912aU, 0x43869722U, + 0x4488851aU, 0x458a8312U, 0x468c890aU, 0x478e8f02U, + 0x4890ad7aU, 0x4992ab72U, 0x4a94a16aU, 0x4b96a762U, + 0x4c98b55aU, 0x4d9ab352U, 0x4e9cb94aU, 0x4f9ebf42U, + 0x50a0fdbaU, 0x51a2fbb2U, 0x52a4f1aaU, 0x53a6f7a2U, + 0x54a8e59aU, 0x55aae392U, 0x56ace98aU, 0x57aeef82U, + 0x58b0cdfaU, 0x59b2cbf2U, 0x5ab4c1eaU, 0x5bb6c7e2U, + 0x5cb8d5daU, 0x5dbad3d2U, 0x5ebcd9caU, 0x5fbedfc2U, + 0x60c05d27U, 0x61c25b2fU, 0x62c45137U, 0x63c6573fU, + 0x64c84507U, 0x65ca430fU, 0x66cc4917U, 0x67ce4f1fU, + 0x68d06d67U, 0x69d26b6fU, 0x6ad46177U, 0x6bd6677fU, + 0x6cd87547U, 0x6dda734fU, 0x6edc7957U, 0x6fde7f5fU, + 0x70e03da7U, 0x71e23bafU, 0x72e431b7U, 0x73e637bfU, + 0x74e82587U, 0x75ea238fU, 0x76ec2997U, 0x77ee2f9fU, + 0x78f00de7U, 0x79f20befU, 0x7af401f7U, 0x7bf607ffU, + 0x7cf815c7U, 0x7dfa13cfU, 0x7efc19d7U, 0x7ffe1fdfU, + 0x801d2774U, 0x811f217cU, 0x82192b64U, 0x831b2d6cU, + 0x84153f54U, 0x8517395cU, 0x86113344U, 0x8713354cU, + 0x880d1734U, 0x890f113cU, 0x8a091b24U, 0x8b0b1d2cU, + 0x8c050f14U, 0x8d07091cU, 0x8e010304U, 0x8f03050cU, + 0x903d47f4U, 0x913f41fcU, 0x92394be4U, 0x933b4decU, + 0x94355fd4U, 0x953759dcU, 0x963153c4U, 0x973355ccU, + 0x982d77b4U, 0x992f71bcU, 0x9a297ba4U, 0x9b2b7dacU, + 0x9c256f94U, 0x9d27699cU, 0x9e216384U, 0x9f23658cU, + 0xa05de769U, 0xa15fe161U, 0xa259eb79U, 0xa35bed71U, + 0xa455ff49U, 0xa557f941U, 0xa651f359U, 0xa753f551U, + 0xa84dd729U, 0xa94fd121U, 0xaa49db39U, 0xab4bdd31U, + 0xac45cf09U, 0xad47c901U, 0xae41c319U, 0xaf43c511U, + 0xb07d87e9U, 0xb17f81e1U, 0xb2798bf9U, 0xb37b8df1U, + 0xb4759fc9U, 0xb57799c1U, 0xb67193d9U, 0xb77395d1U, + 0xb86db7a9U, 0xb96fb1a1U, 0xba69bbb9U, 0xbb6bbdb1U, + 0xbc65af89U, 0xbd67a981U, 0xbe61a399U, 0xbf63a591U, + 0xc09dba4eU, 0xc19fbc46U, 0xc299b65eU, 0xc39bb056U, + 0xc495a26eU, 0xc597a466U, 0xc691ae7eU, 0xc793a876U, + 0xc88d8a0eU, 0xc98f8c06U, 0xca89861eU, 0xcb8b8016U, + 0xcc85922eU, 0xcd879426U, 0xce819e3eU, 0xcf839836U, + 0xd0bddaceU, 0xd1bfdcc6U, 0xd2b9d6deU, 0xd3bbd0d6U, + 0xd4b5c2eeU, 0xd5b7c4e6U, 0xd6b1cefeU, 0xd7b3c8f6U, + 0xd8adea8eU, 0xd9afec86U, 0xdaa9e69eU, 0xdbabe096U, + 0xdca5f2aeU, 0xdda7f4a6U, 0xdea1febeU, 0xdfa3f8b6U, + 0xe0dd7a53U, 0xe1df7c5bU, 0xe2d97643U, 0xe3db704bU, + 0xe4d56273U, 0xe5d7647bU, 0xe6d16e63U, 0xe7d3686bU, + 0xe8cd4a13U, 0xe9cf4c1bU, 0xeac94603U, 0xebcb400bU, + 0xecc55233U, 0xedc7543bU, 0xeec15e23U, 0xefc3582bU, + 0xf0fd1ad3U, 0xf1ff1cdbU, 0xf2f916c3U, 0xf3fb10cbU, + 0xf4f502f3U, 0xf5f704fbU, 0xf6f10ee3U, 0xf7f308ebU, + 0xf8ed2a93U, 0xf9ef2c9bU, 0xfae92683U, 0xfbeb208bU, + 0xfce532b3U, 0xfde734bbU, 0xfee13ea3U, 0xffe338abU, +}; + +static const u32 rc[] = { + 0xba542f74U, 0x53d3d24dU, 0x50ac8dbfU, 0x70529a4cU, + 0xead597d1U, 0x33515ba6U, 0xde48a899U, 0xdb32b7fcU, + 0xe39e919bU, 0xe2bb416eU, 0xa5cb6b95U, 0xa1f3b102U, + 0xccc41d14U, 0xc363da5dU, 0x5fdc7dcdU, 0x7f5a6c5cU, + 0xf726ffedU, 0xe89d6f8eU, 0x19a0f089U, +}; + +static int anubis_setkey(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len) +{ + struct anubis_ctx *ctx = crypto_tfm_ctx(tfm); + const __be32 *key = (const __be32 *)in_key; + u32 *flags = &tfm->crt_flags; + int N, R, i, r; + u32 kappa[ANUBIS_MAX_N]; + u32 inter[ANUBIS_MAX_N]; + + switch (key_len) { + case 16: case 20: case 24: case 28: + case 32: case 36: case 40: + break; + default: + *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; + } + + ctx->key_len = key_len * 8; + N = ctx->key_len >> 5; + ctx->R = R = 8 + N; + + /* * map cipher key to initial key state (mu): */ + for (i = 0; i < N; i++) + kappa[i] = be32_to_cpu(key[i]); + + /* + * generate R + 1 round keys: + */ + for (r = 0; r <= R; r++) { + u32 K0, K1, K2, K3; + /* + * generate r-th round key K^r: + */ + K0 = T4[(kappa[N - 1] >> 24) ]; + K1 = T4[(kappa[N - 1] >> 16) & 0xff]; + K2 = T4[(kappa[N - 1] >> 8) & 0xff]; + K3 = T4[(kappa[N - 1] ) & 0xff]; + for (i = N - 2; i >= 0; i--) { + K0 = T4[(kappa[i] >> 24) ] ^ + (T5[(K0 >> 24) ] & 0xff000000U) ^ + (T5[(K0 >> 16) & 0xff] & 0x00ff0000U) ^ + (T5[(K0 >> 8) & 0xff] & 0x0000ff00U) ^ + (T5[(K0 ) & 0xff] & 0x000000ffU); + K1 = T4[(kappa[i] >> 16) & 0xff] ^ + (T5[(K1 >> 24) ] & 0xff000000U) ^ + (T5[(K1 >> 16) & 0xff] & 0x00ff0000U) ^ + (T5[(K1 >> 8) & 0xff] & 0x0000ff00U) ^ + (T5[(K1 ) & 0xff] & 0x000000ffU); + K2 = T4[(kappa[i] >> 8) & 0xff] ^ + (T5[(K2 >> 24) ] & 0xff000000U) ^ + (T5[(K2 >> 16) & 0xff] & 0x00ff0000U) ^ + (T5[(K2 >> 8) & 0xff] & 0x0000ff00U) ^ + (T5[(K2 ) & 0xff] & 0x000000ffU); + K3 = T4[(kappa[i] ) & 0xff] ^ + (T5[(K3 >> 24) ] & 0xff000000U) ^ + (T5[(K3 >> 16) & 0xff] & 0x00ff0000U) ^ + (T5[(K3 >> 8) & 0xff] & 0x0000ff00U) ^ + (T5[(K3 ) & 0xff] & 0x000000ffU); + } + + ctx->E[r][0] = K0; + ctx->E[r][1] = K1; + ctx->E[r][2] = K2; + ctx->E[r][3] = K3; + + /* + * compute kappa^{r+1} from kappa^r: + */ + if (r == R) + break; + for (i = 0; i < N; i++) { + int j = i; + inter[i] = T0[(kappa[j--] >> 24) ]; + if (j < 0) + j = N - 1; + inter[i] ^= T1[(kappa[j--] >> 16) & 0xff]; + if (j < 0) + j = N - 1; + inter[i] ^= T2[(kappa[j--] >> 8) & 0xff]; + if (j < 0) + j = N - 1; + inter[i] ^= T3[(kappa[j ] ) & 0xff]; + } + kappa[0] = inter[0] ^ rc[r]; + for (i = 1; i < N; i++) + kappa[i] = inter[i]; + } + + /* + * generate inverse key schedule: K'^0 = K^R, K'^R = + * K^0, K'^r = theta(K^{R-r}): + */ + for (i = 0; i < 4; i++) { + ctx->D[0][i] = ctx->E[R][i]; + ctx->D[R][i] = ctx->E[0][i]; + } + for (r = 1; r < R; r++) { + for (i = 0; i < 4; i++) { + u32 v = ctx->E[R - r][i]; + ctx->D[r][i] = + T0[T4[(v >> 24) ] & 0xff] ^ + T1[T4[(v >> 16) & 0xff] & 0xff] ^ + T2[T4[(v >> 8) & 0xff] & 0xff] ^ + T3[T4[(v ) & 0xff] & 0xff]; + } + } + + return 0; +} + +static void anubis_crypt(u32 roundKey[ANUBIS_MAX_ROUNDS + 1][4], + u8 *ciphertext, const u8 *plaintext, const int R) +{ + const __be32 *src = (const __be32 *)plaintext; + __be32 *dst = (__be32 *)ciphertext; + int i, r; + u32 state[4]; + u32 inter[4]; + + /* + * map plaintext block to cipher state (mu) + * and add initial round key (sigma[K^0]): + */ + for (i = 0; i < 4; i++) + state[i] = be32_to_cpu(src[i]) ^ roundKey[0][i]; + + /* + * R - 1 full rounds: + */ + + for (r = 1; r < R; r++) { + inter[0] = + T0[(state[0] >> 24) ] ^ + T1[(state[1] >> 24) ] ^ + T2[(state[2] >> 24) ] ^ + T3[(state[3] >> 24) ] ^ + roundKey[r][0]; + inter[1] = + T0[(state[0] >> 16) & 0xff] ^ + T1[(state[1] >> 16) & 0xff] ^ + T2[(state[2] >> 16) & 0xff] ^ + T3[(state[3] >> 16) & 0xff] ^ + roundKey[r][1]; + inter[2] = + T0[(state[0] >> 8) & 0xff] ^ + T1[(state[1] >> 8) & 0xff] ^ + T2[(state[2] >> 8) & 0xff] ^ + T3[(state[3] >> 8) & 0xff] ^ + roundKey[r][2]; + inter[3] = + T0[(state[0] ) & 0xff] ^ + T1[(state[1] ) & 0xff] ^ + T2[(state[2] ) & 0xff] ^ + T3[(state[3] ) & 0xff] ^ + roundKey[r][3]; + state[0] = inter[0]; + state[1] = inter[1]; + state[2] = inter[2]; + state[3] = inter[3]; + } + + /* + * last round: + */ + + inter[0] = + (T0[(state[0] >> 24) ] & 0xff000000U) ^ + (T1[(state[1] >> 24) ] & 0x00ff0000U) ^ + (T2[(state[2] >> 24) ] & 0x0000ff00U) ^ + (T3[(state[3] >> 24) ] & 0x000000ffU) ^ + roundKey[R][0]; + inter[1] = + (T0[(state[0] >> 16) & 0xff] & 0xff000000U) ^ + (T1[(state[1] >> 16) & 0xff] & 0x00ff0000U) ^ + (T2[(state[2] >> 16) & 0xff] & 0x0000ff00U) ^ + (T3[(state[3] >> 16) & 0xff] & 0x000000ffU) ^ + roundKey[R][1]; + inter[2] = + (T0[(state[0] >> 8) & 0xff] & 0xff000000U) ^ + (T1[(state[1] >> 8) & 0xff] & 0x00ff0000U) ^ + (T2[(state[2] >> 8) & 0xff] & 0x0000ff00U) ^ + (T3[(state[3] >> 8) & 0xff] & 0x000000ffU) ^ + roundKey[R][2]; + inter[3] = + (T0[(state[0] ) & 0xff] & 0xff000000U) ^ + (T1[(state[1] ) & 0xff] & 0x00ff0000U) ^ + (T2[(state[2] ) & 0xff] & 0x0000ff00U) ^ + (T3[(state[3] ) & 0xff] & 0x000000ffU) ^ + roundKey[R][3]; + + /* + * map cipher state to ciphertext block (mu^{-1}): + */ + + for (i = 0; i < 4; i++) + dst[i] = cpu_to_be32(inter[i]); +} + +static void anubis_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct anubis_ctx *ctx = crypto_tfm_ctx(tfm); + anubis_crypt(ctx->E, dst, src, ctx->R); +} + +static void anubis_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct anubis_ctx *ctx = crypto_tfm_ctx(tfm); + anubis_crypt(ctx->D, dst, src, ctx->R); +} + +static struct crypto_alg anubis_alg = { + .cra_name = "anubis", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = ANUBIS_BLOCK_SIZE, + .cra_ctxsize = sizeof (struct anubis_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(anubis_alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = ANUBIS_MIN_KEY_SIZE, + .cia_max_keysize = ANUBIS_MAX_KEY_SIZE, + .cia_setkey = anubis_setkey, + .cia_encrypt = anubis_encrypt, + .cia_decrypt = anubis_decrypt } } +}; + +static int __init anubis_mod_init(void) +{ + int ret = 0; + + ret = crypto_register_alg(&anubis_alg); + return ret; +} + +static void __exit anubis_mod_fini(void) +{ + crypto_unregister_alg(&anubis_alg); +} + +module_init(anubis_mod_init); +module_exit(anubis_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Anubis Cryptographic Algorithm"); diff --git a/crypto/api.c b/crypto/api.c new file mode 100644 index 00000000..033a7147 --- /dev/null +++ b/crypto/api.c @@ -0,0 +1,606 @@ +/* + * Scatterlist Cryptographic API. + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * Copyright (c) 2002 David S. Miller (davem@redhat.com) + * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> + * + * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> + * and Nettle, by Niels Möller. + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <linux/err.h> +#include <linux/errno.h> +#include <linux/kernel.h> +#include <linux/kmod.h> +#include <linux/module.h> +#include <linux/param.h> +#include <linux/sched.h> +#include <linux/slab.h> +#include <linux/string.h> +#include "internal.h" + +LIST_HEAD(crypto_alg_list); +EXPORT_SYMBOL_GPL(crypto_alg_list); +DECLARE_RWSEM(crypto_alg_sem); +EXPORT_SYMBOL_GPL(crypto_alg_sem); + +BLOCKING_NOTIFIER_HEAD(crypto_chain); +EXPORT_SYMBOL_GPL(crypto_chain); + +static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg) +{ + atomic_inc(&alg->cra_refcnt); + return alg; +} + +struct crypto_alg *crypto_mod_get(struct crypto_alg *alg) +{ + return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL; +} +EXPORT_SYMBOL_GPL(crypto_mod_get); + +void crypto_mod_put(struct crypto_alg *alg) +{ + struct module *module = alg->cra_module; + + crypto_alg_put(alg); + module_put(module); +} +EXPORT_SYMBOL_GPL(crypto_mod_put); + +static inline int crypto_is_test_larval(struct crypto_larval *larval) +{ + return larval->alg.cra_driver_name[0]; +} + +static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, + u32 mask) +{ + struct crypto_alg *q, *alg = NULL; + int best = -2; + + list_for_each_entry(q, &crypto_alg_list, cra_list) { + int exact, fuzzy; + + if (crypto_is_moribund(q)) + continue; + + if ((q->cra_flags ^ type) & mask) + continue; + + if (crypto_is_larval(q) && + !crypto_is_test_larval((struct crypto_larval *)q) && + ((struct crypto_larval *)q)->mask != mask) + continue; + + exact = !strcmp(q->cra_driver_name, name); + fuzzy = !strcmp(q->cra_name, name); + if (!exact && !(fuzzy && q->cra_priority > best)) + continue; + + if (unlikely(!crypto_mod_get(q))) + continue; + + best = q->cra_priority; + if (alg) + crypto_mod_put(alg); + alg = q; + + if (exact) + break; + } + + return alg; +} + +static void crypto_larval_destroy(struct crypto_alg *alg) +{ + struct crypto_larval *larval = (void *)alg; + + BUG_ON(!crypto_is_larval(alg)); + if (larval->adult) + crypto_mod_put(larval->adult); + kfree(larval); +} + +struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask) +{ + struct crypto_larval *larval; + + larval = kzalloc(sizeof(*larval), GFP_KERNEL); + if (!larval) + return ERR_PTR(-ENOMEM); + + larval->mask = mask; + larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type; + larval->alg.cra_priority = -1; + larval->alg.cra_destroy = crypto_larval_destroy; + + strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME); + init_completion(&larval->completion); + + return larval; +} +EXPORT_SYMBOL_GPL(crypto_larval_alloc); + +static struct crypto_alg *crypto_larval_add(const char *name, u32 type, + u32 mask) +{ + struct crypto_alg *alg; + struct crypto_larval *larval; + + larval = crypto_larval_alloc(name, type, mask); + if (IS_ERR(larval)) + return ERR_CAST(larval); + + atomic_set(&larval->alg.cra_refcnt, 2); + + down_write(&crypto_alg_sem); + alg = __crypto_alg_lookup(name, type, mask); + if (!alg) { + alg = &larval->alg; + list_add(&alg->cra_list, &crypto_alg_list); + } + up_write(&crypto_alg_sem); + + if (alg != &larval->alg) + kfree(larval); + + return alg; +} + +void crypto_larval_kill(struct crypto_alg *alg) +{ + struct crypto_larval *larval = (void *)alg; + + down_write(&crypto_alg_sem); + list_del(&alg->cra_list); + up_write(&crypto_alg_sem); + complete_all(&larval->completion); + crypto_alg_put(alg); +} +EXPORT_SYMBOL_GPL(crypto_larval_kill); + +static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) +{ + struct crypto_larval *larval = (void *)alg; + long timeout; + + timeout = wait_for_completion_interruptible_timeout( + &larval->completion, 60 * HZ); + + alg = larval->adult; + if (timeout < 0) + alg = ERR_PTR(-EINTR); + else if (!timeout) + alg = ERR_PTR(-ETIMEDOUT); + else if (!alg) + alg = ERR_PTR(-ENOENT); + else if (crypto_is_test_larval(larval) && + !(alg->cra_flags & CRYPTO_ALG_TESTED)) + alg = ERR_PTR(-EAGAIN); + else if (!crypto_mod_get(alg)) + alg = ERR_PTR(-EAGAIN); + crypto_mod_put(&larval->alg); + + return alg; +} + +struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask) +{ + struct crypto_alg *alg; + + down_read(&crypto_alg_sem); + alg = __crypto_alg_lookup(name, type, mask); + up_read(&crypto_alg_sem); + + return alg; +} +EXPORT_SYMBOL_GPL(crypto_alg_lookup); + +struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask) +{ + struct crypto_alg *alg; + + if (!name) + return ERR_PTR(-ENOENT); + + mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); + type &= mask; + + alg = crypto_alg_lookup(name, type, mask); + if (!alg) { + request_module("%s", name); + + if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask & + CRYPTO_ALG_NEED_FALLBACK)) + request_module("%s-all", name); + + alg = crypto_alg_lookup(name, type, mask); + } + + if (alg) + return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg; + + return crypto_larval_add(name, type, mask); +} +EXPORT_SYMBOL_GPL(crypto_larval_lookup); + +int crypto_probing_notify(unsigned long val, void *v) +{ + int ok; + + ok = blocking_notifier_call_chain(&crypto_chain, val, v); + if (ok == NOTIFY_DONE) { + request_module("cryptomgr"); + ok = blocking_notifier_call_chain(&crypto_chain, val, v); + } + + return ok; +} +EXPORT_SYMBOL_GPL(crypto_probing_notify); + +struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) +{ + struct crypto_alg *alg; + struct crypto_alg *larval; + int ok; + + if (!((type | mask) & CRYPTO_ALG_TESTED)) { + type |= CRYPTO_ALG_TESTED; + mask |= CRYPTO_ALG_TESTED; + } + + larval = crypto_larval_lookup(name, type, mask); + if (IS_ERR(larval) || !crypto_is_larval(larval)) + return larval; + + ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval); + + if (ok == NOTIFY_STOP) + alg = crypto_larval_wait(larval); + else { + crypto_mod_put(larval); + alg = ERR_PTR(-ENOENT); + } + crypto_larval_kill(larval); + return alg; +} +EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup); + +static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask) +{ + const struct crypto_type *type_obj = tfm->__crt_alg->cra_type; + + if (type_obj) + return type_obj->init(tfm, type, mask); + + switch (crypto_tfm_alg_type(tfm)) { + case CRYPTO_ALG_TYPE_CIPHER: + return crypto_init_cipher_ops(tfm); + + case CRYPTO_ALG_TYPE_COMPRESS: + return crypto_init_compress_ops(tfm); + + default: + break; + } + + BUG(); + return -EINVAL; +} + +static void crypto_exit_ops(struct crypto_tfm *tfm) +{ + const struct crypto_type *type = tfm->__crt_alg->cra_type; + + if (type) { + if (tfm->exit) + tfm->exit(tfm); + return; + } + + switch (crypto_tfm_alg_type(tfm)) { + case CRYPTO_ALG_TYPE_CIPHER: + crypto_exit_cipher_ops(tfm); + break; + + case CRYPTO_ALG_TYPE_COMPRESS: + crypto_exit_compress_ops(tfm); + break; + + default: + BUG(); + } +} + +static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) +{ + const struct crypto_type *type_obj = alg->cra_type; + unsigned int len; + + len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1); + if (type_obj) + return len + type_obj->ctxsize(alg, type, mask); + + switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { + default: + BUG(); + + case CRYPTO_ALG_TYPE_CIPHER: + len += crypto_cipher_ctxsize(alg); + break; + + case CRYPTO_ALG_TYPE_COMPRESS: + len += crypto_compress_ctxsize(alg); + break; + } + + return len; +} + +void crypto_shoot_alg(struct crypto_alg *alg) +{ + down_write(&crypto_alg_sem); + alg->cra_flags |= CRYPTO_ALG_DYING; + up_write(&crypto_alg_sem); +} +EXPORT_SYMBOL_GPL(crypto_shoot_alg); + +struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, + u32 mask) +{ + struct crypto_tfm *tfm = NULL; + unsigned int tfm_size; + int err = -ENOMEM; + + tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask); + tfm = kzalloc(tfm_size, GFP_KERNEL); + if (tfm == NULL) + goto out_err; + + tfm->__crt_alg = alg; + + err = crypto_init_ops(tfm, type, mask); + if (err) + goto out_free_tfm; + + if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) + goto cra_init_failed; + + goto out; + +cra_init_failed: + crypto_exit_ops(tfm); +out_free_tfm: + if (err == -EAGAIN) + crypto_shoot_alg(alg); + kfree(tfm); +out_err: + tfm = ERR_PTR(err); +out: + return tfm; +} +EXPORT_SYMBOL_GPL(__crypto_alloc_tfm); + +/* + * crypto_alloc_base - Locate algorithm and allocate transform + * @alg_name: Name of algorithm + * @type: Type of algorithm + * @mask: Mask for type comparison + * + * This function should not be used by new algorithm types. + * Plesae use crypto_alloc_tfm instead. + * + * crypto_alloc_base() will first attempt to locate an already loaded + * algorithm. If that fails and the kernel supports dynamically loadable + * modules, it will then attempt to load a module of the same name or + * alias. If that fails it will send a query to any loaded crypto manager + * to construct an algorithm on the fly. A refcount is grabbed on the + * algorithm which is then associated with the new transform. + * + * The returned transform is of a non-determinate type. Most people + * should use one of the more specific allocation functions such as + * crypto_alloc_blkcipher. + * + * In case of error the return value is an error pointer. + */ +struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) +{ + struct crypto_tfm *tfm; + int err; + + for (;;) { + struct crypto_alg *alg; + + alg = crypto_alg_mod_lookup(alg_name, type, mask); + if (IS_ERR(alg)) { + err = PTR_ERR(alg); + goto err; + } + + tfm = __crypto_alloc_tfm(alg, type, mask); + if (!IS_ERR(tfm)) + return tfm; + + crypto_mod_put(alg); + err = PTR_ERR(tfm); + +err: + if (err != -EAGAIN) + break; + if (signal_pending(current)) { + err = -EINTR; + break; + } + } + + return ERR_PTR(err); +} +EXPORT_SYMBOL_GPL(crypto_alloc_base); + +void *crypto_create_tfm(struct crypto_alg *alg, + const struct crypto_type *frontend) +{ + char *mem; + struct crypto_tfm *tfm = NULL; + unsigned int tfmsize; + unsigned int total; + int err = -ENOMEM; + + tfmsize = frontend->tfmsize; + total = tfmsize + sizeof(*tfm) + frontend->extsize(alg); + + mem = kzalloc(total, GFP_KERNEL); + if (mem == NULL) + goto out_err; + + tfm = (struct crypto_tfm *)(mem + tfmsize); + tfm->__crt_alg = alg; + + err = frontend->init_tfm(tfm); + if (err) + goto out_free_tfm; + + if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) + goto cra_init_failed; + + goto out; + +cra_init_failed: + crypto_exit_ops(tfm); +out_free_tfm: + if (err == -EAGAIN) + crypto_shoot_alg(alg); + kfree(mem); +out_err: + mem = ERR_PTR(err); +out: + return mem; +} +EXPORT_SYMBOL_GPL(crypto_create_tfm); + +struct crypto_alg *crypto_find_alg(const char *alg_name, + const struct crypto_type *frontend, + u32 type, u32 mask) +{ + struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) = + crypto_alg_mod_lookup; + + if (frontend) { + type &= frontend->maskclear; + mask &= frontend->maskclear; + type |= frontend->type; + mask |= frontend->maskset; + + if (frontend->lookup) + lookup = frontend->lookup; + } + + return lookup(alg_name, type, mask); +} +EXPORT_SYMBOL_GPL(crypto_find_alg); + +/* + * crypto_alloc_tfm - Locate algorithm and allocate transform + * @alg_name: Name of algorithm + * @frontend: Frontend algorithm type + * @type: Type of algorithm + * @mask: Mask for type comparison + * + * crypto_alloc_tfm() will first attempt to locate an already loaded + * algorithm. If that fails and the kernel supports dynamically loadable + * modules, it will then attempt to load a module of the same name or + * alias. If that fails it will send a query to any loaded crypto manager + * to construct an algorithm on the fly. A refcount is grabbed on the + * algorithm which is then associated with the new transform. + * + * The returned transform is of a non-determinate type. Most people + * should use one of the more specific allocation functions such as + * crypto_alloc_blkcipher. + * + * In case of error the return value is an error pointer. + */ +void *crypto_alloc_tfm(const char *alg_name, + const struct crypto_type *frontend, u32 type, u32 mask) +{ + void *tfm; + int err; + + for (;;) { + struct crypto_alg *alg; + + alg = crypto_find_alg(alg_name, frontend, type, mask); + if (IS_ERR(alg)) { + err = PTR_ERR(alg); + goto err; + } + + tfm = crypto_create_tfm(alg, frontend); + if (!IS_ERR(tfm)) + return tfm; + + crypto_mod_put(alg); + err = PTR_ERR(tfm); + +err: + if (err != -EAGAIN) + break; + if (signal_pending(current)) { + err = -EINTR; + break; + } + } + + return ERR_PTR(err); +} +EXPORT_SYMBOL_GPL(crypto_alloc_tfm); + +/* + * crypto_destroy_tfm - Free crypto transform + * @mem: Start of tfm slab + * @tfm: Transform to free + * + * This function frees up the transform and any associated resources, + * then drops the refcount on the associated algorithm. + */ +void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm) +{ + struct crypto_alg *alg; + + if (unlikely(!mem)) + return; + + alg = tfm->__crt_alg; + + if (!tfm->exit && alg->cra_exit) + alg->cra_exit(tfm); + crypto_exit_ops(tfm); + crypto_mod_put(alg); + kzfree(mem); +} +EXPORT_SYMBOL_GPL(crypto_destroy_tfm); + +int crypto_has_alg(const char *name, u32 type, u32 mask) +{ + int ret = 0; + struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask); + + if (!IS_ERR(alg)) { + crypto_mod_put(alg); + ret = 1; + } + + return ret; +} +EXPORT_SYMBOL_GPL(crypto_has_alg); + +MODULE_DESCRIPTION("Cryptographic core API"); +MODULE_LICENSE("GPL"); diff --git a/crypto/arc4.c b/crypto/arc4.c new file mode 100644 index 00000000..0d12a96d --- /dev/null +++ b/crypto/arc4.c @@ -0,0 +1,103 @@ +/* + * Cryptographic API + * + * ARC4 Cipher Algorithm + * + * Jon Oberheide <jon@oberheide.org> + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + */ +#include <linux/module.h> +#include <linux/init.h> +#include <linux/crypto.h> + +#define ARC4_MIN_KEY_SIZE 1 +#define ARC4_MAX_KEY_SIZE 256 +#define ARC4_BLOCK_SIZE 1 + +struct arc4_ctx { + u8 S[256]; + u8 x, y; +}; + +static int arc4_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len) +{ + struct arc4_ctx *ctx = crypto_tfm_ctx(tfm); + int i, j = 0, k = 0; + + ctx->x = 1; + ctx->y = 0; + + for (i = 0; i < 256; i++) + ctx->S[i] = i; + + for (i = 0; i < 256; i++) { + u8 a = ctx->S[i]; + j = (j + in_key[k] + a) & 0xff; + ctx->S[i] = ctx->S[j]; + ctx->S[j] = a; + if (++k >= key_len) + k = 0; + } + + return 0; +} + +static void arc4_crypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + struct arc4_ctx *ctx = crypto_tfm_ctx(tfm); + + u8 *const S = ctx->S; + u8 x = ctx->x; + u8 y = ctx->y; + u8 a, b; + + a = S[x]; + y = (y + a) & 0xff; + b = S[y]; + S[x] = b; + S[y] = a; + x = (x + 1) & 0xff; + *out++ = *in ^ S[(a + b) & 0xff]; + + ctx->x = x; + ctx->y = y; +} + +static struct crypto_alg arc4_alg = { + .cra_name = "arc4", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = ARC4_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct arc4_ctx), + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(arc4_alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = ARC4_MIN_KEY_SIZE, + .cia_max_keysize = ARC4_MAX_KEY_SIZE, + .cia_setkey = arc4_set_key, + .cia_encrypt = arc4_crypt, + .cia_decrypt = arc4_crypt } } +}; + +static int __init arc4_init(void) +{ + return crypto_register_alg(&arc4_alg); +} + + +static void __exit arc4_exit(void) +{ + crypto_unregister_alg(&arc4_alg); +} + +module_init(arc4_init); +module_exit(arc4_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("ARC4 Cipher Algorithm"); +MODULE_AUTHOR("Jon Oberheide <jon@oberheide.org>"); diff --git a/crypto/async_tx/Kconfig b/crypto/async_tx/Kconfig new file mode 100644 index 00000000..1b11abbb --- /dev/null +++ b/crypto/async_tx/Kconfig @@ -0,0 +1,31 @@ +config ASYNC_CORE + tristate + +config ASYNC_MEMCPY + tristate + select ASYNC_CORE + +config ASYNC_XOR + tristate + select ASYNC_CORE + select XOR_BLOCKS + +config ASYNC_MEMSET + tristate + select ASYNC_CORE + +config ASYNC_PQ + tristate + select ASYNC_CORE + +config ASYNC_RAID6_RECOV + tristate + select ASYNC_CORE + select ASYNC_PQ + select ASYNC_XOR + +config ASYNC_TX_DISABLE_PQ_VAL_DMA + bool + +config ASYNC_TX_DISABLE_XOR_VAL_DMA + bool diff --git a/crypto/async_tx/Makefile b/crypto/async_tx/Makefile new file mode 100644 index 00000000..d1e0e6f7 --- /dev/null +++ b/crypto/async_tx/Makefile @@ -0,0 +1,7 @@ +obj-$(CONFIG_ASYNC_CORE) += async_tx.o +obj-$(CONFIG_ASYNC_MEMCPY) += async_memcpy.o +obj-$(CONFIG_ASYNC_MEMSET) += async_memset.o +obj-$(CONFIG_ASYNC_XOR) += async_xor.o +obj-$(CONFIG_ASYNC_PQ) += async_pq.o +obj-$(CONFIG_ASYNC_RAID6_RECOV) += async_raid6_recov.o +obj-$(CONFIG_ASYNC_RAID6_TEST) += raid6test.o diff --git a/crypto/async_tx/async_memcpy.c b/crypto/async_tx/async_memcpy.c new file mode 100644 index 00000000..361b5e82 --- /dev/null +++ b/crypto/async_tx/async_memcpy.c @@ -0,0 +1,99 @@ +/* + * copy offload engine support + * + * Copyright © 2006, Intel Corporation. + * + * Dan Williams <dan.j.williams@intel.com> + * + * with architecture considerations by: + * Neil Brown <neilb@suse.de> + * Jeff Garzik <jeff@garzik.org> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms and conditions of the GNU General Public License, + * version 2, as published by the Free Software Foundation. + * + * This program is distributed in the hope it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., + * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + * + */ +#include <linux/kernel.h> +#include <linux/highmem.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <linux/dma-mapping.h> +#include <linux/async_tx.h> + +/** + * async_memcpy - attempt to copy memory with a dma engine. + * @dest: destination page + * @src: src page + * @dest_offset: offset into 'dest' to start transaction + * @src_offset: offset into 'src' to start transaction + * @len: length in bytes + * @submit: submission / completion modifiers + * + * honored flags: ASYNC_TX_ACK + */ +struct dma_async_tx_descriptor * +async_memcpy(struct page *dest, struct page *src, unsigned int dest_offset, + unsigned int src_offset, size_t len, + struct async_submit_ctl *submit) +{ + struct dma_chan *chan = async_tx_find_channel(submit, DMA_MEMCPY, + &dest, 1, &src, 1, len); + struct dma_device *device = chan ? chan->device : NULL; + struct dma_async_tx_descriptor *tx = NULL; + + if (device && is_dma_copy_aligned(device, src_offset, dest_offset, len)) { + dma_addr_t dma_dest, dma_src; + unsigned long dma_prep_flags = 0; + + if (submit->cb_fn) + dma_prep_flags |= DMA_PREP_INTERRUPT; + if (submit->flags & ASYNC_TX_FENCE) + dma_prep_flags |= DMA_PREP_FENCE; + dma_dest = dma_map_page(device->dev, dest, dest_offset, len, + DMA_FROM_DEVICE); + + dma_src = dma_map_page(device->dev, src, src_offset, len, + DMA_TO_DEVICE); + + tx = device->device_prep_dma_memcpy(chan, dma_dest, dma_src, + len, dma_prep_flags); + } + + if (tx) { + pr_debug("%s: (async) len: %zu\n", __func__, len); + async_tx_submit(chan, tx, submit); + } else { + void *dest_buf, *src_buf; + pr_debug("%s: (sync) len: %zu\n", __func__, len); + + /* wait for any prerequisite operations */ + async_tx_quiesce(&submit->depend_tx); + + dest_buf = kmap_atomic(dest) + dest_offset; + src_buf = kmap_atomic(src) + src_offset; + + memcpy(dest_buf, src_buf, len); + + kunmap_atomic(src_buf); + kunmap_atomic(dest_buf); + + async_tx_sync_epilog(submit); + } + + return tx; +} +EXPORT_SYMBOL_GPL(async_memcpy); + +MODULE_AUTHOR("Intel Corporation"); +MODULE_DESCRIPTION("asynchronous memcpy api"); +MODULE_LICENSE("GPL"); diff --git a/crypto/async_tx/async_memset.c b/crypto/async_tx/async_memset.c new file mode 100644 index 00000000..58e4a875 --- /dev/null +++ b/crypto/async_tx/async_memset.c @@ -0,0 +1,88 @@ +/* + * memory fill offload engine support + * + * Copyright © 2006, Intel Corporation. + * + * Dan Williams <dan.j.williams@intel.com> + * + * with architecture considerations by: + * Neil Brown <neilb@suse.de> + * Jeff Garzik <jeff@garzik.org> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms and conditions of the GNU General Public License, + * version 2, as published by the Free Software Foundation. + * + * This program is distributed in the hope it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., + * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + * + */ +#include <linux/kernel.h> +#include <linux/interrupt.h> +#include <linux/mm.h> +#include <linux/dma-mapping.h> +#include <linux/async_tx.h> + +/** + * async_memset - attempt to fill memory with a dma engine. + * @dest: destination page + * @val: fill value + * @offset: offset in pages to start transaction + * @len: length in bytes + * + * honored flags: ASYNC_TX_ACK + */ +struct dma_async_tx_descriptor * +async_memset(struct page *dest, int val, unsigned int offset, size_t len, + struct async_submit_ctl *submit) +{ + struct dma_chan *chan = async_tx_find_channel(submit, DMA_MEMSET, + &dest, 1, NULL, 0, len); + struct dma_device *device = chan ? chan->device : NULL; + struct dma_async_tx_descriptor *tx = NULL; + + if (device && is_dma_fill_aligned(device, offset, 0, len)) { + dma_addr_t dma_dest; + unsigned long dma_prep_flags = 0; + + if (submit->cb_fn) + dma_prep_flags |= DMA_PREP_INTERRUPT; + if (submit->flags & ASYNC_TX_FENCE) + dma_prep_flags |= DMA_PREP_FENCE; + dma_dest = dma_map_page(device->dev, dest, offset, len, + DMA_FROM_DEVICE); + + tx = device->device_prep_dma_memset(chan, dma_dest, val, len, + dma_prep_flags); + } + + if (tx) { + pr_debug("%s: (async) len: %zu\n", __func__, len); + async_tx_submit(chan, tx, submit); + } else { /* run the memset synchronously */ + void *dest_buf; + pr_debug("%s: (sync) len: %zu\n", __func__, len); + + dest_buf = page_address(dest) + offset; + + /* wait for any prerequisite operations */ + async_tx_quiesce(&submit->depend_tx); + + memset(dest_buf, val, len); + + async_tx_sync_epilog(submit); + } + + return tx; +} +EXPORT_SYMBOL_GPL(async_memset); + +MODULE_AUTHOR("Intel Corporation"); +MODULE_DESCRIPTION("asynchronous memset api"); +MODULE_LICENSE("GPL"); diff --git a/crypto/async_tx/async_pq.c b/crypto/async_tx/async_pq.c new file mode 100644 index 00000000..91d5d385 --- /dev/null +++ b/crypto/async_tx/async_pq.c @@ -0,0 +1,417 @@ +/* + * Copyright(c) 2007 Yuri Tikhonov <yur@emcraft.com> + * Copyright(c) 2009 Intel Corporation + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., 59 + * Temple Place - Suite 330, Boston, MA 02111-1307, USA. + * + * The full GNU General Public License is included in this distribution in the + * file called COPYING. + */ +#include <linux/kernel.h> +#include <linux/interrupt.h> +#include <linux/module.h> +#include <linux/dma-mapping.h> +#include <linux/raid/pq.h> +#include <linux/async_tx.h> +#include <linux/gfp.h> + +/** + * pq_scribble_page - space to hold throwaway P or Q buffer for + * synchronous gen_syndrome + */ +static struct page *pq_scribble_page; + +/* the struct page *blocks[] parameter passed to async_gen_syndrome() + * and async_syndrome_val() contains the 'P' destination address at + * blocks[disks-2] and the 'Q' destination address at blocks[disks-1] + * + * note: these are macros as they are used as lvalues + */ +#define P(b, d) (b[d-2]) +#define Q(b, d) (b[d-1]) + +/** + * do_async_gen_syndrome - asynchronously calculate P and/or Q + */ +static __async_inline struct dma_async_tx_descriptor * +do_async_gen_syndrome(struct dma_chan *chan, struct page **blocks, + const unsigned char *scfs, unsigned int offset, int disks, + size_t len, dma_addr_t *dma_src, + struct async_submit_ctl *submit) +{ + struct dma_async_tx_descriptor *tx = NULL; + struct dma_device *dma = chan->device; + enum dma_ctrl_flags dma_flags = 0; + enum async_tx_flags flags_orig = submit->flags; + dma_async_tx_callback cb_fn_orig = submit->cb_fn; + dma_async_tx_callback cb_param_orig = submit->cb_param; + int src_cnt = disks - 2; + unsigned char coefs[src_cnt]; + unsigned short pq_src_cnt; + dma_addr_t dma_dest[2]; + int src_off = 0; + int idx; + int i; + + /* DMAs use destinations as sources, so use BIDIRECTIONAL mapping */ + if (P(blocks, disks)) + dma_dest[0] = dma_map_page(dma->dev, P(blocks, disks), offset, + len, DMA_BIDIRECTIONAL); + else + dma_flags |= DMA_PREP_PQ_DISABLE_P; + if (Q(blocks, disks)) + dma_dest[1] = dma_map_page(dma->dev, Q(blocks, disks), offset, + len, DMA_BIDIRECTIONAL); + else + dma_flags |= DMA_PREP_PQ_DISABLE_Q; + + /* convert source addresses being careful to collapse 'empty' + * sources and update the coefficients accordingly + */ + for (i = 0, idx = 0; i < src_cnt; i++) { + if (blocks[i] == NULL) + continue; + dma_src[idx] = dma_map_page(dma->dev, blocks[i], offset, len, + DMA_TO_DEVICE); + coefs[idx] = scfs[i]; + idx++; + } + src_cnt = idx; + + while (src_cnt > 0) { + submit->flags = flags_orig; + pq_src_cnt = min(src_cnt, dma_maxpq(dma, dma_flags)); + /* if we are submitting additional pqs, leave the chain open, + * clear the callback parameters, and leave the destination + * buffers mapped + */ + if (src_cnt > pq_src_cnt) { + submit->flags &= ~ASYNC_TX_ACK; + submit->flags |= ASYNC_TX_FENCE; + dma_flags |= DMA_COMPL_SKIP_DEST_UNMAP; + submit->cb_fn = NULL; + submit->cb_param = NULL; + } else { + dma_flags &= ~DMA_COMPL_SKIP_DEST_UNMAP; + submit->cb_fn = cb_fn_orig; + submit->cb_param = cb_param_orig; + if (cb_fn_orig) + dma_flags |= DMA_PREP_INTERRUPT; + } + if (submit->flags & ASYNC_TX_FENCE) + dma_flags |= DMA_PREP_FENCE; + + /* Since we have clobbered the src_list we are committed + * to doing this asynchronously. Drivers force forward + * progress in case they can not provide a descriptor + */ + for (;;) { + tx = dma->device_prep_dma_pq(chan, dma_dest, + &dma_src[src_off], + pq_src_cnt, + &coefs[src_off], len, + dma_flags); + if (likely(tx)) + break; + async_tx_quiesce(&submit->depend_tx); + dma_async_issue_pending(chan); + } + + async_tx_submit(chan, tx, submit); + submit->depend_tx = tx; + + /* drop completed sources */ + src_cnt -= pq_src_cnt; + src_off += pq_src_cnt; + + dma_flags |= DMA_PREP_CONTINUE; + } + + return tx; +} + +/** + * do_sync_gen_syndrome - synchronously calculate a raid6 syndrome + */ +static void +do_sync_gen_syndrome(struct page **blocks, unsigned int offset, int disks, + size_t len, struct async_submit_ctl *submit) +{ + void **srcs; + int i; + + if (submit->scribble) + srcs = submit->scribble; + else + srcs = (void **) blocks; + + for (i = 0; i < disks; i++) { + if (blocks[i] == NULL) { + BUG_ON(i > disks - 3); /* P or Q can't be zero */ + srcs[i] = (void*)raid6_empty_zero_page; + } else + srcs[i] = page_address(blocks[i]) + offset; + } + raid6_call.gen_syndrome(disks, len, srcs); + async_tx_sync_epilog(submit); +} + +/** + * async_gen_syndrome - asynchronously calculate a raid6 syndrome + * @blocks: source blocks from idx 0..disks-3, P @ disks-2 and Q @ disks-1 + * @offset: common offset into each block (src and dest) to start transaction + * @disks: number of blocks (including missing P or Q, see below) + * @len: length of operation in bytes + * @submit: submission/completion modifiers + * + * General note: This routine assumes a field of GF(2^8) with a + * primitive polynomial of 0x11d and a generator of {02}. + * + * 'disks' note: callers can optionally omit either P or Q (but not + * both) from the calculation by setting blocks[disks-2] or + * blocks[disks-1] to NULL. When P or Q is omitted 'len' must be <= + * PAGE_SIZE as a temporary buffer of this size is used in the + * synchronous path. 'disks' always accounts for both destination + * buffers. If any source buffers (blocks[i] where i < disks - 2) are + * set to NULL those buffers will be replaced with the raid6_zero_page + * in the synchronous path and omitted in the hardware-asynchronous + * path. + * + * 'blocks' note: if submit->scribble is NULL then the contents of + * 'blocks' may be overwritten to perform address conversions + * (dma_map_page() or page_address()). + */ +struct dma_async_tx_descriptor * +async_gen_syndrome(struct page **blocks, unsigned int offset, int disks, + size_t len, struct async_submit_ctl *submit) +{ + int src_cnt = disks - 2; + struct dma_chan *chan = async_tx_find_channel(submit, DMA_PQ, + &P(blocks, disks), 2, + blocks, src_cnt, len); + struct dma_device *device = chan ? chan->device : NULL; + dma_addr_t *dma_src = NULL; + + BUG_ON(disks > 255 || !(P(blocks, disks) || Q(blocks, disks))); + + if (submit->scribble) + dma_src = submit->scribble; + else if (sizeof(dma_addr_t) <= sizeof(struct page *)) + dma_src = (dma_addr_t *) blocks; + + if (dma_src && device && + (src_cnt <= dma_maxpq(device, 0) || + dma_maxpq(device, DMA_PREP_CONTINUE) > 0) && + is_dma_pq_aligned(device, offset, 0, len)) { + /* run the p+q asynchronously */ + pr_debug("%s: (async) disks: %d len: %zu\n", + __func__, disks, len); + return do_async_gen_syndrome(chan, blocks, raid6_gfexp, offset, + disks, len, dma_src, submit); + } + + /* run the pq synchronously */ + pr_debug("%s: (sync) disks: %d len: %zu\n", __func__, disks, len); + + /* wait for any prerequisite operations */ + async_tx_quiesce(&submit->depend_tx); + + if (!P(blocks, disks)) { + P(blocks, disks) = pq_scribble_page; + BUG_ON(len + offset > PAGE_SIZE); + } + if (!Q(blocks, disks)) { + Q(blocks, disks) = pq_scribble_page; + BUG_ON(len + offset > PAGE_SIZE); + } + do_sync_gen_syndrome(blocks, offset, disks, len, submit); + + return NULL; +} +EXPORT_SYMBOL_GPL(async_gen_syndrome); + +static inline struct dma_chan * +pq_val_chan(struct async_submit_ctl *submit, struct page **blocks, int disks, size_t len) +{ + #ifdef CONFIG_ASYNC_TX_DISABLE_PQ_VAL_DMA + return NULL; + #endif + return async_tx_find_channel(submit, DMA_PQ_VAL, NULL, 0, blocks, + disks, len); +} + +/** + * async_syndrome_val - asynchronously validate a raid6 syndrome + * @blocks: source blocks from idx 0..disks-3, P @ disks-2 and Q @ disks-1 + * @offset: common offset into each block (src and dest) to start transaction + * @disks: number of blocks (including missing P or Q, see below) + * @len: length of operation in bytes + * @pqres: on val failure SUM_CHECK_P_RESULT and/or SUM_CHECK_Q_RESULT are set + * @spare: temporary result buffer for the synchronous case + * @submit: submission / completion modifiers + * + * The same notes from async_gen_syndrome apply to the 'blocks', + * and 'disks' parameters of this routine. The synchronous path + * requires a temporary result buffer and submit->scribble to be + * specified. + */ +struct dma_async_tx_descriptor * +async_syndrome_val(struct page **blocks, unsigned int offset, int disks, + size_t len, enum sum_check_flags *pqres, struct page *spare, + struct async_submit_ctl *submit) +{ + struct dma_chan *chan = pq_val_chan(submit, blocks, disks, len); + struct dma_device *device = chan ? chan->device : NULL; + struct dma_async_tx_descriptor *tx; + unsigned char coefs[disks-2]; + enum dma_ctrl_flags dma_flags = submit->cb_fn ? DMA_PREP_INTERRUPT : 0; + dma_addr_t *dma_src = NULL; + int src_cnt = 0; + + BUG_ON(disks < 4); + + if (submit->scribble) + dma_src = submit->scribble; + else if (sizeof(dma_addr_t) <= sizeof(struct page *)) + dma_src = (dma_addr_t *) blocks; + + if (dma_src && device && disks <= dma_maxpq(device, 0) && + is_dma_pq_aligned(device, offset, 0, len)) { + struct device *dev = device->dev; + dma_addr_t *pq = &dma_src[disks-2]; + int i; + + pr_debug("%s: (async) disks: %d len: %zu\n", + __func__, disks, len); + if (!P(blocks, disks)) + dma_flags |= DMA_PREP_PQ_DISABLE_P; + else + pq[0] = dma_map_page(dev, P(blocks, disks), + offset, len, + DMA_TO_DEVICE); + if (!Q(blocks, disks)) + dma_flags |= DMA_PREP_PQ_DISABLE_Q; + else + pq[1] = dma_map_page(dev, Q(blocks, disks), + offset, len, + DMA_TO_DEVICE); + + if (submit->flags & ASYNC_TX_FENCE) + dma_flags |= DMA_PREP_FENCE; + for (i = 0; i < disks-2; i++) + if (likely(blocks[i])) { + dma_src[src_cnt] = dma_map_page(dev, blocks[i], + offset, len, + DMA_TO_DEVICE); + coefs[src_cnt] = raid6_gfexp[i]; + src_cnt++; + } + + for (;;) { + tx = device->device_prep_dma_pq_val(chan, pq, dma_src, + src_cnt, + coefs, + len, pqres, + dma_flags); + if (likely(tx)) + break; + async_tx_quiesce(&submit->depend_tx); + dma_async_issue_pending(chan); + } + async_tx_submit(chan, tx, submit); + + return tx; + } else { + struct page *p_src = P(blocks, disks); + struct page *q_src = Q(blocks, disks); + enum async_tx_flags flags_orig = submit->flags; + dma_async_tx_callback cb_fn_orig = submit->cb_fn; + void *scribble = submit->scribble; + void *cb_param_orig = submit->cb_param; + void *p, *q, *s; + + pr_debug("%s: (sync) disks: %d len: %zu\n", + __func__, disks, len); + + /* caller must provide a temporary result buffer and + * allow the input parameters to be preserved + */ + BUG_ON(!spare || !scribble); + + /* wait for any prerequisite operations */ + async_tx_quiesce(&submit->depend_tx); + + /* recompute p and/or q into the temporary buffer and then + * check to see the result matches the current value + */ + tx = NULL; + *pqres = 0; + if (p_src) { + init_async_submit(submit, ASYNC_TX_XOR_ZERO_DST, NULL, + NULL, NULL, scribble); + tx = async_xor(spare, blocks, offset, disks-2, len, submit); + async_tx_quiesce(&tx); + p = page_address(p_src) + offset; + s = page_address(spare) + offset; + *pqres |= !!memcmp(p, s, len) << SUM_CHECK_P; + } + + if (q_src) { + P(blocks, disks) = NULL; + Q(blocks, disks) = spare; + init_async_submit(submit, 0, NULL, NULL, NULL, scribble); + tx = async_gen_syndrome(blocks, offset, disks, len, submit); + async_tx_quiesce(&tx); + q = page_address(q_src) + offset; + s = page_address(spare) + offset; + *pqres |= !!memcmp(q, s, len) << SUM_CHECK_Q; + } + + /* restore P, Q and submit */ + P(blocks, disks) = p_src; + Q(blocks, disks) = q_src; + + submit->cb_fn = cb_fn_orig; + submit->cb_param = cb_param_orig; + submit->flags = flags_orig; + async_tx_sync_epilog(submit); + + return NULL; + } +} +EXPORT_SYMBOL_GPL(async_syndrome_val); + +static int __init async_pq_init(void) +{ + pq_scribble_page = alloc_page(GFP_KERNEL); + + if (pq_scribble_page) + return 0; + + pr_err("%s: failed to allocate required spare page\n", __func__); + + return -ENOMEM; +} + +static void __exit async_pq_exit(void) +{ + put_page(pq_scribble_page); +} + +module_init(async_pq_init); +module_exit(async_pq_exit); + +MODULE_DESCRIPTION("asynchronous raid6 syndrome generation/validation"); +MODULE_LICENSE("GPL"); diff --git a/crypto/async_tx/async_raid6_recov.c b/crypto/async_tx/async_raid6_recov.c new file mode 100644 index 00000000..a9f08a6a --- /dev/null +++ b/crypto/async_tx/async_raid6_recov.c @@ -0,0 +1,506 @@ +/* + * Asynchronous RAID-6 recovery calculations ASYNC_TX API. + * Copyright(c) 2009 Intel Corporation + * + * based on raid6recov.c: + * Copyright 2002 H. Peter Anvin + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., 51 + * Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + * + */ +#include <linux/kernel.h> +#include <linux/interrupt.h> +#include <linux/module.h> +#include <linux/dma-mapping.h> +#include <linux/raid/pq.h> +#include <linux/async_tx.h> + +static struct dma_async_tx_descriptor * +async_sum_product(struct page *dest, struct page **srcs, unsigned char *coef, + size_t len, struct async_submit_ctl *submit) +{ + struct dma_chan *chan = async_tx_find_channel(submit, DMA_PQ, + &dest, 1, srcs, 2, len); + struct dma_device *dma = chan ? chan->device : NULL; + const u8 *amul, *bmul; + u8 ax, bx; + u8 *a, *b, *c; + + if (dma) { + dma_addr_t dma_dest[2]; + dma_addr_t dma_src[2]; + struct device *dev = dma->dev; + struct dma_async_tx_descriptor *tx; + enum dma_ctrl_flags dma_flags = DMA_PREP_PQ_DISABLE_P; + + if (submit->flags & ASYNC_TX_FENCE) + dma_flags |= DMA_PREP_FENCE; + dma_dest[1] = dma_map_page(dev, dest, 0, len, DMA_BIDIRECTIONAL); + dma_src[0] = dma_map_page(dev, srcs[0], 0, len, DMA_TO_DEVICE); + dma_src[1] = dma_map_page(dev, srcs[1], 0, len, DMA_TO_DEVICE); + tx = dma->device_prep_dma_pq(chan, dma_dest, dma_src, 2, coef, + len, dma_flags); + if (tx) { + async_tx_submit(chan, tx, submit); + return tx; + } + + /* could not get a descriptor, unmap and fall through to + * the synchronous path + */ + dma_unmap_page(dev, dma_dest[1], len, DMA_BIDIRECTIONAL); + dma_unmap_page(dev, dma_src[0], len, DMA_TO_DEVICE); + dma_unmap_page(dev, dma_src[1], len, DMA_TO_DEVICE); + } + + /* run the operation synchronously */ + async_tx_quiesce(&submit->depend_tx); + amul = raid6_gfmul[coef[0]]; + bmul = raid6_gfmul[coef[1]]; + a = page_address(srcs[0]); + b = page_address(srcs[1]); + c = page_address(dest); + + while (len--) { + ax = amul[*a++]; + bx = bmul[*b++]; + *c++ = ax ^ bx; + } + + return NULL; +} + +static struct dma_async_tx_descriptor * +async_mult(struct page *dest, struct page *src, u8 coef, size_t len, + struct async_submit_ctl *submit) +{ + struct dma_chan *chan = async_tx_find_channel(submit, DMA_PQ, + &dest, 1, &src, 1, len); + struct dma_device *dma = chan ? chan->device : NULL; + const u8 *qmul; /* Q multiplier table */ + u8 *d, *s; + + if (dma) { + dma_addr_t dma_dest[2]; + dma_addr_t dma_src[1]; + struct device *dev = dma->dev; + struct dma_async_tx_descriptor *tx; + enum dma_ctrl_flags dma_flags = DMA_PREP_PQ_DISABLE_P; + + if (submit->flags & ASYNC_TX_FENCE) + dma_flags |= DMA_PREP_FENCE; + dma_dest[1] = dma_map_page(dev, dest, 0, len, DMA_BIDIRECTIONAL); + dma_src[0] = dma_map_page(dev, src, 0, len, DMA_TO_DEVICE); + tx = dma->device_prep_dma_pq(chan, dma_dest, dma_src, 1, &coef, + len, dma_flags); + if (tx) { + async_tx_submit(chan, tx, submit); + return tx; + } + + /* could not get a descriptor, unmap and fall through to + * the synchronous path + */ + dma_unmap_page(dev, dma_dest[1], len, DMA_BIDIRECTIONAL); + dma_unmap_page(dev, dma_src[0], len, DMA_TO_DEVICE); + } + + /* no channel available, or failed to allocate a descriptor, so + * perform the operation synchronously + */ + async_tx_quiesce(&submit->depend_tx); + qmul = raid6_gfmul[coef]; + d = page_address(dest); + s = page_address(src); + + while (len--) + *d++ = qmul[*s++]; + + return NULL; +} + +static struct dma_async_tx_descriptor * +__2data_recov_4(int disks, size_t bytes, int faila, int failb, + struct page **blocks, struct async_submit_ctl *submit) +{ + struct dma_async_tx_descriptor *tx = NULL; + struct page *p, *q, *a, *b; + struct page *srcs[2]; + unsigned char coef[2]; + enum async_tx_flags flags = submit->flags; + dma_async_tx_callback cb_fn = submit->cb_fn; + void *cb_param = submit->cb_param; + void *scribble = submit->scribble; + + p = blocks[disks-2]; + q = blocks[disks-1]; + + a = blocks[faila]; + b = blocks[failb]; + + /* in the 4 disk case P + Pxy == P and Q + Qxy == Q */ + /* Dx = A*(P+Pxy) + B*(Q+Qxy) */ + srcs[0] = p; + srcs[1] = q; + coef[0] = raid6_gfexi[failb-faila]; + coef[1] = raid6_gfinv[raid6_gfexp[faila]^raid6_gfexp[failb]]; + init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble); + tx = async_sum_product(b, srcs, coef, bytes, submit); + + /* Dy = P+Pxy+Dx */ + srcs[0] = p; + srcs[1] = b; + init_async_submit(submit, flags | ASYNC_TX_XOR_ZERO_DST, tx, cb_fn, + cb_param, scribble); + tx = async_xor(a, srcs, 0, 2, bytes, submit); + + return tx; + +} + +static struct dma_async_tx_descriptor * +__2data_recov_5(int disks, size_t bytes, int faila, int failb, + struct page **blocks, struct async_submit_ctl *submit) +{ + struct dma_async_tx_descriptor *tx = NULL; + struct page *p, *q, *g, *dp, *dq; + struct page *srcs[2]; + unsigned char coef[2]; + enum async_tx_flags flags = submit->flags; + dma_async_tx_callback cb_fn = submit->cb_fn; + void *cb_param = submit->cb_param; + void *scribble = submit->scribble; + int good_srcs, good, i; + + good_srcs = 0; + good = -1; + for (i = 0; i < disks-2; i++) { + if (blocks[i] == NULL) + continue; + if (i == faila || i == failb) + continue; + good = i; + good_srcs++; + } + BUG_ON(good_srcs > 1); + + p = blocks[disks-2]; + q = blocks[disks-1]; + g = blocks[good]; + + /* Compute syndrome with zero for the missing data pages + * Use the dead data pages as temporary storage for delta p and + * delta q + */ + dp = blocks[faila]; + dq = blocks[failb]; + + init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble); + tx = async_memcpy(dp, g, 0, 0, bytes, submit); + init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble); + tx = async_mult(dq, g, raid6_gfexp[good], bytes, submit); + + /* compute P + Pxy */ + srcs[0] = dp; + srcs[1] = p; + init_async_submit(submit, ASYNC_TX_FENCE|ASYNC_TX_XOR_DROP_DST, tx, + NULL, NULL, scribble); + tx = async_xor(dp, srcs, 0, 2, bytes, submit); + + /* compute Q + Qxy */ + srcs[0] = dq; + srcs[1] = q; + init_async_submit(submit, ASYNC_TX_FENCE|ASYNC_TX_XOR_DROP_DST, tx, + NULL, NULL, scribble); + tx = async_xor(dq, srcs, 0, 2, bytes, submit); + + /* Dx = A*(P+Pxy) + B*(Q+Qxy) */ + srcs[0] = dp; + srcs[1] = dq; + coef[0] = raid6_gfexi[failb-faila]; + coef[1] = raid6_gfinv[raid6_gfexp[faila]^raid6_gfexp[failb]]; + init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble); + tx = async_sum_product(dq, srcs, coef, bytes, submit); + + /* Dy = P+Pxy+Dx */ + srcs[0] = dp; + srcs[1] = dq; + init_async_submit(submit, flags | ASYNC_TX_XOR_DROP_DST, tx, cb_fn, + cb_param, scribble); + tx = async_xor(dp, srcs, 0, 2, bytes, submit); + + return tx; +} + +static struct dma_async_tx_descriptor * +__2data_recov_n(int disks, size_t bytes, int faila, int failb, + struct page **blocks, struct async_submit_ctl *submit) +{ + struct dma_async_tx_descriptor *tx = NULL; + struct page *p, *q, *dp, *dq; + struct page *srcs[2]; + unsigned char coef[2]; + enum async_tx_flags flags = submit->flags; + dma_async_tx_callback cb_fn = submit->cb_fn; + void *cb_param = submit->cb_param; + void *scribble = submit->scribble; + + p = blocks[disks-2]; + q = blocks[disks-1]; + + /* Compute syndrome with zero for the missing data pages + * Use the dead data pages as temporary storage for + * delta p and delta q + */ + dp = blocks[faila]; + blocks[faila] = NULL; + blocks[disks-2] = dp; + dq = blocks[failb]; + blocks[failb] = NULL; + blocks[disks-1] = dq; + + init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble); + tx = async_gen_syndrome(blocks, 0, disks, bytes, submit); + + /* Restore pointer table */ + blocks[faila] = dp; + blocks[failb] = dq; + blocks[disks-2] = p; + blocks[disks-1] = q; + + /* compute P + Pxy */ + srcs[0] = dp; + srcs[1] = p; + init_async_submit(submit, ASYNC_TX_FENCE|ASYNC_TX_XOR_DROP_DST, tx, + NULL, NULL, scribble); + tx = async_xor(dp, srcs, 0, 2, bytes, submit); + + /* compute Q + Qxy */ + srcs[0] = dq; + srcs[1] = q; + init_async_submit(submit, ASYNC_TX_FENCE|ASYNC_TX_XOR_DROP_DST, tx, + NULL, NULL, scribble); + tx = async_xor(dq, srcs, 0, 2, bytes, submit); + + /* Dx = A*(P+Pxy) + B*(Q+Qxy) */ + srcs[0] = dp; + srcs[1] = dq; + coef[0] = raid6_gfexi[failb-faila]; + coef[1] = raid6_gfinv[raid6_gfexp[faila]^raid6_gfexp[failb]]; + init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble); + tx = async_sum_product(dq, srcs, coef, bytes, submit); + + /* Dy = P+Pxy+Dx */ + srcs[0] = dp; + srcs[1] = dq; + init_async_submit(submit, flags | ASYNC_TX_XOR_DROP_DST, tx, cb_fn, + cb_param, scribble); + tx = async_xor(dp, srcs, 0, 2, bytes, submit); + + return tx; +} + +/** + * async_raid6_2data_recov - asynchronously calculate two missing data blocks + * @disks: number of disks in the RAID-6 array + * @bytes: block size + * @faila: first failed drive index + * @failb: second failed drive index + * @blocks: array of source pointers where the last two entries are p and q + * @submit: submission/completion modifiers + */ +struct dma_async_tx_descriptor * +async_raid6_2data_recov(int disks, size_t bytes, int faila, int failb, + struct page **blocks, struct async_submit_ctl *submit) +{ + void *scribble = submit->scribble; + int non_zero_srcs, i; + + BUG_ON(faila == failb); + if (failb < faila) + swap(faila, failb); + + pr_debug("%s: disks: %d len: %zu\n", __func__, disks, bytes); + + /* if a dma resource is not available or a scribble buffer is not + * available punt to the synchronous path. In the 'dma not + * available' case be sure to use the scribble buffer to + * preserve the content of 'blocks' as the caller intended. + */ + if (!async_dma_find_channel(DMA_PQ) || !scribble) { + void **ptrs = scribble ? scribble : (void **) blocks; + + async_tx_quiesce(&submit->depend_tx); + for (i = 0; i < disks; i++) + if (blocks[i] == NULL) + ptrs[i] = (void *) raid6_empty_zero_page; + else + ptrs[i] = page_address(blocks[i]); + + raid6_2data_recov(disks, bytes, faila, failb, ptrs); + + async_tx_sync_epilog(submit); + + return NULL; + } + + non_zero_srcs = 0; + for (i = 0; i < disks-2 && non_zero_srcs < 4; i++) + if (blocks[i]) + non_zero_srcs++; + switch (non_zero_srcs) { + case 0: + case 1: + /* There must be at least 2 sources - the failed devices. */ + BUG(); + + case 2: + /* dma devices do not uniformly understand a zero source pq + * operation (in contrast to the synchronous case), so + * explicitly handle the special case of a 4 disk array with + * both data disks missing. + */ + return __2data_recov_4(disks, bytes, faila, failb, blocks, submit); + case 3: + /* dma devices do not uniformly understand a single + * source pq operation (in contrast to the synchronous + * case), so explicitly handle the special case of a 5 disk + * array with 2 of 3 data disks missing. + */ + return __2data_recov_5(disks, bytes, faila, failb, blocks, submit); + default: + return __2data_recov_n(disks, bytes, faila, failb, blocks, submit); + } +} +EXPORT_SYMBOL_GPL(async_raid6_2data_recov); + +/** + * async_raid6_datap_recov - asynchronously calculate a data and the 'p' block + * @disks: number of disks in the RAID-6 array + * @bytes: block size + * @faila: failed drive index + * @blocks: array of source pointers where the last two entries are p and q + * @submit: submission/completion modifiers + */ +struct dma_async_tx_descriptor * +async_raid6_datap_recov(int disks, size_t bytes, int faila, + struct page **blocks, struct async_submit_ctl *submit) +{ + struct dma_async_tx_descriptor *tx = NULL; + struct page *p, *q, *dq; + u8 coef; + enum async_tx_flags flags = submit->flags; + dma_async_tx_callback cb_fn = submit->cb_fn; + void *cb_param = submit->cb_param; + void *scribble = submit->scribble; + int good_srcs, good, i; + struct page *srcs[2]; + + pr_debug("%s: disks: %d len: %zu\n", __func__, disks, bytes); + + /* if a dma resource is not available or a scribble buffer is not + * available punt to the synchronous path. In the 'dma not + * available' case be sure to use the scribble buffer to + * preserve the content of 'blocks' as the caller intended. + */ + if (!async_dma_find_channel(DMA_PQ) || !scribble) { + void **ptrs = scribble ? scribble : (void **) blocks; + + async_tx_quiesce(&submit->depend_tx); + for (i = 0; i < disks; i++) + if (blocks[i] == NULL) + ptrs[i] = (void*)raid6_empty_zero_page; + else + ptrs[i] = page_address(blocks[i]); + + raid6_datap_recov(disks, bytes, faila, ptrs); + + async_tx_sync_epilog(submit); + + return NULL; + } + + good_srcs = 0; + good = -1; + for (i = 0; i < disks-2; i++) { + if (i == faila) + continue; + if (blocks[i]) { + good = i; + good_srcs++; + if (good_srcs > 1) + break; + } + } + BUG_ON(good_srcs == 0); + + p = blocks[disks-2]; + q = blocks[disks-1]; + + /* Compute syndrome with zero for the missing data page + * Use the dead data page as temporary storage for delta q + */ + dq = blocks[faila]; + blocks[faila] = NULL; + blocks[disks-1] = dq; + + /* in the 4-disk case we only need to perform a single source + * multiplication with the one good data block. + */ + if (good_srcs == 1) { + struct page *g = blocks[good]; + + init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, + scribble); + tx = async_memcpy(p, g, 0, 0, bytes, submit); + + init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, + scribble); + tx = async_mult(dq, g, raid6_gfexp[good], bytes, submit); + } else { + init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, + scribble); + tx = async_gen_syndrome(blocks, 0, disks, bytes, submit); + } + + /* Restore pointer table */ + blocks[faila] = dq; + blocks[disks-1] = q; + + /* calculate g^{-faila} */ + coef = raid6_gfinv[raid6_gfexp[faila]]; + + srcs[0] = dq; + srcs[1] = q; + init_async_submit(submit, ASYNC_TX_FENCE|ASYNC_TX_XOR_DROP_DST, tx, + NULL, NULL, scribble); + tx = async_xor(dq, srcs, 0, 2, bytes, submit); + + init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble); + tx = async_mult(dq, dq, coef, bytes, submit); + + srcs[0] = p; + srcs[1] = dq; + init_async_submit(submit, flags | ASYNC_TX_XOR_DROP_DST, tx, cb_fn, + cb_param, scribble); + tx = async_xor(p, srcs, 0, 2, bytes, submit); + + return tx; +} +EXPORT_SYMBOL_GPL(async_raid6_datap_recov); + +MODULE_AUTHOR("Dan Williams <dan.j.williams@intel.com>"); +MODULE_DESCRIPTION("asynchronous RAID-6 recovery api"); +MODULE_LICENSE("GPL"); diff --git a/crypto/async_tx/async_tx.c b/crypto/async_tx/async_tx.c new file mode 100644 index 00000000..84212097 --- /dev/null +++ b/crypto/async_tx/async_tx.c @@ -0,0 +1,293 @@ +/* + * core routines for the asynchronous memory transfer/transform api + * + * Copyright © 2006, Intel Corporation. + * + * Dan Williams <dan.j.williams@intel.com> + * + * with architecture considerations by: + * Neil Brown <neilb@suse.de> + * Jeff Garzik <jeff@garzik.org> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms and conditions of the GNU General Public License, + * version 2, as published by the Free Software Foundation. + * + * This program is distributed in the hope it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., + * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + * + */ +#include <linux/rculist.h> +#include <linux/module.h> +#include <linux/kernel.h> +#include <linux/async_tx.h> + +#ifdef CONFIG_DMA_ENGINE +static int __init async_tx_init(void) +{ + async_dmaengine_get(); + + printk(KERN_INFO "async_tx: api initialized (async)\n"); + + return 0; +} + +static void __exit async_tx_exit(void) +{ + async_dmaengine_put(); +} + +module_init(async_tx_init); +module_exit(async_tx_exit); + +/** + * __async_tx_find_channel - find a channel to carry out the operation or let + * the transaction execute synchronously + * @submit: transaction dependency and submission modifiers + * @tx_type: transaction type + */ +struct dma_chan * +__async_tx_find_channel(struct async_submit_ctl *submit, + enum dma_transaction_type tx_type) +{ + struct dma_async_tx_descriptor *depend_tx = submit->depend_tx; + + /* see if we can keep the chain on one channel */ + if (depend_tx && + dma_has_cap(tx_type, depend_tx->chan->device->cap_mask)) + return depend_tx->chan; + return async_dma_find_channel(tx_type); +} +EXPORT_SYMBOL_GPL(__async_tx_find_channel); +#endif + + +/** + * async_tx_channel_switch - queue an interrupt descriptor with a dependency + * pre-attached. + * @depend_tx: the operation that must finish before the new operation runs + * @tx: the new operation + */ +static void +async_tx_channel_switch(struct dma_async_tx_descriptor *depend_tx, + struct dma_async_tx_descriptor *tx) +{ + struct dma_chan *chan = depend_tx->chan; + struct dma_device *device = chan->device; + struct dma_async_tx_descriptor *intr_tx = (void *) ~0; + + /* first check to see if we can still append to depend_tx */ + txd_lock(depend_tx); + if (txd_parent(depend_tx) && depend_tx->chan == tx->chan) { + txd_chain(depend_tx, tx); + intr_tx = NULL; + } + txd_unlock(depend_tx); + + /* attached dependency, flush the parent channel */ + if (!intr_tx) { + device->device_issue_pending(chan); + return; + } + + /* see if we can schedule an interrupt + * otherwise poll for completion + */ + if (dma_has_cap(DMA_INTERRUPT, device->cap_mask)) + intr_tx = device->device_prep_dma_interrupt(chan, 0); + else + intr_tx = NULL; + + if (intr_tx) { + intr_tx->callback = NULL; + intr_tx->callback_param = NULL; + /* safe to chain outside the lock since we know we are + * not submitted yet + */ + txd_chain(intr_tx, tx); + + /* check if we need to append */ + txd_lock(depend_tx); + if (txd_parent(depend_tx)) { + txd_chain(depend_tx, intr_tx); + async_tx_ack(intr_tx); + intr_tx = NULL; + } + txd_unlock(depend_tx); + + if (intr_tx) { + txd_clear_parent(intr_tx); + intr_tx->tx_submit(intr_tx); + async_tx_ack(intr_tx); + } + device->device_issue_pending(chan); + } else { + if (dma_wait_for_async_tx(depend_tx) == DMA_ERROR) + panic("%s: DMA_ERROR waiting for depend_tx\n", + __func__); + tx->tx_submit(tx); + } +} + + +/** + * submit_disposition - flags for routing an incoming operation + * @ASYNC_TX_SUBMITTED: we were able to append the new operation under the lock + * @ASYNC_TX_CHANNEL_SWITCH: when the lock is dropped schedule a channel switch + * @ASYNC_TX_DIRECT_SUBMIT: when the lock is dropped submit directly + * + * while holding depend_tx->lock we must avoid submitting new operations + * to prevent a circular locking dependency with drivers that already + * hold a channel lock when calling async_tx_run_dependencies. + */ +enum submit_disposition { + ASYNC_TX_SUBMITTED, + ASYNC_TX_CHANNEL_SWITCH, + ASYNC_TX_DIRECT_SUBMIT, +}; + +void +async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx, + struct async_submit_ctl *submit) +{ + struct dma_async_tx_descriptor *depend_tx = submit->depend_tx; + + tx->callback = submit->cb_fn; + tx->callback_param = submit->cb_param; + + if (depend_tx) { + enum submit_disposition s; + + /* sanity check the dependency chain: + * 1/ if ack is already set then we cannot be sure + * we are referring to the correct operation + * 2/ dependencies are 1:1 i.e. two transactions can + * not depend on the same parent + */ + BUG_ON(async_tx_test_ack(depend_tx) || txd_next(depend_tx) || + txd_parent(tx)); + + /* the lock prevents async_tx_run_dependencies from missing + * the setting of ->next when ->parent != NULL + */ + txd_lock(depend_tx); + if (txd_parent(depend_tx)) { + /* we have a parent so we can not submit directly + * if we are staying on the same channel: append + * else: channel switch + */ + if (depend_tx->chan == chan) { + txd_chain(depend_tx, tx); + s = ASYNC_TX_SUBMITTED; + } else + s = ASYNC_TX_CHANNEL_SWITCH; + } else { + /* we do not have a parent so we may be able to submit + * directly if we are staying on the same channel + */ + if (depend_tx->chan == chan) + s = ASYNC_TX_DIRECT_SUBMIT; + else + s = ASYNC_TX_CHANNEL_SWITCH; + } + txd_unlock(depend_tx); + + switch (s) { + case ASYNC_TX_SUBMITTED: + break; + case ASYNC_TX_CHANNEL_SWITCH: + async_tx_channel_switch(depend_tx, tx); + break; + case ASYNC_TX_DIRECT_SUBMIT: + txd_clear_parent(tx); + tx->tx_submit(tx); + break; + } + } else { + txd_clear_parent(tx); + tx->tx_submit(tx); + } + + if (submit->flags & ASYNC_TX_ACK) + async_tx_ack(tx); + + if (depend_tx) + async_tx_ack(depend_tx); +} +EXPORT_SYMBOL_GPL(async_tx_submit); + +/** + * async_trigger_callback - schedules the callback function to be run + * @submit: submission and completion parameters + * + * honored flags: ASYNC_TX_ACK + * + * The callback is run after any dependent operations have completed. + */ +struct dma_async_tx_descriptor * +async_trigger_callback(struct async_submit_ctl *submit) +{ + struct dma_chan *chan; + struct dma_device *device; + struct dma_async_tx_descriptor *tx; + struct dma_async_tx_descriptor *depend_tx = submit->depend_tx; + + if (depend_tx) { + chan = depend_tx->chan; + device = chan->device; + + /* see if we can schedule an interrupt + * otherwise poll for completion + */ + if (device && !dma_has_cap(DMA_INTERRUPT, device->cap_mask)) + device = NULL; + + tx = device ? device->device_prep_dma_interrupt(chan, 0) : NULL; + } else + tx = NULL; + + if (tx) { + pr_debug("%s: (async)\n", __func__); + + async_tx_submit(chan, tx, submit); + } else { + pr_debug("%s: (sync)\n", __func__); + + /* wait for any prerequisite operations */ + async_tx_quiesce(&submit->depend_tx); + + async_tx_sync_epilog(submit); + } + + return tx; +} +EXPORT_SYMBOL_GPL(async_trigger_callback); + +/** + * async_tx_quiesce - ensure tx is complete and freeable upon return + * @tx - transaction to quiesce + */ +void async_tx_quiesce(struct dma_async_tx_descriptor **tx) +{ + if (*tx) { + /* if ack is already set then we cannot be sure + * we are referring to the correct operation + */ + BUG_ON(async_tx_test_ack(*tx)); + if (dma_wait_for_async_tx(*tx) == DMA_ERROR) + panic("DMA_ERROR waiting for transaction\n"); + async_tx_ack(*tx); + *tx = NULL; + } +} +EXPORT_SYMBOL_GPL(async_tx_quiesce); + +MODULE_AUTHOR("Intel Corporation"); +MODULE_DESCRIPTION("Asynchronous Bulk Memory Transactions API"); +MODULE_LICENSE("GPL"); diff --git a/crypto/async_tx/async_xor.c b/crypto/async_tx/async_xor.c new file mode 100644 index 00000000..154cc843 --- /dev/null +++ b/crypto/async_tx/async_xor.c @@ -0,0 +1,339 @@ +/* + * xor offload engine api + * + * Copyright © 2006, Intel Corporation. + * + * Dan Williams <dan.j.williams@intel.com> + * + * with architecture considerations by: + * Neil Brown <neilb@suse.de> + * Jeff Garzik <jeff@garzik.org> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms and conditions of the GNU General Public License, + * version 2, as published by the Free Software Foundation. + * + * This program is distributed in the hope it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., + * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + * + */ +#include <linux/kernel.h> +#include <linux/interrupt.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <linux/dma-mapping.h> +#include <linux/raid/xor.h> +#include <linux/async_tx.h> + +/* do_async_xor - dma map the pages and perform the xor with an engine */ +static __async_inline struct dma_async_tx_descriptor * +do_async_xor(struct dma_chan *chan, struct page *dest, struct page **src_list, + unsigned int offset, int src_cnt, size_t len, dma_addr_t *dma_src, + struct async_submit_ctl *submit) +{ + struct dma_device *dma = chan->device; + struct dma_async_tx_descriptor *tx = NULL; + int src_off = 0; + int i; + dma_async_tx_callback cb_fn_orig = submit->cb_fn; + void *cb_param_orig = submit->cb_param; + enum async_tx_flags flags_orig = submit->flags; + enum dma_ctrl_flags dma_flags; + int xor_src_cnt = 0; + dma_addr_t dma_dest; + + /* map the dest bidrectional in case it is re-used as a source */ + dma_dest = dma_map_page(dma->dev, dest, offset, len, DMA_BIDIRECTIONAL); + for (i = 0; i < src_cnt; i++) { + /* only map the dest once */ + if (!src_list[i]) + continue; + if (unlikely(src_list[i] == dest)) { + dma_src[xor_src_cnt++] = dma_dest; + continue; + } + dma_src[xor_src_cnt++] = dma_map_page(dma->dev, src_list[i], offset, + len, DMA_TO_DEVICE); + } + src_cnt = xor_src_cnt; + + while (src_cnt) { + submit->flags = flags_orig; + dma_flags = 0; + xor_src_cnt = min(src_cnt, (int)dma->max_xor); + /* if we are submitting additional xors, leave the chain open, + * clear the callback parameters, and leave the destination + * buffer mapped + */ + if (src_cnt > xor_src_cnt) { + submit->flags &= ~ASYNC_TX_ACK; + submit->flags |= ASYNC_TX_FENCE; + dma_flags = DMA_COMPL_SKIP_DEST_UNMAP; + submit->cb_fn = NULL; + submit->cb_param = NULL; + } else { + submit->cb_fn = cb_fn_orig; + submit->cb_param = cb_param_orig; + } + if (submit->cb_fn) + dma_flags |= DMA_PREP_INTERRUPT; + if (submit->flags & ASYNC_TX_FENCE) + dma_flags |= DMA_PREP_FENCE; + /* Since we have clobbered the src_list we are committed + * to doing this asynchronously. Drivers force forward progress + * in case they can not provide a descriptor + */ + tx = dma->device_prep_dma_xor(chan, dma_dest, &dma_src[src_off], + xor_src_cnt, len, dma_flags); + + if (unlikely(!tx)) + async_tx_quiesce(&submit->depend_tx); + + /* spin wait for the preceding transactions to complete */ + while (unlikely(!tx)) { + dma_async_issue_pending(chan); + tx = dma->device_prep_dma_xor(chan, dma_dest, + &dma_src[src_off], + xor_src_cnt, len, + dma_flags); + } + + async_tx_submit(chan, tx, submit); + submit->depend_tx = tx; + + if (src_cnt > xor_src_cnt) { + /* drop completed sources */ + src_cnt -= xor_src_cnt; + src_off += xor_src_cnt; + + /* use the intermediate result a source */ + dma_src[--src_off] = dma_dest; + src_cnt++; + } else + break; + } + + return tx; +} + +static void +do_sync_xor(struct page *dest, struct page **src_list, unsigned int offset, + int src_cnt, size_t len, struct async_submit_ctl *submit) +{ + int i; + int xor_src_cnt = 0; + int src_off = 0; + void *dest_buf; + void **srcs; + + if (submit->scribble) + srcs = submit->scribble; + else + srcs = (void **) src_list; + + /* convert to buffer pointers */ + for (i = 0; i < src_cnt; i++) + if (src_list[i]) + srcs[xor_src_cnt++] = page_address(src_list[i]) + offset; + src_cnt = xor_src_cnt; + /* set destination address */ + dest_buf = page_address(dest) + offset; + + if (submit->flags & ASYNC_TX_XOR_ZERO_DST) + memset(dest_buf, 0, len); + + while (src_cnt > 0) { + /* process up to 'MAX_XOR_BLOCKS' sources */ + xor_src_cnt = min(src_cnt, MAX_XOR_BLOCKS); + xor_blocks(xor_src_cnt, len, dest_buf, &srcs[src_off]); + + /* drop completed sources */ + src_cnt -= xor_src_cnt; + src_off += xor_src_cnt; + } + + async_tx_sync_epilog(submit); +} + +/** + * async_xor - attempt to xor a set of blocks with a dma engine. + * @dest: destination page + * @src_list: array of source pages + * @offset: common src/dst offset to start transaction + * @src_cnt: number of source pages + * @len: length in bytes + * @submit: submission / completion modifiers + * + * honored flags: ASYNC_TX_ACK, ASYNC_TX_XOR_ZERO_DST, ASYNC_TX_XOR_DROP_DST + * + * xor_blocks always uses the dest as a source so the + * ASYNC_TX_XOR_ZERO_DST flag must be set to not include dest data in + * the calculation. The assumption with dma eninges is that they only + * use the destination buffer as a source when it is explicity specified + * in the source list. + * + * src_list note: if the dest is also a source it must be at index zero. + * The contents of this array will be overwritten if a scribble region + * is not specified. + */ +struct dma_async_tx_descriptor * +async_xor(struct page *dest, struct page **src_list, unsigned int offset, + int src_cnt, size_t len, struct async_submit_ctl *submit) +{ + struct dma_chan *chan = async_tx_find_channel(submit, DMA_XOR, + &dest, 1, src_list, + src_cnt, len); + dma_addr_t *dma_src = NULL; + + BUG_ON(src_cnt <= 1); + + if (submit->scribble) + dma_src = submit->scribble; + else if (sizeof(dma_addr_t) <= sizeof(struct page *)) + dma_src = (dma_addr_t *) src_list; + + if (dma_src && chan && is_dma_xor_aligned(chan->device, offset, 0, len)) { + /* run the xor asynchronously */ + pr_debug("%s (async): len: %zu\n", __func__, len); + + return do_async_xor(chan, dest, src_list, offset, src_cnt, len, + dma_src, submit); + } else { + /* run the xor synchronously */ + pr_debug("%s (sync): len: %zu\n", __func__, len); + WARN_ONCE(chan, "%s: no space for dma address conversion\n", + __func__); + + /* in the sync case the dest is an implied source + * (assumes the dest is the first source) + */ + if (submit->flags & ASYNC_TX_XOR_DROP_DST) { + src_cnt--; + src_list++; + } + + /* wait for any prerequisite operations */ + async_tx_quiesce(&submit->depend_tx); + + do_sync_xor(dest, src_list, offset, src_cnt, len, submit); + + return NULL; + } +} +EXPORT_SYMBOL_GPL(async_xor); + +static int page_is_zero(struct page *p, unsigned int offset, size_t len) +{ + char *a = page_address(p) + offset; + return ((*(u32 *) a) == 0 && + memcmp(a, a + 4, len - 4) == 0); +} + +static inline struct dma_chan * +xor_val_chan(struct async_submit_ctl *submit, struct page *dest, + struct page **src_list, int src_cnt, size_t len) +{ + #ifdef CONFIG_ASYNC_TX_DISABLE_XOR_VAL_DMA + return NULL; + #endif + return async_tx_find_channel(submit, DMA_XOR_VAL, &dest, 1, src_list, + src_cnt, len); +} + +/** + * async_xor_val - attempt a xor parity check with a dma engine. + * @dest: destination page used if the xor is performed synchronously + * @src_list: array of source pages + * @offset: offset in pages to start transaction + * @src_cnt: number of source pages + * @len: length in bytes + * @result: 0 if sum == 0 else non-zero + * @submit: submission / completion modifiers + * + * honored flags: ASYNC_TX_ACK + * + * src_list note: if the dest is also a source it must be at index zero. + * The contents of this array will be overwritten if a scribble region + * is not specified. + */ +struct dma_async_tx_descriptor * +async_xor_val(struct page *dest, struct page **src_list, unsigned int offset, + int src_cnt, size_t len, enum sum_check_flags *result, + struct async_submit_ctl *submit) +{ + struct dma_chan *chan = xor_val_chan(submit, dest, src_list, src_cnt, len); + struct dma_device *device = chan ? chan->device : NULL; + struct dma_async_tx_descriptor *tx = NULL; + dma_addr_t *dma_src = NULL; + + BUG_ON(src_cnt <= 1); + + if (submit->scribble) + dma_src = submit->scribble; + else if (sizeof(dma_addr_t) <= sizeof(struct page *)) + dma_src = (dma_addr_t *) src_list; + + if (dma_src && device && src_cnt <= device->max_xor && + is_dma_xor_aligned(device, offset, 0, len)) { + unsigned long dma_prep_flags = 0; + int i; + + pr_debug("%s: (async) len: %zu\n", __func__, len); + + if (submit->cb_fn) + dma_prep_flags |= DMA_PREP_INTERRUPT; + if (submit->flags & ASYNC_TX_FENCE) + dma_prep_flags |= DMA_PREP_FENCE; + for (i = 0; i < src_cnt; i++) + dma_src[i] = dma_map_page(device->dev, src_list[i], + offset, len, DMA_TO_DEVICE); + + tx = device->device_prep_dma_xor_val(chan, dma_src, src_cnt, + len, result, + dma_prep_flags); + if (unlikely(!tx)) { + async_tx_quiesce(&submit->depend_tx); + + while (!tx) { + dma_async_issue_pending(chan); + tx = device->device_prep_dma_xor_val(chan, + dma_src, src_cnt, len, result, + dma_prep_flags); + } + } + + async_tx_submit(chan, tx, submit); + } else { + enum async_tx_flags flags_orig = submit->flags; + + pr_debug("%s: (sync) len: %zu\n", __func__, len); + WARN_ONCE(device && src_cnt <= device->max_xor, + "%s: no space for dma address conversion\n", + __func__); + + submit->flags |= ASYNC_TX_XOR_DROP_DST; + submit->flags &= ~ASYNC_TX_ACK; + + tx = async_xor(dest, src_list, offset, src_cnt, len, submit); + + async_tx_quiesce(&tx); + + *result = !page_is_zero(dest, offset, len) << SUM_CHECK_P; + + async_tx_sync_epilog(submit); + submit->flags = flags_orig; + } + + return tx; +} +EXPORT_SYMBOL_GPL(async_xor_val); + +MODULE_AUTHOR("Intel Corporation"); +MODULE_DESCRIPTION("asynchronous xor/xor-zero-sum api"); +MODULE_LICENSE("GPL"); diff --git a/crypto/async_tx/raid6test.c b/crypto/async_tx/raid6test.c new file mode 100644 index 00000000..aa2b0270 --- /dev/null +++ b/crypto/async_tx/raid6test.c @@ -0,0 +1,250 @@ +/* + * asynchronous raid6 recovery self test + * Copyright (c) 2009, Intel Corporation. + * + * based on drivers/md/raid6test/test.c: + * Copyright 2002-2007 H. Peter Anvin + * + * This program is free software; you can redistribute it and/or modify it + * under the terms and conditions of the GNU General Public License, + * version 2, as published by the Free Software Foundation. + * + * This program is distributed in the hope it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., + * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + * + */ +#include <linux/async_tx.h> +#include <linux/gfp.h> +#include <linux/mm.h> +#include <linux/random.h> +#include <linux/module.h> + +#undef pr +#define pr(fmt, args...) pr_info("raid6test: " fmt, ##args) + +#define NDISKS 16 /* Including P and Q */ + +static struct page *dataptrs[NDISKS]; +static addr_conv_t addr_conv[NDISKS]; +static struct page *data[NDISKS+3]; +static struct page *spare; +static struct page *recovi; +static struct page *recovj; + +static void callback(void *param) +{ + struct completion *cmp = param; + + complete(cmp); +} + +static void makedata(int disks) +{ + int i, j; + + for (i = 0; i < disks; i++) { + for (j = 0; j < PAGE_SIZE/sizeof(u32); j += sizeof(u32)) { + u32 *p = page_address(data[i]) + j; + + *p = random32(); + } + + dataptrs[i] = data[i]; + } +} + +static char disk_type(int d, int disks) +{ + if (d == disks - 2) + return 'P'; + else if (d == disks - 1) + return 'Q'; + else + return 'D'; +} + +/* Recover two failed blocks. */ +static void raid6_dual_recov(int disks, size_t bytes, int faila, int failb, struct page **ptrs) +{ + struct async_submit_ctl submit; + struct completion cmp; + struct dma_async_tx_descriptor *tx = NULL; + enum sum_check_flags result = ~0; + + if (faila > failb) + swap(faila, failb); + + if (failb == disks-1) { + if (faila == disks-2) { + /* P+Q failure. Just rebuild the syndrome. */ + init_async_submit(&submit, 0, NULL, NULL, NULL, addr_conv); + tx = async_gen_syndrome(ptrs, 0, disks, bytes, &submit); + } else { + struct page *blocks[disks]; + struct page *dest; + int count = 0; + int i; + + /* data+Q failure. Reconstruct data from P, + * then rebuild syndrome + */ + for (i = disks; i-- ; ) { + if (i == faila || i == failb) + continue; + blocks[count++] = ptrs[i]; + } + dest = ptrs[faila]; + init_async_submit(&submit, ASYNC_TX_XOR_ZERO_DST, NULL, + NULL, NULL, addr_conv); + tx = async_xor(dest, blocks, 0, count, bytes, &submit); + + init_async_submit(&submit, 0, tx, NULL, NULL, addr_conv); + tx = async_gen_syndrome(ptrs, 0, disks, bytes, &submit); + } + } else { + if (failb == disks-2) { + /* data+P failure. */ + init_async_submit(&submit, 0, NULL, NULL, NULL, addr_conv); + tx = async_raid6_datap_recov(disks, bytes, faila, ptrs, &submit); + } else { + /* data+data failure. */ + init_async_submit(&submit, 0, NULL, NULL, NULL, addr_conv); + tx = async_raid6_2data_recov(disks, bytes, faila, failb, ptrs, &submit); + } + } + init_completion(&cmp); + init_async_submit(&submit, ASYNC_TX_ACK, tx, callback, &cmp, addr_conv); + tx = async_syndrome_val(ptrs, 0, disks, bytes, &result, spare, &submit); + async_tx_issue_pending(tx); + + if (wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000)) == 0) + pr("%s: timeout! (faila: %d failb: %d disks: %d)\n", + __func__, faila, failb, disks); + + if (result != 0) + pr("%s: validation failure! faila: %d failb: %d sum_check_flags: %x\n", + __func__, faila, failb, result); +} + +static int test_disks(int i, int j, int disks) +{ + int erra, errb; + + memset(page_address(recovi), 0xf0, PAGE_SIZE); + memset(page_address(recovj), 0xba, PAGE_SIZE); + + dataptrs[i] = recovi; + dataptrs[j] = recovj; + + raid6_dual_recov(disks, PAGE_SIZE, i, j, dataptrs); + + erra = memcmp(page_address(data[i]), page_address(recovi), PAGE_SIZE); + errb = memcmp(page_address(data[j]), page_address(recovj), PAGE_SIZE); + + pr("%s(%d, %d): faila=%3d(%c) failb=%3d(%c) %s\n", + __func__, i, j, i, disk_type(i, disks), j, disk_type(j, disks), + (!erra && !errb) ? "OK" : !erra ? "ERRB" : !errb ? "ERRA" : "ERRAB"); + + dataptrs[i] = data[i]; + dataptrs[j] = data[j]; + + return erra || errb; +} + +static int test(int disks, int *tests) +{ + struct dma_async_tx_descriptor *tx; + struct async_submit_ctl submit; + struct completion cmp; + int err = 0; + int i, j; + + recovi = data[disks]; + recovj = data[disks+1]; + spare = data[disks+2]; + + makedata(disks); + + /* Nuke syndromes */ + memset(page_address(data[disks-2]), 0xee, PAGE_SIZE); + memset(page_address(data[disks-1]), 0xee, PAGE_SIZE); + + /* Generate assumed good syndrome */ + init_completion(&cmp); + init_async_submit(&submit, ASYNC_TX_ACK, NULL, callback, &cmp, addr_conv); + tx = async_gen_syndrome(dataptrs, 0, disks, PAGE_SIZE, &submit); + async_tx_issue_pending(tx); + + if (wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000)) == 0) { + pr("error: initial gen_syndrome(%d) timed out\n", disks); + return 1; + } + + pr("testing the %d-disk case...\n", disks); + for (i = 0; i < disks-1; i++) + for (j = i+1; j < disks; j++) { + (*tests)++; + err += test_disks(i, j, disks); + } + + return err; +} + + +static int raid6_test(void) +{ + int err = 0; + int tests = 0; + int i; + + for (i = 0; i < NDISKS+3; i++) { + data[i] = alloc_page(GFP_KERNEL); + if (!data[i]) { + while (i--) + put_page(data[i]); + return -ENOMEM; + } + } + + /* the 4-disk and 5-disk cases are special for the recovery code */ + if (NDISKS > 4) + err += test(4, &tests); + if (NDISKS > 5) + err += test(5, &tests); + /* the 11 and 12 disk cases are special for ioatdma (p-disabled + * q-continuation without extended descriptor) + */ + if (NDISKS > 12) { + err += test(11, &tests); + err += test(12, &tests); + } + err += test(NDISKS, &tests); + + pr("\n"); + pr("complete (%d tests, %d failure%s)\n", + tests, err, err == 1 ? "" : "s"); + + for (i = 0; i < NDISKS+3; i++) + put_page(data[i]); + + return 0; +} + +static void raid6_test_exit(void) +{ +} + +/* when compiled-in wait for drivers to load first (assumes dma drivers + * are also compliled-in) + */ +late_initcall(raid6_test); +module_exit(raid6_test_exit); +MODULE_AUTHOR("Dan Williams <dan.j.williams@intel.com>"); +MODULE_DESCRIPTION("asynchronous RAID-6 recovery self tests"); +MODULE_LICENSE("GPL"); diff --git a/crypto/authenc.c b/crypto/authenc.c new file mode 100644 index 00000000..5ef7ba6b --- /dev/null +++ b/crypto/authenc.c @@ -0,0 +1,711 @@ +/* + * Authenc: Simple AEAD wrapper for IPsec + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/aead.h> +#include <crypto/internal/hash.h> +#include <crypto/internal/skcipher.h> +#include <crypto/authenc.h> +#include <crypto/scatterwalk.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/rtnetlink.h> +#include <linux/slab.h> +#include <linux/spinlock.h> + +typedef u8 *(*authenc_ahash_t)(struct aead_request *req, unsigned int flags); + +struct authenc_instance_ctx { + struct crypto_ahash_spawn auth; + struct crypto_skcipher_spawn enc; +}; + +struct crypto_authenc_ctx { + unsigned int reqoff; + struct crypto_ahash *auth; + struct crypto_ablkcipher *enc; +}; + +struct authenc_request_ctx { + unsigned int cryptlen; + struct scatterlist *sg; + struct scatterlist asg[2]; + struct scatterlist cipher[2]; + crypto_completion_t complete; + crypto_completion_t update_complete; + char tail[]; +}; + +static void authenc_request_complete(struct aead_request *req, int err) +{ + if (err != -EINPROGRESS) + aead_request_complete(req, err); +} + +static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key, + unsigned int keylen) +{ + unsigned int authkeylen; + unsigned int enckeylen; + struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); + struct crypto_ahash *auth = ctx->auth; + struct crypto_ablkcipher *enc = ctx->enc; + struct rtattr *rta = (void *)key; + struct crypto_authenc_key_param *param; + int err = -EINVAL; + + if (!RTA_OK(rta, keylen)) + goto badkey; + if (rta->rta_type != CRYPTO_AUTHENC_KEYA_PARAM) + goto badkey; + if (RTA_PAYLOAD(rta) < sizeof(*param)) + goto badkey; + + param = RTA_DATA(rta); + enckeylen = be32_to_cpu(param->enckeylen); + + key += RTA_ALIGN(rta->rta_len); + keylen -= RTA_ALIGN(rta->rta_len); + + if (keylen < enckeylen) + goto badkey; + + authkeylen = keylen - enckeylen; + + crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK); + crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc) & + CRYPTO_TFM_REQ_MASK); + err = crypto_ahash_setkey(auth, key, authkeylen); + crypto_aead_set_flags(authenc, crypto_ahash_get_flags(auth) & + CRYPTO_TFM_RES_MASK); + + if (err) + goto out; + + crypto_ablkcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK); + crypto_ablkcipher_set_flags(enc, crypto_aead_get_flags(authenc) & + CRYPTO_TFM_REQ_MASK); + err = crypto_ablkcipher_setkey(enc, key + authkeylen, enckeylen); + crypto_aead_set_flags(authenc, crypto_ablkcipher_get_flags(enc) & + CRYPTO_TFM_RES_MASK); + +out: + return err; + +badkey: + crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN); + goto out; +} + +static void authenc_geniv_ahash_update_done(struct crypto_async_request *areq, + int err) +{ + struct aead_request *req = areq->data; + struct crypto_aead *authenc = crypto_aead_reqtfm(req); + struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); + struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result, + areq_ctx->cryptlen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->complete, req); + + err = crypto_ahash_finup(ahreq); + if (err) + goto out; + + scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg, + areq_ctx->cryptlen, + crypto_aead_authsize(authenc), 1); + +out: + authenc_request_complete(req, err); +} + +static void authenc_geniv_ahash_done(struct crypto_async_request *areq, int err) +{ + struct aead_request *req = areq->data; + struct crypto_aead *authenc = crypto_aead_reqtfm(req); + struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); + struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + + if (err) + goto out; + + scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg, + areq_ctx->cryptlen, + crypto_aead_authsize(authenc), 1); + +out: + aead_request_complete(req, err); +} + +static void authenc_verify_ahash_update_done(struct crypto_async_request *areq, + int err) +{ + u8 *ihash; + unsigned int authsize; + struct ablkcipher_request *abreq; + struct aead_request *req = areq->data; + struct crypto_aead *authenc = crypto_aead_reqtfm(req); + struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); + struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + unsigned int cryptlen = req->cryptlen; + + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result, + areq_ctx->cryptlen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->complete, req); + + err = crypto_ahash_finup(ahreq); + if (err) + goto out; + + authsize = crypto_aead_authsize(authenc); + cryptlen -= authsize; + ihash = ahreq->result + authsize; + scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, + authsize, 0); + + err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG : 0; + if (err) + goto out; + + abreq = aead_request_ctx(req); + ablkcipher_request_set_tfm(abreq, ctx->enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + req->base.complete, req->base.data); + ablkcipher_request_set_crypt(abreq, req->src, req->dst, + cryptlen, req->iv); + + err = crypto_ablkcipher_decrypt(abreq); + +out: + authenc_request_complete(req, err); +} + +static void authenc_verify_ahash_done(struct crypto_async_request *areq, + int err) +{ + u8 *ihash; + unsigned int authsize; + struct ablkcipher_request *abreq; + struct aead_request *req = areq->data; + struct crypto_aead *authenc = crypto_aead_reqtfm(req); + struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); + struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + unsigned int cryptlen = req->cryptlen; + + if (err) + goto out; + + authsize = crypto_aead_authsize(authenc); + cryptlen -= authsize; + ihash = ahreq->result + authsize; + scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, + authsize, 0); + + err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG : 0; + if (err) + goto out; + + abreq = aead_request_ctx(req); + ablkcipher_request_set_tfm(abreq, ctx->enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + req->base.complete, req->base.data); + ablkcipher_request_set_crypt(abreq, req->src, req->dst, + cryptlen, req->iv); + + err = crypto_ablkcipher_decrypt(abreq); + +out: + authenc_request_complete(req, err); +} + +static u8 *crypto_authenc_ahash_fb(struct aead_request *req, unsigned int flags) +{ + struct crypto_aead *authenc = crypto_aead_reqtfm(req); + struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); + struct crypto_ahash *auth = ctx->auth; + struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + u8 *hash = areq_ctx->tail; + int err; + + hash = (u8 *)ALIGN((unsigned long)hash + crypto_ahash_alignmask(auth), + crypto_ahash_alignmask(auth) + 1); + + ahash_request_set_tfm(ahreq, auth); + + err = crypto_ahash_init(ahreq); + if (err) + return ERR_PTR(err); + + ahash_request_set_crypt(ahreq, req->assoc, hash, req->assoclen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & flags, + areq_ctx->update_complete, req); + + err = crypto_ahash_update(ahreq); + if (err) + return ERR_PTR(err); + + ahash_request_set_crypt(ahreq, areq_ctx->sg, hash, + areq_ctx->cryptlen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & flags, + areq_ctx->complete, req); + + err = crypto_ahash_finup(ahreq); + if (err) + return ERR_PTR(err); + + return hash; +} + +static u8 *crypto_authenc_ahash(struct aead_request *req, unsigned int flags) +{ + struct crypto_aead *authenc = crypto_aead_reqtfm(req); + struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); + struct crypto_ahash *auth = ctx->auth; + struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + u8 *hash = areq_ctx->tail; + int err; + + hash = (u8 *)ALIGN((unsigned long)hash + crypto_ahash_alignmask(auth), + crypto_ahash_alignmask(auth) + 1); + + ahash_request_set_tfm(ahreq, auth); + ahash_request_set_crypt(ahreq, areq_ctx->sg, hash, + areq_ctx->cryptlen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & flags, + areq_ctx->complete, req); + + err = crypto_ahash_digest(ahreq); + if (err) + return ERR_PTR(err); + + return hash; +} + +static int crypto_authenc_genicv(struct aead_request *req, u8 *iv, + unsigned int flags) +{ + struct crypto_aead *authenc = crypto_aead_reqtfm(req); + struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); + struct scatterlist *dst = req->dst; + struct scatterlist *assoc = req->assoc; + struct scatterlist *cipher = areq_ctx->cipher; + struct scatterlist *asg = areq_ctx->asg; + unsigned int ivsize = crypto_aead_ivsize(authenc); + unsigned int cryptlen = req->cryptlen; + authenc_ahash_t authenc_ahash_fn = crypto_authenc_ahash_fb; + struct page *dstp; + u8 *vdst; + u8 *hash; + + dstp = sg_page(dst); + vdst = PageHighMem(dstp) ? NULL : page_address(dstp) + dst->offset; + + if (ivsize) { + sg_init_table(cipher, 2); + sg_set_buf(cipher, iv, ivsize); + scatterwalk_crypto_chain(cipher, dst, vdst == iv + ivsize, 2); + dst = cipher; + cryptlen += ivsize; + } + + if (sg_is_last(assoc)) { + authenc_ahash_fn = crypto_authenc_ahash; + sg_init_table(asg, 2); + sg_set_page(asg, sg_page(assoc), assoc->length, assoc->offset); + scatterwalk_crypto_chain(asg, dst, 0, 2); + dst = asg; + cryptlen += req->assoclen; + } + + areq_ctx->cryptlen = cryptlen; + areq_ctx->sg = dst; + + areq_ctx->complete = authenc_geniv_ahash_done; + areq_ctx->update_complete = authenc_geniv_ahash_update_done; + + hash = authenc_ahash_fn(req, flags); + if (IS_ERR(hash)) + return PTR_ERR(hash); + + scatterwalk_map_and_copy(hash, dst, cryptlen, + crypto_aead_authsize(authenc), 1); + return 0; +} + +static void crypto_authenc_encrypt_done(struct crypto_async_request *req, + int err) +{ + struct aead_request *areq = req->data; + + if (!err) { + struct crypto_aead *authenc = crypto_aead_reqtfm(areq); + struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); + struct ablkcipher_request *abreq = aead_request_ctx(areq); + u8 *iv = (u8 *)(abreq + 1) + + crypto_ablkcipher_reqsize(ctx->enc); + + err = crypto_authenc_genicv(areq, iv, 0); + } + + authenc_request_complete(areq, err); +} + +static int crypto_authenc_encrypt(struct aead_request *req) +{ + struct crypto_aead *authenc = crypto_aead_reqtfm(req); + struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); + struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); + struct crypto_ablkcipher *enc = ctx->enc; + struct scatterlist *dst = req->dst; + unsigned int cryptlen = req->cryptlen; + struct ablkcipher_request *abreq = (void *)(areq_ctx->tail + + ctx->reqoff); + u8 *iv = (u8 *)abreq - crypto_ablkcipher_ivsize(enc); + int err; + + ablkcipher_request_set_tfm(abreq, enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + crypto_authenc_encrypt_done, req); + ablkcipher_request_set_crypt(abreq, req->src, dst, cryptlen, req->iv); + + memcpy(iv, req->iv, crypto_aead_ivsize(authenc)); + + err = crypto_ablkcipher_encrypt(abreq); + if (err) + return err; + + return crypto_authenc_genicv(req, iv, CRYPTO_TFM_REQ_MAY_SLEEP); +} + +static void crypto_authenc_givencrypt_done(struct crypto_async_request *req, + int err) +{ + struct aead_request *areq = req->data; + + if (!err) { + struct skcipher_givcrypt_request *greq = aead_request_ctx(areq); + + err = crypto_authenc_genicv(areq, greq->giv, 0); + } + + authenc_request_complete(areq, err); +} + +static int crypto_authenc_givencrypt(struct aead_givcrypt_request *req) +{ + struct crypto_aead *authenc = aead_givcrypt_reqtfm(req); + struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); + struct aead_request *areq = &req->areq; + struct skcipher_givcrypt_request *greq = aead_request_ctx(areq); + u8 *iv = req->giv; + int err; + + skcipher_givcrypt_set_tfm(greq, ctx->enc); + skcipher_givcrypt_set_callback(greq, aead_request_flags(areq), + crypto_authenc_givencrypt_done, areq); + skcipher_givcrypt_set_crypt(greq, areq->src, areq->dst, areq->cryptlen, + areq->iv); + skcipher_givcrypt_set_giv(greq, iv, req->seq); + + err = crypto_skcipher_givencrypt(greq); + if (err) + return err; + + return crypto_authenc_genicv(areq, iv, CRYPTO_TFM_REQ_MAY_SLEEP); +} + +static int crypto_authenc_verify(struct aead_request *req, + authenc_ahash_t authenc_ahash_fn) +{ + struct crypto_aead *authenc = crypto_aead_reqtfm(req); + struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); + u8 *ohash; + u8 *ihash; + unsigned int authsize; + + areq_ctx->complete = authenc_verify_ahash_done; + areq_ctx->update_complete = authenc_verify_ahash_update_done; + + ohash = authenc_ahash_fn(req, CRYPTO_TFM_REQ_MAY_SLEEP); + if (IS_ERR(ohash)) + return PTR_ERR(ohash); + + authsize = crypto_aead_authsize(authenc); + ihash = ohash + authsize; + scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, + authsize, 0); + return memcmp(ihash, ohash, authsize) ? -EBADMSG : 0; +} + +static int crypto_authenc_iverify(struct aead_request *req, u8 *iv, + unsigned int cryptlen) +{ + struct crypto_aead *authenc = crypto_aead_reqtfm(req); + struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); + struct scatterlist *src = req->src; + struct scatterlist *assoc = req->assoc; + struct scatterlist *cipher = areq_ctx->cipher; + struct scatterlist *asg = areq_ctx->asg; + unsigned int ivsize = crypto_aead_ivsize(authenc); + authenc_ahash_t authenc_ahash_fn = crypto_authenc_ahash_fb; + struct page *srcp; + u8 *vsrc; + + srcp = sg_page(src); + vsrc = PageHighMem(srcp) ? NULL : page_address(srcp) + src->offset; + + if (ivsize) { + sg_init_table(cipher, 2); + sg_set_buf(cipher, iv, ivsize); + scatterwalk_crypto_chain(cipher, src, vsrc == iv + ivsize, 2); + src = cipher; + cryptlen += ivsize; + } + + if (sg_is_last(assoc)) { + authenc_ahash_fn = crypto_authenc_ahash; + sg_init_table(asg, 2); + sg_set_page(asg, sg_page(assoc), assoc->length, assoc->offset); + scatterwalk_crypto_chain(asg, src, 0, 2); + src = asg; + cryptlen += req->assoclen; + } + + areq_ctx->cryptlen = cryptlen; + areq_ctx->sg = src; + + return crypto_authenc_verify(req, authenc_ahash_fn); +} + +static int crypto_authenc_decrypt(struct aead_request *req) +{ + struct crypto_aead *authenc = crypto_aead_reqtfm(req); + struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); + struct ablkcipher_request *abreq = aead_request_ctx(req); + unsigned int cryptlen = req->cryptlen; + unsigned int authsize = crypto_aead_authsize(authenc); + u8 *iv = req->iv; + int err; + + if (cryptlen < authsize) + return -EINVAL; + cryptlen -= authsize; + + err = crypto_authenc_iverify(req, iv, cryptlen); + if (err) + return err; + + ablkcipher_request_set_tfm(abreq, ctx->enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + req->base.complete, req->base.data); + ablkcipher_request_set_crypt(abreq, req->src, req->dst, cryptlen, iv); + + return crypto_ablkcipher_decrypt(abreq); +} + +static int crypto_authenc_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); + struct authenc_instance_ctx *ictx = crypto_instance_ctx(inst); + struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_ahash *auth; + struct crypto_ablkcipher *enc; + int err; + + auth = crypto_spawn_ahash(&ictx->auth); + if (IS_ERR(auth)) + return PTR_ERR(auth); + + enc = crypto_spawn_skcipher(&ictx->enc); + err = PTR_ERR(enc); + if (IS_ERR(enc)) + goto err_free_ahash; + + ctx->auth = auth; + ctx->enc = enc; + + ctx->reqoff = ALIGN(2 * crypto_ahash_digestsize(auth) + + crypto_ahash_alignmask(auth), + crypto_ahash_alignmask(auth) + 1) + + crypto_ablkcipher_ivsize(enc); + + tfm->crt_aead.reqsize = sizeof(struct authenc_request_ctx) + + ctx->reqoff + + max_t(unsigned int, + crypto_ahash_reqsize(auth) + + sizeof(struct ahash_request), + sizeof(struct skcipher_givcrypt_request) + + crypto_ablkcipher_reqsize(enc)); + + return 0; + +err_free_ahash: + crypto_free_ahash(auth); + return err; +} + +static void crypto_authenc_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_ahash(ctx->auth); + crypto_free_ablkcipher(ctx->enc); +} + +static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb) +{ + struct crypto_attr_type *algt; + struct crypto_instance *inst; + struct hash_alg_common *auth; + struct crypto_alg *auth_base; + struct crypto_alg *enc; + struct authenc_instance_ctx *ctx; + const char *enc_name; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) + return ERR_PTR(-EINVAL); + + auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH, + CRYPTO_ALG_TYPE_AHASH_MASK); + if (IS_ERR(auth)) + return ERR_CAST(auth); + + auth_base = &auth->base; + + enc_name = crypto_attr_alg_name(tb[2]); + err = PTR_ERR(enc_name); + if (IS_ERR(enc_name)) + goto out_put_auth; + + inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); + err = -ENOMEM; + if (!inst) + goto out_put_auth; + + ctx = crypto_instance_ctx(inst); + + err = crypto_init_ahash_spawn(&ctx->auth, auth, inst); + if (err) + goto err_free_inst; + + crypto_set_skcipher_spawn(&ctx->enc, inst); + err = crypto_grab_skcipher(&ctx->enc, enc_name, 0, + crypto_requires_sync(algt->type, + algt->mask)); + if (err) + goto err_drop_auth; + + enc = crypto_skcipher_spawn_alg(&ctx->enc); + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, + "authenc(%s,%s)", auth_base->cra_name, enc->cra_name) >= + CRYPTO_MAX_ALG_NAME) + goto err_drop_enc; + + if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "authenc(%s,%s)", auth_base->cra_driver_name, + enc->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) + goto err_drop_enc; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD; + inst->alg.cra_flags |= enc->cra_flags & CRYPTO_ALG_ASYNC; + inst->alg.cra_priority = enc->cra_priority * + 10 + auth_base->cra_priority; + inst->alg.cra_blocksize = enc->cra_blocksize; + inst->alg.cra_alignmask = auth_base->cra_alignmask | enc->cra_alignmask; + inst->alg.cra_type = &crypto_aead_type; + + inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize; + inst->alg.cra_aead.maxauthsize = auth->digestsize; + + inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx); + + inst->alg.cra_init = crypto_authenc_init_tfm; + inst->alg.cra_exit = crypto_authenc_exit_tfm; + + inst->alg.cra_aead.setkey = crypto_authenc_setkey; + inst->alg.cra_aead.encrypt = crypto_authenc_encrypt; + inst->alg.cra_aead.decrypt = crypto_authenc_decrypt; + inst->alg.cra_aead.givencrypt = crypto_authenc_givencrypt; + +out: + crypto_mod_put(auth_base); + return inst; + +err_drop_enc: + crypto_drop_skcipher(&ctx->enc); +err_drop_auth: + crypto_drop_ahash(&ctx->auth); +err_free_inst: + kfree(inst); +out_put_auth: + inst = ERR_PTR(err); + goto out; +} + +static void crypto_authenc_free(struct crypto_instance *inst) +{ + struct authenc_instance_ctx *ctx = crypto_instance_ctx(inst); + + crypto_drop_skcipher(&ctx->enc); + crypto_drop_ahash(&ctx->auth); + kfree(inst); +} + +static struct crypto_template crypto_authenc_tmpl = { + .name = "authenc", + .alloc = crypto_authenc_alloc, + .free = crypto_authenc_free, + .module = THIS_MODULE, +}; + +static int __init crypto_authenc_module_init(void) +{ + return crypto_register_template(&crypto_authenc_tmpl); +} + +static void __exit crypto_authenc_module_exit(void) +{ + crypto_unregister_template(&crypto_authenc_tmpl); +} + +module_init(crypto_authenc_module_init); +module_exit(crypto_authenc_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Simple AEAD wrapper for IPsec"); diff --git a/crypto/authencesn.c b/crypto/authencesn.c new file mode 100644 index 00000000..136b68b9 --- /dev/null +++ b/crypto/authencesn.c @@ -0,0 +1,835 @@ +/* + * authencesn.c - AEAD wrapper for IPsec with extended sequence numbers, + * derived from authenc.c + * + * Copyright (C) 2010 secunet Security Networks AG + * Copyright (C) 2010 Steffen Klassert <steffen.klassert@secunet.com> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/aead.h> +#include <crypto/internal/hash.h> +#include <crypto/internal/skcipher.h> +#include <crypto/authenc.h> +#include <crypto/scatterwalk.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/rtnetlink.h> +#include <linux/slab.h> +#include <linux/spinlock.h> + +struct authenc_esn_instance_ctx { + struct crypto_ahash_spawn auth; + struct crypto_skcipher_spawn enc; +}; + +struct crypto_authenc_esn_ctx { + unsigned int reqoff; + struct crypto_ahash *auth; + struct crypto_ablkcipher *enc; +}; + +struct authenc_esn_request_ctx { + unsigned int cryptlen; + unsigned int headlen; + unsigned int trailen; + struct scatterlist *sg; + struct scatterlist hsg[2]; + struct scatterlist tsg[1]; + struct scatterlist cipher[2]; + crypto_completion_t complete; + crypto_completion_t update_complete; + crypto_completion_t update_complete2; + char tail[]; +}; + +static void authenc_esn_request_complete(struct aead_request *req, int err) +{ + if (err != -EINPROGRESS) + aead_request_complete(req, err); +} + +static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 *key, + unsigned int keylen) +{ + unsigned int authkeylen; + unsigned int enckeylen; + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct crypto_ahash *auth = ctx->auth; + struct crypto_ablkcipher *enc = ctx->enc; + struct rtattr *rta = (void *)key; + struct crypto_authenc_key_param *param; + int err = -EINVAL; + + if (!RTA_OK(rta, keylen)) + goto badkey; + if (rta->rta_type != CRYPTO_AUTHENC_KEYA_PARAM) + goto badkey; + if (RTA_PAYLOAD(rta) < sizeof(*param)) + goto badkey; + + param = RTA_DATA(rta); + enckeylen = be32_to_cpu(param->enckeylen); + + key += RTA_ALIGN(rta->rta_len); + keylen -= RTA_ALIGN(rta->rta_len); + + if (keylen < enckeylen) + goto badkey; + + authkeylen = keylen - enckeylen; + + crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK); + crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc_esn) & + CRYPTO_TFM_REQ_MASK); + err = crypto_ahash_setkey(auth, key, authkeylen); + crypto_aead_set_flags(authenc_esn, crypto_ahash_get_flags(auth) & + CRYPTO_TFM_RES_MASK); + + if (err) + goto out; + + crypto_ablkcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK); + crypto_ablkcipher_set_flags(enc, crypto_aead_get_flags(authenc_esn) & + CRYPTO_TFM_REQ_MASK); + err = crypto_ablkcipher_setkey(enc, key + authkeylen, enckeylen); + crypto_aead_set_flags(authenc_esn, crypto_ablkcipher_get_flags(enc) & + CRYPTO_TFM_RES_MASK); + +out: + return err; + +badkey: + crypto_aead_set_flags(authenc_esn, CRYPTO_TFM_RES_BAD_KEY_LEN); + goto out; +} + +static void authenc_esn_geniv_ahash_update_done(struct crypto_async_request *areq, + int err) +{ + struct aead_request *req = areq->data; + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result, + areq_ctx->cryptlen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->update_complete2, req); + + err = crypto_ahash_update(ahreq); + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->tsg, ahreq->result, + areq_ctx->trailen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->complete, req); + + err = crypto_ahash_finup(ahreq); + if (err) + goto out; + + scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg, + areq_ctx->cryptlen, + crypto_aead_authsize(authenc_esn), 1); + +out: + authenc_esn_request_complete(req, err); +} + +static void authenc_esn_geniv_ahash_update_done2(struct crypto_async_request *areq, + int err) +{ + struct aead_request *req = areq->data; + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->tsg, ahreq->result, + areq_ctx->trailen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->complete, req); + + err = crypto_ahash_finup(ahreq); + if (err) + goto out; + + scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg, + areq_ctx->cryptlen, + crypto_aead_authsize(authenc_esn), 1); + +out: + authenc_esn_request_complete(req, err); +} + + +static void authenc_esn_geniv_ahash_done(struct crypto_async_request *areq, + int err) +{ + struct aead_request *req = areq->data; + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + + if (err) + goto out; + + scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg, + areq_ctx->cryptlen, + crypto_aead_authsize(authenc_esn), 1); + +out: + aead_request_complete(req, err); +} + + +static void authenc_esn_verify_ahash_update_done(struct crypto_async_request *areq, + int err) +{ + u8 *ihash; + unsigned int authsize; + struct ablkcipher_request *abreq; + struct aead_request *req = areq->data; + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + unsigned int cryptlen = req->cryptlen; + + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result, + areq_ctx->cryptlen); + + ahash_request_set_callback(ahreq, + aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->update_complete2, req); + + err = crypto_ahash_update(ahreq); + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->tsg, ahreq->result, + areq_ctx->trailen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->complete, req); + + err = crypto_ahash_finup(ahreq); + if (err) + goto out; + + authsize = crypto_aead_authsize(authenc_esn); + cryptlen -= authsize; + ihash = ahreq->result + authsize; + scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, + authsize, 0); + + err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG : 0; + if (err) + goto out; + + abreq = aead_request_ctx(req); + ablkcipher_request_set_tfm(abreq, ctx->enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + req->base.complete, req->base.data); + ablkcipher_request_set_crypt(abreq, req->src, req->dst, + cryptlen, req->iv); + + err = crypto_ablkcipher_decrypt(abreq); + +out: + authenc_esn_request_complete(req, err); +} + +static void authenc_esn_verify_ahash_update_done2(struct crypto_async_request *areq, + int err) +{ + u8 *ihash; + unsigned int authsize; + struct ablkcipher_request *abreq; + struct aead_request *req = areq->data; + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + unsigned int cryptlen = req->cryptlen; + + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->tsg, ahreq->result, + areq_ctx->trailen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->complete, req); + + err = crypto_ahash_finup(ahreq); + if (err) + goto out; + + authsize = crypto_aead_authsize(authenc_esn); + cryptlen -= authsize; + ihash = ahreq->result + authsize; + scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, + authsize, 0); + + err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG : 0; + if (err) + goto out; + + abreq = aead_request_ctx(req); + ablkcipher_request_set_tfm(abreq, ctx->enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + req->base.complete, req->base.data); + ablkcipher_request_set_crypt(abreq, req->src, req->dst, + cryptlen, req->iv); + + err = crypto_ablkcipher_decrypt(abreq); + +out: + authenc_esn_request_complete(req, err); +} + + +static void authenc_esn_verify_ahash_done(struct crypto_async_request *areq, + int err) +{ + u8 *ihash; + unsigned int authsize; + struct ablkcipher_request *abreq; + struct aead_request *req = areq->data; + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + unsigned int cryptlen = req->cryptlen; + + if (err) + goto out; + + authsize = crypto_aead_authsize(authenc_esn); + cryptlen -= authsize; + ihash = ahreq->result + authsize; + scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, + authsize, 0); + + err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG : 0; + if (err) + goto out; + + abreq = aead_request_ctx(req); + ablkcipher_request_set_tfm(abreq, ctx->enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + req->base.complete, req->base.data); + ablkcipher_request_set_crypt(abreq, req->src, req->dst, + cryptlen, req->iv); + + err = crypto_ablkcipher_decrypt(abreq); + +out: + authenc_esn_request_complete(req, err); +} + +static u8 *crypto_authenc_esn_ahash(struct aead_request *req, + unsigned int flags) +{ + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct crypto_ahash *auth = ctx->auth; + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + u8 *hash = areq_ctx->tail; + int err; + + hash = (u8 *)ALIGN((unsigned long)hash + crypto_ahash_alignmask(auth), + crypto_ahash_alignmask(auth) + 1); + + ahash_request_set_tfm(ahreq, auth); + + err = crypto_ahash_init(ahreq); + if (err) + return ERR_PTR(err); + + ahash_request_set_crypt(ahreq, areq_ctx->hsg, hash, areq_ctx->headlen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & flags, + areq_ctx->update_complete, req); + + err = crypto_ahash_update(ahreq); + if (err) + return ERR_PTR(err); + + ahash_request_set_crypt(ahreq, areq_ctx->sg, hash, areq_ctx->cryptlen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & flags, + areq_ctx->update_complete2, req); + + err = crypto_ahash_update(ahreq); + if (err) + return ERR_PTR(err); + + ahash_request_set_crypt(ahreq, areq_ctx->tsg, hash, + areq_ctx->trailen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & flags, + areq_ctx->complete, req); + + err = crypto_ahash_finup(ahreq); + if (err) + return ERR_PTR(err); + + return hash; +} + +static int crypto_authenc_esn_genicv(struct aead_request *req, u8 *iv, + unsigned int flags) +{ + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct scatterlist *dst = req->dst; + struct scatterlist *assoc = req->assoc; + struct scatterlist *cipher = areq_ctx->cipher; + struct scatterlist *hsg = areq_ctx->hsg; + struct scatterlist *tsg = areq_ctx->tsg; + struct scatterlist *assoc1; + struct scatterlist *assoc2; + unsigned int ivsize = crypto_aead_ivsize(authenc_esn); + unsigned int cryptlen = req->cryptlen; + struct page *dstp; + u8 *vdst; + u8 *hash; + + dstp = sg_page(dst); + vdst = PageHighMem(dstp) ? NULL : page_address(dstp) + dst->offset; + + if (ivsize) { + sg_init_table(cipher, 2); + sg_set_buf(cipher, iv, ivsize); + scatterwalk_crypto_chain(cipher, dst, vdst == iv + ivsize, 2); + dst = cipher; + cryptlen += ivsize; + } + + if (sg_is_last(assoc)) + return -EINVAL; + + assoc1 = assoc + 1; + if (sg_is_last(assoc1)) + return -EINVAL; + + assoc2 = assoc + 2; + if (!sg_is_last(assoc2)) + return -EINVAL; + + sg_init_table(hsg, 2); + sg_set_page(hsg, sg_page(assoc), assoc->length, assoc->offset); + sg_set_page(hsg + 1, sg_page(assoc2), assoc2->length, assoc2->offset); + + sg_init_table(tsg, 1); + sg_set_page(tsg, sg_page(assoc1), assoc1->length, assoc1->offset); + + areq_ctx->cryptlen = cryptlen; + areq_ctx->headlen = assoc->length + assoc2->length; + areq_ctx->trailen = assoc1->length; + areq_ctx->sg = dst; + + areq_ctx->complete = authenc_esn_geniv_ahash_done; + areq_ctx->update_complete = authenc_esn_geniv_ahash_update_done; + areq_ctx->update_complete2 = authenc_esn_geniv_ahash_update_done2; + + hash = crypto_authenc_esn_ahash(req, flags); + if (IS_ERR(hash)) + return PTR_ERR(hash); + + scatterwalk_map_and_copy(hash, dst, cryptlen, + crypto_aead_authsize(authenc_esn), 1); + return 0; +} + + +static void crypto_authenc_esn_encrypt_done(struct crypto_async_request *req, + int err) +{ + struct aead_request *areq = req->data; + + if (!err) { + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(areq); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct ablkcipher_request *abreq = aead_request_ctx(areq); + u8 *iv = (u8 *)(abreq + 1) + + crypto_ablkcipher_reqsize(ctx->enc); + + err = crypto_authenc_esn_genicv(areq, iv, 0); + } + + authenc_esn_request_complete(areq, err); +} + +static int crypto_authenc_esn_encrypt(struct aead_request *req) +{ + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct crypto_ablkcipher *enc = ctx->enc; + struct scatterlist *dst = req->dst; + unsigned int cryptlen = req->cryptlen; + struct ablkcipher_request *abreq = (void *)(areq_ctx->tail + + ctx->reqoff); + u8 *iv = (u8 *)abreq - crypto_ablkcipher_ivsize(enc); + int err; + + ablkcipher_request_set_tfm(abreq, enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + crypto_authenc_esn_encrypt_done, req); + ablkcipher_request_set_crypt(abreq, req->src, dst, cryptlen, req->iv); + + memcpy(iv, req->iv, crypto_aead_ivsize(authenc_esn)); + + err = crypto_ablkcipher_encrypt(abreq); + if (err) + return err; + + return crypto_authenc_esn_genicv(req, iv, CRYPTO_TFM_REQ_MAY_SLEEP); +} + +static void crypto_authenc_esn_givencrypt_done(struct crypto_async_request *req, + int err) +{ + struct aead_request *areq = req->data; + + if (!err) { + struct skcipher_givcrypt_request *greq = aead_request_ctx(areq); + + err = crypto_authenc_esn_genicv(areq, greq->giv, 0); + } + + authenc_esn_request_complete(areq, err); +} + +static int crypto_authenc_esn_givencrypt(struct aead_givcrypt_request *req) +{ + struct crypto_aead *authenc_esn = aead_givcrypt_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct aead_request *areq = &req->areq; + struct skcipher_givcrypt_request *greq = aead_request_ctx(areq); + u8 *iv = req->giv; + int err; + + skcipher_givcrypt_set_tfm(greq, ctx->enc); + skcipher_givcrypt_set_callback(greq, aead_request_flags(areq), + crypto_authenc_esn_givencrypt_done, areq); + skcipher_givcrypt_set_crypt(greq, areq->src, areq->dst, areq->cryptlen, + areq->iv); + skcipher_givcrypt_set_giv(greq, iv, req->seq); + + err = crypto_skcipher_givencrypt(greq); + if (err) + return err; + + return crypto_authenc_esn_genicv(areq, iv, CRYPTO_TFM_REQ_MAY_SLEEP); +} + +static int crypto_authenc_esn_verify(struct aead_request *req) +{ + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + u8 *ohash; + u8 *ihash; + unsigned int authsize; + + areq_ctx->complete = authenc_esn_verify_ahash_done; + areq_ctx->update_complete = authenc_esn_verify_ahash_update_done; + + ohash = crypto_authenc_esn_ahash(req, CRYPTO_TFM_REQ_MAY_SLEEP); + if (IS_ERR(ohash)) + return PTR_ERR(ohash); + + authsize = crypto_aead_authsize(authenc_esn); + ihash = ohash + authsize; + scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, + authsize, 0); + return memcmp(ihash, ohash, authsize) ? -EBADMSG : 0; +} + +static int crypto_authenc_esn_iverify(struct aead_request *req, u8 *iv, + unsigned int cryptlen) +{ + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct scatterlist *src = req->src; + struct scatterlist *assoc = req->assoc; + struct scatterlist *cipher = areq_ctx->cipher; + struct scatterlist *hsg = areq_ctx->hsg; + struct scatterlist *tsg = areq_ctx->tsg; + struct scatterlist *assoc1; + struct scatterlist *assoc2; + unsigned int ivsize = crypto_aead_ivsize(authenc_esn); + struct page *srcp; + u8 *vsrc; + + srcp = sg_page(src); + vsrc = PageHighMem(srcp) ? NULL : page_address(srcp) + src->offset; + + if (ivsize) { + sg_init_table(cipher, 2); + sg_set_buf(cipher, iv, ivsize); + scatterwalk_crypto_chain(cipher, src, vsrc == iv + ivsize, 2); + src = cipher; + cryptlen += ivsize; + } + + if (sg_is_last(assoc)) + return -EINVAL; + + assoc1 = assoc + 1; + if (sg_is_last(assoc1)) + return -EINVAL; + + assoc2 = assoc + 2; + if (!sg_is_last(assoc2)) + return -EINVAL; + + sg_init_table(hsg, 2); + sg_set_page(hsg, sg_page(assoc), assoc->length, assoc->offset); + sg_set_page(hsg + 1, sg_page(assoc2), assoc2->length, assoc2->offset); + + sg_init_table(tsg, 1); + sg_set_page(tsg, sg_page(assoc1), assoc1->length, assoc1->offset); + + areq_ctx->cryptlen = cryptlen; + areq_ctx->headlen = assoc->length + assoc2->length; + areq_ctx->trailen = assoc1->length; + areq_ctx->sg = src; + + areq_ctx->complete = authenc_esn_verify_ahash_done; + areq_ctx->update_complete = authenc_esn_verify_ahash_update_done; + areq_ctx->update_complete2 = authenc_esn_verify_ahash_update_done2; + + return crypto_authenc_esn_verify(req); +} + +static int crypto_authenc_esn_decrypt(struct aead_request *req) +{ + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct ablkcipher_request *abreq = aead_request_ctx(req); + unsigned int cryptlen = req->cryptlen; + unsigned int authsize = crypto_aead_authsize(authenc_esn); + u8 *iv = req->iv; + int err; + + if (cryptlen < authsize) + return -EINVAL; + cryptlen -= authsize; + + err = crypto_authenc_esn_iverify(req, iv, cryptlen); + if (err) + return err; + + ablkcipher_request_set_tfm(abreq, ctx->enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + req->base.complete, req->base.data); + ablkcipher_request_set_crypt(abreq, req->src, req->dst, cryptlen, iv); + + return crypto_ablkcipher_decrypt(abreq); +} + +static int crypto_authenc_esn_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); + struct authenc_esn_instance_ctx *ictx = crypto_instance_ctx(inst); + struct crypto_authenc_esn_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_ahash *auth; + struct crypto_ablkcipher *enc; + int err; + + auth = crypto_spawn_ahash(&ictx->auth); + if (IS_ERR(auth)) + return PTR_ERR(auth); + + enc = crypto_spawn_skcipher(&ictx->enc); + err = PTR_ERR(enc); + if (IS_ERR(enc)) + goto err_free_ahash; + + ctx->auth = auth; + ctx->enc = enc; + + ctx->reqoff = ALIGN(2 * crypto_ahash_digestsize(auth) + + crypto_ahash_alignmask(auth), + crypto_ahash_alignmask(auth) + 1) + + crypto_ablkcipher_ivsize(enc); + + tfm->crt_aead.reqsize = sizeof(struct authenc_esn_request_ctx) + + ctx->reqoff + + max_t(unsigned int, + crypto_ahash_reqsize(auth) + + sizeof(struct ahash_request), + sizeof(struct skcipher_givcrypt_request) + + crypto_ablkcipher_reqsize(enc)); + + return 0; + +err_free_ahash: + crypto_free_ahash(auth); + return err; +} + +static void crypto_authenc_esn_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_authenc_esn_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_ahash(ctx->auth); + crypto_free_ablkcipher(ctx->enc); +} + +static struct crypto_instance *crypto_authenc_esn_alloc(struct rtattr **tb) +{ + struct crypto_attr_type *algt; + struct crypto_instance *inst; + struct hash_alg_common *auth; + struct crypto_alg *auth_base; + struct crypto_alg *enc; + struct authenc_esn_instance_ctx *ctx; + const char *enc_name; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) + return ERR_PTR(-EINVAL); + + auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH, + CRYPTO_ALG_TYPE_AHASH_MASK); + if (IS_ERR(auth)) + return ERR_CAST(auth); + + auth_base = &auth->base; + + enc_name = crypto_attr_alg_name(tb[2]); + err = PTR_ERR(enc_name); + if (IS_ERR(enc_name)) + goto out_put_auth; + + inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); + err = -ENOMEM; + if (!inst) + goto out_put_auth; + + ctx = crypto_instance_ctx(inst); + + err = crypto_init_ahash_spawn(&ctx->auth, auth, inst); + if (err) + goto err_free_inst; + + crypto_set_skcipher_spawn(&ctx->enc, inst); + err = crypto_grab_skcipher(&ctx->enc, enc_name, 0, + crypto_requires_sync(algt->type, + algt->mask)); + if (err) + goto err_drop_auth; + + enc = crypto_skcipher_spawn_alg(&ctx->enc); + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, + "authencesn(%s,%s)", auth_base->cra_name, enc->cra_name) >= + CRYPTO_MAX_ALG_NAME) + goto err_drop_enc; + + if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "authencesn(%s,%s)", auth_base->cra_driver_name, + enc->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) + goto err_drop_enc; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD; + inst->alg.cra_flags |= enc->cra_flags & CRYPTO_ALG_ASYNC; + inst->alg.cra_priority = enc->cra_priority * + 10 + auth_base->cra_priority; + inst->alg.cra_blocksize = enc->cra_blocksize; + inst->alg.cra_alignmask = auth_base->cra_alignmask | enc->cra_alignmask; + inst->alg.cra_type = &crypto_aead_type; + + inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize; + inst->alg.cra_aead.maxauthsize = auth->digestsize; + + inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_esn_ctx); + + inst->alg.cra_init = crypto_authenc_esn_init_tfm; + inst->alg.cra_exit = crypto_authenc_esn_exit_tfm; + + inst->alg.cra_aead.setkey = crypto_authenc_esn_setkey; + inst->alg.cra_aead.encrypt = crypto_authenc_esn_encrypt; + inst->alg.cra_aead.decrypt = crypto_authenc_esn_decrypt; + inst->alg.cra_aead.givencrypt = crypto_authenc_esn_givencrypt; + +out: + crypto_mod_put(auth_base); + return inst; + +err_drop_enc: + crypto_drop_skcipher(&ctx->enc); +err_drop_auth: + crypto_drop_ahash(&ctx->auth); +err_free_inst: + kfree(inst); +out_put_auth: + inst = ERR_PTR(err); + goto out; +} + +static void crypto_authenc_esn_free(struct crypto_instance *inst) +{ + struct authenc_esn_instance_ctx *ctx = crypto_instance_ctx(inst); + + crypto_drop_skcipher(&ctx->enc); + crypto_drop_ahash(&ctx->auth); + kfree(inst); +} + +static struct crypto_template crypto_authenc_esn_tmpl = { + .name = "authencesn", + .alloc = crypto_authenc_esn_alloc, + .free = crypto_authenc_esn_free, + .module = THIS_MODULE, +}; + +static int __init crypto_authenc_esn_module_init(void) +{ + return crypto_register_template(&crypto_authenc_esn_tmpl); +} + +static void __exit crypto_authenc_esn_module_exit(void) +{ + crypto_unregister_template(&crypto_authenc_esn_tmpl); +} + +module_init(crypto_authenc_esn_module_init); +module_exit(crypto_authenc_esn_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>"); +MODULE_DESCRIPTION("AEAD wrapper for IPsec with extended sequence numbers"); diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c new file mode 100644 index 00000000..4dd80c72 --- /dev/null +++ b/crypto/blkcipher.c @@ -0,0 +1,732 @@ +/* + * Block chaining cipher operations. + * + * Generic encrypt/decrypt wrapper for ciphers, handles operations across + * multiple page boundaries by using temporary blocks. In user context, + * the kernel is given a chance to schedule us once per page. + * + * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/internal/skcipher.h> +#include <crypto/scatterwalk.h> +#include <linux/errno.h> +#include <linux/hardirq.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/scatterlist.h> +#include <linux/seq_file.h> +#include <linux/slab.h> +#include <linux/string.h> +#include <linux/cryptouser.h> +#include <net/netlink.h> + +#include "internal.h" + +enum { + BLKCIPHER_WALK_PHYS = 1 << 0, + BLKCIPHER_WALK_SLOW = 1 << 1, + BLKCIPHER_WALK_COPY = 1 << 2, + BLKCIPHER_WALK_DIFF = 1 << 3, +}; + +static int blkcipher_walk_next(struct blkcipher_desc *desc, + struct blkcipher_walk *walk); +static int blkcipher_walk_first(struct blkcipher_desc *desc, + struct blkcipher_walk *walk); + +static inline void blkcipher_map_src(struct blkcipher_walk *walk) +{ + walk->src.virt.addr = scatterwalk_map(&walk->in); +} + +static inline void blkcipher_map_dst(struct blkcipher_walk *walk) +{ + walk->dst.virt.addr = scatterwalk_map(&walk->out); +} + +static inline void blkcipher_unmap_src(struct blkcipher_walk *walk) +{ + scatterwalk_unmap(walk->src.virt.addr); +} + +static inline void blkcipher_unmap_dst(struct blkcipher_walk *walk) +{ + scatterwalk_unmap(walk->dst.virt.addr); +} + +/* Get a spot of the specified length that does not straddle a page. + * The caller needs to ensure that there is enough space for this operation. + */ +static inline u8 *blkcipher_get_spot(u8 *start, unsigned int len) +{ + u8 *end_page = (u8 *)(((unsigned long)(start + len - 1)) & PAGE_MASK); + return max(start, end_page); +} + +static inline unsigned int blkcipher_done_slow(struct crypto_blkcipher *tfm, + struct blkcipher_walk *walk, + unsigned int bsize) +{ + u8 *addr; + unsigned int alignmask = crypto_blkcipher_alignmask(tfm); + + addr = (u8 *)ALIGN((unsigned long)walk->buffer, alignmask + 1); + addr = blkcipher_get_spot(addr, bsize); + scatterwalk_copychunks(addr, &walk->out, bsize, 1); + return bsize; +} + +static inline unsigned int blkcipher_done_fast(struct blkcipher_walk *walk, + unsigned int n) +{ + if (walk->flags & BLKCIPHER_WALK_COPY) { + blkcipher_map_dst(walk); + memcpy(walk->dst.virt.addr, walk->page, n); + blkcipher_unmap_dst(walk); + } else if (!(walk->flags & BLKCIPHER_WALK_PHYS)) { + if (walk->flags & BLKCIPHER_WALK_DIFF) + blkcipher_unmap_dst(walk); + blkcipher_unmap_src(walk); + } + + scatterwalk_advance(&walk->in, n); + scatterwalk_advance(&walk->out, n); + + return n; +} + +int blkcipher_walk_done(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, int err) +{ + struct crypto_blkcipher *tfm = desc->tfm; + unsigned int nbytes = 0; + + if (likely(err >= 0)) { + unsigned int n = walk->nbytes - err; + + if (likely(!(walk->flags & BLKCIPHER_WALK_SLOW))) + n = blkcipher_done_fast(walk, n); + else if (WARN_ON(err)) { + err = -EINVAL; + goto err; + } else + n = blkcipher_done_slow(tfm, walk, n); + + nbytes = walk->total - n; + err = 0; + } + + scatterwalk_done(&walk->in, 0, nbytes); + scatterwalk_done(&walk->out, 1, nbytes); + +err: + walk->total = nbytes; + walk->nbytes = nbytes; + + if (nbytes) { + crypto_yield(desc->flags); + return blkcipher_walk_next(desc, walk); + } + + if (walk->iv != desc->info) + memcpy(desc->info, walk->iv, crypto_blkcipher_ivsize(tfm)); + if (walk->buffer != walk->page) + kfree(walk->buffer); + if (walk->page) + free_page((unsigned long)walk->page); + + return err; +} +EXPORT_SYMBOL_GPL(blkcipher_walk_done); + +static inline int blkcipher_next_slow(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + unsigned int bsize, + unsigned int alignmask) +{ + unsigned int n; + unsigned aligned_bsize = ALIGN(bsize, alignmask + 1); + + if (walk->buffer) + goto ok; + + walk->buffer = walk->page; + if (walk->buffer) + goto ok; + + n = aligned_bsize * 3 - (alignmask + 1) + + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); + walk->buffer = kmalloc(n, GFP_ATOMIC); + if (!walk->buffer) + return blkcipher_walk_done(desc, walk, -ENOMEM); + +ok: + walk->dst.virt.addr = (u8 *)ALIGN((unsigned long)walk->buffer, + alignmask + 1); + walk->dst.virt.addr = blkcipher_get_spot(walk->dst.virt.addr, bsize); + walk->src.virt.addr = blkcipher_get_spot(walk->dst.virt.addr + + aligned_bsize, bsize); + + scatterwalk_copychunks(walk->src.virt.addr, &walk->in, bsize, 0); + + walk->nbytes = bsize; + walk->flags |= BLKCIPHER_WALK_SLOW; + + return 0; +} + +static inline int blkcipher_next_copy(struct blkcipher_walk *walk) +{ + u8 *tmp = walk->page; + + blkcipher_map_src(walk); + memcpy(tmp, walk->src.virt.addr, walk->nbytes); + blkcipher_unmap_src(walk); + + walk->src.virt.addr = tmp; + walk->dst.virt.addr = tmp; + + return 0; +} + +static inline int blkcipher_next_fast(struct blkcipher_desc *desc, + struct blkcipher_walk *walk) +{ + unsigned long diff; + + walk->src.phys.page = scatterwalk_page(&walk->in); + walk->src.phys.offset = offset_in_page(walk->in.offset); + walk->dst.phys.page = scatterwalk_page(&walk->out); + walk->dst.phys.offset = offset_in_page(walk->out.offset); + + if (walk->flags & BLKCIPHER_WALK_PHYS) + return 0; + + diff = walk->src.phys.offset - walk->dst.phys.offset; + diff |= walk->src.virt.page - walk->dst.virt.page; + + blkcipher_map_src(walk); + walk->dst.virt.addr = walk->src.virt.addr; + + if (diff) { + walk->flags |= BLKCIPHER_WALK_DIFF; + blkcipher_map_dst(walk); + } + + return 0; +} + +static int blkcipher_walk_next(struct blkcipher_desc *desc, + struct blkcipher_walk *walk) +{ + struct crypto_blkcipher *tfm = desc->tfm; + unsigned int alignmask = crypto_blkcipher_alignmask(tfm); + unsigned int bsize; + unsigned int n; + int err; + + n = walk->total; + if (unlikely(n < crypto_blkcipher_blocksize(tfm))) { + desc->flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; + return blkcipher_walk_done(desc, walk, -EINVAL); + } + + walk->flags &= ~(BLKCIPHER_WALK_SLOW | BLKCIPHER_WALK_COPY | + BLKCIPHER_WALK_DIFF); + if (!scatterwalk_aligned(&walk->in, alignmask) || + !scatterwalk_aligned(&walk->out, alignmask)) { + walk->flags |= BLKCIPHER_WALK_COPY; + if (!walk->page) { + walk->page = (void *)__get_free_page(GFP_ATOMIC); + if (!walk->page) + n = 0; + } + } + + bsize = min(walk->blocksize, n); + n = scatterwalk_clamp(&walk->in, n); + n = scatterwalk_clamp(&walk->out, n); + + if (unlikely(n < bsize)) { + err = blkcipher_next_slow(desc, walk, bsize, alignmask); + goto set_phys_lowmem; + } + + walk->nbytes = n; + if (walk->flags & BLKCIPHER_WALK_COPY) { + err = blkcipher_next_copy(walk); + goto set_phys_lowmem; + } + + return blkcipher_next_fast(desc, walk); + +set_phys_lowmem: + if (walk->flags & BLKCIPHER_WALK_PHYS) { + walk->src.phys.page = virt_to_page(walk->src.virt.addr); + walk->dst.phys.page = virt_to_page(walk->dst.virt.addr); + walk->src.phys.offset &= PAGE_SIZE - 1; + walk->dst.phys.offset &= PAGE_SIZE - 1; + } + return err; +} + +static inline int blkcipher_copy_iv(struct blkcipher_walk *walk, + struct crypto_blkcipher *tfm, + unsigned int alignmask) +{ + unsigned bs = walk->blocksize; + unsigned int ivsize = crypto_blkcipher_ivsize(tfm); + unsigned aligned_bs = ALIGN(bs, alignmask + 1); + unsigned int size = aligned_bs * 2 + ivsize + max(aligned_bs, ivsize) - + (alignmask + 1); + u8 *iv; + + size += alignmask & ~(crypto_tfm_ctx_alignment() - 1); + walk->buffer = kmalloc(size, GFP_ATOMIC); + if (!walk->buffer) + return -ENOMEM; + + iv = (u8 *)ALIGN((unsigned long)walk->buffer, alignmask + 1); + iv = blkcipher_get_spot(iv, bs) + aligned_bs; + iv = blkcipher_get_spot(iv, bs) + aligned_bs; + iv = blkcipher_get_spot(iv, ivsize); + + walk->iv = memcpy(iv, walk->iv, ivsize); + return 0; +} + +int blkcipher_walk_virt(struct blkcipher_desc *desc, + struct blkcipher_walk *walk) +{ + walk->flags &= ~BLKCIPHER_WALK_PHYS; + walk->blocksize = crypto_blkcipher_blocksize(desc->tfm); + return blkcipher_walk_first(desc, walk); +} +EXPORT_SYMBOL_GPL(blkcipher_walk_virt); + +int blkcipher_walk_phys(struct blkcipher_desc *desc, + struct blkcipher_walk *walk) +{ + walk->flags |= BLKCIPHER_WALK_PHYS; + walk->blocksize = crypto_blkcipher_blocksize(desc->tfm); + return blkcipher_walk_first(desc, walk); +} +EXPORT_SYMBOL_GPL(blkcipher_walk_phys); + +static int blkcipher_walk_first(struct blkcipher_desc *desc, + struct blkcipher_walk *walk) +{ + struct crypto_blkcipher *tfm = desc->tfm; + unsigned int alignmask = crypto_blkcipher_alignmask(tfm); + + if (WARN_ON_ONCE(in_irq())) + return -EDEADLK; + + walk->nbytes = walk->total; + if (unlikely(!walk->total)) + return 0; + + walk->buffer = NULL; + walk->iv = desc->info; + if (unlikely(((unsigned long)walk->iv & alignmask))) { + int err = blkcipher_copy_iv(walk, tfm, alignmask); + if (err) + return err; + } + + scatterwalk_start(&walk->in, walk->in.sg); + scatterwalk_start(&walk->out, walk->out.sg); + walk->page = NULL; + + return blkcipher_walk_next(desc, walk); +} + +int blkcipher_walk_virt_block(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + unsigned int blocksize) +{ + walk->flags &= ~BLKCIPHER_WALK_PHYS; + walk->blocksize = blocksize; + return blkcipher_walk_first(desc, walk); +} +EXPORT_SYMBOL_GPL(blkcipher_walk_virt_block); + +static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen) +{ + struct blkcipher_alg *cipher = &tfm->__crt_alg->cra_blkcipher; + unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); + int ret; + u8 *buffer, *alignbuffer; + unsigned long absize; + + absize = keylen + alignmask; + buffer = kmalloc(absize, GFP_ATOMIC); + if (!buffer) + return -ENOMEM; + + alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); + memcpy(alignbuffer, key, keylen); + ret = cipher->setkey(tfm, alignbuffer, keylen); + memset(alignbuffer, 0, keylen); + kfree(buffer); + return ret; +} + +static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) +{ + struct blkcipher_alg *cipher = &tfm->__crt_alg->cra_blkcipher; + unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); + + if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) { + tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; + } + + if ((unsigned long)key & alignmask) + return setkey_unaligned(tfm, key, keylen); + + return cipher->setkey(tfm, key, keylen); +} + +static int async_setkey(struct crypto_ablkcipher *tfm, const u8 *key, + unsigned int keylen) +{ + return setkey(crypto_ablkcipher_tfm(tfm), key, keylen); +} + +static int async_encrypt(struct ablkcipher_request *req) +{ + struct crypto_tfm *tfm = req->base.tfm; + struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher; + struct blkcipher_desc desc = { + .tfm = __crypto_blkcipher_cast(tfm), + .info = req->info, + .flags = req->base.flags, + }; + + + return alg->encrypt(&desc, req->dst, req->src, req->nbytes); +} + +static int async_decrypt(struct ablkcipher_request *req) +{ + struct crypto_tfm *tfm = req->base.tfm; + struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher; + struct blkcipher_desc desc = { + .tfm = __crypto_blkcipher_cast(tfm), + .info = req->info, + .flags = req->base.flags, + }; + + return alg->decrypt(&desc, req->dst, req->src, req->nbytes); +} + +static unsigned int crypto_blkcipher_ctxsize(struct crypto_alg *alg, u32 type, + u32 mask) +{ + struct blkcipher_alg *cipher = &alg->cra_blkcipher; + unsigned int len = alg->cra_ctxsize; + + if ((mask & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_MASK && + cipher->ivsize) { + len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1); + len += cipher->ivsize; + } + + return len; +} + +static int crypto_init_blkcipher_ops_async(struct crypto_tfm *tfm) +{ + struct ablkcipher_tfm *crt = &tfm->crt_ablkcipher; + struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher; + + crt->setkey = async_setkey; + crt->encrypt = async_encrypt; + crt->decrypt = async_decrypt; + if (!alg->ivsize) { + crt->givencrypt = skcipher_null_givencrypt; + crt->givdecrypt = skcipher_null_givdecrypt; + } + crt->base = __crypto_ablkcipher_cast(tfm); + crt->ivsize = alg->ivsize; + + return 0; +} + +static int crypto_init_blkcipher_ops_sync(struct crypto_tfm *tfm) +{ + struct blkcipher_tfm *crt = &tfm->crt_blkcipher; + struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher; + unsigned long align = crypto_tfm_alg_alignmask(tfm) + 1; + unsigned long addr; + + crt->setkey = setkey; + crt->encrypt = alg->encrypt; + crt->decrypt = alg->decrypt; + + addr = (unsigned long)crypto_tfm_ctx(tfm); + addr = ALIGN(addr, align); + addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align); + crt->iv = (void *)addr; + + return 0; +} + +static int crypto_init_blkcipher_ops(struct crypto_tfm *tfm, u32 type, u32 mask) +{ + struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher; + + if (alg->ivsize > PAGE_SIZE / 8) + return -EINVAL; + + if ((mask & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_MASK) + return crypto_init_blkcipher_ops_sync(tfm); + else + return crypto_init_blkcipher_ops_async(tfm); +} + +#ifdef CONFIG_NET +static int crypto_blkcipher_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + struct crypto_report_blkcipher rblkcipher; + + snprintf(rblkcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "blkcipher"); + snprintf(rblkcipher.geniv, CRYPTO_MAX_ALG_NAME, "%s", + alg->cra_blkcipher.geniv ?: "<default>"); + + rblkcipher.blocksize = alg->cra_blocksize; + rblkcipher.min_keysize = alg->cra_blkcipher.min_keysize; + rblkcipher.max_keysize = alg->cra_blkcipher.max_keysize; + rblkcipher.ivsize = alg->cra_blkcipher.ivsize; + + NLA_PUT(skb, CRYPTOCFGA_REPORT_BLKCIPHER, + sizeof(struct crypto_report_blkcipher), &rblkcipher); + + return 0; + +nla_put_failure: + return -EMSGSIZE; +} +#else +static int crypto_blkcipher_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + return -ENOSYS; +} +#endif + +static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg) + __attribute__ ((unused)); +static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg) +{ + seq_printf(m, "type : blkcipher\n"); + seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); + seq_printf(m, "min keysize : %u\n", alg->cra_blkcipher.min_keysize); + seq_printf(m, "max keysize : %u\n", alg->cra_blkcipher.max_keysize); + seq_printf(m, "ivsize : %u\n", alg->cra_blkcipher.ivsize); + seq_printf(m, "geniv : %s\n", alg->cra_blkcipher.geniv ?: + "<default>"); +} + +const struct crypto_type crypto_blkcipher_type = { + .ctxsize = crypto_blkcipher_ctxsize, + .init = crypto_init_blkcipher_ops, +#ifdef CONFIG_PROC_FS + .show = crypto_blkcipher_show, +#endif + .report = crypto_blkcipher_report, +}; +EXPORT_SYMBOL_GPL(crypto_blkcipher_type); + +static int crypto_grab_nivcipher(struct crypto_skcipher_spawn *spawn, + const char *name, u32 type, u32 mask) +{ + struct crypto_alg *alg; + int err; + + type = crypto_skcipher_type(type); + mask = crypto_skcipher_mask(mask)| CRYPTO_ALG_GENIV; + + alg = crypto_alg_mod_lookup(name, type, mask); + if (IS_ERR(alg)) + return PTR_ERR(alg); + + err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask); + crypto_mod_put(alg); + return err; +} + +struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl, + struct rtattr **tb, u32 type, + u32 mask) +{ + struct { + int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, + unsigned int keylen); + int (*encrypt)(struct ablkcipher_request *req); + int (*decrypt)(struct ablkcipher_request *req); + + unsigned int min_keysize; + unsigned int max_keysize; + unsigned int ivsize; + + const char *geniv; + } balg; + const char *name; + struct crypto_skcipher_spawn *spawn; + struct crypto_attr_type *algt; + struct crypto_instance *inst; + struct crypto_alg *alg; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + if ((algt->type ^ (CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV)) & + algt->mask) + return ERR_PTR(-EINVAL); + + name = crypto_attr_alg_name(tb[1]); + err = PTR_ERR(name); + if (IS_ERR(name)) + return ERR_PTR(err); + + inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); + if (!inst) + return ERR_PTR(-ENOMEM); + + spawn = crypto_instance_ctx(inst); + + /* Ignore async algorithms if necessary. */ + mask |= crypto_requires_sync(algt->type, algt->mask); + + crypto_set_skcipher_spawn(spawn, inst); + err = crypto_grab_nivcipher(spawn, name, type, mask); + if (err) + goto err_free_inst; + + alg = crypto_skcipher_spawn_alg(spawn); + + if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == + CRYPTO_ALG_TYPE_BLKCIPHER) { + balg.ivsize = alg->cra_blkcipher.ivsize; + balg.min_keysize = alg->cra_blkcipher.min_keysize; + balg.max_keysize = alg->cra_blkcipher.max_keysize; + + balg.setkey = async_setkey; + balg.encrypt = async_encrypt; + balg.decrypt = async_decrypt; + + balg.geniv = alg->cra_blkcipher.geniv; + } else { + balg.ivsize = alg->cra_ablkcipher.ivsize; + balg.min_keysize = alg->cra_ablkcipher.min_keysize; + balg.max_keysize = alg->cra_ablkcipher.max_keysize; + + balg.setkey = alg->cra_ablkcipher.setkey; + balg.encrypt = alg->cra_ablkcipher.encrypt; + balg.decrypt = alg->cra_ablkcipher.decrypt; + + balg.geniv = alg->cra_ablkcipher.geniv; + } + + err = -EINVAL; + if (!balg.ivsize) + goto err_drop_alg; + + /* + * This is only true if we're constructing an algorithm with its + * default IV generator. For the default generator we elide the + * template name and double-check the IV generator. + */ + if (algt->mask & CRYPTO_ALG_GENIV) { + if (!balg.geniv) + balg.geniv = crypto_default_geniv(alg); + err = -EAGAIN; + if (strcmp(tmpl->name, balg.geniv)) + goto err_drop_alg; + + memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); + memcpy(inst->alg.cra_driver_name, alg->cra_driver_name, + CRYPTO_MAX_ALG_NAME); + } else { + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, + "%s(%s)", tmpl->name, alg->cra_name) >= + CRYPTO_MAX_ALG_NAME) + goto err_drop_alg; + if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "%s(%s)", tmpl->name, alg->cra_driver_name) >= + CRYPTO_MAX_ALG_NAME) + goto err_drop_alg; + } + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV; + inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = alg->cra_blocksize; + inst->alg.cra_alignmask = alg->cra_alignmask; + inst->alg.cra_type = &crypto_givcipher_type; + + inst->alg.cra_ablkcipher.ivsize = balg.ivsize; + inst->alg.cra_ablkcipher.min_keysize = balg.min_keysize; + inst->alg.cra_ablkcipher.max_keysize = balg.max_keysize; + inst->alg.cra_ablkcipher.geniv = balg.geniv; + + inst->alg.cra_ablkcipher.setkey = balg.setkey; + inst->alg.cra_ablkcipher.encrypt = balg.encrypt; + inst->alg.cra_ablkcipher.decrypt = balg.decrypt; + +out: + return inst; + +err_drop_alg: + crypto_drop_skcipher(spawn); +err_free_inst: + kfree(inst); + inst = ERR_PTR(err); + goto out; +} +EXPORT_SYMBOL_GPL(skcipher_geniv_alloc); + +void skcipher_geniv_free(struct crypto_instance *inst) +{ + crypto_drop_skcipher(crypto_instance_ctx(inst)); + kfree(inst); +} +EXPORT_SYMBOL_GPL(skcipher_geniv_free); + +int skcipher_geniv_init(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_ablkcipher *cipher; + + cipher = crypto_spawn_skcipher(crypto_instance_ctx(inst)); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + tfm->crt_ablkcipher.base = cipher; + tfm->crt_ablkcipher.reqsize += crypto_ablkcipher_reqsize(cipher); + + return 0; +} +EXPORT_SYMBOL_GPL(skcipher_geniv_init); + +void skcipher_geniv_exit(struct crypto_tfm *tfm) +{ + crypto_free_ablkcipher(tfm->crt_ablkcipher.base); +} +EXPORT_SYMBOL_GPL(skcipher_geniv_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Generic block chaining cipher type"); diff --git a/crypto/blowfish_common.c b/crypto/blowfish_common.c new file mode 100644 index 00000000..f636aab0 --- /dev/null +++ b/crypto/blowfish_common.c @@ -0,0 +1,402 @@ +/* + * Cryptographic API. + * + * Common Blowfish algorithm parts shared between the c and assembler + * implementations. + * + * Blowfish Cipher Algorithm, by Bruce Schneier. + * http://www.counterpane.com/blowfish.html + * + * Adapted from Kerneli implementation. + * + * Copyright (c) Herbert Valerio Riedel <hvr@hvrlab.org> + * Copyright (c) Kyle McMartin <kyle@debian.org> + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + */ +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <asm/byteorder.h> +#include <linux/crypto.h> +#include <linux/types.h> +#include <crypto/blowfish.h> + +static const u32 bf_pbox[16 + 2] = { + 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, + 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, + 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, + 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, + 0x9216d5d9, 0x8979fb1b, +}; + +static const u32 bf_sbox[256 * 4] = { + 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, + 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, + 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, + 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, + 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, + 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, + 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, + 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, + 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, + 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, + 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, + 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, + 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, + 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, + 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, + 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, + 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, + 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, + 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, + 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, + 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, + 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, + 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, + 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, + 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, + 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, + 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, + 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, + 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, + 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, + 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, + 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, + 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, + 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, + 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, + 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, + 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, + 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, + 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, + 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, + 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, + 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, + 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, + 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, + 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, + 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, + 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, + 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, + 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, + 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, + 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, + 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, + 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, + 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, + 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, + 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, + 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, + 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, + 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, + 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, + 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, + 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, + 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, + 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a, + 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, + 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, + 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, + 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, + 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, + 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, + 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, + 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, + 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, + 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, + 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, + 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, + 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, + 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, + 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, + 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, + 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, + 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, + 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, + 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, + 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, + 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, + 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, + 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, + 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, + 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, + 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, + 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, + 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, + 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, + 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, + 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, + 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, + 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, + 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, + 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, + 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, + 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, + 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, + 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, + 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, + 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, + 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, + 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, + 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, + 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, + 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, + 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, + 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, + 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, + 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, + 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, + 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, + 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, + 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, + 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, + 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, + 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, + 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, + 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, + 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, + 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, + 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, + 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7, + 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, + 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, + 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, + 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, + 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, + 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, + 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, + 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, + 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, + 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, + 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, + 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, + 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, + 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, + 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, + 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, + 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, + 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, + 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, + 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, + 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, + 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, + 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, + 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, + 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, + 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, + 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, + 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, + 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, + 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, + 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, + 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, + 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, + 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, + 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, + 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, + 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, + 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, + 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, + 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, + 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, + 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, + 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, + 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, + 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, + 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, + 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, + 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, + 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, + 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, + 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, + 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, + 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, + 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, + 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, + 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, + 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, + 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, + 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, + 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, + 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, + 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, + 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, + 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0, + 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, + 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, + 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, + 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, + 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, + 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, + 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, + 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, + 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, + 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, + 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, + 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, + 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, + 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, + 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, + 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, + 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, + 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, + 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, + 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, + 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, + 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, + 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, + 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, + 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, + 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, + 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, + 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, + 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, + 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, + 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, + 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, + 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, + 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, + 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, + 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, + 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, + 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, + 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, + 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, + 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, + 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, + 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, + 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, + 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, + 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, + 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, + 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, + 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, + 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, + 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, + 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, + 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, + 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, + 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, + 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, + 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, + 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, + 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, + 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, + 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, + 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, + 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, + 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6, +}; + +/* + * Round loop unrolling macros, S is a pointer to a S-Box array + * organized in 4 unsigned longs at a row. + */ +#define GET32_3(x) (((x) & 0xff)) +#define GET32_2(x) (((x) >> (8)) & (0xff)) +#define GET32_1(x) (((x) >> (16)) & (0xff)) +#define GET32_0(x) (((x) >> (24)) & (0xff)) + +#define bf_F(x) (((S[GET32_0(x)] + S[256 + GET32_1(x)]) ^ \ + S[512 + GET32_2(x)]) + S[768 + GET32_3(x)]) + +#define ROUND(a, b, n) ({ b ^= P[n]; a ^= bf_F(b); }) + +/* + * The blowfish encipher, processes 64-bit blocks. + * NOTE: This function MUSTN'T respect endianess + */ +static void encrypt_block(struct bf_ctx *bctx, u32 *dst, u32 *src) +{ + const u32 *P = bctx->p; + const u32 *S = bctx->s; + u32 yl = src[0]; + u32 yr = src[1]; + + ROUND(yr, yl, 0); + ROUND(yl, yr, 1); + ROUND(yr, yl, 2); + ROUND(yl, yr, 3); + ROUND(yr, yl, 4); + ROUND(yl, yr, 5); + ROUND(yr, yl, 6); + ROUND(yl, yr, 7); + ROUND(yr, yl, 8); + ROUND(yl, yr, 9); + ROUND(yr, yl, 10); + ROUND(yl, yr, 11); + ROUND(yr, yl, 12); + ROUND(yl, yr, 13); + ROUND(yr, yl, 14); + ROUND(yl, yr, 15); + + yl ^= P[16]; + yr ^= P[17]; + + dst[0] = yr; + dst[1] = yl; +} + +/* + * Calculates the blowfish S and P boxes for encryption and decryption. + */ +int blowfish_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) +{ + struct bf_ctx *ctx = crypto_tfm_ctx(tfm); + u32 *P = ctx->p; + u32 *S = ctx->s; + short i, j, count; + u32 data[2], temp; + + /* Copy the initialization s-boxes */ + for (i = 0, count = 0; i < 256; i++) + for (j = 0; j < 4; j++, count++) + S[count] = bf_sbox[count]; + + /* Set the p-boxes */ + for (i = 0; i < 16 + 2; i++) + P[i] = bf_pbox[i]; + + /* Actual subkey generation */ + for (j = 0, i = 0; i < 16 + 2; i++) { + temp = (((u32)key[j] << 24) | + ((u32)key[(j + 1) % keylen] << 16) | + ((u32)key[(j + 2) % keylen] << 8) | + ((u32)key[(j + 3) % keylen])); + + P[i] = P[i] ^ temp; + j = (j + 4) % keylen; + } + + data[0] = 0x00000000; + data[1] = 0x00000000; + + for (i = 0; i < 16 + 2; i += 2) { + encrypt_block((struct bf_ctx *)ctx, data, data); + + P[i] = data[0]; + P[i + 1] = data[1]; + } + + for (i = 0; i < 4; i++) { + for (j = 0, count = i * 256; j < 256; j += 2, count += 2) { + encrypt_block((struct bf_ctx *)ctx, data, data); + + S[count] = data[0]; + S[count + 1] = data[1]; + } + } + + /* Bruce says not to bother with the weak key check. */ + return 0; +} +EXPORT_SYMBOL_GPL(blowfish_setkey); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Blowfish Cipher common functions"); diff --git a/crypto/blowfish_generic.c b/crypto/blowfish_generic.c new file mode 100644 index 00000000..6f269b5c --- /dev/null +++ b/crypto/blowfish_generic.c @@ -0,0 +1,142 @@ +/* + * Cryptographic API. + * + * Blowfish Cipher Algorithm, by Bruce Schneier. + * http://www.counterpane.com/blowfish.html + * + * Adapted from Kerneli implementation. + * + * Copyright (c) Herbert Valerio Riedel <hvr@hvrlab.org> + * Copyright (c) Kyle McMartin <kyle@debian.org> + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + */ +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <asm/byteorder.h> +#include <linux/crypto.h> +#include <linux/types.h> +#include <crypto/blowfish.h> + +/* + * Round loop unrolling macros, S is a pointer to a S-Box array + * organized in 4 unsigned longs at a row. + */ +#define GET32_3(x) (((x) & 0xff)) +#define GET32_2(x) (((x) >> (8)) & (0xff)) +#define GET32_1(x) (((x) >> (16)) & (0xff)) +#define GET32_0(x) (((x) >> (24)) & (0xff)) + +#define bf_F(x) (((S[GET32_0(x)] + S[256 + GET32_1(x)]) ^ \ + S[512 + GET32_2(x)]) + S[768 + GET32_3(x)]) + +#define ROUND(a, b, n) ({ b ^= P[n]; a ^= bf_F(b); }) + +static void bf_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct bf_ctx *ctx = crypto_tfm_ctx(tfm); + const __be32 *in_blk = (const __be32 *)src; + __be32 *const out_blk = (__be32 *)dst; + const u32 *P = ctx->p; + const u32 *S = ctx->s; + u32 yl = be32_to_cpu(in_blk[0]); + u32 yr = be32_to_cpu(in_blk[1]); + + ROUND(yr, yl, 0); + ROUND(yl, yr, 1); + ROUND(yr, yl, 2); + ROUND(yl, yr, 3); + ROUND(yr, yl, 4); + ROUND(yl, yr, 5); + ROUND(yr, yl, 6); + ROUND(yl, yr, 7); + ROUND(yr, yl, 8); + ROUND(yl, yr, 9); + ROUND(yr, yl, 10); + ROUND(yl, yr, 11); + ROUND(yr, yl, 12); + ROUND(yl, yr, 13); + ROUND(yr, yl, 14); + ROUND(yl, yr, 15); + + yl ^= P[16]; + yr ^= P[17]; + + out_blk[0] = cpu_to_be32(yr); + out_blk[1] = cpu_to_be32(yl); +} + +static void bf_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct bf_ctx *ctx = crypto_tfm_ctx(tfm); + const __be32 *in_blk = (const __be32 *)src; + __be32 *const out_blk = (__be32 *)dst; + const u32 *P = ctx->p; + const u32 *S = ctx->s; + u32 yl = be32_to_cpu(in_blk[0]); + u32 yr = be32_to_cpu(in_blk[1]); + + ROUND(yr, yl, 17); + ROUND(yl, yr, 16); + ROUND(yr, yl, 15); + ROUND(yl, yr, 14); + ROUND(yr, yl, 13); + ROUND(yl, yr, 12); + ROUND(yr, yl, 11); + ROUND(yl, yr, 10); + ROUND(yr, yl, 9); + ROUND(yl, yr, 8); + ROUND(yr, yl, 7); + ROUND(yl, yr, 6); + ROUND(yr, yl, 5); + ROUND(yl, yr, 4); + ROUND(yr, yl, 3); + ROUND(yl, yr, 2); + + yl ^= P[1]; + yr ^= P[0]; + + out_blk[0] = cpu_to_be32(yr); + out_blk[1] = cpu_to_be32(yl); +} + +static struct crypto_alg alg = { + .cra_name = "blowfish", + .cra_driver_name = "blowfish-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = BF_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct bf_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = BF_MIN_KEY_SIZE, + .cia_max_keysize = BF_MAX_KEY_SIZE, + .cia_setkey = blowfish_setkey, + .cia_encrypt = bf_encrypt, + .cia_decrypt = bf_decrypt } } +}; + +static int __init blowfish_mod_init(void) +{ + return crypto_register_alg(&alg); +} + +static void __exit blowfish_mod_fini(void) +{ + crypto_unregister_alg(&alg); +} + +module_init(blowfish_mod_init); +module_exit(blowfish_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Blowfish Cipher Algorithm"); +MODULE_ALIAS("blowfish"); diff --git a/crypto/camellia_generic.c b/crypto/camellia_generic.c new file mode 100644 index 00000000..f7aaaaf8 --- /dev/null +++ b/crypto/camellia_generic.c @@ -0,0 +1,1102 @@ +/* + * Copyright (C) 2006 + * NTT (Nippon Telegraph and Telephone Corporation). + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. + */ + +/* + * Algorithm Specification + * http://info.isl.ntt.co.jp/crypt/eng/camellia/specifications.html + */ + +/* + * + * NOTE --- NOTE --- NOTE --- NOTE + * This implementation assumes that all memory addresses passed + * as parameters are four-byte aligned. + * + */ + +#include <linux/crypto.h> +#include <linux/errno.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/bitops.h> +#include <asm/unaligned.h> + +static const u32 camellia_sp1110[256] = { + 0x70707000, 0x82828200, 0x2c2c2c00, 0xececec00, + 0xb3b3b300, 0x27272700, 0xc0c0c000, 0xe5e5e500, + 0xe4e4e400, 0x85858500, 0x57575700, 0x35353500, + 0xeaeaea00, 0x0c0c0c00, 0xaeaeae00, 0x41414100, + 0x23232300, 0xefefef00, 0x6b6b6b00, 0x93939300, + 0x45454500, 0x19191900, 0xa5a5a500, 0x21212100, + 0xededed00, 0x0e0e0e00, 0x4f4f4f00, 0x4e4e4e00, + 0x1d1d1d00, 0x65656500, 0x92929200, 0xbdbdbd00, + 0x86868600, 0xb8b8b800, 0xafafaf00, 0x8f8f8f00, + 0x7c7c7c00, 0xebebeb00, 0x1f1f1f00, 0xcecece00, + 0x3e3e3e00, 0x30303000, 0xdcdcdc00, 0x5f5f5f00, + 0x5e5e5e00, 0xc5c5c500, 0x0b0b0b00, 0x1a1a1a00, + 0xa6a6a600, 0xe1e1e100, 0x39393900, 0xcacaca00, + 0xd5d5d500, 0x47474700, 0x5d5d5d00, 0x3d3d3d00, + 0xd9d9d900, 0x01010100, 0x5a5a5a00, 0xd6d6d600, + 0x51515100, 0x56565600, 0x6c6c6c00, 0x4d4d4d00, + 0x8b8b8b00, 0x0d0d0d00, 0x9a9a9a00, 0x66666600, + 0xfbfbfb00, 0xcccccc00, 0xb0b0b000, 0x2d2d2d00, + 0x74747400, 0x12121200, 0x2b2b2b00, 0x20202000, + 0xf0f0f000, 0xb1b1b100, 0x84848400, 0x99999900, + 0xdfdfdf00, 0x4c4c4c00, 0xcbcbcb00, 0xc2c2c200, + 0x34343400, 0x7e7e7e00, 0x76767600, 0x05050500, + 0x6d6d6d00, 0xb7b7b700, 0xa9a9a900, 0x31313100, + 0xd1d1d100, 0x17171700, 0x04040400, 0xd7d7d700, + 0x14141400, 0x58585800, 0x3a3a3a00, 0x61616100, + 0xdedede00, 0x1b1b1b00, 0x11111100, 0x1c1c1c00, + 0x32323200, 0x0f0f0f00, 0x9c9c9c00, 0x16161600, + 0x53535300, 0x18181800, 0xf2f2f200, 0x22222200, + 0xfefefe00, 0x44444400, 0xcfcfcf00, 0xb2b2b200, + 0xc3c3c300, 0xb5b5b500, 0x7a7a7a00, 0x91919100, + 0x24242400, 0x08080800, 0xe8e8e800, 0xa8a8a800, + 0x60606000, 0xfcfcfc00, 0x69696900, 0x50505000, + 0xaaaaaa00, 0xd0d0d000, 0xa0a0a000, 0x7d7d7d00, + 0xa1a1a100, 0x89898900, 0x62626200, 0x97979700, + 0x54545400, 0x5b5b5b00, 0x1e1e1e00, 0x95959500, + 0xe0e0e000, 0xffffff00, 0x64646400, 0xd2d2d200, + 0x10101000, 0xc4c4c400, 0x00000000, 0x48484800, + 0xa3a3a300, 0xf7f7f700, 0x75757500, 0xdbdbdb00, + 0x8a8a8a00, 0x03030300, 0xe6e6e600, 0xdadada00, + 0x09090900, 0x3f3f3f00, 0xdddddd00, 0x94949400, + 0x87878700, 0x5c5c5c00, 0x83838300, 0x02020200, + 0xcdcdcd00, 0x4a4a4a00, 0x90909000, 0x33333300, + 0x73737300, 0x67676700, 0xf6f6f600, 0xf3f3f300, + 0x9d9d9d00, 0x7f7f7f00, 0xbfbfbf00, 0xe2e2e200, + 0x52525200, 0x9b9b9b00, 0xd8d8d800, 0x26262600, + 0xc8c8c800, 0x37373700, 0xc6c6c600, 0x3b3b3b00, + 0x81818100, 0x96969600, 0x6f6f6f00, 0x4b4b4b00, + 0x13131300, 0xbebebe00, 0x63636300, 0x2e2e2e00, + 0xe9e9e900, 0x79797900, 0xa7a7a700, 0x8c8c8c00, + 0x9f9f9f00, 0x6e6e6e00, 0xbcbcbc00, 0x8e8e8e00, + 0x29292900, 0xf5f5f500, 0xf9f9f900, 0xb6b6b600, + 0x2f2f2f00, 0xfdfdfd00, 0xb4b4b400, 0x59595900, + 0x78787800, 0x98989800, 0x06060600, 0x6a6a6a00, + 0xe7e7e700, 0x46464600, 0x71717100, 0xbababa00, + 0xd4d4d400, 0x25252500, 0xababab00, 0x42424200, + 0x88888800, 0xa2a2a200, 0x8d8d8d00, 0xfafafa00, + 0x72727200, 0x07070700, 0xb9b9b900, 0x55555500, + 0xf8f8f800, 0xeeeeee00, 0xacacac00, 0x0a0a0a00, + 0x36363600, 0x49494900, 0x2a2a2a00, 0x68686800, + 0x3c3c3c00, 0x38383800, 0xf1f1f100, 0xa4a4a400, + 0x40404000, 0x28282800, 0xd3d3d300, 0x7b7b7b00, + 0xbbbbbb00, 0xc9c9c900, 0x43434300, 0xc1c1c100, + 0x15151500, 0xe3e3e300, 0xadadad00, 0xf4f4f400, + 0x77777700, 0xc7c7c700, 0x80808000, 0x9e9e9e00, +}; + +static const u32 camellia_sp0222[256] = { + 0x00e0e0e0, 0x00050505, 0x00585858, 0x00d9d9d9, + 0x00676767, 0x004e4e4e, 0x00818181, 0x00cbcbcb, + 0x00c9c9c9, 0x000b0b0b, 0x00aeaeae, 0x006a6a6a, + 0x00d5d5d5, 0x00181818, 0x005d5d5d, 0x00828282, + 0x00464646, 0x00dfdfdf, 0x00d6d6d6, 0x00272727, + 0x008a8a8a, 0x00323232, 0x004b4b4b, 0x00424242, + 0x00dbdbdb, 0x001c1c1c, 0x009e9e9e, 0x009c9c9c, + 0x003a3a3a, 0x00cacaca, 0x00252525, 0x007b7b7b, + 0x000d0d0d, 0x00717171, 0x005f5f5f, 0x001f1f1f, + 0x00f8f8f8, 0x00d7d7d7, 0x003e3e3e, 0x009d9d9d, + 0x007c7c7c, 0x00606060, 0x00b9b9b9, 0x00bebebe, + 0x00bcbcbc, 0x008b8b8b, 0x00161616, 0x00343434, + 0x004d4d4d, 0x00c3c3c3, 0x00727272, 0x00959595, + 0x00ababab, 0x008e8e8e, 0x00bababa, 0x007a7a7a, + 0x00b3b3b3, 0x00020202, 0x00b4b4b4, 0x00adadad, + 0x00a2a2a2, 0x00acacac, 0x00d8d8d8, 0x009a9a9a, + 0x00171717, 0x001a1a1a, 0x00353535, 0x00cccccc, + 0x00f7f7f7, 0x00999999, 0x00616161, 0x005a5a5a, + 0x00e8e8e8, 0x00242424, 0x00565656, 0x00404040, + 0x00e1e1e1, 0x00636363, 0x00090909, 0x00333333, + 0x00bfbfbf, 0x00989898, 0x00979797, 0x00858585, + 0x00686868, 0x00fcfcfc, 0x00ececec, 0x000a0a0a, + 0x00dadada, 0x006f6f6f, 0x00535353, 0x00626262, + 0x00a3a3a3, 0x002e2e2e, 0x00080808, 0x00afafaf, + 0x00282828, 0x00b0b0b0, 0x00747474, 0x00c2c2c2, + 0x00bdbdbd, 0x00363636, 0x00222222, 0x00383838, + 0x00646464, 0x001e1e1e, 0x00393939, 0x002c2c2c, + 0x00a6a6a6, 0x00303030, 0x00e5e5e5, 0x00444444, + 0x00fdfdfd, 0x00888888, 0x009f9f9f, 0x00656565, + 0x00878787, 0x006b6b6b, 0x00f4f4f4, 0x00232323, + 0x00484848, 0x00101010, 0x00d1d1d1, 0x00515151, + 0x00c0c0c0, 0x00f9f9f9, 0x00d2d2d2, 0x00a0a0a0, + 0x00555555, 0x00a1a1a1, 0x00414141, 0x00fafafa, + 0x00434343, 0x00131313, 0x00c4c4c4, 0x002f2f2f, + 0x00a8a8a8, 0x00b6b6b6, 0x003c3c3c, 0x002b2b2b, + 0x00c1c1c1, 0x00ffffff, 0x00c8c8c8, 0x00a5a5a5, + 0x00202020, 0x00898989, 0x00000000, 0x00909090, + 0x00474747, 0x00efefef, 0x00eaeaea, 0x00b7b7b7, + 0x00151515, 0x00060606, 0x00cdcdcd, 0x00b5b5b5, + 0x00121212, 0x007e7e7e, 0x00bbbbbb, 0x00292929, + 0x000f0f0f, 0x00b8b8b8, 0x00070707, 0x00040404, + 0x009b9b9b, 0x00949494, 0x00212121, 0x00666666, + 0x00e6e6e6, 0x00cecece, 0x00ededed, 0x00e7e7e7, + 0x003b3b3b, 0x00fefefe, 0x007f7f7f, 0x00c5c5c5, + 0x00a4a4a4, 0x00373737, 0x00b1b1b1, 0x004c4c4c, + 0x00919191, 0x006e6e6e, 0x008d8d8d, 0x00767676, + 0x00030303, 0x002d2d2d, 0x00dedede, 0x00969696, + 0x00262626, 0x007d7d7d, 0x00c6c6c6, 0x005c5c5c, + 0x00d3d3d3, 0x00f2f2f2, 0x004f4f4f, 0x00191919, + 0x003f3f3f, 0x00dcdcdc, 0x00797979, 0x001d1d1d, + 0x00525252, 0x00ebebeb, 0x00f3f3f3, 0x006d6d6d, + 0x005e5e5e, 0x00fbfbfb, 0x00696969, 0x00b2b2b2, + 0x00f0f0f0, 0x00313131, 0x000c0c0c, 0x00d4d4d4, + 0x00cfcfcf, 0x008c8c8c, 0x00e2e2e2, 0x00757575, + 0x00a9a9a9, 0x004a4a4a, 0x00575757, 0x00848484, + 0x00111111, 0x00454545, 0x001b1b1b, 0x00f5f5f5, + 0x00e4e4e4, 0x000e0e0e, 0x00737373, 0x00aaaaaa, + 0x00f1f1f1, 0x00dddddd, 0x00595959, 0x00141414, + 0x006c6c6c, 0x00929292, 0x00545454, 0x00d0d0d0, + 0x00787878, 0x00707070, 0x00e3e3e3, 0x00494949, + 0x00808080, 0x00505050, 0x00a7a7a7, 0x00f6f6f6, + 0x00777777, 0x00939393, 0x00868686, 0x00838383, + 0x002a2a2a, 0x00c7c7c7, 0x005b5b5b, 0x00e9e9e9, + 0x00eeeeee, 0x008f8f8f, 0x00010101, 0x003d3d3d, +}; + +static const u32 camellia_sp3033[256] = { + 0x38003838, 0x41004141, 0x16001616, 0x76007676, + 0xd900d9d9, 0x93009393, 0x60006060, 0xf200f2f2, + 0x72007272, 0xc200c2c2, 0xab00abab, 0x9a009a9a, + 0x75007575, 0x06000606, 0x57005757, 0xa000a0a0, + 0x91009191, 0xf700f7f7, 0xb500b5b5, 0xc900c9c9, + 0xa200a2a2, 0x8c008c8c, 0xd200d2d2, 0x90009090, + 0xf600f6f6, 0x07000707, 0xa700a7a7, 0x27002727, + 0x8e008e8e, 0xb200b2b2, 0x49004949, 0xde00dede, + 0x43004343, 0x5c005c5c, 0xd700d7d7, 0xc700c7c7, + 0x3e003e3e, 0xf500f5f5, 0x8f008f8f, 0x67006767, + 0x1f001f1f, 0x18001818, 0x6e006e6e, 0xaf00afaf, + 0x2f002f2f, 0xe200e2e2, 0x85008585, 0x0d000d0d, + 0x53005353, 0xf000f0f0, 0x9c009c9c, 0x65006565, + 0xea00eaea, 0xa300a3a3, 0xae00aeae, 0x9e009e9e, + 0xec00ecec, 0x80008080, 0x2d002d2d, 0x6b006b6b, + 0xa800a8a8, 0x2b002b2b, 0x36003636, 0xa600a6a6, + 0xc500c5c5, 0x86008686, 0x4d004d4d, 0x33003333, + 0xfd00fdfd, 0x66006666, 0x58005858, 0x96009696, + 0x3a003a3a, 0x09000909, 0x95009595, 0x10001010, + 0x78007878, 0xd800d8d8, 0x42004242, 0xcc00cccc, + 0xef00efef, 0x26002626, 0xe500e5e5, 0x61006161, + 0x1a001a1a, 0x3f003f3f, 0x3b003b3b, 0x82008282, + 0xb600b6b6, 0xdb00dbdb, 0xd400d4d4, 0x98009898, + 0xe800e8e8, 0x8b008b8b, 0x02000202, 0xeb00ebeb, + 0x0a000a0a, 0x2c002c2c, 0x1d001d1d, 0xb000b0b0, + 0x6f006f6f, 0x8d008d8d, 0x88008888, 0x0e000e0e, + 0x19001919, 0x87008787, 0x4e004e4e, 0x0b000b0b, + 0xa900a9a9, 0x0c000c0c, 0x79007979, 0x11001111, + 0x7f007f7f, 0x22002222, 0xe700e7e7, 0x59005959, + 0xe100e1e1, 0xda00dada, 0x3d003d3d, 0xc800c8c8, + 0x12001212, 0x04000404, 0x74007474, 0x54005454, + 0x30003030, 0x7e007e7e, 0xb400b4b4, 0x28002828, + 0x55005555, 0x68006868, 0x50005050, 0xbe00bebe, + 0xd000d0d0, 0xc400c4c4, 0x31003131, 0xcb00cbcb, + 0x2a002a2a, 0xad00adad, 0x0f000f0f, 0xca00caca, + 0x70007070, 0xff00ffff, 0x32003232, 0x69006969, + 0x08000808, 0x62006262, 0x00000000, 0x24002424, + 0xd100d1d1, 0xfb00fbfb, 0xba00baba, 0xed00eded, + 0x45004545, 0x81008181, 0x73007373, 0x6d006d6d, + 0x84008484, 0x9f009f9f, 0xee00eeee, 0x4a004a4a, + 0xc300c3c3, 0x2e002e2e, 0xc100c1c1, 0x01000101, + 0xe600e6e6, 0x25002525, 0x48004848, 0x99009999, + 0xb900b9b9, 0xb300b3b3, 0x7b007b7b, 0xf900f9f9, + 0xce00cece, 0xbf00bfbf, 0xdf00dfdf, 0x71007171, + 0x29002929, 0xcd00cdcd, 0x6c006c6c, 0x13001313, + 0x64006464, 0x9b009b9b, 0x63006363, 0x9d009d9d, + 0xc000c0c0, 0x4b004b4b, 0xb700b7b7, 0xa500a5a5, + 0x89008989, 0x5f005f5f, 0xb100b1b1, 0x17001717, + 0xf400f4f4, 0xbc00bcbc, 0xd300d3d3, 0x46004646, + 0xcf00cfcf, 0x37003737, 0x5e005e5e, 0x47004747, + 0x94009494, 0xfa00fafa, 0xfc00fcfc, 0x5b005b5b, + 0x97009797, 0xfe00fefe, 0x5a005a5a, 0xac00acac, + 0x3c003c3c, 0x4c004c4c, 0x03000303, 0x35003535, + 0xf300f3f3, 0x23002323, 0xb800b8b8, 0x5d005d5d, + 0x6a006a6a, 0x92009292, 0xd500d5d5, 0x21002121, + 0x44004444, 0x51005151, 0xc600c6c6, 0x7d007d7d, + 0x39003939, 0x83008383, 0xdc00dcdc, 0xaa00aaaa, + 0x7c007c7c, 0x77007777, 0x56005656, 0x05000505, + 0x1b001b1b, 0xa400a4a4, 0x15001515, 0x34003434, + 0x1e001e1e, 0x1c001c1c, 0xf800f8f8, 0x52005252, + 0x20002020, 0x14001414, 0xe900e9e9, 0xbd00bdbd, + 0xdd00dddd, 0xe400e4e4, 0xa100a1a1, 0xe000e0e0, + 0x8a008a8a, 0xf100f1f1, 0xd600d6d6, 0x7a007a7a, + 0xbb00bbbb, 0xe300e3e3, 0x40004040, 0x4f004f4f, +}; + +static const u32 camellia_sp4404[256] = { + 0x70700070, 0x2c2c002c, 0xb3b300b3, 0xc0c000c0, + 0xe4e400e4, 0x57570057, 0xeaea00ea, 0xaeae00ae, + 0x23230023, 0x6b6b006b, 0x45450045, 0xa5a500a5, + 0xeded00ed, 0x4f4f004f, 0x1d1d001d, 0x92920092, + 0x86860086, 0xafaf00af, 0x7c7c007c, 0x1f1f001f, + 0x3e3e003e, 0xdcdc00dc, 0x5e5e005e, 0x0b0b000b, + 0xa6a600a6, 0x39390039, 0xd5d500d5, 0x5d5d005d, + 0xd9d900d9, 0x5a5a005a, 0x51510051, 0x6c6c006c, + 0x8b8b008b, 0x9a9a009a, 0xfbfb00fb, 0xb0b000b0, + 0x74740074, 0x2b2b002b, 0xf0f000f0, 0x84840084, + 0xdfdf00df, 0xcbcb00cb, 0x34340034, 0x76760076, + 0x6d6d006d, 0xa9a900a9, 0xd1d100d1, 0x04040004, + 0x14140014, 0x3a3a003a, 0xdede00de, 0x11110011, + 0x32320032, 0x9c9c009c, 0x53530053, 0xf2f200f2, + 0xfefe00fe, 0xcfcf00cf, 0xc3c300c3, 0x7a7a007a, + 0x24240024, 0xe8e800e8, 0x60600060, 0x69690069, + 0xaaaa00aa, 0xa0a000a0, 0xa1a100a1, 0x62620062, + 0x54540054, 0x1e1e001e, 0xe0e000e0, 0x64640064, + 0x10100010, 0x00000000, 0xa3a300a3, 0x75750075, + 0x8a8a008a, 0xe6e600e6, 0x09090009, 0xdddd00dd, + 0x87870087, 0x83830083, 0xcdcd00cd, 0x90900090, + 0x73730073, 0xf6f600f6, 0x9d9d009d, 0xbfbf00bf, + 0x52520052, 0xd8d800d8, 0xc8c800c8, 0xc6c600c6, + 0x81810081, 0x6f6f006f, 0x13130013, 0x63630063, + 0xe9e900e9, 0xa7a700a7, 0x9f9f009f, 0xbcbc00bc, + 0x29290029, 0xf9f900f9, 0x2f2f002f, 0xb4b400b4, + 0x78780078, 0x06060006, 0xe7e700e7, 0x71710071, + 0xd4d400d4, 0xabab00ab, 0x88880088, 0x8d8d008d, + 0x72720072, 0xb9b900b9, 0xf8f800f8, 0xacac00ac, + 0x36360036, 0x2a2a002a, 0x3c3c003c, 0xf1f100f1, + 0x40400040, 0xd3d300d3, 0xbbbb00bb, 0x43430043, + 0x15150015, 0xadad00ad, 0x77770077, 0x80800080, + 0x82820082, 0xecec00ec, 0x27270027, 0xe5e500e5, + 0x85850085, 0x35350035, 0x0c0c000c, 0x41410041, + 0xefef00ef, 0x93930093, 0x19190019, 0x21210021, + 0x0e0e000e, 0x4e4e004e, 0x65650065, 0xbdbd00bd, + 0xb8b800b8, 0x8f8f008f, 0xebeb00eb, 0xcece00ce, + 0x30300030, 0x5f5f005f, 0xc5c500c5, 0x1a1a001a, + 0xe1e100e1, 0xcaca00ca, 0x47470047, 0x3d3d003d, + 0x01010001, 0xd6d600d6, 0x56560056, 0x4d4d004d, + 0x0d0d000d, 0x66660066, 0xcccc00cc, 0x2d2d002d, + 0x12120012, 0x20200020, 0xb1b100b1, 0x99990099, + 0x4c4c004c, 0xc2c200c2, 0x7e7e007e, 0x05050005, + 0xb7b700b7, 0x31310031, 0x17170017, 0xd7d700d7, + 0x58580058, 0x61610061, 0x1b1b001b, 0x1c1c001c, + 0x0f0f000f, 0x16160016, 0x18180018, 0x22220022, + 0x44440044, 0xb2b200b2, 0xb5b500b5, 0x91910091, + 0x08080008, 0xa8a800a8, 0xfcfc00fc, 0x50500050, + 0xd0d000d0, 0x7d7d007d, 0x89890089, 0x97970097, + 0x5b5b005b, 0x95950095, 0xffff00ff, 0xd2d200d2, + 0xc4c400c4, 0x48480048, 0xf7f700f7, 0xdbdb00db, + 0x03030003, 0xdada00da, 0x3f3f003f, 0x94940094, + 0x5c5c005c, 0x02020002, 0x4a4a004a, 0x33330033, + 0x67670067, 0xf3f300f3, 0x7f7f007f, 0xe2e200e2, + 0x9b9b009b, 0x26260026, 0x37370037, 0x3b3b003b, + 0x96960096, 0x4b4b004b, 0xbebe00be, 0x2e2e002e, + 0x79790079, 0x8c8c008c, 0x6e6e006e, 0x8e8e008e, + 0xf5f500f5, 0xb6b600b6, 0xfdfd00fd, 0x59590059, + 0x98980098, 0x6a6a006a, 0x46460046, 0xbaba00ba, + 0x25250025, 0x42420042, 0xa2a200a2, 0xfafa00fa, + 0x07070007, 0x55550055, 0xeeee00ee, 0x0a0a000a, + 0x49490049, 0x68680068, 0x38380038, 0xa4a400a4, + 0x28280028, 0x7b7b007b, 0xc9c900c9, 0xc1c100c1, + 0xe3e300e3, 0xf4f400f4, 0xc7c700c7, 0x9e9e009e, +}; + + +#define CAMELLIA_MIN_KEY_SIZE 16 +#define CAMELLIA_MAX_KEY_SIZE 32 +#define CAMELLIA_BLOCK_SIZE 16 +#define CAMELLIA_TABLE_BYTE_LEN 272 + +/* + * NB: L and R below stand for 'left' and 'right' as in written numbers. + * That is, in (xxxL,xxxR) pair xxxL holds most significant digits, + * _not_ least significant ones! + */ + + +/* key constants */ + +#define CAMELLIA_SIGMA1L (0xA09E667FL) +#define CAMELLIA_SIGMA1R (0x3BCC908BL) +#define CAMELLIA_SIGMA2L (0xB67AE858L) +#define CAMELLIA_SIGMA2R (0x4CAA73B2L) +#define CAMELLIA_SIGMA3L (0xC6EF372FL) +#define CAMELLIA_SIGMA3R (0xE94F82BEL) +#define CAMELLIA_SIGMA4L (0x54FF53A5L) +#define CAMELLIA_SIGMA4R (0xF1D36F1CL) +#define CAMELLIA_SIGMA5L (0x10E527FAL) +#define CAMELLIA_SIGMA5R (0xDE682D1DL) +#define CAMELLIA_SIGMA6L (0xB05688C2L) +#define CAMELLIA_SIGMA6R (0xB3E6C1FDL) + +/* + * macros + */ +#define ROLDQ(ll, lr, rl, rr, w0, w1, bits) ({ \ + w0 = ll; \ + ll = (ll << bits) + (lr >> (32 - bits)); \ + lr = (lr << bits) + (rl >> (32 - bits)); \ + rl = (rl << bits) + (rr >> (32 - bits)); \ + rr = (rr << bits) + (w0 >> (32 - bits)); \ +}) + +#define ROLDQo32(ll, lr, rl, rr, w0, w1, bits) ({ \ + w0 = ll; \ + w1 = lr; \ + ll = (lr << (bits - 32)) + (rl >> (64 - bits)); \ + lr = (rl << (bits - 32)) + (rr >> (64 - bits)); \ + rl = (rr << (bits - 32)) + (w0 >> (64 - bits)); \ + rr = (w0 << (bits - 32)) + (w1 >> (64 - bits)); \ +}) + +#define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) ({ \ + il = xl ^ kl; \ + ir = xr ^ kr; \ + t0 = il >> 16; \ + t1 = ir >> 16; \ + yl = camellia_sp1110[(u8)(ir)] \ + ^ camellia_sp0222[(u8)(t1 >> 8)] \ + ^ camellia_sp3033[(u8)(t1)] \ + ^ camellia_sp4404[(u8)(ir >> 8)]; \ + yr = camellia_sp1110[(u8)(t0 >> 8)] \ + ^ camellia_sp0222[(u8)(t0)] \ + ^ camellia_sp3033[(u8)(il >> 8)] \ + ^ camellia_sp4404[(u8)(il)]; \ + yl ^= yr; \ + yr = ror32(yr, 8); \ + yr ^= yl; \ +}) + +#define SUBKEY_L(INDEX) (subkey[(INDEX)*2]) +#define SUBKEY_R(INDEX) (subkey[(INDEX)*2 + 1]) + +static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) +{ + u32 dw, tl, tr; + u32 kw4l, kw4r; + + /* absorb kw2 to other subkeys */ + /* round 2 */ + subL[3] ^= subL[1]; subR[3] ^= subR[1]; + /* round 4 */ + subL[5] ^= subL[1]; subR[5] ^= subR[1]; + /* round 6 */ + subL[7] ^= subL[1]; subR[7] ^= subR[1]; + subL[1] ^= subR[1] & ~subR[9]; + dw = subL[1] & subL[9], + subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl2) */ + /* round 8 */ + subL[11] ^= subL[1]; subR[11] ^= subR[1]; + /* round 10 */ + subL[13] ^= subL[1]; subR[13] ^= subR[1]; + /* round 12 */ + subL[15] ^= subL[1]; subR[15] ^= subR[1]; + subL[1] ^= subR[1] & ~subR[17]; + dw = subL[1] & subL[17], + subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl4) */ + /* round 14 */ + subL[19] ^= subL[1]; subR[19] ^= subR[1]; + /* round 16 */ + subL[21] ^= subL[1]; subR[21] ^= subR[1]; + /* round 18 */ + subL[23] ^= subL[1]; subR[23] ^= subR[1]; + if (max == 24) { + /* kw3 */ + subL[24] ^= subL[1]; subR[24] ^= subR[1]; + + /* absorb kw4 to other subkeys */ + kw4l = subL[25]; kw4r = subR[25]; + } else { + subL[1] ^= subR[1] & ~subR[25]; + dw = subL[1] & subL[25], + subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl6) */ + /* round 20 */ + subL[27] ^= subL[1]; subR[27] ^= subR[1]; + /* round 22 */ + subL[29] ^= subL[1]; subR[29] ^= subR[1]; + /* round 24 */ + subL[31] ^= subL[1]; subR[31] ^= subR[1]; + /* kw3 */ + subL[32] ^= subL[1]; subR[32] ^= subR[1]; + + /* absorb kw4 to other subkeys */ + kw4l = subL[33]; kw4r = subR[33]; + /* round 23 */ + subL[30] ^= kw4l; subR[30] ^= kw4r; + /* round 21 */ + subL[28] ^= kw4l; subR[28] ^= kw4r; + /* round 19 */ + subL[26] ^= kw4l; subR[26] ^= kw4r; + kw4l ^= kw4r & ~subR[24]; + dw = kw4l & subL[24], + kw4r ^= rol32(dw, 1); /* modified for FL(kl5) */ + } + /* round 17 */ + subL[22] ^= kw4l; subR[22] ^= kw4r; + /* round 15 */ + subL[20] ^= kw4l; subR[20] ^= kw4r; + /* round 13 */ + subL[18] ^= kw4l; subR[18] ^= kw4r; + kw4l ^= kw4r & ~subR[16]; + dw = kw4l & subL[16], + kw4r ^= rol32(dw, 1); /* modified for FL(kl3) */ + /* round 11 */ + subL[14] ^= kw4l; subR[14] ^= kw4r; + /* round 9 */ + subL[12] ^= kw4l; subR[12] ^= kw4r; + /* round 7 */ + subL[10] ^= kw4l; subR[10] ^= kw4r; + kw4l ^= kw4r & ~subR[8]; + dw = kw4l & subL[8], + kw4r ^= rol32(dw, 1); /* modified for FL(kl1) */ + /* round 5 */ + subL[6] ^= kw4l; subR[6] ^= kw4r; + /* round 3 */ + subL[4] ^= kw4l; subR[4] ^= kw4r; + /* round 1 */ + subL[2] ^= kw4l; subR[2] ^= kw4r; + /* kw1 */ + subL[0] ^= kw4l; subR[0] ^= kw4r; + + /* key XOR is end of F-function */ + SUBKEY_L(0) = subL[0] ^ subL[2];/* kw1 */ + SUBKEY_R(0) = subR[0] ^ subR[2]; + SUBKEY_L(2) = subL[3]; /* round 1 */ + SUBKEY_R(2) = subR[3]; + SUBKEY_L(3) = subL[2] ^ subL[4]; /* round 2 */ + SUBKEY_R(3) = subR[2] ^ subR[4]; + SUBKEY_L(4) = subL[3] ^ subL[5]; /* round 3 */ + SUBKEY_R(4) = subR[3] ^ subR[5]; + SUBKEY_L(5) = subL[4] ^ subL[6]; /* round 4 */ + SUBKEY_R(5) = subR[4] ^ subR[6]; + SUBKEY_L(6) = subL[5] ^ subL[7]; /* round 5 */ + SUBKEY_R(6) = subR[5] ^ subR[7]; + tl = subL[10] ^ (subR[10] & ~subR[8]); + dw = tl & subL[8], /* FL(kl1) */ + tr = subR[10] ^ rol32(dw, 1); + SUBKEY_L(7) = subL[6] ^ tl; /* round 6 */ + SUBKEY_R(7) = subR[6] ^ tr; + SUBKEY_L(8) = subL[8]; /* FL(kl1) */ + SUBKEY_R(8) = subR[8]; + SUBKEY_L(9) = subL[9]; /* FLinv(kl2) */ + SUBKEY_R(9) = subR[9]; + tl = subL[7] ^ (subR[7] & ~subR[9]); + dw = tl & subL[9], /* FLinv(kl2) */ + tr = subR[7] ^ rol32(dw, 1); + SUBKEY_L(10) = tl ^ subL[11]; /* round 7 */ + SUBKEY_R(10) = tr ^ subR[11]; + SUBKEY_L(11) = subL[10] ^ subL[12]; /* round 8 */ + SUBKEY_R(11) = subR[10] ^ subR[12]; + SUBKEY_L(12) = subL[11] ^ subL[13]; /* round 9 */ + SUBKEY_R(12) = subR[11] ^ subR[13]; + SUBKEY_L(13) = subL[12] ^ subL[14]; /* round 10 */ + SUBKEY_R(13) = subR[12] ^ subR[14]; + SUBKEY_L(14) = subL[13] ^ subL[15]; /* round 11 */ + SUBKEY_R(14) = subR[13] ^ subR[15]; + tl = subL[18] ^ (subR[18] & ~subR[16]); + dw = tl & subL[16], /* FL(kl3) */ + tr = subR[18] ^ rol32(dw, 1); + SUBKEY_L(15) = subL[14] ^ tl; /* round 12 */ + SUBKEY_R(15) = subR[14] ^ tr; + SUBKEY_L(16) = subL[16]; /* FL(kl3) */ + SUBKEY_R(16) = subR[16]; + SUBKEY_L(17) = subL[17]; /* FLinv(kl4) */ + SUBKEY_R(17) = subR[17]; + tl = subL[15] ^ (subR[15] & ~subR[17]); + dw = tl & subL[17], /* FLinv(kl4) */ + tr = subR[15] ^ rol32(dw, 1); + SUBKEY_L(18) = tl ^ subL[19]; /* round 13 */ + SUBKEY_R(18) = tr ^ subR[19]; + SUBKEY_L(19) = subL[18] ^ subL[20]; /* round 14 */ + SUBKEY_R(19) = subR[18] ^ subR[20]; + SUBKEY_L(20) = subL[19] ^ subL[21]; /* round 15 */ + SUBKEY_R(20) = subR[19] ^ subR[21]; + SUBKEY_L(21) = subL[20] ^ subL[22]; /* round 16 */ + SUBKEY_R(21) = subR[20] ^ subR[22]; + SUBKEY_L(22) = subL[21] ^ subL[23]; /* round 17 */ + SUBKEY_R(22) = subR[21] ^ subR[23]; + if (max == 24) { + SUBKEY_L(23) = subL[22]; /* round 18 */ + SUBKEY_R(23) = subR[22]; + SUBKEY_L(24) = subL[24] ^ subL[23]; /* kw3 */ + SUBKEY_R(24) = subR[24] ^ subR[23]; + } else { + tl = subL[26] ^ (subR[26] & ~subR[24]); + dw = tl & subL[24], /* FL(kl5) */ + tr = subR[26] ^ rol32(dw, 1); + SUBKEY_L(23) = subL[22] ^ tl; /* round 18 */ + SUBKEY_R(23) = subR[22] ^ tr; + SUBKEY_L(24) = subL[24]; /* FL(kl5) */ + SUBKEY_R(24) = subR[24]; + SUBKEY_L(25) = subL[25]; /* FLinv(kl6) */ + SUBKEY_R(25) = subR[25]; + tl = subL[23] ^ (subR[23] & ~subR[25]); + dw = tl & subL[25], /* FLinv(kl6) */ + tr = subR[23] ^ rol32(dw, 1); + SUBKEY_L(26) = tl ^ subL[27]; /* round 19 */ + SUBKEY_R(26) = tr ^ subR[27]; + SUBKEY_L(27) = subL[26] ^ subL[28]; /* round 20 */ + SUBKEY_R(27) = subR[26] ^ subR[28]; + SUBKEY_L(28) = subL[27] ^ subL[29]; /* round 21 */ + SUBKEY_R(28) = subR[27] ^ subR[29]; + SUBKEY_L(29) = subL[28] ^ subL[30]; /* round 22 */ + SUBKEY_R(29) = subR[28] ^ subR[30]; + SUBKEY_L(30) = subL[29] ^ subL[31]; /* round 23 */ + SUBKEY_R(30) = subR[29] ^ subR[31]; + SUBKEY_L(31) = subL[30]; /* round 24 */ + SUBKEY_R(31) = subR[30]; + SUBKEY_L(32) = subL[32] ^ subL[31]; /* kw3 */ + SUBKEY_R(32) = subR[32] ^ subR[31]; + } +} + +static void camellia_setup128(const unsigned char *key, u32 *subkey) +{ + u32 kll, klr, krl, krr; + u32 il, ir, t0, t1, w0, w1; + u32 subL[26]; + u32 subR[26]; + + /** + * k == kll || klr || krl || krr (|| is concatenation) + */ + kll = get_unaligned_be32(key); + klr = get_unaligned_be32(key + 4); + krl = get_unaligned_be32(key + 8); + krr = get_unaligned_be32(key + 12); + + /* generate KL dependent subkeys */ + /* kw1 */ + subL[0] = kll; subR[0] = klr; + /* kw2 */ + subL[1] = krl; subR[1] = krr; + /* rotation left shift 15bit */ + ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* k3 */ + subL[4] = kll; subR[4] = klr; + /* k4 */ + subL[5] = krl; subR[5] = krr; + /* rotation left shift 15+30bit */ + ROLDQ(kll, klr, krl, krr, w0, w1, 30); + /* k7 */ + subL[10] = kll; subR[10] = klr; + /* k8 */ + subL[11] = krl; subR[11] = krr; + /* rotation left shift 15+30+15bit */ + ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* k10 */ + subL[13] = krl; subR[13] = krr; + /* rotation left shift 15+30+15+17 bit */ + ROLDQ(kll, klr, krl, krr, w0, w1, 17); + /* kl3 */ + subL[16] = kll; subR[16] = klr; + /* kl4 */ + subL[17] = krl; subR[17] = krr; + /* rotation left shift 15+30+15+17+17 bit */ + ROLDQ(kll, klr, krl, krr, w0, w1, 17); + /* k13 */ + subL[18] = kll; subR[18] = klr; + /* k14 */ + subL[19] = krl; subR[19] = krr; + /* rotation left shift 15+30+15+17+17+17 bit */ + ROLDQ(kll, klr, krl, krr, w0, w1, 17); + /* k17 */ + subL[22] = kll; subR[22] = klr; + /* k18 */ + subL[23] = krl; subR[23] = krr; + + /* generate KA */ + kll = subL[0]; klr = subR[0]; + krl = subL[1]; krr = subR[1]; + CAMELLIA_F(kll, klr, + CAMELLIA_SIGMA1L, CAMELLIA_SIGMA1R, + w0, w1, il, ir, t0, t1); + krl ^= w0; krr ^= w1; + CAMELLIA_F(krl, krr, + CAMELLIA_SIGMA2L, CAMELLIA_SIGMA2R, + kll, klr, il, ir, t0, t1); + /* current status == (kll, klr, w0, w1) */ + CAMELLIA_F(kll, klr, + CAMELLIA_SIGMA3L, CAMELLIA_SIGMA3R, + krl, krr, il, ir, t0, t1); + krl ^= w0; krr ^= w1; + CAMELLIA_F(krl, krr, + CAMELLIA_SIGMA4L, CAMELLIA_SIGMA4R, + w0, w1, il, ir, t0, t1); + kll ^= w0; klr ^= w1; + + /* generate KA dependent subkeys */ + /* k1, k2 */ + subL[2] = kll; subR[2] = klr; + subL[3] = krl; subR[3] = krr; + ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* k5,k6 */ + subL[6] = kll; subR[6] = klr; + subL[7] = krl; subR[7] = krr; + ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* kl1, kl2 */ + subL[8] = kll; subR[8] = klr; + subL[9] = krl; subR[9] = krr; + ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* k9 */ + subL[12] = kll; subR[12] = klr; + ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* k11, k12 */ + subL[14] = kll; subR[14] = klr; + subL[15] = krl; subR[15] = krr; + ROLDQo32(kll, klr, krl, krr, w0, w1, 34); + /* k15, k16 */ + subL[20] = kll; subR[20] = klr; + subL[21] = krl; subR[21] = krr; + ROLDQ(kll, klr, krl, krr, w0, w1, 17); + /* kw3, kw4 */ + subL[24] = kll; subR[24] = klr; + subL[25] = krl; subR[25] = krr; + + camellia_setup_tail(subkey, subL, subR, 24); +} + +static void camellia_setup256(const unsigned char *key, u32 *subkey) +{ + u32 kll, klr, krl, krr; /* left half of key */ + u32 krll, krlr, krrl, krrr; /* right half of key */ + u32 il, ir, t0, t1, w0, w1; /* temporary variables */ + u32 subL[34]; + u32 subR[34]; + + /** + * key = (kll || klr || krl || krr || krll || krlr || krrl || krrr) + * (|| is concatenation) + */ + kll = get_unaligned_be32(key); + klr = get_unaligned_be32(key + 4); + krl = get_unaligned_be32(key + 8); + krr = get_unaligned_be32(key + 12); + krll = get_unaligned_be32(key + 16); + krlr = get_unaligned_be32(key + 20); + krrl = get_unaligned_be32(key + 24); + krrr = get_unaligned_be32(key + 28); + + /* generate KL dependent subkeys */ + /* kw1 */ + subL[0] = kll; subR[0] = klr; + /* kw2 */ + subL[1] = krl; subR[1] = krr; + ROLDQo32(kll, klr, krl, krr, w0, w1, 45); + /* k9 */ + subL[12] = kll; subR[12] = klr; + /* k10 */ + subL[13] = krl; subR[13] = krr; + ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* kl3 */ + subL[16] = kll; subR[16] = klr; + /* kl4 */ + subL[17] = krl; subR[17] = krr; + ROLDQ(kll, klr, krl, krr, w0, w1, 17); + /* k17 */ + subL[22] = kll; subR[22] = klr; + /* k18 */ + subL[23] = krl; subR[23] = krr; + ROLDQo32(kll, klr, krl, krr, w0, w1, 34); + /* k23 */ + subL[30] = kll; subR[30] = klr; + /* k24 */ + subL[31] = krl; subR[31] = krr; + + /* generate KR dependent subkeys */ + ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15); + /* k3 */ + subL[4] = krll; subR[4] = krlr; + /* k4 */ + subL[5] = krrl; subR[5] = krrr; + ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15); + /* kl1 */ + subL[8] = krll; subR[8] = krlr; + /* kl2 */ + subL[9] = krrl; subR[9] = krrr; + ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30); + /* k13 */ + subL[18] = krll; subR[18] = krlr; + /* k14 */ + subL[19] = krrl; subR[19] = krrr; + ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34); + /* k19 */ + subL[26] = krll; subR[26] = krlr; + /* k20 */ + subL[27] = krrl; subR[27] = krrr; + ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34); + + /* generate KA */ + kll = subL[0] ^ krll; klr = subR[0] ^ krlr; + krl = subL[1] ^ krrl; krr = subR[1] ^ krrr; + CAMELLIA_F(kll, klr, + CAMELLIA_SIGMA1L, CAMELLIA_SIGMA1R, + w0, w1, il, ir, t0, t1); + krl ^= w0; krr ^= w1; + CAMELLIA_F(krl, krr, + CAMELLIA_SIGMA2L, CAMELLIA_SIGMA2R, + kll, klr, il, ir, t0, t1); + kll ^= krll; klr ^= krlr; + CAMELLIA_F(kll, klr, + CAMELLIA_SIGMA3L, CAMELLIA_SIGMA3R, + krl, krr, il, ir, t0, t1); + krl ^= w0 ^ krrl; krr ^= w1 ^ krrr; + CAMELLIA_F(krl, krr, + CAMELLIA_SIGMA4L, CAMELLIA_SIGMA4R, + w0, w1, il, ir, t0, t1); + kll ^= w0; klr ^= w1; + + /* generate KB */ + krll ^= kll; krlr ^= klr; + krrl ^= krl; krrr ^= krr; + CAMELLIA_F(krll, krlr, + CAMELLIA_SIGMA5L, CAMELLIA_SIGMA5R, + w0, w1, il, ir, t0, t1); + krrl ^= w0; krrr ^= w1; + CAMELLIA_F(krrl, krrr, + CAMELLIA_SIGMA6L, CAMELLIA_SIGMA6R, + w0, w1, il, ir, t0, t1); + krll ^= w0; krlr ^= w1; + + /* generate KA dependent subkeys */ + ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* k5 */ + subL[6] = kll; subR[6] = klr; + /* k6 */ + subL[7] = krl; subR[7] = krr; + ROLDQ(kll, klr, krl, krr, w0, w1, 30); + /* k11 */ + subL[14] = kll; subR[14] = klr; + /* k12 */ + subL[15] = krl; subR[15] = krr; + /* rotation left shift 32bit */ + /* kl5 */ + subL[24] = klr; subR[24] = krl; + /* kl6 */ + subL[25] = krr; subR[25] = kll; + /* rotation left shift 49 from k11,k12 -> k21,k22 */ + ROLDQo32(kll, klr, krl, krr, w0, w1, 49); + /* k21 */ + subL[28] = kll; subR[28] = klr; + /* k22 */ + subL[29] = krl; subR[29] = krr; + + /* generate KB dependent subkeys */ + /* k1 */ + subL[2] = krll; subR[2] = krlr; + /* k2 */ + subL[3] = krrl; subR[3] = krrr; + ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30); + /* k7 */ + subL[10] = krll; subR[10] = krlr; + /* k8 */ + subL[11] = krrl; subR[11] = krrr; + ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30); + /* k15 */ + subL[20] = krll; subR[20] = krlr; + /* k16 */ + subL[21] = krrl; subR[21] = krrr; + ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 51); + /* kw3 */ + subL[32] = krll; subR[32] = krlr; + /* kw4 */ + subL[33] = krrl; subR[33] = krrr; + + camellia_setup_tail(subkey, subL, subR, 32); +} + +static void camellia_setup192(const unsigned char *key, u32 *subkey) +{ + unsigned char kk[32]; + u32 krll, krlr, krrl, krrr; + + memcpy(kk, key, 24); + memcpy((unsigned char *)&krll, key+16, 4); + memcpy((unsigned char *)&krlr, key+20, 4); + krrl = ~krll; + krrr = ~krlr; + memcpy(kk+24, (unsigned char *)&krrl, 4); + memcpy(kk+28, (unsigned char *)&krrr, 4); + camellia_setup256(kk, subkey); +} + + +/* + * Encrypt/decrypt + */ +#define CAMELLIA_FLS(ll, lr, rl, rr, kll, klr, krl, krr, t0, t1, t2, t3) ({ \ + t0 = kll; \ + t2 = krr; \ + t0 &= ll; \ + t2 |= rr; \ + rl ^= t2; \ + lr ^= rol32(t0, 1); \ + t3 = krl; \ + t1 = klr; \ + t3 &= rl; \ + t1 |= lr; \ + ll ^= t1; \ + rr ^= rol32(t3, 1); \ +}) + +#define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir) ({ \ + yl ^= kl; \ + yr ^= kr; \ + ir = camellia_sp1110[(u8)xr]; \ + il = camellia_sp1110[(u8)(xl >> 24)]; \ + ir ^= camellia_sp0222[(u8)(xr >> 24)]; \ + il ^= camellia_sp0222[(u8)(xl >> 16)]; \ + ir ^= camellia_sp3033[(u8)(xr >> 16)]; \ + il ^= camellia_sp3033[(u8)(xl >> 8)]; \ + ir ^= camellia_sp4404[(u8)(xr >> 8)]; \ + il ^= camellia_sp4404[(u8)xl]; \ + ir ^= il; \ + yl ^= ir; \ + yr ^= ror32(il, 8) ^ ir; \ +}) + +/* max = 24: 128bit encrypt, max = 32: 256bit encrypt */ +static void camellia_do_encrypt(const u32 *subkey, u32 *io, unsigned max) +{ + u32 il, ir, t0, t1; /* temporary variables */ + + /* pre whitening but absorb kw2 */ + io[0] ^= SUBKEY_L(0); + io[1] ^= SUBKEY_R(0); + + /* main iteration */ +#define ROUNDS(i) ({ \ + CAMELLIA_ROUNDSM(io[0], io[1], \ + SUBKEY_L(i + 2), SUBKEY_R(i + 2), \ + io[2], io[3], il, ir); \ + CAMELLIA_ROUNDSM(io[2], io[3], \ + SUBKEY_L(i + 3), SUBKEY_R(i + 3), \ + io[0], io[1], il, ir); \ + CAMELLIA_ROUNDSM(io[0], io[1], \ + SUBKEY_L(i + 4), SUBKEY_R(i + 4), \ + io[2], io[3], il, ir); \ + CAMELLIA_ROUNDSM(io[2], io[3], \ + SUBKEY_L(i + 5), SUBKEY_R(i + 5), \ + io[0], io[1], il, ir); \ + CAMELLIA_ROUNDSM(io[0], io[1], \ + SUBKEY_L(i + 6), SUBKEY_R(i + 6), \ + io[2], io[3], il, ir); \ + CAMELLIA_ROUNDSM(io[2], io[3], \ + SUBKEY_L(i + 7), SUBKEY_R(i + 7), \ + io[0], io[1], il, ir); \ +}) +#define FLS(i) ({ \ + CAMELLIA_FLS(io[0], io[1], io[2], io[3], \ + SUBKEY_L(i + 0), SUBKEY_R(i + 0), \ + SUBKEY_L(i + 1), SUBKEY_R(i + 1), \ + t0, t1, il, ir); \ +}) + + ROUNDS(0); + FLS(8); + ROUNDS(8); + FLS(16); + ROUNDS(16); + if (max == 32) { + FLS(24); + ROUNDS(24); + } + +#undef ROUNDS +#undef FLS + + /* post whitening but kw4 */ + io[2] ^= SUBKEY_L(max); + io[3] ^= SUBKEY_R(max); + /* NB: io[0],[1] should be swapped with [2],[3] by caller! */ +} + +static void camellia_do_decrypt(const u32 *subkey, u32 *io, unsigned i) +{ + u32 il, ir, t0, t1; /* temporary variables */ + + /* pre whitening but absorb kw2 */ + io[0] ^= SUBKEY_L(i); + io[1] ^= SUBKEY_R(i); + + /* main iteration */ +#define ROUNDS(i) ({ \ + CAMELLIA_ROUNDSM(io[0], io[1], \ + SUBKEY_L(i + 7), SUBKEY_R(i + 7), \ + io[2], io[3], il, ir); \ + CAMELLIA_ROUNDSM(io[2], io[3], \ + SUBKEY_L(i + 6), SUBKEY_R(i + 6), \ + io[0], io[1], il, ir); \ + CAMELLIA_ROUNDSM(io[0], io[1], \ + SUBKEY_L(i + 5), SUBKEY_R(i + 5), \ + io[2], io[3], il, ir); \ + CAMELLIA_ROUNDSM(io[2], io[3], \ + SUBKEY_L(i + 4), SUBKEY_R(i + 4), \ + io[0], io[1], il, ir); \ + CAMELLIA_ROUNDSM(io[0], io[1], \ + SUBKEY_L(i + 3), SUBKEY_R(i + 3), \ + io[2], io[3], il, ir); \ + CAMELLIA_ROUNDSM(io[2], io[3], \ + SUBKEY_L(i + 2), SUBKEY_R(i + 2), \ + io[0], io[1], il, ir); \ +}) +#define FLS(i) ({ \ + CAMELLIA_FLS(io[0], io[1], io[2], io[3], \ + SUBKEY_L(i + 1), SUBKEY_R(i + 1), \ + SUBKEY_L(i + 0), SUBKEY_R(i + 0), \ + t0, t1, il, ir); \ +}) + + if (i == 32) { + ROUNDS(24); + FLS(24); + } + ROUNDS(16); + FLS(16); + ROUNDS(8); + FLS(8); + ROUNDS(0); + +#undef ROUNDS +#undef FLS + + /* post whitening but kw4 */ + io[2] ^= SUBKEY_L(0); + io[3] ^= SUBKEY_R(0); + /* NB: 0,1 should be swapped with 2,3 by caller! */ +} + + +struct camellia_ctx { + int key_length; + u32 key_table[CAMELLIA_TABLE_BYTE_LEN / sizeof(u32)]; +}; + +static int +camellia_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len) +{ + struct camellia_ctx *cctx = crypto_tfm_ctx(tfm); + const unsigned char *key = (const unsigned char *)in_key; + u32 *flags = &tfm->crt_flags; + + if (key_len != 16 && key_len != 24 && key_len != 32) { + *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; + } + + cctx->key_length = key_len; + + switch (key_len) { + case 16: + camellia_setup128(key, cctx->key_table); + break; + case 24: + camellia_setup192(key, cctx->key_table); + break; + case 32: + camellia_setup256(key, cctx->key_table); + break; + } + + return 0; +} + +static void camellia_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm); + const __be32 *src = (const __be32 *)in; + __be32 *dst = (__be32 *)out; + unsigned int max; + + u32 tmp[4]; + + tmp[0] = be32_to_cpu(src[0]); + tmp[1] = be32_to_cpu(src[1]); + tmp[2] = be32_to_cpu(src[2]); + tmp[3] = be32_to_cpu(src[3]); + + if (cctx->key_length == 16) + max = 24; + else + max = 32; /* for key lengths of 24 and 32 */ + + camellia_do_encrypt(cctx->key_table, tmp, max); + + /* do_encrypt returns 0,1 swapped with 2,3 */ + dst[0] = cpu_to_be32(tmp[2]); + dst[1] = cpu_to_be32(tmp[3]); + dst[2] = cpu_to_be32(tmp[0]); + dst[3] = cpu_to_be32(tmp[1]); +} + +static void camellia_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm); + const __be32 *src = (const __be32 *)in; + __be32 *dst = (__be32 *)out; + unsigned int max; + + u32 tmp[4]; + + tmp[0] = be32_to_cpu(src[0]); + tmp[1] = be32_to_cpu(src[1]); + tmp[2] = be32_to_cpu(src[2]); + tmp[3] = be32_to_cpu(src[3]); + + if (cctx->key_length == 16) + max = 24; + else + max = 32; /* for key lengths of 24 and 32 */ + + camellia_do_decrypt(cctx->key_table, tmp, max); + + /* do_decrypt returns 0,1 swapped with 2,3 */ + dst[0] = cpu_to_be32(tmp[2]); + dst[1] = cpu_to_be32(tmp[3]); + dst[2] = cpu_to_be32(tmp[0]); + dst[3] = cpu_to_be32(tmp[1]); +} + +static struct crypto_alg camellia_alg = { + .cra_name = "camellia", + .cra_driver_name = "camellia-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = CAMELLIA_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct camellia_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(camellia_alg.cra_list), + .cra_u = { + .cipher = { + .cia_min_keysize = CAMELLIA_MIN_KEY_SIZE, + .cia_max_keysize = CAMELLIA_MAX_KEY_SIZE, + .cia_setkey = camellia_set_key, + .cia_encrypt = camellia_encrypt, + .cia_decrypt = camellia_decrypt + } + } +}; + +static int __init camellia_init(void) +{ + return crypto_register_alg(&camellia_alg); +} + +static void __exit camellia_fini(void) +{ + crypto_unregister_alg(&camellia_alg); +} + +module_init(camellia_init); +module_exit(camellia_fini); + +MODULE_DESCRIPTION("Camellia Cipher Algorithm"); +MODULE_LICENSE("GPL"); +MODULE_ALIAS("camellia"); diff --git a/crypto/cast5.c b/crypto/cast5.c new file mode 100644 index 00000000..4a230dde --- /dev/null +++ b/crypto/cast5.c @@ -0,0 +1,809 @@ +/* Kernel cryptographic api. +* cast5.c - Cast5 cipher algorithm (rfc2144). +* +* Derived from GnuPG implementation of cast5. +* +* Major Changes. +* Complete conformance to rfc2144. +* Supports key size from 40 to 128 bits. +* +* Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc. +* Copyright (C) 2003 Kartikey Mahendra Bhatt <kartik_me@hotmail.com>. +* +* This program is free software; you can redistribute it and/or modify it +* under the terms of GNU General Public License as published by the Free +* Software Foundation; either version 2 of the License, or (at your option) +* any later version. +* +* You should have received a copy of the GNU General Public License +* along with this program; if not, write to the Free Software +* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA +*/ + + +#include <asm/byteorder.h> +#include <linux/init.h> +#include <linux/crypto.h> +#include <linux/module.h> +#include <linux/errno.h> +#include <linux/string.h> +#include <linux/types.h> + +#define CAST5_BLOCK_SIZE 8 +#define CAST5_MIN_KEY_SIZE 5 +#define CAST5_MAX_KEY_SIZE 16 + +struct cast5_ctx { + u32 Km[16]; + u8 Kr[16]; + int rr; /* rr?number of rounds = 16:number of rounds = 12; (rfc 2144) */ +}; + + +static const u32 s1[256] = { + 0x30fb40d4, 0x9fa0ff0b, 0x6beccd2f, 0x3f258c7a, 0x1e213f2f, + 0x9c004dd3, 0x6003e540, 0xcf9fc949, + 0xbfd4af27, 0x88bbbdb5, 0xe2034090, 0x98d09675, 0x6e63a0e0, + 0x15c361d2, 0xc2e7661d, 0x22d4ff8e, + 0x28683b6f, 0xc07fd059, 0xff2379c8, 0x775f50e2, 0x43c340d3, + 0xdf2f8656, 0x887ca41a, 0xa2d2bd2d, + 0xa1c9e0d6, 0x346c4819, 0x61b76d87, 0x22540f2f, 0x2abe32e1, + 0xaa54166b, 0x22568e3a, 0xa2d341d0, + 0x66db40c8, 0xa784392f, 0x004dff2f, 0x2db9d2de, 0x97943fac, + 0x4a97c1d8, 0x527644b7, 0xb5f437a7, + 0xb82cbaef, 0xd751d159, 0x6ff7f0ed, 0x5a097a1f, 0x827b68d0, + 0x90ecf52e, 0x22b0c054, 0xbc8e5935, + 0x4b6d2f7f, 0x50bb64a2, 0xd2664910, 0xbee5812d, 0xb7332290, + 0xe93b159f, 0xb48ee411, 0x4bff345d, + 0xfd45c240, 0xad31973f, 0xc4f6d02e, 0x55fc8165, 0xd5b1caad, + 0xa1ac2dae, 0xa2d4b76d, 0xc19b0c50, + 0x882240f2, 0x0c6e4f38, 0xa4e4bfd7, 0x4f5ba272, 0x564c1d2f, + 0xc59c5319, 0xb949e354, 0xb04669fe, + 0xb1b6ab8a, 0xc71358dd, 0x6385c545, 0x110f935d, 0x57538ad5, + 0x6a390493, 0xe63d37e0, 0x2a54f6b3, + 0x3a787d5f, 0x6276a0b5, 0x19a6fcdf, 0x7a42206a, 0x29f9d4d5, + 0xf61b1891, 0xbb72275e, 0xaa508167, + 0x38901091, 0xc6b505eb, 0x84c7cb8c, 0x2ad75a0f, 0x874a1427, + 0xa2d1936b, 0x2ad286af, 0xaa56d291, + 0xd7894360, 0x425c750d, 0x93b39e26, 0x187184c9, 0x6c00b32d, + 0x73e2bb14, 0xa0bebc3c, 0x54623779, + 0x64459eab, 0x3f328b82, 0x7718cf82, 0x59a2cea6, 0x04ee002e, + 0x89fe78e6, 0x3fab0950, 0x325ff6c2, + 0x81383f05, 0x6963c5c8, 0x76cb5ad6, 0xd49974c9, 0xca180dcf, + 0x380782d5, 0xc7fa5cf6, 0x8ac31511, + 0x35e79e13, 0x47da91d0, 0xf40f9086, 0xa7e2419e, 0x31366241, + 0x051ef495, 0xaa573b04, 0x4a805d8d, + 0x548300d0, 0x00322a3c, 0xbf64cddf, 0xba57a68e, 0x75c6372b, + 0x50afd341, 0xa7c13275, 0x915a0bf5, + 0x6b54bfab, 0x2b0b1426, 0xab4cc9d7, 0x449ccd82, 0xf7fbf265, + 0xab85c5f3, 0x1b55db94, 0xaad4e324, + 0xcfa4bd3f, 0x2deaa3e2, 0x9e204d02, 0xc8bd25ac, 0xeadf55b3, + 0xd5bd9e98, 0xe31231b2, 0x2ad5ad6c, + 0x954329de, 0xadbe4528, 0xd8710f69, 0xaa51c90f, 0xaa786bf6, + 0x22513f1e, 0xaa51a79b, 0x2ad344cc, + 0x7b5a41f0, 0xd37cfbad, 0x1b069505, 0x41ece491, 0xb4c332e6, + 0x032268d4, 0xc9600acc, 0xce387e6d, + 0xbf6bb16c, 0x6a70fb78, 0x0d03d9c9, 0xd4df39de, 0xe01063da, + 0x4736f464, 0x5ad328d8, 0xb347cc96, + 0x75bb0fc3, 0x98511bfb, 0x4ffbcc35, 0xb58bcf6a, 0xe11f0abc, + 0xbfc5fe4a, 0xa70aec10, 0xac39570a, + 0x3f04442f, 0x6188b153, 0xe0397a2e, 0x5727cb79, 0x9ceb418f, + 0x1cacd68d, 0x2ad37c96, 0x0175cb9d, + 0xc69dff09, 0xc75b65f0, 0xd9db40d8, 0xec0e7779, 0x4744ead4, + 0xb11c3274, 0xdd24cb9e, 0x7e1c54bd, + 0xf01144f9, 0xd2240eb1, 0x9675b3fd, 0xa3ac3755, 0xd47c27af, + 0x51c85f4d, 0x56907596, 0xa5bb15e6, + 0x580304f0, 0xca042cf1, 0x011a37ea, 0x8dbfaadb, 0x35ba3e4a, + 0x3526ffa0, 0xc37b4d09, 0xbc306ed9, + 0x98a52666, 0x5648f725, 0xff5e569d, 0x0ced63d0, 0x7c63b2cf, + 0x700b45e1, 0xd5ea50f1, 0x85a92872, + 0xaf1fbda7, 0xd4234870, 0xa7870bf3, 0x2d3b4d79, 0x42e04198, + 0x0cd0ede7, 0x26470db8, 0xf881814c, + 0x474d6ad7, 0x7c0c5e5c, 0xd1231959, 0x381b7298, 0xf5d2f4db, + 0xab838653, 0x6e2f1e23, 0x83719c9e, + 0xbd91e046, 0x9a56456e, 0xdc39200c, 0x20c8c571, 0x962bda1c, + 0xe1e696ff, 0xb141ab08, 0x7cca89b9, + 0x1a69e783, 0x02cc4843, 0xa2f7c579, 0x429ef47d, 0x427b169c, + 0x5ac9f049, 0xdd8f0f00, 0x5c8165bf +}; +static const u32 s2[256] = { + 0x1f201094, 0xef0ba75b, 0x69e3cf7e, 0x393f4380, 0xfe61cf7a, + 0xeec5207a, 0x55889c94, 0x72fc0651, + 0xada7ef79, 0x4e1d7235, 0xd55a63ce, 0xde0436ba, 0x99c430ef, + 0x5f0c0794, 0x18dcdb7d, 0xa1d6eff3, + 0xa0b52f7b, 0x59e83605, 0xee15b094, 0xe9ffd909, 0xdc440086, + 0xef944459, 0xba83ccb3, 0xe0c3cdfb, + 0xd1da4181, 0x3b092ab1, 0xf997f1c1, 0xa5e6cf7b, 0x01420ddb, + 0xe4e7ef5b, 0x25a1ff41, 0xe180f806, + 0x1fc41080, 0x179bee7a, 0xd37ac6a9, 0xfe5830a4, 0x98de8b7f, + 0x77e83f4e, 0x79929269, 0x24fa9f7b, + 0xe113c85b, 0xacc40083, 0xd7503525, 0xf7ea615f, 0x62143154, + 0x0d554b63, 0x5d681121, 0xc866c359, + 0x3d63cf73, 0xcee234c0, 0xd4d87e87, 0x5c672b21, 0x071f6181, + 0x39f7627f, 0x361e3084, 0xe4eb573b, + 0x602f64a4, 0xd63acd9c, 0x1bbc4635, 0x9e81032d, 0x2701f50c, + 0x99847ab4, 0xa0e3df79, 0xba6cf38c, + 0x10843094, 0x2537a95e, 0xf46f6ffe, 0xa1ff3b1f, 0x208cfb6a, + 0x8f458c74, 0xd9e0a227, 0x4ec73a34, + 0xfc884f69, 0x3e4de8df, 0xef0e0088, 0x3559648d, 0x8a45388c, + 0x1d804366, 0x721d9bfd, 0xa58684bb, + 0xe8256333, 0x844e8212, 0x128d8098, 0xfed33fb4, 0xce280ae1, + 0x27e19ba5, 0xd5a6c252, 0xe49754bd, + 0xc5d655dd, 0xeb667064, 0x77840b4d, 0xa1b6a801, 0x84db26a9, + 0xe0b56714, 0x21f043b7, 0xe5d05860, + 0x54f03084, 0x066ff472, 0xa31aa153, 0xdadc4755, 0xb5625dbf, + 0x68561be6, 0x83ca6b94, 0x2d6ed23b, + 0xeccf01db, 0xa6d3d0ba, 0xb6803d5c, 0xaf77a709, 0x33b4a34c, + 0x397bc8d6, 0x5ee22b95, 0x5f0e5304, + 0x81ed6f61, 0x20e74364, 0xb45e1378, 0xde18639b, 0x881ca122, + 0xb96726d1, 0x8049a7e8, 0x22b7da7b, + 0x5e552d25, 0x5272d237, 0x79d2951c, 0xc60d894c, 0x488cb402, + 0x1ba4fe5b, 0xa4b09f6b, 0x1ca815cf, + 0xa20c3005, 0x8871df63, 0xb9de2fcb, 0x0cc6c9e9, 0x0beeff53, + 0xe3214517, 0xb4542835, 0x9f63293c, + 0xee41e729, 0x6e1d2d7c, 0x50045286, 0x1e6685f3, 0xf33401c6, + 0x30a22c95, 0x31a70850, 0x60930f13, + 0x73f98417, 0xa1269859, 0xec645c44, 0x52c877a9, 0xcdff33a6, + 0xa02b1741, 0x7cbad9a2, 0x2180036f, + 0x50d99c08, 0xcb3f4861, 0xc26bd765, 0x64a3f6ab, 0x80342676, + 0x25a75e7b, 0xe4e6d1fc, 0x20c710e6, + 0xcdf0b680, 0x17844d3b, 0x31eef84d, 0x7e0824e4, 0x2ccb49eb, + 0x846a3bae, 0x8ff77888, 0xee5d60f6, + 0x7af75673, 0x2fdd5cdb, 0xa11631c1, 0x30f66f43, 0xb3faec54, + 0x157fd7fa, 0xef8579cc, 0xd152de58, + 0xdb2ffd5e, 0x8f32ce19, 0x306af97a, 0x02f03ef8, 0x99319ad5, + 0xc242fa0f, 0xa7e3ebb0, 0xc68e4906, + 0xb8da230c, 0x80823028, 0xdcdef3c8, 0xd35fb171, 0x088a1bc8, + 0xbec0c560, 0x61a3c9e8, 0xbca8f54d, + 0xc72feffa, 0x22822e99, 0x82c570b4, 0xd8d94e89, 0x8b1c34bc, + 0x301e16e6, 0x273be979, 0xb0ffeaa6, + 0x61d9b8c6, 0x00b24869, 0xb7ffce3f, 0x08dc283b, 0x43daf65a, + 0xf7e19798, 0x7619b72f, 0x8f1c9ba4, + 0xdc8637a0, 0x16a7d3b1, 0x9fc393b7, 0xa7136eeb, 0xc6bcc63e, + 0x1a513742, 0xef6828bc, 0x520365d6, + 0x2d6a77ab, 0x3527ed4b, 0x821fd216, 0x095c6e2e, 0xdb92f2fb, + 0x5eea29cb, 0x145892f5, 0x91584f7f, + 0x5483697b, 0x2667a8cc, 0x85196048, 0x8c4bacea, 0x833860d4, + 0x0d23e0f9, 0x6c387e8a, 0x0ae6d249, + 0xb284600c, 0xd835731d, 0xdcb1c647, 0xac4c56ea, 0x3ebd81b3, + 0x230eabb0, 0x6438bc87, 0xf0b5b1fa, + 0x8f5ea2b3, 0xfc184642, 0x0a036b7a, 0x4fb089bd, 0x649da589, + 0xa345415e, 0x5c038323, 0x3e5d3bb9, + 0x43d79572, 0x7e6dd07c, 0x06dfdf1e, 0x6c6cc4ef, 0x7160a539, + 0x73bfbe70, 0x83877605, 0x4523ecf1 +}; +static const u32 s3[256] = { + 0x8defc240, 0x25fa5d9f, 0xeb903dbf, 0xe810c907, 0x47607fff, + 0x369fe44b, 0x8c1fc644, 0xaececa90, + 0xbeb1f9bf, 0xeefbcaea, 0xe8cf1950, 0x51df07ae, 0x920e8806, + 0xf0ad0548, 0xe13c8d83, 0x927010d5, + 0x11107d9f, 0x07647db9, 0xb2e3e4d4, 0x3d4f285e, 0xb9afa820, + 0xfade82e0, 0xa067268b, 0x8272792e, + 0x553fb2c0, 0x489ae22b, 0xd4ef9794, 0x125e3fbc, 0x21fffcee, + 0x825b1bfd, 0x9255c5ed, 0x1257a240, + 0x4e1a8302, 0xbae07fff, 0x528246e7, 0x8e57140e, 0x3373f7bf, + 0x8c9f8188, 0xa6fc4ee8, 0xc982b5a5, + 0xa8c01db7, 0x579fc264, 0x67094f31, 0xf2bd3f5f, 0x40fff7c1, + 0x1fb78dfc, 0x8e6bd2c1, 0x437be59b, + 0x99b03dbf, 0xb5dbc64b, 0x638dc0e6, 0x55819d99, 0xa197c81c, + 0x4a012d6e, 0xc5884a28, 0xccc36f71, + 0xb843c213, 0x6c0743f1, 0x8309893c, 0x0feddd5f, 0x2f7fe850, + 0xd7c07f7e, 0x02507fbf, 0x5afb9a04, + 0xa747d2d0, 0x1651192e, 0xaf70bf3e, 0x58c31380, 0x5f98302e, + 0x727cc3c4, 0x0a0fb402, 0x0f7fef82, + 0x8c96fdad, 0x5d2c2aae, 0x8ee99a49, 0x50da88b8, 0x8427f4a0, + 0x1eac5790, 0x796fb449, 0x8252dc15, + 0xefbd7d9b, 0xa672597d, 0xada840d8, 0x45f54504, 0xfa5d7403, + 0xe83ec305, 0x4f91751a, 0x925669c2, + 0x23efe941, 0xa903f12e, 0x60270df2, 0x0276e4b6, 0x94fd6574, + 0x927985b2, 0x8276dbcb, 0x02778176, + 0xf8af918d, 0x4e48f79e, 0x8f616ddf, 0xe29d840e, 0x842f7d83, + 0x340ce5c8, 0x96bbb682, 0x93b4b148, + 0xef303cab, 0x984faf28, 0x779faf9b, 0x92dc560d, 0x224d1e20, + 0x8437aa88, 0x7d29dc96, 0x2756d3dc, + 0x8b907cee, 0xb51fd240, 0xe7c07ce3, 0xe566b4a1, 0xc3e9615e, + 0x3cf8209d, 0x6094d1e3, 0xcd9ca341, + 0x5c76460e, 0x00ea983b, 0xd4d67881, 0xfd47572c, 0xf76cedd9, + 0xbda8229c, 0x127dadaa, 0x438a074e, + 0x1f97c090, 0x081bdb8a, 0x93a07ebe, 0xb938ca15, 0x97b03cff, + 0x3dc2c0f8, 0x8d1ab2ec, 0x64380e51, + 0x68cc7bfb, 0xd90f2788, 0x12490181, 0x5de5ffd4, 0xdd7ef86a, + 0x76a2e214, 0xb9a40368, 0x925d958f, + 0x4b39fffa, 0xba39aee9, 0xa4ffd30b, 0xfaf7933b, 0x6d498623, + 0x193cbcfa, 0x27627545, 0x825cf47a, + 0x61bd8ba0, 0xd11e42d1, 0xcead04f4, 0x127ea392, 0x10428db7, + 0x8272a972, 0x9270c4a8, 0x127de50b, + 0x285ba1c8, 0x3c62f44f, 0x35c0eaa5, 0xe805d231, 0x428929fb, + 0xb4fcdf82, 0x4fb66a53, 0x0e7dc15b, + 0x1f081fab, 0x108618ae, 0xfcfd086d, 0xf9ff2889, 0x694bcc11, + 0x236a5cae, 0x12deca4d, 0x2c3f8cc5, + 0xd2d02dfe, 0xf8ef5896, 0xe4cf52da, 0x95155b67, 0x494a488c, + 0xb9b6a80c, 0x5c8f82bc, 0x89d36b45, + 0x3a609437, 0xec00c9a9, 0x44715253, 0x0a874b49, 0xd773bc40, + 0x7c34671c, 0x02717ef6, 0x4feb5536, + 0xa2d02fff, 0xd2bf60c4, 0xd43f03c0, 0x50b4ef6d, 0x07478cd1, + 0x006e1888, 0xa2e53f55, 0xb9e6d4bc, + 0xa2048016, 0x97573833, 0xd7207d67, 0xde0f8f3d, 0x72f87b33, + 0xabcc4f33, 0x7688c55d, 0x7b00a6b0, + 0x947b0001, 0x570075d2, 0xf9bb88f8, 0x8942019e, 0x4264a5ff, + 0x856302e0, 0x72dbd92b, 0xee971b69, + 0x6ea22fde, 0x5f08ae2b, 0xaf7a616d, 0xe5c98767, 0xcf1febd2, + 0x61efc8c2, 0xf1ac2571, 0xcc8239c2, + 0x67214cb8, 0xb1e583d1, 0xb7dc3e62, 0x7f10bdce, 0xf90a5c38, + 0x0ff0443d, 0x606e6dc6, 0x60543a49, + 0x5727c148, 0x2be98a1d, 0x8ab41738, 0x20e1be24, 0xaf96da0f, + 0x68458425, 0x99833be5, 0x600d457d, + 0x282f9350, 0x8334b362, 0xd91d1120, 0x2b6d8da0, 0x642b1e31, + 0x9c305a00, 0x52bce688, 0x1b03588a, + 0xf7baefd5, 0x4142ed9c, 0xa4315c11, 0x83323ec5, 0xdfef4636, + 0xa133c501, 0xe9d3531c, 0xee353783 +}; +static const u32 s4[256] = { + 0x9db30420, 0x1fb6e9de, 0xa7be7bef, 0xd273a298, 0x4a4f7bdb, + 0x64ad8c57, 0x85510443, 0xfa020ed1, + 0x7e287aff, 0xe60fb663, 0x095f35a1, 0x79ebf120, 0xfd059d43, + 0x6497b7b1, 0xf3641f63, 0x241e4adf, + 0x28147f5f, 0x4fa2b8cd, 0xc9430040, 0x0cc32220, 0xfdd30b30, + 0xc0a5374f, 0x1d2d00d9, 0x24147b15, + 0xee4d111a, 0x0fca5167, 0x71ff904c, 0x2d195ffe, 0x1a05645f, + 0x0c13fefe, 0x081b08ca, 0x05170121, + 0x80530100, 0xe83e5efe, 0xac9af4f8, 0x7fe72701, 0xd2b8ee5f, + 0x06df4261, 0xbb9e9b8a, 0x7293ea25, + 0xce84ffdf, 0xf5718801, 0x3dd64b04, 0xa26f263b, 0x7ed48400, + 0x547eebe6, 0x446d4ca0, 0x6cf3d6f5, + 0x2649abdf, 0xaea0c7f5, 0x36338cc1, 0x503f7e93, 0xd3772061, + 0x11b638e1, 0x72500e03, 0xf80eb2bb, + 0xabe0502e, 0xec8d77de, 0x57971e81, 0xe14f6746, 0xc9335400, + 0x6920318f, 0x081dbb99, 0xffc304a5, + 0x4d351805, 0x7f3d5ce3, 0xa6c866c6, 0x5d5bcca9, 0xdaec6fea, + 0x9f926f91, 0x9f46222f, 0x3991467d, + 0xa5bf6d8e, 0x1143c44f, 0x43958302, 0xd0214eeb, 0x022083b8, + 0x3fb6180c, 0x18f8931e, 0x281658e6, + 0x26486e3e, 0x8bd78a70, 0x7477e4c1, 0xb506e07c, 0xf32d0a25, + 0x79098b02, 0xe4eabb81, 0x28123b23, + 0x69dead38, 0x1574ca16, 0xdf871b62, 0x211c40b7, 0xa51a9ef9, + 0x0014377b, 0x041e8ac8, 0x09114003, + 0xbd59e4d2, 0xe3d156d5, 0x4fe876d5, 0x2f91a340, 0x557be8de, + 0x00eae4a7, 0x0ce5c2ec, 0x4db4bba6, + 0xe756bdff, 0xdd3369ac, 0xec17b035, 0x06572327, 0x99afc8b0, + 0x56c8c391, 0x6b65811c, 0x5e146119, + 0x6e85cb75, 0xbe07c002, 0xc2325577, 0x893ff4ec, 0x5bbfc92d, + 0xd0ec3b25, 0xb7801ab7, 0x8d6d3b24, + 0x20c763ef, 0xc366a5fc, 0x9c382880, 0x0ace3205, 0xaac9548a, + 0xeca1d7c7, 0x041afa32, 0x1d16625a, + 0x6701902c, 0x9b757a54, 0x31d477f7, 0x9126b031, 0x36cc6fdb, + 0xc70b8b46, 0xd9e66a48, 0x56e55a79, + 0x026a4ceb, 0x52437eff, 0x2f8f76b4, 0x0df980a5, 0x8674cde3, + 0xedda04eb, 0x17a9be04, 0x2c18f4df, + 0xb7747f9d, 0xab2af7b4, 0xefc34d20, 0x2e096b7c, 0x1741a254, + 0xe5b6a035, 0x213d42f6, 0x2c1c7c26, + 0x61c2f50f, 0x6552daf9, 0xd2c231f8, 0x25130f69, 0xd8167fa2, + 0x0418f2c8, 0x001a96a6, 0x0d1526ab, + 0x63315c21, 0x5e0a72ec, 0x49bafefd, 0x187908d9, 0x8d0dbd86, + 0x311170a7, 0x3e9b640c, 0xcc3e10d7, + 0xd5cad3b6, 0x0caec388, 0xf73001e1, 0x6c728aff, 0x71eae2a1, + 0x1f9af36e, 0xcfcbd12f, 0xc1de8417, + 0xac07be6b, 0xcb44a1d8, 0x8b9b0f56, 0x013988c3, 0xb1c52fca, + 0xb4be31cd, 0xd8782806, 0x12a3a4e2, + 0x6f7de532, 0x58fd7eb6, 0xd01ee900, 0x24adffc2, 0xf4990fc5, + 0x9711aac5, 0x001d7b95, 0x82e5e7d2, + 0x109873f6, 0x00613096, 0xc32d9521, 0xada121ff, 0x29908415, + 0x7fbb977f, 0xaf9eb3db, 0x29c9ed2a, + 0x5ce2a465, 0xa730f32c, 0xd0aa3fe8, 0x8a5cc091, 0xd49e2ce7, + 0x0ce454a9, 0xd60acd86, 0x015f1919, + 0x77079103, 0xdea03af6, 0x78a8565e, 0xdee356df, 0x21f05cbe, + 0x8b75e387, 0xb3c50651, 0xb8a5c3ef, + 0xd8eeb6d2, 0xe523be77, 0xc2154529, 0x2f69efdf, 0xafe67afb, + 0xf470c4b2, 0xf3e0eb5b, 0xd6cc9876, + 0x39e4460c, 0x1fda8538, 0x1987832f, 0xca007367, 0xa99144f8, + 0x296b299e, 0x492fc295, 0x9266beab, + 0xb5676e69, 0x9bd3ddda, 0xdf7e052f, 0xdb25701c, 0x1b5e51ee, + 0xf65324e6, 0x6afce36c, 0x0316cc04, + 0x8644213e, 0xb7dc59d0, 0x7965291f, 0xccd6fd43, 0x41823979, + 0x932bcdf6, 0xb657c34d, 0x4edfd282, + 0x7ae5290c, 0x3cb9536b, 0x851e20fe, 0x9833557e, 0x13ecf0b0, + 0xd3ffb372, 0x3f85c5c1, 0x0aef7ed2 +}; +static const u32 s5[256] = { + 0x7ec90c04, 0x2c6e74b9, 0x9b0e66df, 0xa6337911, 0xb86a7fff, + 0x1dd358f5, 0x44dd9d44, 0x1731167f, + 0x08fbf1fa, 0xe7f511cc, 0xd2051b00, 0x735aba00, 0x2ab722d8, + 0x386381cb, 0xacf6243a, 0x69befd7a, + 0xe6a2e77f, 0xf0c720cd, 0xc4494816, 0xccf5c180, 0x38851640, + 0x15b0a848, 0xe68b18cb, 0x4caadeff, + 0x5f480a01, 0x0412b2aa, 0x259814fc, 0x41d0efe2, 0x4e40b48d, + 0x248eb6fb, 0x8dba1cfe, 0x41a99b02, + 0x1a550a04, 0xba8f65cb, 0x7251f4e7, 0x95a51725, 0xc106ecd7, + 0x97a5980a, 0xc539b9aa, 0x4d79fe6a, + 0xf2f3f763, 0x68af8040, 0xed0c9e56, 0x11b4958b, 0xe1eb5a88, + 0x8709e6b0, 0xd7e07156, 0x4e29fea7, + 0x6366e52d, 0x02d1c000, 0xc4ac8e05, 0x9377f571, 0x0c05372a, + 0x578535f2, 0x2261be02, 0xd642a0c9, + 0xdf13a280, 0x74b55bd2, 0x682199c0, 0xd421e5ec, 0x53fb3ce8, + 0xc8adedb3, 0x28a87fc9, 0x3d959981, + 0x5c1ff900, 0xfe38d399, 0x0c4eff0b, 0x062407ea, 0xaa2f4fb1, + 0x4fb96976, 0x90c79505, 0xb0a8a774, + 0xef55a1ff, 0xe59ca2c2, 0xa6b62d27, 0xe66a4263, 0xdf65001f, + 0x0ec50966, 0xdfdd55bc, 0x29de0655, + 0x911e739a, 0x17af8975, 0x32c7911c, 0x89f89468, 0x0d01e980, + 0x524755f4, 0x03b63cc9, 0x0cc844b2, + 0xbcf3f0aa, 0x87ac36e9, 0xe53a7426, 0x01b3d82b, 0x1a9e7449, + 0x64ee2d7e, 0xcddbb1da, 0x01c94910, + 0xb868bf80, 0x0d26f3fd, 0x9342ede7, 0x04a5c284, 0x636737b6, + 0x50f5b616, 0xf24766e3, 0x8eca36c1, + 0x136e05db, 0xfef18391, 0xfb887a37, 0xd6e7f7d4, 0xc7fb7dc9, + 0x3063fcdf, 0xb6f589de, 0xec2941da, + 0x26e46695, 0xb7566419, 0xf654efc5, 0xd08d58b7, 0x48925401, + 0xc1bacb7f, 0xe5ff550f, 0xb6083049, + 0x5bb5d0e8, 0x87d72e5a, 0xab6a6ee1, 0x223a66ce, 0xc62bf3cd, + 0x9e0885f9, 0x68cb3e47, 0x086c010f, + 0xa21de820, 0xd18b69de, 0xf3f65777, 0xfa02c3f6, 0x407edac3, + 0xcbb3d550, 0x1793084d, 0xb0d70eba, + 0x0ab378d5, 0xd951fb0c, 0xded7da56, 0x4124bbe4, 0x94ca0b56, + 0x0f5755d1, 0xe0e1e56e, 0x6184b5be, + 0x580a249f, 0x94f74bc0, 0xe327888e, 0x9f7b5561, 0xc3dc0280, + 0x05687715, 0x646c6bd7, 0x44904db3, + 0x66b4f0a3, 0xc0f1648a, 0x697ed5af, 0x49e92ff6, 0x309e374f, + 0x2cb6356a, 0x85808573, 0x4991f840, + 0x76f0ae02, 0x083be84d, 0x28421c9a, 0x44489406, 0x736e4cb8, + 0xc1092910, 0x8bc95fc6, 0x7d869cf4, + 0x134f616f, 0x2e77118d, 0xb31b2be1, 0xaa90b472, 0x3ca5d717, + 0x7d161bba, 0x9cad9010, 0xaf462ba2, + 0x9fe459d2, 0x45d34559, 0xd9f2da13, 0xdbc65487, 0xf3e4f94e, + 0x176d486f, 0x097c13ea, 0x631da5c7, + 0x445f7382, 0x175683f4, 0xcdc66a97, 0x70be0288, 0xb3cdcf72, + 0x6e5dd2f3, 0x20936079, 0x459b80a5, + 0xbe60e2db, 0xa9c23101, 0xeba5315c, 0x224e42f2, 0x1c5c1572, + 0xf6721b2c, 0x1ad2fff3, 0x8c25404e, + 0x324ed72f, 0x4067b7fd, 0x0523138e, 0x5ca3bc78, 0xdc0fd66e, + 0x75922283, 0x784d6b17, 0x58ebb16e, + 0x44094f85, 0x3f481d87, 0xfcfeae7b, 0x77b5ff76, 0x8c2302bf, + 0xaaf47556, 0x5f46b02a, 0x2b092801, + 0x3d38f5f7, 0x0ca81f36, 0x52af4a8a, 0x66d5e7c0, 0xdf3b0874, + 0x95055110, 0x1b5ad7a8, 0xf61ed5ad, + 0x6cf6e479, 0x20758184, 0xd0cefa65, 0x88f7be58, 0x4a046826, + 0x0ff6f8f3, 0xa09c7f70, 0x5346aba0, + 0x5ce96c28, 0xe176eda3, 0x6bac307f, 0x376829d2, 0x85360fa9, + 0x17e3fe2a, 0x24b79767, 0xf5a96b20, + 0xd6cd2595, 0x68ff1ebf, 0x7555442c, 0xf19f06be, 0xf9e0659a, + 0xeeb9491d, 0x34010718, 0xbb30cab8, + 0xe822fe15, 0x88570983, 0x750e6249, 0xda627e55, 0x5e76ffa8, + 0xb1534546, 0x6d47de08, 0xefe9e7d4 +}; +static const u32 s6[256] = { + 0xf6fa8f9d, 0x2cac6ce1, 0x4ca34867, 0xe2337f7c, 0x95db08e7, + 0x016843b4, 0xeced5cbc, 0x325553ac, + 0xbf9f0960, 0xdfa1e2ed, 0x83f0579d, 0x63ed86b9, 0x1ab6a6b8, + 0xde5ebe39, 0xf38ff732, 0x8989b138, + 0x33f14961, 0xc01937bd, 0xf506c6da, 0xe4625e7e, 0xa308ea99, + 0x4e23e33c, 0x79cbd7cc, 0x48a14367, + 0xa3149619, 0xfec94bd5, 0xa114174a, 0xeaa01866, 0xa084db2d, + 0x09a8486f, 0xa888614a, 0x2900af98, + 0x01665991, 0xe1992863, 0xc8f30c60, 0x2e78ef3c, 0xd0d51932, + 0xcf0fec14, 0xf7ca07d2, 0xd0a82072, + 0xfd41197e, 0x9305a6b0, 0xe86be3da, 0x74bed3cd, 0x372da53c, + 0x4c7f4448, 0xdab5d440, 0x6dba0ec3, + 0x083919a7, 0x9fbaeed9, 0x49dbcfb0, 0x4e670c53, 0x5c3d9c01, + 0x64bdb941, 0x2c0e636a, 0xba7dd9cd, + 0xea6f7388, 0xe70bc762, 0x35f29adb, 0x5c4cdd8d, 0xf0d48d8c, + 0xb88153e2, 0x08a19866, 0x1ae2eac8, + 0x284caf89, 0xaa928223, 0x9334be53, 0x3b3a21bf, 0x16434be3, + 0x9aea3906, 0xefe8c36e, 0xf890cdd9, + 0x80226dae, 0xc340a4a3, 0xdf7e9c09, 0xa694a807, 0x5b7c5ecc, + 0x221db3a6, 0x9a69a02f, 0x68818a54, + 0xceb2296f, 0x53c0843a, 0xfe893655, 0x25bfe68a, 0xb4628abc, + 0xcf222ebf, 0x25ac6f48, 0xa9a99387, + 0x53bddb65, 0xe76ffbe7, 0xe967fd78, 0x0ba93563, 0x8e342bc1, + 0xe8a11be9, 0x4980740d, 0xc8087dfc, + 0x8de4bf99, 0xa11101a0, 0x7fd37975, 0xda5a26c0, 0xe81f994f, + 0x9528cd89, 0xfd339fed, 0xb87834bf, + 0x5f04456d, 0x22258698, 0xc9c4c83b, 0x2dc156be, 0x4f628daa, + 0x57f55ec5, 0xe2220abe, 0xd2916ebf, + 0x4ec75b95, 0x24f2c3c0, 0x42d15d99, 0xcd0d7fa0, 0x7b6e27ff, + 0xa8dc8af0, 0x7345c106, 0xf41e232f, + 0x35162386, 0xe6ea8926, 0x3333b094, 0x157ec6f2, 0x372b74af, + 0x692573e4, 0xe9a9d848, 0xf3160289, + 0x3a62ef1d, 0xa787e238, 0xf3a5f676, 0x74364853, 0x20951063, + 0x4576698d, 0xb6fad407, 0x592af950, + 0x36f73523, 0x4cfb6e87, 0x7da4cec0, 0x6c152daa, 0xcb0396a8, + 0xc50dfe5d, 0xfcd707ab, 0x0921c42f, + 0x89dff0bb, 0x5fe2be78, 0x448f4f33, 0x754613c9, 0x2b05d08d, + 0x48b9d585, 0xdc049441, 0xc8098f9b, + 0x7dede786, 0xc39a3373, 0x42410005, 0x6a091751, 0x0ef3c8a6, + 0x890072d6, 0x28207682, 0xa9a9f7be, + 0xbf32679d, 0xd45b5b75, 0xb353fd00, 0xcbb0e358, 0x830f220a, + 0x1f8fb214, 0xd372cf08, 0xcc3c4a13, + 0x8cf63166, 0x061c87be, 0x88c98f88, 0x6062e397, 0x47cf8e7a, + 0xb6c85283, 0x3cc2acfb, 0x3fc06976, + 0x4e8f0252, 0x64d8314d, 0xda3870e3, 0x1e665459, 0xc10908f0, + 0x513021a5, 0x6c5b68b7, 0x822f8aa0, + 0x3007cd3e, 0x74719eef, 0xdc872681, 0x073340d4, 0x7e432fd9, + 0x0c5ec241, 0x8809286c, 0xf592d891, + 0x08a930f6, 0x957ef305, 0xb7fbffbd, 0xc266e96f, 0x6fe4ac98, + 0xb173ecc0, 0xbc60b42a, 0x953498da, + 0xfba1ae12, 0x2d4bd736, 0x0f25faab, 0xa4f3fceb, 0xe2969123, + 0x257f0c3d, 0x9348af49, 0x361400bc, + 0xe8816f4a, 0x3814f200, 0xa3f94043, 0x9c7a54c2, 0xbc704f57, + 0xda41e7f9, 0xc25ad33a, 0x54f4a084, + 0xb17f5505, 0x59357cbe, 0xedbd15c8, 0x7f97c5ab, 0xba5ac7b5, + 0xb6f6deaf, 0x3a479c3a, 0x5302da25, + 0x653d7e6a, 0x54268d49, 0x51a477ea, 0x5017d55b, 0xd7d25d88, + 0x44136c76, 0x0404a8c8, 0xb8e5a121, + 0xb81a928a, 0x60ed5869, 0x97c55b96, 0xeaec991b, 0x29935913, + 0x01fdb7f1, 0x088e8dfa, 0x9ab6f6f5, + 0x3b4cbf9f, 0x4a5de3ab, 0xe6051d35, 0xa0e1d855, 0xd36b4cf1, + 0xf544edeb, 0xb0e93524, 0xbebb8fbd, + 0xa2d762cf, 0x49c92f54, 0x38b5f331, 0x7128a454, 0x48392905, + 0xa65b1db8, 0x851c97bd, 0xd675cf2f +}; +static const u32 s7[256] = { + 0x85e04019, 0x332bf567, 0x662dbfff, 0xcfc65693, 0x2a8d7f6f, + 0xab9bc912, 0xde6008a1, 0x2028da1f, + 0x0227bce7, 0x4d642916, 0x18fac300, 0x50f18b82, 0x2cb2cb11, + 0xb232e75c, 0x4b3695f2, 0xb28707de, + 0xa05fbcf6, 0xcd4181e9, 0xe150210c, 0xe24ef1bd, 0xb168c381, + 0xfde4e789, 0x5c79b0d8, 0x1e8bfd43, + 0x4d495001, 0x38be4341, 0x913cee1d, 0x92a79c3f, 0x089766be, + 0xbaeeadf4, 0x1286becf, 0xb6eacb19, + 0x2660c200, 0x7565bde4, 0x64241f7a, 0x8248dca9, 0xc3b3ad66, + 0x28136086, 0x0bd8dfa8, 0x356d1cf2, + 0x107789be, 0xb3b2e9ce, 0x0502aa8f, 0x0bc0351e, 0x166bf52a, + 0xeb12ff82, 0xe3486911, 0xd34d7516, + 0x4e7b3aff, 0x5f43671b, 0x9cf6e037, 0x4981ac83, 0x334266ce, + 0x8c9341b7, 0xd0d854c0, 0xcb3a6c88, + 0x47bc2829, 0x4725ba37, 0xa66ad22b, 0x7ad61f1e, 0x0c5cbafa, + 0x4437f107, 0xb6e79962, 0x42d2d816, + 0x0a961288, 0xe1a5c06e, 0x13749e67, 0x72fc081a, 0xb1d139f7, + 0xf9583745, 0xcf19df58, 0xbec3f756, + 0xc06eba30, 0x07211b24, 0x45c28829, 0xc95e317f, 0xbc8ec511, + 0x38bc46e9, 0xc6e6fa14, 0xbae8584a, + 0xad4ebc46, 0x468f508b, 0x7829435f, 0xf124183b, 0x821dba9f, + 0xaff60ff4, 0xea2c4e6d, 0x16e39264, + 0x92544a8b, 0x009b4fc3, 0xaba68ced, 0x9ac96f78, 0x06a5b79a, + 0xb2856e6e, 0x1aec3ca9, 0xbe838688, + 0x0e0804e9, 0x55f1be56, 0xe7e5363b, 0xb3a1f25d, 0xf7debb85, + 0x61fe033c, 0x16746233, 0x3c034c28, + 0xda6d0c74, 0x79aac56c, 0x3ce4e1ad, 0x51f0c802, 0x98f8f35a, + 0x1626a49f, 0xeed82b29, 0x1d382fe3, + 0x0c4fb99a, 0xbb325778, 0x3ec6d97b, 0x6e77a6a9, 0xcb658b5c, + 0xd45230c7, 0x2bd1408b, 0x60c03eb7, + 0xb9068d78, 0xa33754f4, 0xf430c87d, 0xc8a71302, 0xb96d8c32, + 0xebd4e7be, 0xbe8b9d2d, 0x7979fb06, + 0xe7225308, 0x8b75cf77, 0x11ef8da4, 0xe083c858, 0x8d6b786f, + 0x5a6317a6, 0xfa5cf7a0, 0x5dda0033, + 0xf28ebfb0, 0xf5b9c310, 0xa0eac280, 0x08b9767a, 0xa3d9d2b0, + 0x79d34217, 0x021a718d, 0x9ac6336a, + 0x2711fd60, 0x438050e3, 0x069908a8, 0x3d7fedc4, 0x826d2bef, + 0x4eeb8476, 0x488dcf25, 0x36c9d566, + 0x28e74e41, 0xc2610aca, 0x3d49a9cf, 0xbae3b9df, 0xb65f8de6, + 0x92aeaf64, 0x3ac7d5e6, 0x9ea80509, + 0xf22b017d, 0xa4173f70, 0xdd1e16c3, 0x15e0d7f9, 0x50b1b887, + 0x2b9f4fd5, 0x625aba82, 0x6a017962, + 0x2ec01b9c, 0x15488aa9, 0xd716e740, 0x40055a2c, 0x93d29a22, + 0xe32dbf9a, 0x058745b9, 0x3453dc1e, + 0xd699296e, 0x496cff6f, 0x1c9f4986, 0xdfe2ed07, 0xb87242d1, + 0x19de7eae, 0x053e561a, 0x15ad6f8c, + 0x66626c1c, 0x7154c24c, 0xea082b2a, 0x93eb2939, 0x17dcb0f0, + 0x58d4f2ae, 0x9ea294fb, 0x52cf564c, + 0x9883fe66, 0x2ec40581, 0x763953c3, 0x01d6692e, 0xd3a0c108, + 0xa1e7160e, 0xe4f2dfa6, 0x693ed285, + 0x74904698, 0x4c2b0edd, 0x4f757656, 0x5d393378, 0xa132234f, + 0x3d321c5d, 0xc3f5e194, 0x4b269301, + 0xc79f022f, 0x3c997e7e, 0x5e4f9504, 0x3ffafbbd, 0x76f7ad0e, + 0x296693f4, 0x3d1fce6f, 0xc61e45be, + 0xd3b5ab34, 0xf72bf9b7, 0x1b0434c0, 0x4e72b567, 0x5592a33d, + 0xb5229301, 0xcfd2a87f, 0x60aeb767, + 0x1814386b, 0x30bcc33d, 0x38a0c07d, 0xfd1606f2, 0xc363519b, + 0x589dd390, 0x5479f8e6, 0x1cb8d647, + 0x97fd61a9, 0xea7759f4, 0x2d57539d, 0x569a58cf, 0xe84e63ad, + 0x462e1b78, 0x6580f87e, 0xf3817914, + 0x91da55f4, 0x40a230f3, 0xd1988f35, 0xb6e318d2, 0x3ffa50bc, + 0x3d40f021, 0xc3c0bdae, 0x4958c24c, + 0x518f36b2, 0x84b1d370, 0x0fedce83, 0x878ddada, 0xf2a279c7, + 0x94e01be8, 0x90716f4b, 0x954b8aa3 +}; +static const u32 sb8[256] = { + 0xe216300d, 0xbbddfffc, 0xa7ebdabd, 0x35648095, 0x7789f8b7, + 0xe6c1121b, 0x0e241600, 0x052ce8b5, + 0x11a9cfb0, 0xe5952f11, 0xece7990a, 0x9386d174, 0x2a42931c, + 0x76e38111, 0xb12def3a, 0x37ddddfc, + 0xde9adeb1, 0x0a0cc32c, 0xbe197029, 0x84a00940, 0xbb243a0f, + 0xb4d137cf, 0xb44e79f0, 0x049eedfd, + 0x0b15a15d, 0x480d3168, 0x8bbbde5a, 0x669ded42, 0xc7ece831, + 0x3f8f95e7, 0x72df191b, 0x7580330d, + 0x94074251, 0x5c7dcdfa, 0xabbe6d63, 0xaa402164, 0xb301d40a, + 0x02e7d1ca, 0x53571dae, 0x7a3182a2, + 0x12a8ddec, 0xfdaa335d, 0x176f43e8, 0x71fb46d4, 0x38129022, + 0xce949ad4, 0xb84769ad, 0x965bd862, + 0x82f3d055, 0x66fb9767, 0x15b80b4e, 0x1d5b47a0, 0x4cfde06f, + 0xc28ec4b8, 0x57e8726e, 0x647a78fc, + 0x99865d44, 0x608bd593, 0x6c200e03, 0x39dc5ff6, 0x5d0b00a3, + 0xae63aff2, 0x7e8bd632, 0x70108c0c, + 0xbbd35049, 0x2998df04, 0x980cf42a, 0x9b6df491, 0x9e7edd53, + 0x06918548, 0x58cb7e07, 0x3b74ef2e, + 0x522fffb1, 0xd24708cc, 0x1c7e27cd, 0xa4eb215b, 0x3cf1d2e2, + 0x19b47a38, 0x424f7618, 0x35856039, + 0x9d17dee7, 0x27eb35e6, 0xc9aff67b, 0x36baf5b8, 0x09c467cd, + 0xc18910b1, 0xe11dbf7b, 0x06cd1af8, + 0x7170c608, 0x2d5e3354, 0xd4de495a, 0x64c6d006, 0xbcc0c62c, + 0x3dd00db3, 0x708f8f34, 0x77d51b42, + 0x264f620f, 0x24b8d2bf, 0x15c1b79e, 0x46a52564, 0xf8d7e54e, + 0x3e378160, 0x7895cda5, 0x859c15a5, + 0xe6459788, 0xc37bc75f, 0xdb07ba0c, 0x0676a3ab, 0x7f229b1e, + 0x31842e7b, 0x24259fd7, 0xf8bef472, + 0x835ffcb8, 0x6df4c1f2, 0x96f5b195, 0xfd0af0fc, 0xb0fe134c, + 0xe2506d3d, 0x4f9b12ea, 0xf215f225, + 0xa223736f, 0x9fb4c428, 0x25d04979, 0x34c713f8, 0xc4618187, + 0xea7a6e98, 0x7cd16efc, 0x1436876c, + 0xf1544107, 0xbedeee14, 0x56e9af27, 0xa04aa441, 0x3cf7c899, + 0x92ecbae6, 0xdd67016d, 0x151682eb, + 0xa842eedf, 0xfdba60b4, 0xf1907b75, 0x20e3030f, 0x24d8c29e, + 0xe139673b, 0xefa63fb8, 0x71873054, + 0xb6f2cf3b, 0x9f326442, 0xcb15a4cc, 0xb01a4504, 0xf1e47d8d, + 0x844a1be5, 0xbae7dfdc, 0x42cbda70, + 0xcd7dae0a, 0x57e85b7a, 0xd53f5af6, 0x20cf4d8c, 0xcea4d428, + 0x79d130a4, 0x3486ebfb, 0x33d3cddc, + 0x77853b53, 0x37effcb5, 0xc5068778, 0xe580b3e6, 0x4e68b8f4, + 0xc5c8b37e, 0x0d809ea2, 0x398feb7c, + 0x132a4f94, 0x43b7950e, 0x2fee7d1c, 0x223613bd, 0xdd06caa2, + 0x37df932b, 0xc4248289, 0xacf3ebc3, + 0x5715f6b7, 0xef3478dd, 0xf267616f, 0xc148cbe4, 0x9052815e, + 0x5e410fab, 0xb48a2465, 0x2eda7fa4, + 0xe87b40e4, 0xe98ea084, 0x5889e9e1, 0xefd390fc, 0xdd07d35b, + 0xdb485694, 0x38d7e5b2, 0x57720101, + 0x730edebc, 0x5b643113, 0x94917e4f, 0x503c2fba, 0x646f1282, + 0x7523d24a, 0xe0779695, 0xf9c17a8f, + 0x7a5b2121, 0xd187b896, 0x29263a4d, 0xba510cdf, 0x81f47c9f, + 0xad1163ed, 0xea7b5965, 0x1a00726e, + 0x11403092, 0x00da6d77, 0x4a0cdd61, 0xad1f4603, 0x605bdfb0, + 0x9eedc364, 0x22ebe6a8, 0xcee7d28a, + 0xa0e736a0, 0x5564a6b9, 0x10853209, 0xc7eb8f37, 0x2de705ca, + 0x8951570f, 0xdf09822b, 0xbd691a6c, + 0xaa12e4f2, 0x87451c0f, 0xe0f6a27a, 0x3ada4819, 0x4cf1764f, + 0x0d771c2b, 0x67cdb156, 0x350d8384, + 0x5938fa0f, 0x42399ef3, 0x36997b07, 0x0e84093d, 0x4aa93e61, + 0x8360d87b, 0x1fa98b0c, 0x1149382c, + 0xe97625a5, 0x0614d1b7, 0x0e25244b, 0x0c768347, 0x589e8d82, + 0x0d2059d1, 0xa466bb1e, 0xf8da0a82, + 0x04f19130, 0xba6e4ec0, 0x99265164, 0x1ee7230d, 0x50b2ad80, + 0xeaee6801, 0x8db2a283, 0xea8bf59e +}; + +#define F1(D, m, r) ((I = ((m) + (D))), (I = rol32(I, (r))), \ + (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff])) +#define F2(D, m, r) ((I = ((m) ^ (D))), (I = rol32(I, (r))), \ + (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff])) +#define F3(D, m, r) ((I = ((m) - (D))), (I = rol32(I, (r))), \ + (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff])) + + +static void cast5_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) +{ + struct cast5_ctx *c = crypto_tfm_ctx(tfm); + const __be32 *src = (const __be32 *)inbuf; + __be32 *dst = (__be32 *)outbuf; + u32 l, r, t; + u32 I; /* used by the Fx macros */ + u32 *Km; + u8 *Kr; + + Km = c->Km; + Kr = c->Kr; + + /* (L0,R0) <-- (m1...m64). (Split the plaintext into left and + * right 32-bit halves L0 = m1...m32 and R0 = m33...m64.) + */ + l = be32_to_cpu(src[0]); + r = be32_to_cpu(src[1]); + + /* (16 rounds) for i from 1 to 16, compute Li and Ri as follows: + * Li = Ri-1; + * Ri = Li-1 ^ f(Ri-1,Kmi,Kri), where f is defined in Section 2.2 + * Rounds 1, 4, 7, 10, 13, and 16 use f function Type 1. + * Rounds 2, 5, 8, 11, and 14 use f function Type 2. + * Rounds 3, 6, 9, 12, and 15 use f function Type 3. + */ + + t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]); + t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]); + t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]); + t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]); + t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]); + t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]); + t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]); + t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]); + t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]); + t = l; l = r; r = t ^ F1(r, Km[9], Kr[9]); + t = l; l = r; r = t ^ F2(r, Km[10], Kr[10]); + t = l; l = r; r = t ^ F3(r, Km[11], Kr[11]); + if (!(c->rr)) { + t = l; l = r; r = t ^ F1(r, Km[12], Kr[12]); + t = l; l = r; r = t ^ F2(r, Km[13], Kr[13]); + t = l; l = r; r = t ^ F3(r, Km[14], Kr[14]); + t = l; l = r; r = t ^ F1(r, Km[15], Kr[15]); + } + + /* c1...c64 <-- (R16,L16). (Exchange final blocks L16, R16 and + * concatenate to form the ciphertext.) */ + dst[0] = cpu_to_be32(r); + dst[1] = cpu_to_be32(l); +} + +static void cast5_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) +{ + struct cast5_ctx *c = crypto_tfm_ctx(tfm); + const __be32 *src = (const __be32 *)inbuf; + __be32 *dst = (__be32 *)outbuf; + u32 l, r, t; + u32 I; + u32 *Km; + u8 *Kr; + + Km = c->Km; + Kr = c->Kr; + + l = be32_to_cpu(src[0]); + r = be32_to_cpu(src[1]); + + if (!(c->rr)) { + t = l; l = r; r = t ^ F1(r, Km[15], Kr[15]); + t = l; l = r; r = t ^ F3(r, Km[14], Kr[14]); + t = l; l = r; r = t ^ F2(r, Km[13], Kr[13]); + t = l; l = r; r = t ^ F1(r, Km[12], Kr[12]); + } + t = l; l = r; r = t ^ F3(r, Km[11], Kr[11]); + t = l; l = r; r = t ^ F2(r, Km[10], Kr[10]); + t = l; l = r; r = t ^ F1(r, Km[9], Kr[9]); + t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]); + t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]); + t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]); + t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]); + t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]); + t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]); + t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]); + t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]); + t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]); + + dst[0] = cpu_to_be32(r); + dst[1] = cpu_to_be32(l); +} + +static void key_schedule(u32 *x, u32 *z, u32 *k) +{ + +#define xi(i) ((x[(i)/4] >> (8*(3-((i)%4)))) & 0xff) +#define zi(i) ((z[(i)/4] >> (8*(3-((i)%4)))) & 0xff) + + z[0] = x[0] ^ s5[xi(13)] ^ s6[xi(15)] ^ s7[xi(12)] ^ sb8[xi(14)] ^ + s7[xi(8)]; + z[1] = x[2] ^ s5[zi(0)] ^ s6[zi(2)] ^ s7[zi(1)] ^ sb8[zi(3)] ^ + sb8[xi(10)]; + z[2] = x[3] ^ s5[zi(7)] ^ s6[zi(6)] ^ s7[zi(5)] ^ sb8[zi(4)] ^ + s5[xi(9)]; + z[3] = x[1] ^ s5[zi(10)] ^ s6[zi(9)] ^ s7[zi(11)] ^ sb8[zi(8)] ^ + s6[xi(11)]; + k[0] = s5[zi(8)] ^ s6[zi(9)] ^ s7[zi(7)] ^ sb8[zi(6)] ^ s5[zi(2)]; + k[1] = s5[zi(10)] ^ s6[zi(11)] ^ s7[zi(5)] ^ sb8[zi(4)] ^ + s6[zi(6)]; + k[2] = s5[zi(12)] ^ s6[zi(13)] ^ s7[zi(3)] ^ sb8[zi(2)] ^ + s7[zi(9)]; + k[3] = s5[zi(14)] ^ s6[zi(15)] ^ s7[zi(1)] ^ sb8[zi(0)] ^ + sb8[zi(12)]; + + x[0] = z[2] ^ s5[zi(5)] ^ s6[zi(7)] ^ s7[zi(4)] ^ sb8[zi(6)] ^ + s7[zi(0)]; + x[1] = z[0] ^ s5[xi(0)] ^ s6[xi(2)] ^ s7[xi(1)] ^ sb8[xi(3)] ^ + sb8[zi(2)]; + x[2] = z[1] ^ s5[xi(7)] ^ s6[xi(6)] ^ s7[xi(5)] ^ sb8[xi(4)] ^ + s5[zi(1)]; + x[3] = z[3] ^ s5[xi(10)] ^ s6[xi(9)] ^ s7[xi(11)] ^ sb8[xi(8)] ^ + s6[zi(3)]; + k[4] = s5[xi(3)] ^ s6[xi(2)] ^ s7[xi(12)] ^ sb8[xi(13)] ^ + s5[xi(8)]; + k[5] = s5[xi(1)] ^ s6[xi(0)] ^ s7[xi(14)] ^ sb8[xi(15)] ^ + s6[xi(13)]; + k[6] = s5[xi(7)] ^ s6[xi(6)] ^ s7[xi(8)] ^ sb8[xi(9)] ^ s7[xi(3)]; + k[7] = s5[xi(5)] ^ s6[xi(4)] ^ s7[xi(10)] ^ sb8[xi(11)] ^ + sb8[xi(7)]; + + z[0] = x[0] ^ s5[xi(13)] ^ s6[xi(15)] ^ s7[xi(12)] ^ sb8[xi(14)] ^ + s7[xi(8)]; + z[1] = x[2] ^ s5[zi(0)] ^ s6[zi(2)] ^ s7[zi(1)] ^ sb8[zi(3)] ^ + sb8[xi(10)]; + z[2] = x[3] ^ s5[zi(7)] ^ s6[zi(6)] ^ s7[zi(5)] ^ sb8[zi(4)] ^ + s5[xi(9)]; + z[3] = x[1] ^ s5[zi(10)] ^ s6[zi(9)] ^ s7[zi(11)] ^ sb8[zi(8)] ^ + s6[xi(11)]; + k[8] = s5[zi(3)] ^ s6[zi(2)] ^ s7[zi(12)] ^ sb8[zi(13)] ^ + s5[zi(9)]; + k[9] = s5[zi(1)] ^ s6[zi(0)] ^ s7[zi(14)] ^ sb8[zi(15)] ^ + s6[zi(12)]; + k[10] = s5[zi(7)] ^ s6[zi(6)] ^ s7[zi(8)] ^ sb8[zi(9)] ^ s7[zi(2)]; + k[11] = s5[zi(5)] ^ s6[zi(4)] ^ s7[zi(10)] ^ sb8[zi(11)] ^ + sb8[zi(6)]; + + x[0] = z[2] ^ s5[zi(5)] ^ s6[zi(7)] ^ s7[zi(4)] ^ sb8[zi(6)] ^ + s7[zi(0)]; + x[1] = z[0] ^ s5[xi(0)] ^ s6[xi(2)] ^ s7[xi(1)] ^ sb8[xi(3)] ^ + sb8[zi(2)]; + x[2] = z[1] ^ s5[xi(7)] ^ s6[xi(6)] ^ s7[xi(5)] ^ sb8[xi(4)] ^ + s5[zi(1)]; + x[3] = z[3] ^ s5[xi(10)] ^ s6[xi(9)] ^ s7[xi(11)] ^ sb8[xi(8)] ^ + s6[zi(3)]; + k[12] = s5[xi(8)] ^ s6[xi(9)] ^ s7[xi(7)] ^ sb8[xi(6)] ^ s5[xi(3)]; + k[13] = s5[xi(10)] ^ s6[xi(11)] ^ s7[xi(5)] ^ sb8[xi(4)] ^ + s6[xi(7)]; + k[14] = s5[xi(12)] ^ s6[xi(13)] ^ s7[xi(3)] ^ sb8[xi(2)] ^ + s7[xi(8)]; + k[15] = s5[xi(14)] ^ s6[xi(15)] ^ s7[xi(1)] ^ sb8[xi(0)] ^ + sb8[xi(13)]; + +#undef xi +#undef zi +} + + +static int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned key_len) +{ + struct cast5_ctx *c = crypto_tfm_ctx(tfm); + int i; + u32 x[4]; + u32 z[4]; + u32 k[16]; + __be32 p_key[4]; + + c->rr = key_len <= 10 ? 1 : 0; + + memset(p_key, 0, 16); + memcpy(p_key, key, key_len); + + + x[0] = be32_to_cpu(p_key[0]); + x[1] = be32_to_cpu(p_key[1]); + x[2] = be32_to_cpu(p_key[2]); + x[3] = be32_to_cpu(p_key[3]); + + key_schedule(x, z, k); + for (i = 0; i < 16; i++) + c->Km[i] = k[i]; + key_schedule(x, z, k); + for (i = 0; i < 16; i++) + c->Kr[i] = k[i] & 0x1f; + return 0; +} + +static struct crypto_alg alg = { + .cra_name = "cast5", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = CAST5_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct cast5_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(alg.cra_list), + .cra_u = { + .cipher = { + .cia_min_keysize = CAST5_MIN_KEY_SIZE, + .cia_max_keysize = CAST5_MAX_KEY_SIZE, + .cia_setkey = cast5_setkey, + .cia_encrypt = cast5_encrypt, + .cia_decrypt = cast5_decrypt + } + } +}; + +static int __init cast5_mod_init(void) +{ + return crypto_register_alg(&alg); +} + +static void __exit cast5_mod_fini(void) +{ + crypto_unregister_alg(&alg); +} + +module_init(cast5_mod_init); +module_exit(cast5_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Cast5 Cipher Algorithm"); + diff --git a/crypto/cast6.c b/crypto/cast6.c new file mode 100644 index 00000000..e0c15a6c --- /dev/null +++ b/crypto/cast6.c @@ -0,0 +1,547 @@ +/* Kernel cryptographic api. + * cast6.c - Cast6 cipher algorithm [rfc2612]. + * + * CAST-256 (*cast6*) is a DES like Substitution-Permutation Network (SPN) + * cryptosystem built upon the CAST-128 (*cast5*) [rfc2144] encryption + * algorithm. + * + * Copyright (C) 2003 Kartikey Mahendra Bhatt <kartik_me@hotmail.com>. + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA + */ + + +#include <asm/byteorder.h> +#include <linux/init.h> +#include <linux/crypto.h> +#include <linux/module.h> +#include <linux/errno.h> +#include <linux/string.h> +#include <linux/types.h> + +#define CAST6_BLOCK_SIZE 16 +#define CAST6_MIN_KEY_SIZE 16 +#define CAST6_MAX_KEY_SIZE 32 + +struct cast6_ctx { + u32 Km[12][4]; + u8 Kr[12][4]; +}; + +#define F1(D, r, m) ((I = ((m) + (D))), (I = rol32(I, (r))), \ + (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff])) +#define F2(D, r, m) ((I = ((m) ^ (D))), (I = rol32(I, (r))), \ + (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff])) +#define F3(D, r, m) ((I = ((m) - (D))), (I = rol32(I, (r))), \ + (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff])) + +static const u32 s1[256] = { + 0x30fb40d4, 0x9fa0ff0b, 0x6beccd2f, 0x3f258c7a, 0x1e213f2f, + 0x9c004dd3, 0x6003e540, 0xcf9fc949, + 0xbfd4af27, 0x88bbbdb5, 0xe2034090, 0x98d09675, 0x6e63a0e0, + 0x15c361d2, 0xc2e7661d, 0x22d4ff8e, + 0x28683b6f, 0xc07fd059, 0xff2379c8, 0x775f50e2, 0x43c340d3, + 0xdf2f8656, 0x887ca41a, 0xa2d2bd2d, + 0xa1c9e0d6, 0x346c4819, 0x61b76d87, 0x22540f2f, 0x2abe32e1, + 0xaa54166b, 0x22568e3a, 0xa2d341d0, + 0x66db40c8, 0xa784392f, 0x004dff2f, 0x2db9d2de, 0x97943fac, + 0x4a97c1d8, 0x527644b7, 0xb5f437a7, + 0xb82cbaef, 0xd751d159, 0x6ff7f0ed, 0x5a097a1f, 0x827b68d0, + 0x90ecf52e, 0x22b0c054, 0xbc8e5935, + 0x4b6d2f7f, 0x50bb64a2, 0xd2664910, 0xbee5812d, 0xb7332290, + 0xe93b159f, 0xb48ee411, 0x4bff345d, + 0xfd45c240, 0xad31973f, 0xc4f6d02e, 0x55fc8165, 0xd5b1caad, + 0xa1ac2dae, 0xa2d4b76d, 0xc19b0c50, + 0x882240f2, 0x0c6e4f38, 0xa4e4bfd7, 0x4f5ba272, 0x564c1d2f, + 0xc59c5319, 0xb949e354, 0xb04669fe, + 0xb1b6ab8a, 0xc71358dd, 0x6385c545, 0x110f935d, 0x57538ad5, + 0x6a390493, 0xe63d37e0, 0x2a54f6b3, + 0x3a787d5f, 0x6276a0b5, 0x19a6fcdf, 0x7a42206a, 0x29f9d4d5, + 0xf61b1891, 0xbb72275e, 0xaa508167, + 0x38901091, 0xc6b505eb, 0x84c7cb8c, 0x2ad75a0f, 0x874a1427, + 0xa2d1936b, 0x2ad286af, 0xaa56d291, + 0xd7894360, 0x425c750d, 0x93b39e26, 0x187184c9, 0x6c00b32d, + 0x73e2bb14, 0xa0bebc3c, 0x54623779, + 0x64459eab, 0x3f328b82, 0x7718cf82, 0x59a2cea6, 0x04ee002e, + 0x89fe78e6, 0x3fab0950, 0x325ff6c2, + 0x81383f05, 0x6963c5c8, 0x76cb5ad6, 0xd49974c9, 0xca180dcf, + 0x380782d5, 0xc7fa5cf6, 0x8ac31511, + 0x35e79e13, 0x47da91d0, 0xf40f9086, 0xa7e2419e, 0x31366241, + 0x051ef495, 0xaa573b04, 0x4a805d8d, + 0x548300d0, 0x00322a3c, 0xbf64cddf, 0xba57a68e, 0x75c6372b, + 0x50afd341, 0xa7c13275, 0x915a0bf5, + 0x6b54bfab, 0x2b0b1426, 0xab4cc9d7, 0x449ccd82, 0xf7fbf265, + 0xab85c5f3, 0x1b55db94, 0xaad4e324, + 0xcfa4bd3f, 0x2deaa3e2, 0x9e204d02, 0xc8bd25ac, 0xeadf55b3, + 0xd5bd9e98, 0xe31231b2, 0x2ad5ad6c, + 0x954329de, 0xadbe4528, 0xd8710f69, 0xaa51c90f, 0xaa786bf6, + 0x22513f1e, 0xaa51a79b, 0x2ad344cc, + 0x7b5a41f0, 0xd37cfbad, 0x1b069505, 0x41ece491, 0xb4c332e6, + 0x032268d4, 0xc9600acc, 0xce387e6d, + 0xbf6bb16c, 0x6a70fb78, 0x0d03d9c9, 0xd4df39de, 0xe01063da, + 0x4736f464, 0x5ad328d8, 0xb347cc96, + 0x75bb0fc3, 0x98511bfb, 0x4ffbcc35, 0xb58bcf6a, 0xe11f0abc, + 0xbfc5fe4a, 0xa70aec10, 0xac39570a, + 0x3f04442f, 0x6188b153, 0xe0397a2e, 0x5727cb79, 0x9ceb418f, + 0x1cacd68d, 0x2ad37c96, 0x0175cb9d, + 0xc69dff09, 0xc75b65f0, 0xd9db40d8, 0xec0e7779, 0x4744ead4, + 0xb11c3274, 0xdd24cb9e, 0x7e1c54bd, + 0xf01144f9, 0xd2240eb1, 0x9675b3fd, 0xa3ac3755, 0xd47c27af, + 0x51c85f4d, 0x56907596, 0xa5bb15e6, + 0x580304f0, 0xca042cf1, 0x011a37ea, 0x8dbfaadb, 0x35ba3e4a, + 0x3526ffa0, 0xc37b4d09, 0xbc306ed9, + 0x98a52666, 0x5648f725, 0xff5e569d, 0x0ced63d0, 0x7c63b2cf, + 0x700b45e1, 0xd5ea50f1, 0x85a92872, + 0xaf1fbda7, 0xd4234870, 0xa7870bf3, 0x2d3b4d79, 0x42e04198, + 0x0cd0ede7, 0x26470db8, 0xf881814c, + 0x474d6ad7, 0x7c0c5e5c, 0xd1231959, 0x381b7298, 0xf5d2f4db, + 0xab838653, 0x6e2f1e23, 0x83719c9e, + 0xbd91e046, 0x9a56456e, 0xdc39200c, 0x20c8c571, 0x962bda1c, + 0xe1e696ff, 0xb141ab08, 0x7cca89b9, + 0x1a69e783, 0x02cc4843, 0xa2f7c579, 0x429ef47d, 0x427b169c, + 0x5ac9f049, 0xdd8f0f00, 0x5c8165bf +}; + +static const u32 s2[256] = { + 0x1f201094, 0xef0ba75b, 0x69e3cf7e, 0x393f4380, 0xfe61cf7a, + 0xeec5207a, 0x55889c94, 0x72fc0651, + 0xada7ef79, 0x4e1d7235, 0xd55a63ce, 0xde0436ba, 0x99c430ef, + 0x5f0c0794, 0x18dcdb7d, 0xa1d6eff3, + 0xa0b52f7b, 0x59e83605, 0xee15b094, 0xe9ffd909, 0xdc440086, + 0xef944459, 0xba83ccb3, 0xe0c3cdfb, + 0xd1da4181, 0x3b092ab1, 0xf997f1c1, 0xa5e6cf7b, 0x01420ddb, + 0xe4e7ef5b, 0x25a1ff41, 0xe180f806, + 0x1fc41080, 0x179bee7a, 0xd37ac6a9, 0xfe5830a4, 0x98de8b7f, + 0x77e83f4e, 0x79929269, 0x24fa9f7b, + 0xe113c85b, 0xacc40083, 0xd7503525, 0xf7ea615f, 0x62143154, + 0x0d554b63, 0x5d681121, 0xc866c359, + 0x3d63cf73, 0xcee234c0, 0xd4d87e87, 0x5c672b21, 0x071f6181, + 0x39f7627f, 0x361e3084, 0xe4eb573b, + 0x602f64a4, 0xd63acd9c, 0x1bbc4635, 0x9e81032d, 0x2701f50c, + 0x99847ab4, 0xa0e3df79, 0xba6cf38c, + 0x10843094, 0x2537a95e, 0xf46f6ffe, 0xa1ff3b1f, 0x208cfb6a, + 0x8f458c74, 0xd9e0a227, 0x4ec73a34, + 0xfc884f69, 0x3e4de8df, 0xef0e0088, 0x3559648d, 0x8a45388c, + 0x1d804366, 0x721d9bfd, 0xa58684bb, + 0xe8256333, 0x844e8212, 0x128d8098, 0xfed33fb4, 0xce280ae1, + 0x27e19ba5, 0xd5a6c252, 0xe49754bd, + 0xc5d655dd, 0xeb667064, 0x77840b4d, 0xa1b6a801, 0x84db26a9, + 0xe0b56714, 0x21f043b7, 0xe5d05860, + 0x54f03084, 0x066ff472, 0xa31aa153, 0xdadc4755, 0xb5625dbf, + 0x68561be6, 0x83ca6b94, 0x2d6ed23b, + 0xeccf01db, 0xa6d3d0ba, 0xb6803d5c, 0xaf77a709, 0x33b4a34c, + 0x397bc8d6, 0x5ee22b95, 0x5f0e5304, + 0x81ed6f61, 0x20e74364, 0xb45e1378, 0xde18639b, 0x881ca122, + 0xb96726d1, 0x8049a7e8, 0x22b7da7b, + 0x5e552d25, 0x5272d237, 0x79d2951c, 0xc60d894c, 0x488cb402, + 0x1ba4fe5b, 0xa4b09f6b, 0x1ca815cf, + 0xa20c3005, 0x8871df63, 0xb9de2fcb, 0x0cc6c9e9, 0x0beeff53, + 0xe3214517, 0xb4542835, 0x9f63293c, + 0xee41e729, 0x6e1d2d7c, 0x50045286, 0x1e6685f3, 0xf33401c6, + 0x30a22c95, 0x31a70850, 0x60930f13, + 0x73f98417, 0xa1269859, 0xec645c44, 0x52c877a9, 0xcdff33a6, + 0xa02b1741, 0x7cbad9a2, 0x2180036f, + 0x50d99c08, 0xcb3f4861, 0xc26bd765, 0x64a3f6ab, 0x80342676, + 0x25a75e7b, 0xe4e6d1fc, 0x20c710e6, + 0xcdf0b680, 0x17844d3b, 0x31eef84d, 0x7e0824e4, 0x2ccb49eb, + 0x846a3bae, 0x8ff77888, 0xee5d60f6, + 0x7af75673, 0x2fdd5cdb, 0xa11631c1, 0x30f66f43, 0xb3faec54, + 0x157fd7fa, 0xef8579cc, 0xd152de58, + 0xdb2ffd5e, 0x8f32ce19, 0x306af97a, 0x02f03ef8, 0x99319ad5, + 0xc242fa0f, 0xa7e3ebb0, 0xc68e4906, + 0xb8da230c, 0x80823028, 0xdcdef3c8, 0xd35fb171, 0x088a1bc8, + 0xbec0c560, 0x61a3c9e8, 0xbca8f54d, + 0xc72feffa, 0x22822e99, 0x82c570b4, 0xd8d94e89, 0x8b1c34bc, + 0x301e16e6, 0x273be979, 0xb0ffeaa6, + 0x61d9b8c6, 0x00b24869, 0xb7ffce3f, 0x08dc283b, 0x43daf65a, + 0xf7e19798, 0x7619b72f, 0x8f1c9ba4, + 0xdc8637a0, 0x16a7d3b1, 0x9fc393b7, 0xa7136eeb, 0xc6bcc63e, + 0x1a513742, 0xef6828bc, 0x520365d6, + 0x2d6a77ab, 0x3527ed4b, 0x821fd216, 0x095c6e2e, 0xdb92f2fb, + 0x5eea29cb, 0x145892f5, 0x91584f7f, + 0x5483697b, 0x2667a8cc, 0x85196048, 0x8c4bacea, 0x833860d4, + 0x0d23e0f9, 0x6c387e8a, 0x0ae6d249, + 0xb284600c, 0xd835731d, 0xdcb1c647, 0xac4c56ea, 0x3ebd81b3, + 0x230eabb0, 0x6438bc87, 0xf0b5b1fa, + 0x8f5ea2b3, 0xfc184642, 0x0a036b7a, 0x4fb089bd, 0x649da589, + 0xa345415e, 0x5c038323, 0x3e5d3bb9, + 0x43d79572, 0x7e6dd07c, 0x06dfdf1e, 0x6c6cc4ef, 0x7160a539, + 0x73bfbe70, 0x83877605, 0x4523ecf1 +}; + +static const u32 s3[256] = { + 0x8defc240, 0x25fa5d9f, 0xeb903dbf, 0xe810c907, 0x47607fff, + 0x369fe44b, 0x8c1fc644, 0xaececa90, + 0xbeb1f9bf, 0xeefbcaea, 0xe8cf1950, 0x51df07ae, 0x920e8806, + 0xf0ad0548, 0xe13c8d83, 0x927010d5, + 0x11107d9f, 0x07647db9, 0xb2e3e4d4, 0x3d4f285e, 0xb9afa820, + 0xfade82e0, 0xa067268b, 0x8272792e, + 0x553fb2c0, 0x489ae22b, 0xd4ef9794, 0x125e3fbc, 0x21fffcee, + 0x825b1bfd, 0x9255c5ed, 0x1257a240, + 0x4e1a8302, 0xbae07fff, 0x528246e7, 0x8e57140e, 0x3373f7bf, + 0x8c9f8188, 0xa6fc4ee8, 0xc982b5a5, + 0xa8c01db7, 0x579fc264, 0x67094f31, 0xf2bd3f5f, 0x40fff7c1, + 0x1fb78dfc, 0x8e6bd2c1, 0x437be59b, + 0x99b03dbf, 0xb5dbc64b, 0x638dc0e6, 0x55819d99, 0xa197c81c, + 0x4a012d6e, 0xc5884a28, 0xccc36f71, + 0xb843c213, 0x6c0743f1, 0x8309893c, 0x0feddd5f, 0x2f7fe850, + 0xd7c07f7e, 0x02507fbf, 0x5afb9a04, + 0xa747d2d0, 0x1651192e, 0xaf70bf3e, 0x58c31380, 0x5f98302e, + 0x727cc3c4, 0x0a0fb402, 0x0f7fef82, + 0x8c96fdad, 0x5d2c2aae, 0x8ee99a49, 0x50da88b8, 0x8427f4a0, + 0x1eac5790, 0x796fb449, 0x8252dc15, + 0xefbd7d9b, 0xa672597d, 0xada840d8, 0x45f54504, 0xfa5d7403, + 0xe83ec305, 0x4f91751a, 0x925669c2, + 0x23efe941, 0xa903f12e, 0x60270df2, 0x0276e4b6, 0x94fd6574, + 0x927985b2, 0x8276dbcb, 0x02778176, + 0xf8af918d, 0x4e48f79e, 0x8f616ddf, 0xe29d840e, 0x842f7d83, + 0x340ce5c8, 0x96bbb682, 0x93b4b148, + 0xef303cab, 0x984faf28, 0x779faf9b, 0x92dc560d, 0x224d1e20, + 0x8437aa88, 0x7d29dc96, 0x2756d3dc, + 0x8b907cee, 0xb51fd240, 0xe7c07ce3, 0xe566b4a1, 0xc3e9615e, + 0x3cf8209d, 0x6094d1e3, 0xcd9ca341, + 0x5c76460e, 0x00ea983b, 0xd4d67881, 0xfd47572c, 0xf76cedd9, + 0xbda8229c, 0x127dadaa, 0x438a074e, + 0x1f97c090, 0x081bdb8a, 0x93a07ebe, 0xb938ca15, 0x97b03cff, + 0x3dc2c0f8, 0x8d1ab2ec, 0x64380e51, + 0x68cc7bfb, 0xd90f2788, 0x12490181, 0x5de5ffd4, 0xdd7ef86a, + 0x76a2e214, 0xb9a40368, 0x925d958f, + 0x4b39fffa, 0xba39aee9, 0xa4ffd30b, 0xfaf7933b, 0x6d498623, + 0x193cbcfa, 0x27627545, 0x825cf47a, + 0x61bd8ba0, 0xd11e42d1, 0xcead04f4, 0x127ea392, 0x10428db7, + 0x8272a972, 0x9270c4a8, 0x127de50b, + 0x285ba1c8, 0x3c62f44f, 0x35c0eaa5, 0xe805d231, 0x428929fb, + 0xb4fcdf82, 0x4fb66a53, 0x0e7dc15b, + 0x1f081fab, 0x108618ae, 0xfcfd086d, 0xf9ff2889, 0x694bcc11, + 0x236a5cae, 0x12deca4d, 0x2c3f8cc5, + 0xd2d02dfe, 0xf8ef5896, 0xe4cf52da, 0x95155b67, 0x494a488c, + 0xb9b6a80c, 0x5c8f82bc, 0x89d36b45, + 0x3a609437, 0xec00c9a9, 0x44715253, 0x0a874b49, 0xd773bc40, + 0x7c34671c, 0x02717ef6, 0x4feb5536, + 0xa2d02fff, 0xd2bf60c4, 0xd43f03c0, 0x50b4ef6d, 0x07478cd1, + 0x006e1888, 0xa2e53f55, 0xb9e6d4bc, + 0xa2048016, 0x97573833, 0xd7207d67, 0xde0f8f3d, 0x72f87b33, + 0xabcc4f33, 0x7688c55d, 0x7b00a6b0, + 0x947b0001, 0x570075d2, 0xf9bb88f8, 0x8942019e, 0x4264a5ff, + 0x856302e0, 0x72dbd92b, 0xee971b69, + 0x6ea22fde, 0x5f08ae2b, 0xaf7a616d, 0xe5c98767, 0xcf1febd2, + 0x61efc8c2, 0xf1ac2571, 0xcc8239c2, + 0x67214cb8, 0xb1e583d1, 0xb7dc3e62, 0x7f10bdce, 0xf90a5c38, + 0x0ff0443d, 0x606e6dc6, 0x60543a49, + 0x5727c148, 0x2be98a1d, 0x8ab41738, 0x20e1be24, 0xaf96da0f, + 0x68458425, 0x99833be5, 0x600d457d, + 0x282f9350, 0x8334b362, 0xd91d1120, 0x2b6d8da0, 0x642b1e31, + 0x9c305a00, 0x52bce688, 0x1b03588a, + 0xf7baefd5, 0x4142ed9c, 0xa4315c11, 0x83323ec5, 0xdfef4636, + 0xa133c501, 0xe9d3531c, 0xee353783 +}; + +static const u32 s4[256] = { + 0x9db30420, 0x1fb6e9de, 0xa7be7bef, 0xd273a298, 0x4a4f7bdb, + 0x64ad8c57, 0x85510443, 0xfa020ed1, + 0x7e287aff, 0xe60fb663, 0x095f35a1, 0x79ebf120, 0xfd059d43, + 0x6497b7b1, 0xf3641f63, 0x241e4adf, + 0x28147f5f, 0x4fa2b8cd, 0xc9430040, 0x0cc32220, 0xfdd30b30, + 0xc0a5374f, 0x1d2d00d9, 0x24147b15, + 0xee4d111a, 0x0fca5167, 0x71ff904c, 0x2d195ffe, 0x1a05645f, + 0x0c13fefe, 0x081b08ca, 0x05170121, + 0x80530100, 0xe83e5efe, 0xac9af4f8, 0x7fe72701, 0xd2b8ee5f, + 0x06df4261, 0xbb9e9b8a, 0x7293ea25, + 0xce84ffdf, 0xf5718801, 0x3dd64b04, 0xa26f263b, 0x7ed48400, + 0x547eebe6, 0x446d4ca0, 0x6cf3d6f5, + 0x2649abdf, 0xaea0c7f5, 0x36338cc1, 0x503f7e93, 0xd3772061, + 0x11b638e1, 0x72500e03, 0xf80eb2bb, + 0xabe0502e, 0xec8d77de, 0x57971e81, 0xe14f6746, 0xc9335400, + 0x6920318f, 0x081dbb99, 0xffc304a5, + 0x4d351805, 0x7f3d5ce3, 0xa6c866c6, 0x5d5bcca9, 0xdaec6fea, + 0x9f926f91, 0x9f46222f, 0x3991467d, + 0xa5bf6d8e, 0x1143c44f, 0x43958302, 0xd0214eeb, 0x022083b8, + 0x3fb6180c, 0x18f8931e, 0x281658e6, + 0x26486e3e, 0x8bd78a70, 0x7477e4c1, 0xb506e07c, 0xf32d0a25, + 0x79098b02, 0xe4eabb81, 0x28123b23, + 0x69dead38, 0x1574ca16, 0xdf871b62, 0x211c40b7, 0xa51a9ef9, + 0x0014377b, 0x041e8ac8, 0x09114003, + 0xbd59e4d2, 0xe3d156d5, 0x4fe876d5, 0x2f91a340, 0x557be8de, + 0x00eae4a7, 0x0ce5c2ec, 0x4db4bba6, + 0xe756bdff, 0xdd3369ac, 0xec17b035, 0x06572327, 0x99afc8b0, + 0x56c8c391, 0x6b65811c, 0x5e146119, + 0x6e85cb75, 0xbe07c002, 0xc2325577, 0x893ff4ec, 0x5bbfc92d, + 0xd0ec3b25, 0xb7801ab7, 0x8d6d3b24, + 0x20c763ef, 0xc366a5fc, 0x9c382880, 0x0ace3205, 0xaac9548a, + 0xeca1d7c7, 0x041afa32, 0x1d16625a, + 0x6701902c, 0x9b757a54, 0x31d477f7, 0x9126b031, 0x36cc6fdb, + 0xc70b8b46, 0xd9e66a48, 0x56e55a79, + 0x026a4ceb, 0x52437eff, 0x2f8f76b4, 0x0df980a5, 0x8674cde3, + 0xedda04eb, 0x17a9be04, 0x2c18f4df, + 0xb7747f9d, 0xab2af7b4, 0xefc34d20, 0x2e096b7c, 0x1741a254, + 0xe5b6a035, 0x213d42f6, 0x2c1c7c26, + 0x61c2f50f, 0x6552daf9, 0xd2c231f8, 0x25130f69, 0xd8167fa2, + 0x0418f2c8, 0x001a96a6, 0x0d1526ab, + 0x63315c21, 0x5e0a72ec, 0x49bafefd, 0x187908d9, 0x8d0dbd86, + 0x311170a7, 0x3e9b640c, 0xcc3e10d7, + 0xd5cad3b6, 0x0caec388, 0xf73001e1, 0x6c728aff, 0x71eae2a1, + 0x1f9af36e, 0xcfcbd12f, 0xc1de8417, + 0xac07be6b, 0xcb44a1d8, 0x8b9b0f56, 0x013988c3, 0xb1c52fca, + 0xb4be31cd, 0xd8782806, 0x12a3a4e2, + 0x6f7de532, 0x58fd7eb6, 0xd01ee900, 0x24adffc2, 0xf4990fc5, + 0x9711aac5, 0x001d7b95, 0x82e5e7d2, + 0x109873f6, 0x00613096, 0xc32d9521, 0xada121ff, 0x29908415, + 0x7fbb977f, 0xaf9eb3db, 0x29c9ed2a, + 0x5ce2a465, 0xa730f32c, 0xd0aa3fe8, 0x8a5cc091, 0xd49e2ce7, + 0x0ce454a9, 0xd60acd86, 0x015f1919, + 0x77079103, 0xdea03af6, 0x78a8565e, 0xdee356df, 0x21f05cbe, + 0x8b75e387, 0xb3c50651, 0xb8a5c3ef, + 0xd8eeb6d2, 0xe523be77, 0xc2154529, 0x2f69efdf, 0xafe67afb, + 0xf470c4b2, 0xf3e0eb5b, 0xd6cc9876, + 0x39e4460c, 0x1fda8538, 0x1987832f, 0xca007367, 0xa99144f8, + 0x296b299e, 0x492fc295, 0x9266beab, + 0xb5676e69, 0x9bd3ddda, 0xdf7e052f, 0xdb25701c, 0x1b5e51ee, + 0xf65324e6, 0x6afce36c, 0x0316cc04, + 0x8644213e, 0xb7dc59d0, 0x7965291f, 0xccd6fd43, 0x41823979, + 0x932bcdf6, 0xb657c34d, 0x4edfd282, + 0x7ae5290c, 0x3cb9536b, 0x851e20fe, 0x9833557e, 0x13ecf0b0, + 0xd3ffb372, 0x3f85c5c1, 0x0aef7ed2 +}; + +static const u32 Tm[24][8] = { + { 0x5a827999, 0xc95c653a, 0x383650db, 0xa7103c7c, 0x15ea281d, + 0x84c413be, 0xf39dff5f, 0x6277eb00 } , + { 0xd151d6a1, 0x402bc242, 0xaf05ade3, 0x1ddf9984, 0x8cb98525, + 0xfb9370c6, 0x6a6d5c67, 0xd9474808 } , + { 0x482133a9, 0xb6fb1f4a, 0x25d50aeb, 0x94aef68c, 0x0388e22d, + 0x7262cdce, 0xe13cb96f, 0x5016a510 } , + { 0xbef090b1, 0x2dca7c52, 0x9ca467f3, 0x0b7e5394, 0x7a583f35, + 0xe9322ad6, 0x580c1677, 0xc6e60218 } , + { 0x35bfedb9, 0xa499d95a, 0x1373c4fb, 0x824db09c, 0xf1279c3d, + 0x600187de, 0xcedb737f, 0x3db55f20 } , + { 0xac8f4ac1, 0x1b693662, 0x8a432203, 0xf91d0da4, 0x67f6f945, + 0xd6d0e4e6, 0x45aad087, 0xb484bc28 } , + { 0x235ea7c9, 0x9238936a, 0x01127f0b, 0x6fec6aac, 0xdec6564d, + 0x4da041ee, 0xbc7a2d8f, 0x2b541930 } , + { 0x9a2e04d1, 0x0907f072, 0x77e1dc13, 0xe6bbc7b4, 0x5595b355, + 0xc46f9ef6, 0x33498a97, 0xa2237638 } , + { 0x10fd61d9, 0x7fd74d7a, 0xeeb1391b, 0x5d8b24bc, 0xcc65105d, + 0x3b3efbfe, 0xaa18e79f, 0x18f2d340 } , + { 0x87ccbee1, 0xf6a6aa82, 0x65809623, 0xd45a81c4, 0x43346d65, + 0xb20e5906, 0x20e844a7, 0x8fc23048 } , + { 0xfe9c1be9, 0x6d76078a, 0xdc4ff32b, 0x4b29decc, 0xba03ca6d, + 0x28ddb60e, 0x97b7a1af, 0x06918d50 } , + { 0x756b78f1, 0xe4456492, 0x531f5033, 0xc1f93bd4, 0x30d32775, + 0x9fad1316, 0x0e86feb7, 0x7d60ea58 } , + { 0xec3ad5f9, 0x5b14c19a, 0xc9eead3b, 0x38c898dc, 0xa7a2847d, + 0x167c701e, 0x85565bbf, 0xf4304760 } , + { 0x630a3301, 0xd1e41ea2, 0x40be0a43, 0xaf97f5e4, 0x1e71e185, + 0x8d4bcd26, 0xfc25b8c7, 0x6affa468 } , + { 0xd9d99009, 0x48b37baa, 0xb78d674b, 0x266752ec, 0x95413e8d, + 0x041b2a2e, 0x72f515cf, 0xe1cf0170 } , + { 0x50a8ed11, 0xbf82d8b2, 0x2e5cc453, 0x9d36aff4, 0x0c109b95, + 0x7aea8736, 0xe9c472d7, 0x589e5e78 } , + { 0xc7784a19, 0x365235ba, 0xa52c215b, 0x14060cfc, 0x82dff89d, + 0xf1b9e43e, 0x6093cfdf, 0xcf6dbb80 } , + { 0x3e47a721, 0xad2192c2, 0x1bfb7e63, 0x8ad56a04, 0xf9af55a5, + 0x68894146, 0xd7632ce7, 0x463d1888 } , + { 0xb5170429, 0x23f0efca, 0x92cadb6b, 0x01a4c70c, 0x707eb2ad, + 0xdf589e4e, 0x4e3289ef, 0xbd0c7590 } , + { 0x2be66131, 0x9ac04cd2, 0x099a3873, 0x78742414, 0xe74e0fb5, + 0x5627fb56, 0xc501e6f7, 0x33dbd298 } , + { 0xa2b5be39, 0x118fa9da, 0x8069957b, 0xef43811c, 0x5e1d6cbd, + 0xccf7585e, 0x3bd143ff, 0xaaab2fa0 } , + { 0x19851b41, 0x885f06e2, 0xf738f283, 0x6612de24, 0xd4ecc9c5, + 0x43c6b566, 0xb2a0a107, 0x217a8ca8 } , + { 0x90547849, 0xff2e63ea, 0x6e084f8b, 0xdce23b2c, 0x4bbc26cd, + 0xba96126e, 0x296ffe0f, 0x9849e9b0 } , + { 0x0723d551, 0x75fdc0f2, 0xe4d7ac93, 0x53b19834, 0xc28b83d5, + 0x31656f76, 0xa03f5b17, 0x0f1946b8 } +}; + +static const u8 Tr[4][8] = { + { 0x13, 0x04, 0x15, 0x06, 0x17, 0x08, 0x19, 0x0a } , + { 0x1b, 0x0c, 0x1d, 0x0e, 0x1f, 0x10, 0x01, 0x12 } , + { 0x03, 0x14, 0x05, 0x16, 0x07, 0x18, 0x09, 0x1a } , + { 0x0b, 0x1c, 0x0d, 0x1e, 0x0f, 0x00, 0x11, 0x02 } +}; + +/* forward octave */ +static void W(u32 *key, unsigned int i) +{ + u32 I; + key[6] ^= F1(key[7], Tr[i % 4][0], Tm[i][0]); + key[5] ^= F2(key[6], Tr[i % 4][1], Tm[i][1]); + key[4] ^= F3(key[5], Tr[i % 4][2], Tm[i][2]); + key[3] ^= F1(key[4], Tr[i % 4][3], Tm[i][3]); + key[2] ^= F2(key[3], Tr[i % 4][4], Tm[i][4]); + key[1] ^= F3(key[2], Tr[i % 4][5], Tm[i][5]); + key[0] ^= F1(key[1], Tr[i % 4][6], Tm[i][6]); + key[7] ^= F2(key[0], Tr[i % 4][7], Tm[i][7]); +} + +static int cast6_setkey(struct crypto_tfm *tfm, const u8 *in_key, + unsigned key_len) +{ + int i; + u32 key[8]; + __be32 p_key[8]; /* padded key */ + struct cast6_ctx *c = crypto_tfm_ctx(tfm); + u32 *flags = &tfm->crt_flags; + + if (key_len % 4 != 0) { + *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; + } + + memset(p_key, 0, 32); + memcpy(p_key, in_key, key_len); + + key[0] = be32_to_cpu(p_key[0]); /* A */ + key[1] = be32_to_cpu(p_key[1]); /* B */ + key[2] = be32_to_cpu(p_key[2]); /* C */ + key[3] = be32_to_cpu(p_key[3]); /* D */ + key[4] = be32_to_cpu(p_key[4]); /* E */ + key[5] = be32_to_cpu(p_key[5]); /* F */ + key[6] = be32_to_cpu(p_key[6]); /* G */ + key[7] = be32_to_cpu(p_key[7]); /* H */ + + for (i = 0; i < 12; i++) { + W(key, 2 * i); + W(key, 2 * i + 1); + + c->Kr[i][0] = key[0] & 0x1f; + c->Kr[i][1] = key[2] & 0x1f; + c->Kr[i][2] = key[4] & 0x1f; + c->Kr[i][3] = key[6] & 0x1f; + + c->Km[i][0] = key[7]; + c->Km[i][1] = key[5]; + c->Km[i][2] = key[3]; + c->Km[i][3] = key[1]; + } + + return 0; +} + +/*forward quad round*/ +static void Q(u32 *block, u8 *Kr, u32 *Km) +{ + u32 I; + block[2] ^= F1(block[3], Kr[0], Km[0]); + block[1] ^= F2(block[2], Kr[1], Km[1]); + block[0] ^= F3(block[1], Kr[2], Km[2]); + block[3] ^= F1(block[0], Kr[3], Km[3]); +} + +/*reverse quad round*/ +static void QBAR(u32 *block, u8 *Kr, u32 *Km) +{ + u32 I; + block[3] ^= F1(block[0], Kr[3], Km[3]); + block[0] ^= F3(block[1], Kr[2], Km[2]); + block[1] ^= F2(block[2], Kr[1], Km[1]); + block[2] ^= F1(block[3], Kr[0], Km[0]); +} + +static void cast6_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) +{ + struct cast6_ctx *c = crypto_tfm_ctx(tfm); + const __be32 *src = (const __be32 *)inbuf; + __be32 *dst = (__be32 *)outbuf; + u32 block[4]; + u32 *Km; + u8 *Kr; + + block[0] = be32_to_cpu(src[0]); + block[1] = be32_to_cpu(src[1]); + block[2] = be32_to_cpu(src[2]); + block[3] = be32_to_cpu(src[3]); + + Km = c->Km[0]; Kr = c->Kr[0]; Q(block, Kr, Km); + Km = c->Km[1]; Kr = c->Kr[1]; Q(block, Kr, Km); + Km = c->Km[2]; Kr = c->Kr[2]; Q(block, Kr, Km); + Km = c->Km[3]; Kr = c->Kr[3]; Q(block, Kr, Km); + Km = c->Km[4]; Kr = c->Kr[4]; Q(block, Kr, Km); + Km = c->Km[5]; Kr = c->Kr[5]; Q(block, Kr, Km); + Km = c->Km[6]; Kr = c->Kr[6]; QBAR(block, Kr, Km); + Km = c->Km[7]; Kr = c->Kr[7]; QBAR(block, Kr, Km); + Km = c->Km[8]; Kr = c->Kr[8]; QBAR(block, Kr, Km); + Km = c->Km[9]; Kr = c->Kr[9]; QBAR(block, Kr, Km); + Km = c->Km[10]; Kr = c->Kr[10]; QBAR(block, Kr, Km); + Km = c->Km[11]; Kr = c->Kr[11]; QBAR(block, Kr, Km); + + dst[0] = cpu_to_be32(block[0]); + dst[1] = cpu_to_be32(block[1]); + dst[2] = cpu_to_be32(block[2]); + dst[3] = cpu_to_be32(block[3]); +} + +static void cast6_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) +{ + struct cast6_ctx *c = crypto_tfm_ctx(tfm); + const __be32 *src = (const __be32 *)inbuf; + __be32 *dst = (__be32 *)outbuf; + u32 block[4]; + u32 *Km; + u8 *Kr; + + block[0] = be32_to_cpu(src[0]); + block[1] = be32_to_cpu(src[1]); + block[2] = be32_to_cpu(src[2]); + block[3] = be32_to_cpu(src[3]); + + Km = c->Km[11]; Kr = c->Kr[11]; Q(block, Kr, Km); + Km = c->Km[10]; Kr = c->Kr[10]; Q(block, Kr, Km); + Km = c->Km[9]; Kr = c->Kr[9]; Q(block, Kr, Km); + Km = c->Km[8]; Kr = c->Kr[8]; Q(block, Kr, Km); + Km = c->Km[7]; Kr = c->Kr[7]; Q(block, Kr, Km); + Km = c->Km[6]; Kr = c->Kr[6]; Q(block, Kr, Km); + Km = c->Km[5]; Kr = c->Kr[5]; QBAR(block, Kr, Km); + Km = c->Km[4]; Kr = c->Kr[4]; QBAR(block, Kr, Km); + Km = c->Km[3]; Kr = c->Kr[3]; QBAR(block, Kr, Km); + Km = c->Km[2]; Kr = c->Kr[2]; QBAR(block, Kr, Km); + Km = c->Km[1]; Kr = c->Kr[1]; QBAR(block, Kr, Km); + Km = c->Km[0]; Kr = c->Kr[0]; QBAR(block, Kr, Km); + + dst[0] = cpu_to_be32(block[0]); + dst[1] = cpu_to_be32(block[1]); + dst[2] = cpu_to_be32(block[2]); + dst[3] = cpu_to_be32(block[3]); +} + +static struct crypto_alg alg = { + .cra_name = "cast6", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = CAST6_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct cast6_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(alg.cra_list), + .cra_u = { + .cipher = { + .cia_min_keysize = CAST6_MIN_KEY_SIZE, + .cia_max_keysize = CAST6_MAX_KEY_SIZE, + .cia_setkey = cast6_setkey, + .cia_encrypt = cast6_encrypt, + .cia_decrypt = cast6_decrypt} + } +}; + +static int __init cast6_mod_init(void) +{ + return crypto_register_alg(&alg); +} + +static void __exit cast6_mod_fini(void) +{ + crypto_unregister_alg(&alg); +} + +module_init(cast6_mod_init); +module_exit(cast6_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Cast6 Cipher Algorithm"); diff --git a/crypto/cbc.c b/crypto/cbc.c new file mode 100644 index 00000000..61ac42e1 --- /dev/null +++ b/crypto/cbc.c @@ -0,0 +1,291 @@ +/* + * CBC: Cipher Block Chaining mode + * + * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/algapi.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/log2.h> +#include <linux/module.h> +#include <linux/scatterlist.h> +#include <linux/slab.h> + +struct crypto_cbc_ctx { + struct crypto_cipher *child; +}; + +static int crypto_cbc_setkey(struct crypto_tfm *parent, const u8 *key, + unsigned int keylen) +{ + struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(parent); + struct crypto_cipher *child = ctx->child; + int err; + + crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_cipher_setkey(child, key, keylen); + crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + return err; +} + +static int crypto_cbc_encrypt_segment(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + struct crypto_cipher *tfm) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_encrypt; + int bsize = crypto_cipher_blocksize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + u8 *iv = walk->iv; + + do { + crypto_xor(iv, src, bsize); + fn(crypto_cipher_tfm(tfm), dst, iv); + memcpy(iv, dst, bsize); + + src += bsize; + dst += bsize; + } while ((nbytes -= bsize) >= bsize); + + return nbytes; +} + +static int crypto_cbc_encrypt_inplace(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + struct crypto_cipher *tfm) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_encrypt; + int bsize = crypto_cipher_blocksize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *iv = walk->iv; + + do { + crypto_xor(src, iv, bsize); + fn(crypto_cipher_tfm(tfm), src, src); + iv = src; + + src += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_cbc_encrypt(struct blkcipher_desc *desc, + struct scatterlist *dst, struct scatterlist *src, + unsigned int nbytes) +{ + struct blkcipher_walk walk; + struct crypto_blkcipher *tfm = desc->tfm; + struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm); + struct crypto_cipher *child = ctx->child; + int err; + + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt(desc, &walk); + + while ((nbytes = walk.nbytes)) { + if (walk.src.virt.addr == walk.dst.virt.addr) + nbytes = crypto_cbc_encrypt_inplace(desc, &walk, child); + else + nbytes = crypto_cbc_encrypt_segment(desc, &walk, child); + err = blkcipher_walk_done(desc, &walk, nbytes); + } + + return err; +} + +static int crypto_cbc_decrypt_segment(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + struct crypto_cipher *tfm) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_decrypt; + int bsize = crypto_cipher_blocksize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + u8 *iv = walk->iv; + + do { + fn(crypto_cipher_tfm(tfm), dst, src); + crypto_xor(dst, iv, bsize); + iv = src; + + src += bsize; + dst += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_cbc_decrypt_inplace(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + struct crypto_cipher *tfm) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_decrypt; + int bsize = crypto_cipher_blocksize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 last_iv[bsize]; + + /* Start of the last block. */ + src += nbytes - (nbytes & (bsize - 1)) - bsize; + memcpy(last_iv, src, bsize); + + for (;;) { + fn(crypto_cipher_tfm(tfm), src, src); + if ((nbytes -= bsize) < bsize) + break; + crypto_xor(src, src - bsize, bsize); + src -= bsize; + } + + crypto_xor(src, walk->iv, bsize); + memcpy(walk->iv, last_iv, bsize); + + return nbytes; +} + +static int crypto_cbc_decrypt(struct blkcipher_desc *desc, + struct scatterlist *dst, struct scatterlist *src, + unsigned int nbytes) +{ + struct blkcipher_walk walk; + struct crypto_blkcipher *tfm = desc->tfm; + struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm); + struct crypto_cipher *child = ctx->child; + int err; + + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt(desc, &walk); + + while ((nbytes = walk.nbytes)) { + if (walk.src.virt.addr == walk.dst.virt.addr) + nbytes = crypto_cbc_decrypt_inplace(desc, &walk, child); + else + nbytes = crypto_cbc_decrypt_segment(desc, &walk, child); + err = blkcipher_walk_done(desc, &walk, nbytes); + } + + return err; +} + +static int crypto_cbc_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_spawn *spawn = crypto_instance_ctx(inst); + struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_cipher *cipher; + + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; + return 0; +} + +static void crypto_cbc_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(tfm); + crypto_free_cipher(ctx->child); +} + +static struct crypto_instance *crypto_cbc_alloc(struct rtattr **tb) +{ + struct crypto_instance *inst; + struct crypto_alg *alg; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); + if (err) + return ERR_PTR(err); + + alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK); + if (IS_ERR(alg)) + return ERR_CAST(alg); + + inst = ERR_PTR(-EINVAL); + if (!is_power_of_2(alg->cra_blocksize)) + goto out_put_alg; + + inst = crypto_alloc_instance("cbc", alg); + if (IS_ERR(inst)) + goto out_put_alg; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = alg->cra_blocksize; + inst->alg.cra_alignmask = alg->cra_alignmask; + inst->alg.cra_type = &crypto_blkcipher_type; + + /* We access the data as u32s when xoring. */ + inst->alg.cra_alignmask |= __alignof__(u32) - 1; + + inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; + inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize; + inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize; + + inst->alg.cra_ctxsize = sizeof(struct crypto_cbc_ctx); + + inst->alg.cra_init = crypto_cbc_init_tfm; + inst->alg.cra_exit = crypto_cbc_exit_tfm; + + inst->alg.cra_blkcipher.setkey = crypto_cbc_setkey; + inst->alg.cra_blkcipher.encrypt = crypto_cbc_encrypt; + inst->alg.cra_blkcipher.decrypt = crypto_cbc_decrypt; + +out_put_alg: + crypto_mod_put(alg); + return inst; +} + +static void crypto_cbc_free(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(inst); +} + +static struct crypto_template crypto_cbc_tmpl = { + .name = "cbc", + .alloc = crypto_cbc_alloc, + .free = crypto_cbc_free, + .module = THIS_MODULE, +}; + +static int __init crypto_cbc_module_init(void) +{ + return crypto_register_template(&crypto_cbc_tmpl); +} + +static void __exit crypto_cbc_module_exit(void) +{ + crypto_unregister_template(&crypto_cbc_tmpl); +} + +module_init(crypto_cbc_module_init); +module_exit(crypto_cbc_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("CBC block cipher algorithm"); diff --git a/crypto/ccm.c b/crypto/ccm.c new file mode 100644 index 00000000..32fe1bb5 --- /dev/null +++ b/crypto/ccm.c @@ -0,0 +1,891 @@ +/* + * CCM: Counter with CBC-MAC + * + * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/internal/aead.h> +#include <crypto/internal/skcipher.h> +#include <crypto/scatterwalk.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/slab.h> + +#include "internal.h" + +struct ccm_instance_ctx { + struct crypto_skcipher_spawn ctr; + struct crypto_spawn cipher; +}; + +struct crypto_ccm_ctx { + struct crypto_cipher *cipher; + struct crypto_ablkcipher *ctr; +}; + +struct crypto_rfc4309_ctx { + struct crypto_aead *child; + u8 nonce[3]; +}; + +struct crypto_ccm_req_priv_ctx { + u8 odata[16]; + u8 idata[16]; + u8 auth_tag[16]; + u32 ilen; + u32 flags; + struct scatterlist src[2]; + struct scatterlist dst[2]; + struct ablkcipher_request abreq; +}; + +static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx( + struct aead_request *req) +{ + unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); + + return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); +} + +static int set_msg_len(u8 *block, unsigned int msglen, int csize) +{ + __be32 data; + + memset(block, 0, csize); + block += csize; + + if (csize >= 4) + csize = 4; + else if (msglen > (1 << (8 * csize))) + return -EOVERFLOW; + + data = cpu_to_be32(msglen); + memcpy(block - csize, (u8 *)&data + 4 - csize, csize); + + return 0; +} + +static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key, + unsigned int keylen) +{ + struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_ablkcipher *ctr = ctx->ctr; + struct crypto_cipher *tfm = ctx->cipher; + int err = 0; + + crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK); + crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) & + CRYPTO_TFM_REQ_MASK); + err = crypto_ablkcipher_setkey(ctr, key, keylen); + crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) & + CRYPTO_TFM_RES_MASK); + if (err) + goto out; + + crypto_cipher_clear_flags(tfm, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(tfm, crypto_aead_get_flags(aead) & + CRYPTO_TFM_REQ_MASK); + err = crypto_cipher_setkey(tfm, key, keylen); + crypto_aead_set_flags(aead, crypto_cipher_get_flags(tfm) & + CRYPTO_TFM_RES_MASK); + +out: + return err; +} + +static int crypto_ccm_setauthsize(struct crypto_aead *tfm, + unsigned int authsize) +{ + switch (authsize) { + case 4: + case 6: + case 8: + case 10: + case 12: + case 14: + case 16: + break; + default: + return -EINVAL; + } + + return 0; +} + +static int format_input(u8 *info, struct aead_request *req, + unsigned int cryptlen) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + unsigned int lp = req->iv[0]; + unsigned int l = lp + 1; + unsigned int m; + + m = crypto_aead_authsize(aead); + + memcpy(info, req->iv, 16); + + /* format control info per RFC 3610 and + * NIST Special Publication 800-38C + */ + *info |= (8 * ((m - 2) / 2)); + if (req->assoclen) + *info |= 64; + + return set_msg_len(info + 16 - l, cryptlen, l); +} + +static int format_adata(u8 *adata, unsigned int a) +{ + int len = 0; + + /* add control info for associated data + * RFC 3610 and NIST Special Publication 800-38C + */ + if (a < 65280) { + *(__be16 *)adata = cpu_to_be16(a); + len = 2; + } else { + *(__be16 *)adata = cpu_to_be16(0xfffe); + *(__be32 *)&adata[2] = cpu_to_be32(a); + len = 6; + } + + return len; +} + +static void compute_mac(struct crypto_cipher *tfm, u8 *data, int n, + struct crypto_ccm_req_priv_ctx *pctx) +{ + unsigned int bs = 16; + u8 *odata = pctx->odata; + u8 *idata = pctx->idata; + int datalen, getlen; + + datalen = n; + + /* first time in here, block may be partially filled. */ + getlen = bs - pctx->ilen; + if (datalen >= getlen) { + memcpy(idata + pctx->ilen, data, getlen); + crypto_xor(odata, idata, bs); + crypto_cipher_encrypt_one(tfm, odata, odata); + datalen -= getlen; + data += getlen; + pctx->ilen = 0; + } + + /* now encrypt rest of data */ + while (datalen >= bs) { + crypto_xor(odata, data, bs); + crypto_cipher_encrypt_one(tfm, odata, odata); + + datalen -= bs; + data += bs; + } + + /* check and see if there's leftover data that wasn't + * enough to fill a block. + */ + if (datalen) { + memcpy(idata + pctx->ilen, data, datalen); + pctx->ilen += datalen; + } +} + +static void get_data_to_compute(struct crypto_cipher *tfm, + struct crypto_ccm_req_priv_ctx *pctx, + struct scatterlist *sg, unsigned int len) +{ + struct scatter_walk walk; + u8 *data_src; + int n; + + scatterwalk_start(&walk, sg); + + while (len) { + n = scatterwalk_clamp(&walk, len); + if (!n) { + scatterwalk_start(&walk, sg_next(walk.sg)); + n = scatterwalk_clamp(&walk, len); + } + data_src = scatterwalk_map(&walk); + + compute_mac(tfm, data_src, n, pctx); + len -= n; + + scatterwalk_unmap(data_src); + scatterwalk_advance(&walk, n); + scatterwalk_done(&walk, 0, len); + if (len) + crypto_yield(pctx->flags); + } + + /* any leftover needs padding and then encrypted */ + if (pctx->ilen) { + int padlen; + u8 *odata = pctx->odata; + u8 *idata = pctx->idata; + + padlen = 16 - pctx->ilen; + memset(idata + pctx->ilen, 0, padlen); + crypto_xor(odata, idata, 16); + crypto_cipher_encrypt_one(tfm, odata, odata); + pctx->ilen = 0; + } +} + +static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain, + unsigned int cryptlen) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); + struct crypto_cipher *cipher = ctx->cipher; + unsigned int assoclen = req->assoclen; + u8 *odata = pctx->odata; + u8 *idata = pctx->idata; + int err; + + /* format control data for input */ + err = format_input(odata, req, cryptlen); + if (err) + goto out; + + /* encrypt first block to use as start in computing mac */ + crypto_cipher_encrypt_one(cipher, odata, odata); + + /* format associated data and compute into mac */ + if (assoclen) { + pctx->ilen = format_adata(idata, assoclen); + get_data_to_compute(cipher, pctx, req->assoc, req->assoclen); + } else { + pctx->ilen = 0; + } + + /* compute plaintext into mac */ + get_data_to_compute(cipher, pctx, plain, cryptlen); + +out: + return err; +} + +static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err) +{ + struct aead_request *req = areq->data; + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); + u8 *odata = pctx->odata; + + if (!err) + scatterwalk_map_and_copy(odata, req->dst, req->cryptlen, + crypto_aead_authsize(aead), 1); + aead_request_complete(req, err); +} + +static inline int crypto_ccm_check_iv(const u8 *iv) +{ + /* 2 <= L <= 8, so 1 <= L' <= 7. */ + if (1 > iv[0] || iv[0] > 7) + return -EINVAL; + + return 0; +} + +static int crypto_ccm_encrypt(struct aead_request *req) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); + struct ablkcipher_request *abreq = &pctx->abreq; + struct scatterlist *dst; + unsigned int cryptlen = req->cryptlen; + u8 *odata = pctx->odata; + u8 *iv = req->iv; + int err; + + err = crypto_ccm_check_iv(iv); + if (err) + return err; + + pctx->flags = aead_request_flags(req); + + err = crypto_ccm_auth(req, req->src, cryptlen); + if (err) + return err; + + /* Note: rfc 3610 and NIST 800-38C require counter of + * zero to encrypt auth tag. + */ + memset(iv + 15 - iv[0], 0, iv[0] + 1); + + sg_init_table(pctx->src, 2); + sg_set_buf(pctx->src, odata, 16); + scatterwalk_sg_chain(pctx->src, 2, req->src); + + dst = pctx->src; + if (req->src != req->dst) { + sg_init_table(pctx->dst, 2); + sg_set_buf(pctx->dst, odata, 16); + scatterwalk_sg_chain(pctx->dst, 2, req->dst); + dst = pctx->dst; + } + + ablkcipher_request_set_tfm(abreq, ctx->ctr); + ablkcipher_request_set_callback(abreq, pctx->flags, + crypto_ccm_encrypt_done, req); + ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv); + err = crypto_ablkcipher_encrypt(abreq); + if (err) + return err; + + /* copy authtag to end of dst */ + scatterwalk_map_and_copy(odata, req->dst, cryptlen, + crypto_aead_authsize(aead), 1); + return err; +} + +static void crypto_ccm_decrypt_done(struct crypto_async_request *areq, + int err) +{ + struct aead_request *req = areq->data; + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); + struct crypto_aead *aead = crypto_aead_reqtfm(req); + unsigned int authsize = crypto_aead_authsize(aead); + unsigned int cryptlen = req->cryptlen - authsize; + + if (!err) { + err = crypto_ccm_auth(req, req->dst, cryptlen); + if (!err && memcmp(pctx->auth_tag, pctx->odata, authsize)) + err = -EBADMSG; + } + aead_request_complete(req, err); +} + +static int crypto_ccm_decrypt(struct aead_request *req) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); + struct ablkcipher_request *abreq = &pctx->abreq; + struct scatterlist *dst; + unsigned int authsize = crypto_aead_authsize(aead); + unsigned int cryptlen = req->cryptlen; + u8 *authtag = pctx->auth_tag; + u8 *odata = pctx->odata; + u8 *iv = req->iv; + int err; + + if (cryptlen < authsize) + return -EINVAL; + cryptlen -= authsize; + + err = crypto_ccm_check_iv(iv); + if (err) + return err; + + pctx->flags = aead_request_flags(req); + + scatterwalk_map_and_copy(authtag, req->src, cryptlen, authsize, 0); + + memset(iv + 15 - iv[0], 0, iv[0] + 1); + + sg_init_table(pctx->src, 2); + sg_set_buf(pctx->src, authtag, 16); + scatterwalk_sg_chain(pctx->src, 2, req->src); + + dst = pctx->src; + if (req->src != req->dst) { + sg_init_table(pctx->dst, 2); + sg_set_buf(pctx->dst, authtag, 16); + scatterwalk_sg_chain(pctx->dst, 2, req->dst); + dst = pctx->dst; + } + + ablkcipher_request_set_tfm(abreq, ctx->ctr); + ablkcipher_request_set_callback(abreq, pctx->flags, + crypto_ccm_decrypt_done, req); + ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv); + err = crypto_ablkcipher_decrypt(abreq); + if (err) + return err; + + err = crypto_ccm_auth(req, req->dst, cryptlen); + if (err) + return err; + + /* verify */ + if (memcmp(authtag, odata, authsize)) + return -EBADMSG; + + return err; +} + +static int crypto_ccm_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct ccm_instance_ctx *ictx = crypto_instance_ctx(inst); + struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_cipher *cipher; + struct crypto_ablkcipher *ctr; + unsigned long align; + int err; + + cipher = crypto_spawn_cipher(&ictx->cipher); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctr = crypto_spawn_skcipher(&ictx->ctr); + err = PTR_ERR(ctr); + if (IS_ERR(ctr)) + goto err_free_cipher; + + ctx->cipher = cipher; + ctx->ctr = ctr; + + align = crypto_tfm_alg_alignmask(tfm); + align &= ~(crypto_tfm_ctx_alignment() - 1); + tfm->crt_aead.reqsize = align + + sizeof(struct crypto_ccm_req_priv_ctx) + + crypto_ablkcipher_reqsize(ctr); + + return 0; + +err_free_cipher: + crypto_free_cipher(cipher); + return err; +} + +static void crypto_ccm_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_cipher(ctx->cipher); + crypto_free_ablkcipher(ctx->ctr); +} + +static struct crypto_instance *crypto_ccm_alloc_common(struct rtattr **tb, + const char *full_name, + const char *ctr_name, + const char *cipher_name) +{ + struct crypto_attr_type *algt; + struct crypto_instance *inst; + struct crypto_alg *ctr; + struct crypto_alg *cipher; + struct ccm_instance_ctx *ictx; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) + return ERR_PTR(-EINVAL); + + cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK); + err = PTR_ERR(cipher); + if (IS_ERR(cipher)) + return ERR_PTR(err); + + err = -EINVAL; + if (cipher->cra_blocksize != 16) + goto out_put_cipher; + + inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); + err = -ENOMEM; + if (!inst) + goto out_put_cipher; + + ictx = crypto_instance_ctx(inst); + + err = crypto_init_spawn(&ictx->cipher, cipher, inst, + CRYPTO_ALG_TYPE_MASK); + if (err) + goto err_free_inst; + + crypto_set_skcipher_spawn(&ictx->ctr, inst); + err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0, + crypto_requires_sync(algt->type, + algt->mask)); + if (err) + goto err_drop_cipher; + + ctr = crypto_skcipher_spawn_alg(&ictx->ctr); + + /* Not a stream cipher? */ + err = -EINVAL; + if (ctr->cra_blocksize != 1) + goto err_drop_ctr; + + /* We want the real thing! */ + if (ctr->cra_ablkcipher.ivsize != 16) + goto err_drop_ctr; + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "ccm_base(%s,%s)", ctr->cra_driver_name, + cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) + goto err_drop_ctr; + + memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME); + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD; + inst->alg.cra_flags |= ctr->cra_flags & CRYPTO_ALG_ASYNC; + inst->alg.cra_priority = cipher->cra_priority + ctr->cra_priority; + inst->alg.cra_blocksize = 1; + inst->alg.cra_alignmask = cipher->cra_alignmask | ctr->cra_alignmask | + (__alignof__(u32) - 1); + inst->alg.cra_type = &crypto_aead_type; + inst->alg.cra_aead.ivsize = 16; + inst->alg.cra_aead.maxauthsize = 16; + inst->alg.cra_ctxsize = sizeof(struct crypto_ccm_ctx); + inst->alg.cra_init = crypto_ccm_init_tfm; + inst->alg.cra_exit = crypto_ccm_exit_tfm; + inst->alg.cra_aead.setkey = crypto_ccm_setkey; + inst->alg.cra_aead.setauthsize = crypto_ccm_setauthsize; + inst->alg.cra_aead.encrypt = crypto_ccm_encrypt; + inst->alg.cra_aead.decrypt = crypto_ccm_decrypt; + +out: + crypto_mod_put(cipher); + return inst; + +err_drop_ctr: + crypto_drop_skcipher(&ictx->ctr); +err_drop_cipher: + crypto_drop_spawn(&ictx->cipher); +err_free_inst: + kfree(inst); +out_put_cipher: + inst = ERR_PTR(err); + goto out; +} + +static struct crypto_instance *crypto_ccm_alloc(struct rtattr **tb) +{ + int err; + const char *cipher_name; + char ctr_name[CRYPTO_MAX_ALG_NAME]; + char full_name[CRYPTO_MAX_ALG_NAME]; + + cipher_name = crypto_attr_alg_name(tb[1]); + err = PTR_ERR(cipher_name); + if (IS_ERR(cipher_name)) + return ERR_PTR(err); + + if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", + cipher_name) >= CRYPTO_MAX_ALG_NAME) + return ERR_PTR(-ENAMETOOLONG); + + if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >= + CRYPTO_MAX_ALG_NAME) + return ERR_PTR(-ENAMETOOLONG); + + return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name); +} + +static void crypto_ccm_free(struct crypto_instance *inst) +{ + struct ccm_instance_ctx *ctx = crypto_instance_ctx(inst); + + crypto_drop_spawn(&ctx->cipher); + crypto_drop_skcipher(&ctx->ctr); + kfree(inst); +} + +static struct crypto_template crypto_ccm_tmpl = { + .name = "ccm", + .alloc = crypto_ccm_alloc, + .free = crypto_ccm_free, + .module = THIS_MODULE, +}; + +static struct crypto_instance *crypto_ccm_base_alloc(struct rtattr **tb) +{ + int err; + const char *ctr_name; + const char *cipher_name; + char full_name[CRYPTO_MAX_ALG_NAME]; + + ctr_name = crypto_attr_alg_name(tb[1]); + err = PTR_ERR(ctr_name); + if (IS_ERR(ctr_name)) + return ERR_PTR(err); + + cipher_name = crypto_attr_alg_name(tb[2]); + err = PTR_ERR(cipher_name); + if (IS_ERR(cipher_name)) + return ERR_PTR(err); + + if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)", + ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME) + return ERR_PTR(-ENAMETOOLONG); + + return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name); +} + +static struct crypto_template crypto_ccm_base_tmpl = { + .name = "ccm_base", + .alloc = crypto_ccm_base_alloc, + .free = crypto_ccm_free, + .module = THIS_MODULE, +}; + +static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key, + unsigned int keylen) +{ + struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent); + struct crypto_aead *child = ctx->child; + int err; + + if (keylen < 3) + return -EINVAL; + + keylen -= 3; + memcpy(ctx->nonce, key + keylen, 3); + + crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_aead_set_flags(child, crypto_aead_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_aead_setkey(child, key, keylen); + crypto_aead_set_flags(parent, crypto_aead_get_flags(child) & + CRYPTO_TFM_RES_MASK); + + return err; +} + +static int crypto_rfc4309_setauthsize(struct crypto_aead *parent, + unsigned int authsize) +{ + struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent); + + switch (authsize) { + case 8: + case 12: + case 16: + break; + default: + return -EINVAL; + } + + return crypto_aead_setauthsize(ctx->child, authsize); +} + +static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req) +{ + struct aead_request *subreq = aead_request_ctx(req); + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_aead *child = ctx->child; + u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child), + crypto_aead_alignmask(child) + 1); + + /* L' */ + iv[0] = 3; + + memcpy(iv + 1, ctx->nonce, 3); + memcpy(iv + 4, req->iv, 8); + + aead_request_set_tfm(subreq, child); + aead_request_set_callback(subreq, req->base.flags, req->base.complete, + req->base.data); + aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, iv); + aead_request_set_assoc(subreq, req->assoc, req->assoclen); + + return subreq; +} + +static int crypto_rfc4309_encrypt(struct aead_request *req) +{ + req = crypto_rfc4309_crypt(req); + + return crypto_aead_encrypt(req); +} + +static int crypto_rfc4309_decrypt(struct aead_request *req) +{ + req = crypto_rfc4309_crypt(req); + + return crypto_aead_decrypt(req); +} + +static int crypto_rfc4309_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst); + struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_aead *aead; + unsigned long align; + + aead = crypto_spawn_aead(spawn); + if (IS_ERR(aead)) + return PTR_ERR(aead); + + ctx->child = aead; + + align = crypto_aead_alignmask(aead); + align &= ~(crypto_tfm_ctx_alignment() - 1); + tfm->crt_aead.reqsize = sizeof(struct aead_request) + + ALIGN(crypto_aead_reqsize(aead), + crypto_tfm_ctx_alignment()) + + align + 16; + + return 0; +} + +static void crypto_rfc4309_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_aead(ctx->child); +} + +static struct crypto_instance *crypto_rfc4309_alloc(struct rtattr **tb) +{ + struct crypto_attr_type *algt; + struct crypto_instance *inst; + struct crypto_aead_spawn *spawn; + struct crypto_alg *alg; + const char *ccm_name; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) + return ERR_PTR(-EINVAL); + + ccm_name = crypto_attr_alg_name(tb[1]); + err = PTR_ERR(ccm_name); + if (IS_ERR(ccm_name)) + return ERR_PTR(err); + + inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); + if (!inst) + return ERR_PTR(-ENOMEM); + + spawn = crypto_instance_ctx(inst); + crypto_set_aead_spawn(spawn, inst); + err = crypto_grab_aead(spawn, ccm_name, 0, + crypto_requires_sync(algt->type, algt->mask)); + if (err) + goto out_free_inst; + + alg = crypto_aead_spawn_alg(spawn); + + err = -EINVAL; + + /* We only support 16-byte blocks. */ + if (alg->cra_aead.ivsize != 16) + goto out_drop_alg; + + /* Not a stream cipher? */ + if (alg->cra_blocksize != 1) + goto out_drop_alg; + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, + "rfc4309(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME || + snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "rfc4309(%s)", alg->cra_driver_name) >= + CRYPTO_MAX_ALG_NAME) + goto out_drop_alg; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD; + inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = 1; + inst->alg.cra_alignmask = alg->cra_alignmask; + inst->alg.cra_type = &crypto_nivaead_type; + + inst->alg.cra_aead.ivsize = 8; + inst->alg.cra_aead.maxauthsize = 16; + + inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx); + + inst->alg.cra_init = crypto_rfc4309_init_tfm; + inst->alg.cra_exit = crypto_rfc4309_exit_tfm; + + inst->alg.cra_aead.setkey = crypto_rfc4309_setkey; + inst->alg.cra_aead.setauthsize = crypto_rfc4309_setauthsize; + inst->alg.cra_aead.encrypt = crypto_rfc4309_encrypt; + inst->alg.cra_aead.decrypt = crypto_rfc4309_decrypt; + + inst->alg.cra_aead.geniv = "seqiv"; + +out: + return inst; + +out_drop_alg: + crypto_drop_aead(spawn); +out_free_inst: + kfree(inst); + inst = ERR_PTR(err); + goto out; +} + +static void crypto_rfc4309_free(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(inst); +} + +static struct crypto_template crypto_rfc4309_tmpl = { + .name = "rfc4309", + .alloc = crypto_rfc4309_alloc, + .free = crypto_rfc4309_free, + .module = THIS_MODULE, +}; + +static int __init crypto_ccm_module_init(void) +{ + int err; + + err = crypto_register_template(&crypto_ccm_base_tmpl); + if (err) + goto out; + + err = crypto_register_template(&crypto_ccm_tmpl); + if (err) + goto out_undo_base; + + err = crypto_register_template(&crypto_rfc4309_tmpl); + if (err) + goto out_undo_ccm; + +out: + return err; + +out_undo_ccm: + crypto_unregister_template(&crypto_ccm_tmpl); +out_undo_base: + crypto_unregister_template(&crypto_ccm_base_tmpl); + goto out; +} + +static void __exit crypto_ccm_module_exit(void) +{ + crypto_unregister_template(&crypto_rfc4309_tmpl); + crypto_unregister_template(&crypto_ccm_tmpl); + crypto_unregister_template(&crypto_ccm_base_tmpl); +} + +module_init(crypto_ccm_module_init); +module_exit(crypto_ccm_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Counter with CBC MAC"); +MODULE_ALIAS("ccm_base"); +MODULE_ALIAS("rfc4309"); diff --git a/crypto/chainiv.c b/crypto/chainiv.c new file mode 100644 index 00000000..ba200b07 --- /dev/null +++ b/crypto/chainiv.c @@ -0,0 +1,362 @@ +/* + * chainiv: Chain IV Generator + * + * Generate IVs simply be using the last block of the previous encryption. + * This is mainly useful for CBC with a synchronous algorithm. + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/internal/skcipher.h> +#include <crypto/rng.h> +#include <crypto/crypto_wq.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/spinlock.h> +#include <linux/string.h> +#include <linux/workqueue.h> + +enum { + CHAINIV_STATE_INUSE = 0, +}; + +struct chainiv_ctx { + spinlock_t lock; + char iv[]; +}; + +struct async_chainiv_ctx { + unsigned long state; + + spinlock_t lock; + int err; + + struct crypto_queue queue; + struct work_struct postponed; + + char iv[]; +}; + +static int chainiv_givencrypt(struct skcipher_givcrypt_request *req) +{ + struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); + struct chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); + unsigned int ivsize; + int err; + + ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv)); + ablkcipher_request_set_callback(subreq, req->creq.base.flags & + ~CRYPTO_TFM_REQ_MAY_SLEEP, + req->creq.base.complete, + req->creq.base.data); + ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst, + req->creq.nbytes, req->creq.info); + + spin_lock_bh(&ctx->lock); + + ivsize = crypto_ablkcipher_ivsize(geniv); + + memcpy(req->giv, ctx->iv, ivsize); + memcpy(subreq->info, ctx->iv, ivsize); + + err = crypto_ablkcipher_encrypt(subreq); + if (err) + goto unlock; + + memcpy(ctx->iv, subreq->info, ivsize); + +unlock: + spin_unlock_bh(&ctx->lock); + + return err; +} + +static int chainiv_givencrypt_first(struct skcipher_givcrypt_request *req) +{ + struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); + struct chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + int err = 0; + + spin_lock_bh(&ctx->lock); + if (crypto_ablkcipher_crt(geniv)->givencrypt != + chainiv_givencrypt_first) + goto unlock; + + crypto_ablkcipher_crt(geniv)->givencrypt = chainiv_givencrypt; + err = crypto_rng_get_bytes(crypto_default_rng, ctx->iv, + crypto_ablkcipher_ivsize(geniv)); + +unlock: + spin_unlock_bh(&ctx->lock); + + if (err) + return err; + + return chainiv_givencrypt(req); +} + +static int chainiv_init_common(struct crypto_tfm *tfm) +{ + tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request); + + return skcipher_geniv_init(tfm); +} + +static int chainiv_init(struct crypto_tfm *tfm) +{ + struct chainiv_ctx *ctx = crypto_tfm_ctx(tfm); + + spin_lock_init(&ctx->lock); + + return chainiv_init_common(tfm); +} + +static int async_chainiv_schedule_work(struct async_chainiv_ctx *ctx) +{ + int queued; + int err = ctx->err; + + if (!ctx->queue.qlen) { + smp_mb__before_clear_bit(); + clear_bit(CHAINIV_STATE_INUSE, &ctx->state); + + if (!ctx->queue.qlen || + test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state)) + goto out; + } + + queued = queue_work(kcrypto_wq, &ctx->postponed); + BUG_ON(!queued); + +out: + return err; +} + +static int async_chainiv_postpone_request(struct skcipher_givcrypt_request *req) +{ + struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); + struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + int err; + + spin_lock_bh(&ctx->lock); + err = skcipher_enqueue_givcrypt(&ctx->queue, req); + spin_unlock_bh(&ctx->lock); + + if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state)) + return err; + + ctx->err = err; + return async_chainiv_schedule_work(ctx); +} + +static int async_chainiv_givencrypt_tail(struct skcipher_givcrypt_request *req) +{ + struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); + struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); + unsigned int ivsize = crypto_ablkcipher_ivsize(geniv); + + memcpy(req->giv, ctx->iv, ivsize); + memcpy(subreq->info, ctx->iv, ivsize); + + ctx->err = crypto_ablkcipher_encrypt(subreq); + if (ctx->err) + goto out; + + memcpy(ctx->iv, subreq->info, ivsize); + +out: + return async_chainiv_schedule_work(ctx); +} + +static int async_chainiv_givencrypt(struct skcipher_givcrypt_request *req) +{ + struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); + struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); + + ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv)); + ablkcipher_request_set_callback(subreq, req->creq.base.flags, + req->creq.base.complete, + req->creq.base.data); + ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst, + req->creq.nbytes, req->creq.info); + + if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state)) + goto postpone; + + if (ctx->queue.qlen) { + clear_bit(CHAINIV_STATE_INUSE, &ctx->state); + goto postpone; + } + + return async_chainiv_givencrypt_tail(req); + +postpone: + return async_chainiv_postpone_request(req); +} + +static int async_chainiv_givencrypt_first(struct skcipher_givcrypt_request *req) +{ + struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); + struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + int err = 0; + + if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state)) + goto out; + + if (crypto_ablkcipher_crt(geniv)->givencrypt != + async_chainiv_givencrypt_first) + goto unlock; + + crypto_ablkcipher_crt(geniv)->givencrypt = async_chainiv_givencrypt; + err = crypto_rng_get_bytes(crypto_default_rng, ctx->iv, + crypto_ablkcipher_ivsize(geniv)); + +unlock: + clear_bit(CHAINIV_STATE_INUSE, &ctx->state); + + if (err) + return err; + +out: + return async_chainiv_givencrypt(req); +} + +static void async_chainiv_do_postponed(struct work_struct *work) +{ + struct async_chainiv_ctx *ctx = container_of(work, + struct async_chainiv_ctx, + postponed); + struct skcipher_givcrypt_request *req; + struct ablkcipher_request *subreq; + int err; + + /* Only handle one request at a time to avoid hogging keventd. */ + spin_lock_bh(&ctx->lock); + req = skcipher_dequeue_givcrypt(&ctx->queue); + spin_unlock_bh(&ctx->lock); + + if (!req) { + async_chainiv_schedule_work(ctx); + return; + } + + subreq = skcipher_givcrypt_reqctx(req); + subreq->base.flags |= CRYPTO_TFM_REQ_MAY_SLEEP; + + err = async_chainiv_givencrypt_tail(req); + + local_bh_disable(); + skcipher_givcrypt_complete(req, err); + local_bh_enable(); +} + +static int async_chainiv_init(struct crypto_tfm *tfm) +{ + struct async_chainiv_ctx *ctx = crypto_tfm_ctx(tfm); + + spin_lock_init(&ctx->lock); + + crypto_init_queue(&ctx->queue, 100); + INIT_WORK(&ctx->postponed, async_chainiv_do_postponed); + + return chainiv_init_common(tfm); +} + +static void async_chainiv_exit(struct crypto_tfm *tfm) +{ + struct async_chainiv_ctx *ctx = crypto_tfm_ctx(tfm); + + BUG_ON(test_bit(CHAINIV_STATE_INUSE, &ctx->state) || ctx->queue.qlen); + + skcipher_geniv_exit(tfm); +} + +static struct crypto_template chainiv_tmpl; + +static struct crypto_instance *chainiv_alloc(struct rtattr **tb) +{ + struct crypto_attr_type *algt; + struct crypto_instance *inst; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + err = crypto_get_default_rng(); + if (err) + return ERR_PTR(err); + + inst = skcipher_geniv_alloc(&chainiv_tmpl, tb, 0, 0); + if (IS_ERR(inst)) + goto put_rng; + + inst->alg.cra_ablkcipher.givencrypt = chainiv_givencrypt_first; + + inst->alg.cra_init = chainiv_init; + inst->alg.cra_exit = skcipher_geniv_exit; + + inst->alg.cra_ctxsize = sizeof(struct chainiv_ctx); + + if (!crypto_requires_sync(algt->type, algt->mask)) { + inst->alg.cra_flags |= CRYPTO_ALG_ASYNC; + + inst->alg.cra_ablkcipher.givencrypt = + async_chainiv_givencrypt_first; + + inst->alg.cra_init = async_chainiv_init; + inst->alg.cra_exit = async_chainiv_exit; + + inst->alg.cra_ctxsize = sizeof(struct async_chainiv_ctx); + } + + inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize; + +out: + return inst; + +put_rng: + crypto_put_default_rng(); + goto out; +} + +static void chainiv_free(struct crypto_instance *inst) +{ + skcipher_geniv_free(inst); + crypto_put_default_rng(); +} + +static struct crypto_template chainiv_tmpl = { + .name = "chainiv", + .alloc = chainiv_alloc, + .free = chainiv_free, + .module = THIS_MODULE, +}; + +static int __init chainiv_module_init(void) +{ + return crypto_register_template(&chainiv_tmpl); +} + +static void chainiv_module_exit(void) +{ + crypto_unregister_template(&chainiv_tmpl); +} + +module_init(chainiv_module_init); +module_exit(chainiv_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Chain IV Generator"); diff --git a/crypto/cipher.c b/crypto/cipher.c new file mode 100644 index 00000000..39541e0e --- /dev/null +++ b/crypto/cipher.c @@ -0,0 +1,122 @@ +/* + * Cryptographic API. + * + * Cipher operations. + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <linux/kernel.h> +#include <linux/crypto.h> +#include <linux/errno.h> +#include <linux/slab.h> +#include <linux/string.h> +#include "internal.h" + +static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen) +{ + struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher; + unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); + int ret; + u8 *buffer, *alignbuffer; + unsigned long absize; + + absize = keylen + alignmask; + buffer = kmalloc(absize, GFP_ATOMIC); + if (!buffer) + return -ENOMEM; + + alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); + memcpy(alignbuffer, key, keylen); + ret = cia->cia_setkey(tfm, alignbuffer, keylen); + memset(alignbuffer, 0, keylen); + kfree(buffer); + return ret; + +} + +static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) +{ + struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher; + unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); + + tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; + if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) { + tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; + } + + if ((unsigned long)key & alignmask) + return setkey_unaligned(tfm, key, keylen); + + return cia->cia_setkey(tfm, key, keylen); +} + +static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *, + const u8 *), + struct crypto_tfm *tfm, + u8 *dst, const u8 *src) +{ + unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); + unsigned int size = crypto_tfm_alg_blocksize(tfm); + u8 buffer[size + alignmask]; + u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); + + memcpy(tmp, src, size); + fn(tfm, tmp, tmp); + memcpy(dst, tmp, size); +} + +static void cipher_encrypt_unaligned(struct crypto_tfm *tfm, + u8 *dst, const u8 *src) +{ + unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); + struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; + + if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { + cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src); + return; + } + + cipher->cia_encrypt(tfm, dst, src); +} + +static void cipher_decrypt_unaligned(struct crypto_tfm *tfm, + u8 *dst, const u8 *src) +{ + unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); + struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; + + if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { + cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src); + return; + } + + cipher->cia_decrypt(tfm, dst, src); +} + +int crypto_init_cipher_ops(struct crypto_tfm *tfm) +{ + struct cipher_tfm *ops = &tfm->crt_cipher; + struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; + + ops->cit_setkey = setkey; + ops->cit_encrypt_one = crypto_tfm_alg_alignmask(tfm) ? + cipher_encrypt_unaligned : cipher->cia_encrypt; + ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ? + cipher_decrypt_unaligned : cipher->cia_decrypt; + + return 0; +} + +void crypto_exit_cipher_ops(struct crypto_tfm *tfm) +{ +} diff --git a/crypto/compress.c b/crypto/compress.c new file mode 100644 index 00000000..c33f0763 --- /dev/null +++ b/crypto/compress.c @@ -0,0 +1,48 @@ +/* + * Cryptographic API. + * + * Compression operations. + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ +#include <linux/types.h> +#include <linux/crypto.h> +#include <linux/errno.h> +#include <linux/string.h> +#include "internal.h" + +static int crypto_compress(struct crypto_tfm *tfm, + const u8 *src, unsigned int slen, + u8 *dst, unsigned int *dlen) +{ + return tfm->__crt_alg->cra_compress.coa_compress(tfm, src, slen, dst, + dlen); +} + +static int crypto_decompress(struct crypto_tfm *tfm, + const u8 *src, unsigned int slen, + u8 *dst, unsigned int *dlen) +{ + return tfm->__crt_alg->cra_compress.coa_decompress(tfm, src, slen, dst, + dlen); +} + +int crypto_init_compress_ops(struct crypto_tfm *tfm) +{ + struct compress_tfm *ops = &tfm->crt_compress; + + ops->cot_compress = crypto_compress; + ops->cot_decompress = crypto_decompress; + + return 0; +} + +void crypto_exit_compress_ops(struct crypto_tfm *tfm) +{ +} diff --git a/crypto/crc32c.c b/crypto/crc32c.c new file mode 100644 index 00000000..06f7018c --- /dev/null +++ b/crypto/crc32c.c @@ -0,0 +1,172 @@ +/* + * Cryptographic API. + * + * CRC32C chksum + * + *@Article{castagnoli-crc, + * author = { Guy Castagnoli and Stefan Braeuer and Martin Herrman}, + * title = {{Optimization of Cyclic Redundancy-Check Codes with 24 + * and 32 Parity Bits}}, + * journal = IEEE Transactions on Communication, + * year = {1993}, + * volume = {41}, + * number = {6}, + * pages = {}, + * month = {June}, + *} + * Used by the iSCSI driver, possibly others, and derived from the + * the iscsi-crc.c module of the linux-iscsi driver at + * http://linux-iscsi.sourceforge.net. + * + * Following the example of lib/crc32, this function is intended to be + * flexible and useful for all users. Modules that currently have their + * own crc32c, but hopefully may be able to use this one are: + * net/sctp (please add all your doco to here if you change to + * use this one!) + * <endoflist> + * + * Copyright (c) 2004 Cisco Systems, Inc. + * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/internal/hash.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/string.h> +#include <linux/kernel.h> +#include <linux/crc32.h> + +#define CHKSUM_BLOCK_SIZE 1 +#define CHKSUM_DIGEST_SIZE 4 + +struct chksum_ctx { + u32 key; +}; + +struct chksum_desc_ctx { + u32 crc; +}; + +/* + * Steps through buffer one byte at at time, calculates reflected + * crc using table. + */ + +static int chksum_init(struct shash_desc *desc) +{ + struct chksum_ctx *mctx = crypto_shash_ctx(desc->tfm); + struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); + + ctx->crc = mctx->key; + + return 0; +} + +/* + * Setting the seed allows arbitrary accumulators and flexible XOR policy + * If your algorithm starts with ~0, then XOR with ~0 before you set + * the seed. + */ +static int chksum_setkey(struct crypto_shash *tfm, const u8 *key, + unsigned int keylen) +{ + struct chksum_ctx *mctx = crypto_shash_ctx(tfm); + + if (keylen != sizeof(mctx->key)) { + crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); + return -EINVAL; + } + mctx->key = le32_to_cpu(*(__le32 *)key); + return 0; +} + +static int chksum_update(struct shash_desc *desc, const u8 *data, + unsigned int length) +{ + struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); + + ctx->crc = __crc32c_le(ctx->crc, data, length); + return 0; +} + +static int chksum_final(struct shash_desc *desc, u8 *out) +{ + struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); + + *(__le32 *)out = ~cpu_to_le32p(&ctx->crc); + return 0; +} + +static int __chksum_finup(u32 *crcp, const u8 *data, unsigned int len, u8 *out) +{ + *(__le32 *)out = ~cpu_to_le32(__crc32c_le(*crcp, data, len)); + return 0; +} + +static int chksum_finup(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) +{ + struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); + + return __chksum_finup(&ctx->crc, data, len, out); +} + +static int chksum_digest(struct shash_desc *desc, const u8 *data, + unsigned int length, u8 *out) +{ + struct chksum_ctx *mctx = crypto_shash_ctx(desc->tfm); + + return __chksum_finup(&mctx->key, data, length, out); +} + +static int crc32c_cra_init(struct crypto_tfm *tfm) +{ + struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); + + mctx->key = ~0; + return 0; +} + +static struct shash_alg alg = { + .digestsize = CHKSUM_DIGEST_SIZE, + .setkey = chksum_setkey, + .init = chksum_init, + .update = chksum_update, + .final = chksum_final, + .finup = chksum_finup, + .digest = chksum_digest, + .descsize = sizeof(struct chksum_desc_ctx), + .base = { + .cra_name = "crc32c", + .cra_driver_name = "crc32c-generic", + .cra_priority = 100, + .cra_blocksize = CHKSUM_BLOCK_SIZE, + .cra_alignmask = 3, + .cra_ctxsize = sizeof(struct chksum_ctx), + .cra_module = THIS_MODULE, + .cra_init = crc32c_cra_init, + } +}; + +static int __init crc32c_mod_init(void) +{ + return crypto_register_shash(&alg); +} + +static void __exit crc32c_mod_fini(void) +{ + crypto_unregister_shash(&alg); +} + +module_init(crc32c_mod_init); +module_exit(crc32c_mod_fini); + +MODULE_AUTHOR("Clay Haapala <chaapala@cisco.com>"); +MODULE_DESCRIPTION("CRC32c (Castagnoli) calculations wrapper for lib/crc32c"); +MODULE_LICENSE("GPL"); diff --git a/crypto/cryptd.c b/crypto/cryptd.c new file mode 100644 index 00000000..671d4d6d --- /dev/null +++ b/crypto/cryptd.c @@ -0,0 +1,952 @@ +/* + * Software async crypto daemon. + * + * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> + * + * Added AEAD support to cryptd. + * Authors: Tadeusz Struk (tadeusz.struk@intel.com) + * Adrian Hoban <adrian.hoban@intel.com> + * Gabriele Paoloni <gabriele.paoloni@intel.com> + * Aidan O'Mahony (aidan.o.mahony@intel.com) + * Copyright (c) 2010, Intel Corporation. + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/algapi.h> +#include <crypto/internal/hash.h> +#include <crypto/internal/aead.h> +#include <crypto/cryptd.h> +#include <crypto/crypto_wq.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/list.h> +#include <linux/module.h> +#include <linux/scatterlist.h> +#include <linux/sched.h> +#include <linux/slab.h> + +#define CRYPTD_MAX_CPU_QLEN 100 + +struct cryptd_cpu_queue { + struct crypto_queue queue; + struct work_struct work; +}; + +struct cryptd_queue { + struct cryptd_cpu_queue __percpu *cpu_queue; +}; + +struct cryptd_instance_ctx { + struct crypto_spawn spawn; + struct cryptd_queue *queue; +}; + +struct hashd_instance_ctx { + struct crypto_shash_spawn spawn; + struct cryptd_queue *queue; +}; + +struct aead_instance_ctx { + struct crypto_aead_spawn aead_spawn; + struct cryptd_queue *queue; +}; + +struct cryptd_blkcipher_ctx { + struct crypto_blkcipher *child; +}; + +struct cryptd_blkcipher_request_ctx { + crypto_completion_t complete; +}; + +struct cryptd_hash_ctx { + struct crypto_shash *child; +}; + +struct cryptd_hash_request_ctx { + crypto_completion_t complete; + struct shash_desc desc; +}; + +struct cryptd_aead_ctx { + struct crypto_aead *child; +}; + +struct cryptd_aead_request_ctx { + crypto_completion_t complete; +}; + +static void cryptd_queue_worker(struct work_struct *work); + +static int cryptd_init_queue(struct cryptd_queue *queue, + unsigned int max_cpu_qlen) +{ + int cpu; + struct cryptd_cpu_queue *cpu_queue; + + queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue); + if (!queue->cpu_queue) + return -ENOMEM; + for_each_possible_cpu(cpu) { + cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu); + crypto_init_queue(&cpu_queue->queue, max_cpu_qlen); + INIT_WORK(&cpu_queue->work, cryptd_queue_worker); + } + return 0; +} + +static void cryptd_fini_queue(struct cryptd_queue *queue) +{ + int cpu; + struct cryptd_cpu_queue *cpu_queue; + + for_each_possible_cpu(cpu) { + cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu); + BUG_ON(cpu_queue->queue.qlen); + } + free_percpu(queue->cpu_queue); +} + +static int cryptd_enqueue_request(struct cryptd_queue *queue, + struct crypto_async_request *request) +{ + int cpu, err; + struct cryptd_cpu_queue *cpu_queue; + + cpu = get_cpu(); + cpu_queue = this_cpu_ptr(queue->cpu_queue); + err = crypto_enqueue_request(&cpu_queue->queue, request); + queue_work_on(cpu, kcrypto_wq, &cpu_queue->work); + put_cpu(); + + return err; +} + +/* Called in workqueue context, do one real cryption work (via + * req->complete) and reschedule itself if there are more work to + * do. */ +static void cryptd_queue_worker(struct work_struct *work) +{ + struct cryptd_cpu_queue *cpu_queue; + struct crypto_async_request *req, *backlog; + + cpu_queue = container_of(work, struct cryptd_cpu_queue, work); + /* Only handle one request at a time to avoid hogging crypto + * workqueue. preempt_disable/enable is used to prevent + * being preempted by cryptd_enqueue_request() */ + preempt_disable(); + backlog = crypto_get_backlog(&cpu_queue->queue); + req = crypto_dequeue_request(&cpu_queue->queue); + preempt_enable(); + + if (!req) + return; + + if (backlog) + backlog->complete(backlog, -EINPROGRESS); + req->complete(req, 0); + + if (cpu_queue->queue.qlen) + queue_work(kcrypto_wq, &cpu_queue->work); +} + +static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); + struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst); + return ictx->queue; +} + +static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent, + const u8 *key, unsigned int keylen) +{ + struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent); + struct crypto_blkcipher *child = ctx->child; + int err; + + crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_blkcipher_setkey(child, key, keylen); + crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + return err; +} + +static void cryptd_blkcipher_crypt(struct ablkcipher_request *req, + struct crypto_blkcipher *child, + int err, + int (*crypt)(struct blkcipher_desc *desc, + struct scatterlist *dst, + struct scatterlist *src, + unsigned int len)) +{ + struct cryptd_blkcipher_request_ctx *rctx; + struct blkcipher_desc desc; + + rctx = ablkcipher_request_ctx(req); + + if (unlikely(err == -EINPROGRESS)) + goto out; + + desc.tfm = child; + desc.info = req->info; + desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; + + err = crypt(&desc, req->dst, req->src, req->nbytes); + + req->base.complete = rctx->complete; + +out: + local_bh_disable(); + rctx->complete(&req->base, err); + local_bh_enable(); +} + +static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err) +{ + struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm); + struct crypto_blkcipher *child = ctx->child; + + cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err, + crypto_blkcipher_crt(child)->encrypt); +} + +static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err) +{ + struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm); + struct crypto_blkcipher *child = ctx->child; + + cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err, + crypto_blkcipher_crt(child)->decrypt); +} + +static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req, + crypto_completion_t complete) +{ + struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req); + struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); + struct cryptd_queue *queue; + + queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm)); + rctx->complete = req->base.complete; + req->base.complete = complete; + + return cryptd_enqueue_request(queue, &req->base); +} + +static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req) +{ + return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt); +} + +static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req) +{ + return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt); +} + +static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); + struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst); + struct crypto_spawn *spawn = &ictx->spawn; + struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_blkcipher *cipher; + + cipher = crypto_spawn_blkcipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; + tfm->crt_ablkcipher.reqsize = + sizeof(struct cryptd_blkcipher_request_ctx); + return 0; +} + +static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm) +{ + struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_blkcipher(ctx->child); +} + +static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head, + unsigned int tail) +{ + char *p; + struct crypto_instance *inst; + int err; + + p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL); + if (!p) + return ERR_PTR(-ENOMEM); + + inst = (void *)(p + head); + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) + goto out_free_inst; + + memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); + + inst->alg.cra_priority = alg->cra_priority + 50; + inst->alg.cra_blocksize = alg->cra_blocksize; + inst->alg.cra_alignmask = alg->cra_alignmask; + +out: + return p; + +out_free_inst: + kfree(p); + p = ERR_PTR(err); + goto out; +} + +static int cryptd_create_blkcipher(struct crypto_template *tmpl, + struct rtattr **tb, + struct cryptd_queue *queue) +{ + struct cryptd_instance_ctx *ctx; + struct crypto_instance *inst; + struct crypto_alg *alg; + int err; + + alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER, + CRYPTO_ALG_TYPE_MASK); + if (IS_ERR(alg)) + return PTR_ERR(alg); + + inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx)); + err = PTR_ERR(inst); + if (IS_ERR(inst)) + goto out_put_alg; + + ctx = crypto_instance_ctx(inst); + ctx->queue = queue; + + err = crypto_init_spawn(&ctx->spawn, alg, inst, + CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); + if (err) + goto out_free_inst; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC; + inst->alg.cra_type = &crypto_ablkcipher_type; + + inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize; + inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize; + inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize; + + inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv; + + inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx); + + inst->alg.cra_init = cryptd_blkcipher_init_tfm; + inst->alg.cra_exit = cryptd_blkcipher_exit_tfm; + + inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey; + inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue; + inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue; + + err = crypto_register_instance(tmpl, inst); + if (err) { + crypto_drop_spawn(&ctx->spawn); +out_free_inst: + kfree(inst); + } + +out_put_alg: + crypto_mod_put(alg); + return err; +} + +static int cryptd_hash_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); + struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst); + struct crypto_shash_spawn *spawn = &ictx->spawn; + struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_shash *hash; + + hash = crypto_spawn_shash(spawn); + if (IS_ERR(hash)) + return PTR_ERR(hash); + + ctx->child = hash; + crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), + sizeof(struct cryptd_hash_request_ctx) + + crypto_shash_descsize(hash)); + return 0; +} + +static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm) +{ + struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_shash(ctx->child); +} + +static int cryptd_hash_setkey(struct crypto_ahash *parent, + const u8 *key, unsigned int keylen) +{ + struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent); + struct crypto_shash *child = ctx->child; + int err; + + crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_shash_setkey(child, key, keylen); + crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) & + CRYPTO_TFM_RES_MASK); + return err; +} + +static int cryptd_hash_enqueue(struct ahash_request *req, + crypto_completion_t complete) +{ + struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); + struct cryptd_queue *queue = + cryptd_get_queue(crypto_ahash_tfm(tfm)); + + rctx->complete = req->base.complete; + req->base.complete = complete; + + return cryptd_enqueue_request(queue, &req->base); +} + +static void cryptd_hash_init(struct crypto_async_request *req_async, int err) +{ + struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); + struct crypto_shash *child = ctx->child; + struct ahash_request *req = ahash_request_cast(req_async); + struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); + struct shash_desc *desc = &rctx->desc; + + if (unlikely(err == -EINPROGRESS)) + goto out; + + desc->tfm = child; + desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP; + + err = crypto_shash_init(desc); + + req->base.complete = rctx->complete; + +out: + local_bh_disable(); + rctx->complete(&req->base, err); + local_bh_enable(); +} + +static int cryptd_hash_init_enqueue(struct ahash_request *req) +{ + return cryptd_hash_enqueue(req, cryptd_hash_init); +} + +static void cryptd_hash_update(struct crypto_async_request *req_async, int err) +{ + struct ahash_request *req = ahash_request_cast(req_async); + struct cryptd_hash_request_ctx *rctx; + + rctx = ahash_request_ctx(req); + + if (unlikely(err == -EINPROGRESS)) + goto out; + + err = shash_ahash_update(req, &rctx->desc); + + req->base.complete = rctx->complete; + +out: + local_bh_disable(); + rctx->complete(&req->base, err); + local_bh_enable(); +} + +static int cryptd_hash_update_enqueue(struct ahash_request *req) +{ + return cryptd_hash_enqueue(req, cryptd_hash_update); +} + +static void cryptd_hash_final(struct crypto_async_request *req_async, int err) +{ + struct ahash_request *req = ahash_request_cast(req_async); + struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); + + if (unlikely(err == -EINPROGRESS)) + goto out; + + err = crypto_shash_final(&rctx->desc, req->result); + + req->base.complete = rctx->complete; + +out: + local_bh_disable(); + rctx->complete(&req->base, err); + local_bh_enable(); +} + +static int cryptd_hash_final_enqueue(struct ahash_request *req) +{ + return cryptd_hash_enqueue(req, cryptd_hash_final); +} + +static void cryptd_hash_finup(struct crypto_async_request *req_async, int err) +{ + struct ahash_request *req = ahash_request_cast(req_async); + struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); + + if (unlikely(err == -EINPROGRESS)) + goto out; + + err = shash_ahash_finup(req, &rctx->desc); + + req->base.complete = rctx->complete; + +out: + local_bh_disable(); + rctx->complete(&req->base, err); + local_bh_enable(); +} + +static int cryptd_hash_finup_enqueue(struct ahash_request *req) +{ + return cryptd_hash_enqueue(req, cryptd_hash_finup); +} + +static void cryptd_hash_digest(struct crypto_async_request *req_async, int err) +{ + struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); + struct crypto_shash *child = ctx->child; + struct ahash_request *req = ahash_request_cast(req_async); + struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); + struct shash_desc *desc = &rctx->desc; + + if (unlikely(err == -EINPROGRESS)) + goto out; + + desc->tfm = child; + desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP; + + err = shash_ahash_digest(req, desc); + + req->base.complete = rctx->complete; + +out: + local_bh_disable(); + rctx->complete(&req->base, err); + local_bh_enable(); +} + +static int cryptd_hash_digest_enqueue(struct ahash_request *req) +{ + return cryptd_hash_enqueue(req, cryptd_hash_digest); +} + +static int cryptd_hash_export(struct ahash_request *req, void *out) +{ + struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); + + return crypto_shash_export(&rctx->desc, out); +} + +static int cryptd_hash_import(struct ahash_request *req, const void *in) +{ + struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); + + return crypto_shash_import(&rctx->desc, in); +} + +static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb, + struct cryptd_queue *queue) +{ + struct hashd_instance_ctx *ctx; + struct ahash_instance *inst; + struct shash_alg *salg; + struct crypto_alg *alg; + int err; + + salg = shash_attr_alg(tb[1], 0, 0); + if (IS_ERR(salg)) + return PTR_ERR(salg); + + alg = &salg->base; + inst = cryptd_alloc_instance(alg, ahash_instance_headroom(), + sizeof(*ctx)); + err = PTR_ERR(inst); + if (IS_ERR(inst)) + goto out_put_alg; + + ctx = ahash_instance_ctx(inst); + ctx->queue = queue; + + err = crypto_init_shash_spawn(&ctx->spawn, salg, + ahash_crypto_instance(inst)); + if (err) + goto out_free_inst; + + inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC; + + inst->alg.halg.digestsize = salg->digestsize; + inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx); + + inst->alg.halg.base.cra_init = cryptd_hash_init_tfm; + inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm; + + inst->alg.init = cryptd_hash_init_enqueue; + inst->alg.update = cryptd_hash_update_enqueue; + inst->alg.final = cryptd_hash_final_enqueue; + inst->alg.finup = cryptd_hash_finup_enqueue; + inst->alg.export = cryptd_hash_export; + inst->alg.import = cryptd_hash_import; + inst->alg.setkey = cryptd_hash_setkey; + inst->alg.digest = cryptd_hash_digest_enqueue; + + err = ahash_register_instance(tmpl, inst); + if (err) { + crypto_drop_shash(&ctx->spawn); +out_free_inst: + kfree(inst); + } + +out_put_alg: + crypto_mod_put(alg); + return err; +} + +static void cryptd_aead_crypt(struct aead_request *req, + struct crypto_aead *child, + int err, + int (*crypt)(struct aead_request *req)) +{ + struct cryptd_aead_request_ctx *rctx; + rctx = aead_request_ctx(req); + + if (unlikely(err == -EINPROGRESS)) + goto out; + aead_request_set_tfm(req, child); + err = crypt( req ); + req->base.complete = rctx->complete; +out: + local_bh_disable(); + rctx->complete(&req->base, err); + local_bh_enable(); +} + +static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err) +{ + struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm); + struct crypto_aead *child = ctx->child; + struct aead_request *req; + + req = container_of(areq, struct aead_request, base); + cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->encrypt); +} + +static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err) +{ + struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm); + struct crypto_aead *child = ctx->child; + struct aead_request *req; + + req = container_of(areq, struct aead_request, base); + cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->decrypt); +} + +static int cryptd_aead_enqueue(struct aead_request *req, + crypto_completion_t complete) +{ + struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req); + struct crypto_aead *tfm = crypto_aead_reqtfm(req); + struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm)); + + rctx->complete = req->base.complete; + req->base.complete = complete; + return cryptd_enqueue_request(queue, &req->base); +} + +static int cryptd_aead_encrypt_enqueue(struct aead_request *req) +{ + return cryptd_aead_enqueue(req, cryptd_aead_encrypt ); +} + +static int cryptd_aead_decrypt_enqueue(struct aead_request *req) +{ + return cryptd_aead_enqueue(req, cryptd_aead_decrypt ); +} + +static int cryptd_aead_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); + struct aead_instance_ctx *ictx = crypto_instance_ctx(inst); + struct crypto_aead_spawn *spawn = &ictx->aead_spawn; + struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_aead *cipher; + + cipher = crypto_spawn_aead(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + crypto_aead_set_flags(cipher, CRYPTO_TFM_REQ_MAY_SLEEP); + ctx->child = cipher; + tfm->crt_aead.reqsize = sizeof(struct cryptd_aead_request_ctx); + return 0; +} + +static void cryptd_aead_exit_tfm(struct crypto_tfm *tfm) +{ + struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm); + crypto_free_aead(ctx->child); +} + +static int cryptd_create_aead(struct crypto_template *tmpl, + struct rtattr **tb, + struct cryptd_queue *queue) +{ + struct aead_instance_ctx *ctx; + struct crypto_instance *inst; + struct crypto_alg *alg; + int err; + + alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_AEAD, + CRYPTO_ALG_TYPE_MASK); + if (IS_ERR(alg)) + return PTR_ERR(alg); + + inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx)); + err = PTR_ERR(inst); + if (IS_ERR(inst)) + goto out_put_alg; + + ctx = crypto_instance_ctx(inst); + ctx->queue = queue; + + err = crypto_init_spawn(&ctx->aead_spawn.base, alg, inst, + CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); + if (err) + goto out_free_inst; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC; + inst->alg.cra_type = alg->cra_type; + inst->alg.cra_ctxsize = sizeof(struct cryptd_aead_ctx); + inst->alg.cra_init = cryptd_aead_init_tfm; + inst->alg.cra_exit = cryptd_aead_exit_tfm; + inst->alg.cra_aead.setkey = alg->cra_aead.setkey; + inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize; + inst->alg.cra_aead.geniv = alg->cra_aead.geniv; + inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize; + inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize; + inst->alg.cra_aead.encrypt = cryptd_aead_encrypt_enqueue; + inst->alg.cra_aead.decrypt = cryptd_aead_decrypt_enqueue; + inst->alg.cra_aead.givencrypt = alg->cra_aead.givencrypt; + inst->alg.cra_aead.givdecrypt = alg->cra_aead.givdecrypt; + + err = crypto_register_instance(tmpl, inst); + if (err) { + crypto_drop_spawn(&ctx->aead_spawn.base); +out_free_inst: + kfree(inst); + } +out_put_alg: + crypto_mod_put(alg); + return err; +} + +static struct cryptd_queue queue; + +static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb) +{ + struct crypto_attr_type *algt; + + algt = crypto_get_attr_type(tb); + if (IS_ERR(algt)) + return PTR_ERR(algt); + + switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { + case CRYPTO_ALG_TYPE_BLKCIPHER: + return cryptd_create_blkcipher(tmpl, tb, &queue); + case CRYPTO_ALG_TYPE_DIGEST: + return cryptd_create_hash(tmpl, tb, &queue); + case CRYPTO_ALG_TYPE_AEAD: + return cryptd_create_aead(tmpl, tb, &queue); + } + + return -EINVAL; +} + +static void cryptd_free(struct crypto_instance *inst) +{ + struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst); + struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst); + struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst); + + switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) { + case CRYPTO_ALG_TYPE_AHASH: + crypto_drop_shash(&hctx->spawn); + kfree(ahash_instance(inst)); + return; + case CRYPTO_ALG_TYPE_AEAD: + crypto_drop_spawn(&aead_ctx->aead_spawn.base); + kfree(inst); + return; + default: + crypto_drop_spawn(&ctx->spawn); + kfree(inst); + } +} + +static struct crypto_template cryptd_tmpl = { + .name = "cryptd", + .create = cryptd_create, + .free = cryptd_free, + .module = THIS_MODULE, +}; + +struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name, + u32 type, u32 mask) +{ + char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; + struct crypto_tfm *tfm; + + if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, + "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) + return ERR_PTR(-EINVAL); + type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); + type |= CRYPTO_ALG_TYPE_BLKCIPHER; + mask &= ~CRYPTO_ALG_TYPE_MASK; + mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK); + tfm = crypto_alloc_base(cryptd_alg_name, type, mask); + if (IS_ERR(tfm)) + return ERR_CAST(tfm); + if (tfm->__crt_alg->cra_module != THIS_MODULE) { + crypto_free_tfm(tfm); + return ERR_PTR(-EINVAL); + } + + return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm)); +} +EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher); + +struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm) +{ + struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base); + return ctx->child; +} +EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child); + +void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm) +{ + crypto_free_ablkcipher(&tfm->base); +} +EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher); + +struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, + u32 type, u32 mask) +{ + char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; + struct crypto_ahash *tfm; + + if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, + "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) + return ERR_PTR(-EINVAL); + tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask); + if (IS_ERR(tfm)) + return ERR_CAST(tfm); + if (tfm->base.__crt_alg->cra_module != THIS_MODULE) { + crypto_free_ahash(tfm); + return ERR_PTR(-EINVAL); + } + + return __cryptd_ahash_cast(tfm); +} +EXPORT_SYMBOL_GPL(cryptd_alloc_ahash); + +struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm) +{ + struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); + + return ctx->child; +} +EXPORT_SYMBOL_GPL(cryptd_ahash_child); + +struct shash_desc *cryptd_shash_desc(struct ahash_request *req) +{ + struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); + return &rctx->desc; +} +EXPORT_SYMBOL_GPL(cryptd_shash_desc); + +void cryptd_free_ahash(struct cryptd_ahash *tfm) +{ + crypto_free_ahash(&tfm->base); +} +EXPORT_SYMBOL_GPL(cryptd_free_ahash); + +struct cryptd_aead *cryptd_alloc_aead(const char *alg_name, + u32 type, u32 mask) +{ + char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; + struct crypto_aead *tfm; + + if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, + "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) + return ERR_PTR(-EINVAL); + tfm = crypto_alloc_aead(cryptd_alg_name, type, mask); + if (IS_ERR(tfm)) + return ERR_CAST(tfm); + if (tfm->base.__crt_alg->cra_module != THIS_MODULE) { + crypto_free_aead(tfm); + return ERR_PTR(-EINVAL); + } + return __cryptd_aead_cast(tfm); +} +EXPORT_SYMBOL_GPL(cryptd_alloc_aead); + +struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm) +{ + struct cryptd_aead_ctx *ctx; + ctx = crypto_aead_ctx(&tfm->base); + return ctx->child; +} +EXPORT_SYMBOL_GPL(cryptd_aead_child); + +void cryptd_free_aead(struct cryptd_aead *tfm) +{ + crypto_free_aead(&tfm->base); +} +EXPORT_SYMBOL_GPL(cryptd_free_aead); + +static int __init cryptd_init(void) +{ + int err; + + err = cryptd_init_queue(&queue, CRYPTD_MAX_CPU_QLEN); + if (err) + return err; + + err = crypto_register_template(&cryptd_tmpl); + if (err) + cryptd_fini_queue(&queue); + + return err; +} + +static void __exit cryptd_exit(void) +{ + cryptd_fini_queue(&queue); + crypto_unregister_template(&cryptd_tmpl); +} + +subsys_initcall(cryptd_init); +module_exit(cryptd_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Software async crypto daemon"); diff --git a/crypto/crypto_null.c b/crypto/crypto_null.c new file mode 100644 index 00000000..07a8a96d --- /dev/null +++ b/crypto/crypto_null.c @@ -0,0 +1,207 @@ +/* + * Cryptographic API. + * + * Null algorithms, aka Much Ado About Nothing. + * + * These are needed for IPsec, and may be useful in general for + * testing & debugging. + * + * The null cipher is compliant with RFC2410. + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + */ + +#include <crypto/internal/hash.h> +#include <crypto/internal/skcipher.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <linux/string.h> + +#define NULL_KEY_SIZE 0 +#define NULL_BLOCK_SIZE 1 +#define NULL_DIGEST_SIZE 0 +#define NULL_IV_SIZE 0 + +static int null_compress(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen) +{ + if (slen > *dlen) + return -EINVAL; + memcpy(dst, src, slen); + *dlen = slen; + return 0; +} + +static int null_init(struct shash_desc *desc) +{ + return 0; +} + +static int null_update(struct shash_desc *desc, const u8 *data, + unsigned int len) +{ + return 0; +} + +static int null_final(struct shash_desc *desc, u8 *out) +{ + return 0; +} + +static int null_digest(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) +{ + return 0; +} + +static int null_hash_setkey(struct crypto_shash *tfm, const u8 *key, + unsigned int keylen) +{ return 0; } + +static int null_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen) +{ return 0; } + +static void null_crypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + memcpy(dst, src, NULL_BLOCK_SIZE); +} + +static int skcipher_null_crypt(struct blkcipher_desc *desc, + struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct blkcipher_walk walk; + int err; + + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt(desc, &walk); + + while (walk.nbytes) { + if (walk.src.virt.addr != walk.dst.virt.addr) + memcpy(walk.dst.virt.addr, walk.src.virt.addr, + walk.nbytes); + err = blkcipher_walk_done(desc, &walk, 0); + } + + return err; +} + +static struct crypto_alg compress_null = { + .cra_name = "compress_null", + .cra_flags = CRYPTO_ALG_TYPE_COMPRESS, + .cra_blocksize = NULL_BLOCK_SIZE, + .cra_ctxsize = 0, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(compress_null.cra_list), + .cra_u = { .compress = { + .coa_compress = null_compress, + .coa_decompress = null_compress } } +}; + +static struct shash_alg digest_null = { + .digestsize = NULL_DIGEST_SIZE, + .setkey = null_hash_setkey, + .init = null_init, + .update = null_update, + .finup = null_digest, + .digest = null_digest, + .final = null_final, + .base = { + .cra_name = "digest_null", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = NULL_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static struct crypto_alg cipher_null = { + .cra_name = "cipher_null", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = NULL_BLOCK_SIZE, + .cra_ctxsize = 0, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(cipher_null.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = NULL_KEY_SIZE, + .cia_max_keysize = NULL_KEY_SIZE, + .cia_setkey = null_setkey, + .cia_encrypt = null_crypt, + .cia_decrypt = null_crypt } } +}; + +static struct crypto_alg skcipher_null = { + .cra_name = "ecb(cipher_null)", + .cra_driver_name = "ecb-cipher_null", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_blocksize = NULL_BLOCK_SIZE, + .cra_type = &crypto_blkcipher_type, + .cra_ctxsize = 0, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(skcipher_null.cra_list), + .cra_u = { .blkcipher = { + .min_keysize = NULL_KEY_SIZE, + .max_keysize = NULL_KEY_SIZE, + .ivsize = NULL_IV_SIZE, + .setkey = null_setkey, + .encrypt = skcipher_null_crypt, + .decrypt = skcipher_null_crypt } } +}; + +MODULE_ALIAS("compress_null"); +MODULE_ALIAS("digest_null"); +MODULE_ALIAS("cipher_null"); + +static int __init crypto_null_mod_init(void) +{ + int ret = 0; + + ret = crypto_register_alg(&cipher_null); + if (ret < 0) + goto out; + + ret = crypto_register_alg(&skcipher_null); + if (ret < 0) + goto out_unregister_cipher; + + ret = crypto_register_shash(&digest_null); + if (ret < 0) + goto out_unregister_skcipher; + + ret = crypto_register_alg(&compress_null); + if (ret < 0) + goto out_unregister_digest; + +out: + return ret; + +out_unregister_digest: + crypto_unregister_shash(&digest_null); +out_unregister_skcipher: + crypto_unregister_alg(&skcipher_null); +out_unregister_cipher: + crypto_unregister_alg(&cipher_null); + goto out; +} + +static void __exit crypto_null_mod_fini(void) +{ + crypto_unregister_alg(&compress_null); + crypto_unregister_shash(&digest_null); + crypto_unregister_alg(&skcipher_null); + crypto_unregister_alg(&cipher_null); +} + +module_init(crypto_null_mod_init); +module_exit(crypto_null_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Null Cryptographic Algorithms"); diff --git a/crypto/crypto_user.c b/crypto/crypto_user.c new file mode 100644 index 00000000..f1ea0a06 --- /dev/null +++ b/crypto/crypto_user.c @@ -0,0 +1,517 @@ +/* + * Crypto user configuration API. + * + * Copyright (C) 2011 secunet Security Networks AG + * Copyright (C) 2011 Steffen Klassert <steffen.klassert@secunet.com> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms and conditions of the GNU General Public License, + * version 2, as published by the Free Software Foundation. + * + * This program is distributed in the hope it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., + * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + */ + +#include <linux/module.h> +#include <linux/crypto.h> +#include <linux/cryptouser.h> +#include <linux/sched.h> +#include <net/netlink.h> +#include <linux/security.h> +#include <net/net_namespace.h> +#include <crypto/internal/aead.h> +#include <crypto/internal/skcipher.h> + +#include "internal.h" + +DEFINE_MUTEX(crypto_cfg_mutex); + +/* The crypto netlink socket */ +static struct sock *crypto_nlsk; + +struct crypto_dump_info { + struct sk_buff *in_skb; + struct sk_buff *out_skb; + u32 nlmsg_seq; + u16 nlmsg_flags; +}; + +static struct crypto_alg *crypto_alg_match(struct crypto_user_alg *p, int exact) +{ + struct crypto_alg *q, *alg = NULL; + + down_read(&crypto_alg_sem); + + list_for_each_entry(q, &crypto_alg_list, cra_list) { + int match = 0; + + if ((q->cra_flags ^ p->cru_type) & p->cru_mask) + continue; + + if (strlen(p->cru_driver_name)) + match = !strcmp(q->cra_driver_name, + p->cru_driver_name); + else if (!exact) + match = !strcmp(q->cra_name, p->cru_name); + + if (match) { + alg = q; + break; + } + } + + up_read(&crypto_alg_sem); + + return alg; +} + +static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg) +{ + struct crypto_report_cipher rcipher; + + snprintf(rcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "cipher"); + + rcipher.blocksize = alg->cra_blocksize; + rcipher.min_keysize = alg->cra_cipher.cia_min_keysize; + rcipher.max_keysize = alg->cra_cipher.cia_max_keysize; + + NLA_PUT(skb, CRYPTOCFGA_REPORT_CIPHER, + sizeof(struct crypto_report_cipher), &rcipher); + + return 0; + +nla_put_failure: + return -EMSGSIZE; +} + +static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg) +{ + struct crypto_report_comp rcomp; + + snprintf(rcomp.type, CRYPTO_MAX_ALG_NAME, "%s", "compression"); + + NLA_PUT(skb, CRYPTOCFGA_REPORT_COMPRESS, + sizeof(struct crypto_report_comp), &rcomp); + + return 0; + +nla_put_failure: + return -EMSGSIZE; +} + +static int crypto_report_one(struct crypto_alg *alg, + struct crypto_user_alg *ualg, struct sk_buff *skb) +{ + memcpy(&ualg->cru_name, &alg->cra_name, sizeof(ualg->cru_name)); + memcpy(&ualg->cru_driver_name, &alg->cra_driver_name, + sizeof(ualg->cru_driver_name)); + memcpy(&ualg->cru_module_name, module_name(alg->cra_module), + CRYPTO_MAX_ALG_NAME); + + ualg->cru_flags = alg->cra_flags; + ualg->cru_refcnt = atomic_read(&alg->cra_refcnt); + + NLA_PUT_U32(skb, CRYPTOCFGA_PRIORITY_VAL, alg->cra_priority); + + if (alg->cra_flags & CRYPTO_ALG_LARVAL) { + struct crypto_report_larval rl; + + snprintf(rl.type, CRYPTO_MAX_ALG_NAME, "%s", "larval"); + + NLA_PUT(skb, CRYPTOCFGA_REPORT_LARVAL, + sizeof(struct crypto_report_larval), &rl); + + goto out; + } + + if (alg->cra_type && alg->cra_type->report) { + if (alg->cra_type->report(skb, alg)) + goto nla_put_failure; + + goto out; + } + + switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) { + case CRYPTO_ALG_TYPE_CIPHER: + if (crypto_report_cipher(skb, alg)) + goto nla_put_failure; + + break; + case CRYPTO_ALG_TYPE_COMPRESS: + if (crypto_report_comp(skb, alg)) + goto nla_put_failure; + + break; + } + +out: + return 0; + +nla_put_failure: + return -EMSGSIZE; +} + +static int crypto_report_alg(struct crypto_alg *alg, + struct crypto_dump_info *info) +{ + struct sk_buff *in_skb = info->in_skb; + struct sk_buff *skb = info->out_skb; + struct nlmsghdr *nlh; + struct crypto_user_alg *ualg; + int err = 0; + + nlh = nlmsg_put(skb, NETLINK_CB(in_skb).pid, info->nlmsg_seq, + CRYPTO_MSG_GETALG, sizeof(*ualg), info->nlmsg_flags); + if (!nlh) { + err = -EMSGSIZE; + goto out; + } + + ualg = nlmsg_data(nlh); + + err = crypto_report_one(alg, ualg, skb); + if (err) { + nlmsg_cancel(skb, nlh); + goto out; + } + + nlmsg_end(skb, nlh); + +out: + return err; +} + +static int crypto_report(struct sk_buff *in_skb, struct nlmsghdr *in_nlh, + struct nlattr **attrs) +{ + struct crypto_user_alg *p = nlmsg_data(in_nlh); + struct crypto_alg *alg; + struct sk_buff *skb; + struct crypto_dump_info info; + int err; + + if (!p->cru_driver_name) + return -EINVAL; + + alg = crypto_alg_match(p, 1); + if (!alg) + return -ENOENT; + + skb = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_ATOMIC); + if (!skb) + return -ENOMEM; + + info.in_skb = in_skb; + info.out_skb = skb; + info.nlmsg_seq = in_nlh->nlmsg_seq; + info.nlmsg_flags = 0; + + err = crypto_report_alg(alg, &info); + if (err) + return err; + + return nlmsg_unicast(crypto_nlsk, skb, NETLINK_CB(in_skb).pid); +} + +static int crypto_dump_report(struct sk_buff *skb, struct netlink_callback *cb) +{ + struct crypto_alg *alg; + struct crypto_dump_info info; + int err; + + if (cb->args[0]) + goto out; + + cb->args[0] = 1; + + info.in_skb = cb->skb; + info.out_skb = skb; + info.nlmsg_seq = cb->nlh->nlmsg_seq; + info.nlmsg_flags = NLM_F_MULTI; + + list_for_each_entry(alg, &crypto_alg_list, cra_list) { + err = crypto_report_alg(alg, &info); + if (err) + goto out_err; + } + +out: + return skb->len; +out_err: + return err; +} + +static int crypto_dump_report_done(struct netlink_callback *cb) +{ + return 0; +} + +static int crypto_update_alg(struct sk_buff *skb, struct nlmsghdr *nlh, + struct nlattr **attrs) +{ + struct crypto_alg *alg; + struct crypto_user_alg *p = nlmsg_data(nlh); + struct nlattr *priority = attrs[CRYPTOCFGA_PRIORITY_VAL]; + LIST_HEAD(list); + + if (priority && !strlen(p->cru_driver_name)) + return -EINVAL; + + alg = crypto_alg_match(p, 1); + if (!alg) + return -ENOENT; + + down_write(&crypto_alg_sem); + + crypto_remove_spawns(alg, &list, NULL); + + if (priority) + alg->cra_priority = nla_get_u32(priority); + + up_write(&crypto_alg_sem); + + crypto_remove_final(&list); + + return 0; +} + +static int crypto_del_alg(struct sk_buff *skb, struct nlmsghdr *nlh, + struct nlattr **attrs) +{ + struct crypto_alg *alg; + struct crypto_user_alg *p = nlmsg_data(nlh); + + alg = crypto_alg_match(p, 1); + if (!alg) + return -ENOENT; + + /* We can not unregister core algorithms such as aes-generic. + * We would loose the reference in the crypto_alg_list to this algorithm + * if we try to unregister. Unregistering such an algorithm without + * removing the module is not possible, so we restrict to crypto + * instances that are build from templates. */ + if (!(alg->cra_flags & CRYPTO_ALG_INSTANCE)) + return -EINVAL; + + if (atomic_read(&alg->cra_refcnt) != 1) + return -EBUSY; + + return crypto_unregister_instance(alg); +} + +static struct crypto_alg *crypto_user_skcipher_alg(const char *name, u32 type, + u32 mask) +{ + int err; + struct crypto_alg *alg; + + type = crypto_skcipher_type(type); + mask = crypto_skcipher_mask(mask); + + for (;;) { + alg = crypto_lookup_skcipher(name, type, mask); + if (!IS_ERR(alg)) + return alg; + + err = PTR_ERR(alg); + if (err != -EAGAIN) + break; + if (signal_pending(current)) { + err = -EINTR; + break; + } + } + + return ERR_PTR(err); +} + +static struct crypto_alg *crypto_user_aead_alg(const char *name, u32 type, + u32 mask) +{ + int err; + struct crypto_alg *alg; + + type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); + type |= CRYPTO_ALG_TYPE_AEAD; + mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); + mask |= CRYPTO_ALG_TYPE_MASK; + + for (;;) { + alg = crypto_lookup_aead(name, type, mask); + if (!IS_ERR(alg)) + return alg; + + err = PTR_ERR(alg); + if (err != -EAGAIN) + break; + if (signal_pending(current)) { + err = -EINTR; + break; + } + } + + return ERR_PTR(err); +} + +static int crypto_add_alg(struct sk_buff *skb, struct nlmsghdr *nlh, + struct nlattr **attrs) +{ + int exact = 0; + const char *name; + struct crypto_alg *alg; + struct crypto_user_alg *p = nlmsg_data(nlh); + struct nlattr *priority = attrs[CRYPTOCFGA_PRIORITY_VAL]; + + if (strlen(p->cru_driver_name)) + exact = 1; + + if (priority && !exact) + return -EINVAL; + + alg = crypto_alg_match(p, exact); + if (alg) + return -EEXIST; + + if (strlen(p->cru_driver_name)) + name = p->cru_driver_name; + else + name = p->cru_name; + + switch (p->cru_type & p->cru_mask & CRYPTO_ALG_TYPE_MASK) { + case CRYPTO_ALG_TYPE_AEAD: + alg = crypto_user_aead_alg(name, p->cru_type, p->cru_mask); + break; + case CRYPTO_ALG_TYPE_GIVCIPHER: + case CRYPTO_ALG_TYPE_BLKCIPHER: + case CRYPTO_ALG_TYPE_ABLKCIPHER: + alg = crypto_user_skcipher_alg(name, p->cru_type, p->cru_mask); + break; + default: + alg = crypto_alg_mod_lookup(name, p->cru_type, p->cru_mask); + } + + if (IS_ERR(alg)) + return PTR_ERR(alg); + + down_write(&crypto_alg_sem); + + if (priority) + alg->cra_priority = nla_get_u32(priority); + + up_write(&crypto_alg_sem); + + crypto_mod_put(alg); + + return 0; +} + +#define MSGSIZE(type) sizeof(struct type) + +static const int crypto_msg_min[CRYPTO_NR_MSGTYPES] = { + [CRYPTO_MSG_NEWALG - CRYPTO_MSG_BASE] = MSGSIZE(crypto_user_alg), + [CRYPTO_MSG_DELALG - CRYPTO_MSG_BASE] = MSGSIZE(crypto_user_alg), + [CRYPTO_MSG_UPDATEALG - CRYPTO_MSG_BASE] = MSGSIZE(crypto_user_alg), + [CRYPTO_MSG_GETALG - CRYPTO_MSG_BASE] = MSGSIZE(crypto_user_alg), +}; + +static const struct nla_policy crypto_policy[CRYPTOCFGA_MAX+1] = { + [CRYPTOCFGA_PRIORITY_VAL] = { .type = NLA_U32}, +}; + +#undef MSGSIZE + +static struct crypto_link { + int (*doit)(struct sk_buff *, struct nlmsghdr *, struct nlattr **); + int (*dump)(struct sk_buff *, struct netlink_callback *); + int (*done)(struct netlink_callback *); +} crypto_dispatch[CRYPTO_NR_MSGTYPES] = { + [CRYPTO_MSG_NEWALG - CRYPTO_MSG_BASE] = { .doit = crypto_add_alg}, + [CRYPTO_MSG_DELALG - CRYPTO_MSG_BASE] = { .doit = crypto_del_alg}, + [CRYPTO_MSG_UPDATEALG - CRYPTO_MSG_BASE] = { .doit = crypto_update_alg}, + [CRYPTO_MSG_GETALG - CRYPTO_MSG_BASE] = { .doit = crypto_report, + .dump = crypto_dump_report, + .done = crypto_dump_report_done}, +}; + +static int crypto_user_rcv_msg(struct sk_buff *skb, struct nlmsghdr *nlh) +{ + struct nlattr *attrs[CRYPTOCFGA_MAX+1]; + struct crypto_link *link; + int type, err; + + type = nlh->nlmsg_type; + if (type > CRYPTO_MSG_MAX) + return -EINVAL; + + type -= CRYPTO_MSG_BASE; + link = &crypto_dispatch[type]; + + if (!capable(CAP_NET_ADMIN)) + return -EPERM; + + if ((type == (CRYPTO_MSG_GETALG - CRYPTO_MSG_BASE) && + (nlh->nlmsg_flags & NLM_F_DUMP))) { + struct crypto_alg *alg; + u16 dump_alloc = 0; + + if (link->dump == NULL) + return -EINVAL; + + list_for_each_entry(alg, &crypto_alg_list, cra_list) + dump_alloc += CRYPTO_REPORT_MAXSIZE; + + { + struct netlink_dump_control c = { + .dump = link->dump, + .done = link->done, + .min_dump_alloc = dump_alloc, + }; + return netlink_dump_start(crypto_nlsk, skb, nlh, &c); + } + } + + err = nlmsg_parse(nlh, crypto_msg_min[type], attrs, CRYPTOCFGA_MAX, + crypto_policy); + if (err < 0) + return err; + + if (link->doit == NULL) + return -EINVAL; + + return link->doit(skb, nlh, attrs); +} + +static void crypto_netlink_rcv(struct sk_buff *skb) +{ + mutex_lock(&crypto_cfg_mutex); + netlink_rcv_skb(skb, &crypto_user_rcv_msg); + mutex_unlock(&crypto_cfg_mutex); +} + +static int __init crypto_user_init(void) +{ + crypto_nlsk = netlink_kernel_create(&init_net, NETLINK_CRYPTO, + 0, crypto_netlink_rcv, + NULL, THIS_MODULE); + if (!crypto_nlsk) + return -ENOMEM; + + return 0; +} + +static void __exit crypto_user_exit(void) +{ + netlink_kernel_release(crypto_nlsk); +} + +module_init(crypto_user_init); +module_exit(crypto_user_exit); +MODULE_LICENSE("GPL"); +MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>"); +MODULE_DESCRIPTION("Crypto userspace configuration API"); diff --git a/crypto/crypto_wq.c b/crypto/crypto_wq.c new file mode 100644 index 00000000..adad92a4 --- /dev/null +++ b/crypto/crypto_wq.c @@ -0,0 +1,40 @@ +/* + * Workqueue for crypto subsystem + * + * Copyright (c) 2009 Intel Corp. + * Author: Huang Ying <ying.huang@intel.com> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <linux/workqueue.h> +#include <linux/module.h> +#include <crypto/algapi.h> +#include <crypto/crypto_wq.h> + +struct workqueue_struct *kcrypto_wq; +EXPORT_SYMBOL_GPL(kcrypto_wq); + +static int __init crypto_wq_init(void) +{ + kcrypto_wq = alloc_workqueue("crypto", + WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE, 1); + if (unlikely(!kcrypto_wq)) + return -ENOMEM; + return 0; +} + +static void __exit crypto_wq_exit(void) +{ + destroy_workqueue(kcrypto_wq); +} + +module_init(crypto_wq_init); +module_exit(crypto_wq_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Workqueue for crypto subsystem"); diff --git a/crypto/ctr.c b/crypto/ctr.c new file mode 100644 index 00000000..4ca7222c --- /dev/null +++ b/crypto/ctr.c @@ -0,0 +1,424 @@ +/* + * CTR: Counter mode + * + * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/algapi.h> +#include <crypto/ctr.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/random.h> +#include <linux/scatterlist.h> +#include <linux/slab.h> + +struct crypto_ctr_ctx { + struct crypto_cipher *child; +}; + +struct crypto_rfc3686_ctx { + struct crypto_blkcipher *child; + u8 nonce[CTR_RFC3686_NONCE_SIZE]; +}; + +static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key, + unsigned int keylen) +{ + struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent); + struct crypto_cipher *child = ctx->child; + int err; + + crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_cipher_setkey(child, key, keylen); + crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + + return err; +} + +static void crypto_ctr_crypt_final(struct blkcipher_walk *walk, + struct crypto_cipher *tfm) +{ + unsigned int bsize = crypto_cipher_blocksize(tfm); + unsigned long alignmask = crypto_cipher_alignmask(tfm); + u8 *ctrblk = walk->iv; + u8 tmp[bsize + alignmask]; + u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + unsigned int nbytes = walk->nbytes; + + crypto_cipher_encrypt_one(tfm, keystream, ctrblk); + crypto_xor(keystream, src, nbytes); + memcpy(dst, keystream, nbytes); + + crypto_inc(ctrblk, bsize); +} + +static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk, + struct crypto_cipher *tfm) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_encrypt; + unsigned int bsize = crypto_cipher_blocksize(tfm); + u8 *ctrblk = walk->iv; + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + unsigned int nbytes = walk->nbytes; + + do { + /* create keystream */ + fn(crypto_cipher_tfm(tfm), dst, ctrblk); + crypto_xor(dst, src, bsize); + + /* increment counter in counterblock */ + crypto_inc(ctrblk, bsize); + + src += bsize; + dst += bsize; + } while ((nbytes -= bsize) >= bsize); + + return nbytes; +} + +static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk, + struct crypto_cipher *tfm) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_encrypt; + unsigned int bsize = crypto_cipher_blocksize(tfm); + unsigned long alignmask = crypto_cipher_alignmask(tfm); + unsigned int nbytes = walk->nbytes; + u8 *ctrblk = walk->iv; + u8 *src = walk->src.virt.addr; + u8 tmp[bsize + alignmask]; + u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); + + do { + /* create keystream */ + fn(crypto_cipher_tfm(tfm), keystream, ctrblk); + crypto_xor(src, keystream, bsize); + + /* increment counter in counterblock */ + crypto_inc(ctrblk, bsize); + + src += bsize; + } while ((nbytes -= bsize) >= bsize); + + return nbytes; +} + +static int crypto_ctr_crypt(struct blkcipher_desc *desc, + struct scatterlist *dst, struct scatterlist *src, + unsigned int nbytes) +{ + struct blkcipher_walk walk; + struct crypto_blkcipher *tfm = desc->tfm; + struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm); + struct crypto_cipher *child = ctx->child; + unsigned int bsize = crypto_cipher_blocksize(child); + int err; + + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt_block(desc, &walk, bsize); + + while (walk.nbytes >= bsize) { + if (walk.src.virt.addr == walk.dst.virt.addr) + nbytes = crypto_ctr_crypt_inplace(&walk, child); + else + nbytes = crypto_ctr_crypt_segment(&walk, child); + + err = blkcipher_walk_done(desc, &walk, nbytes); + } + + if (walk.nbytes) { + crypto_ctr_crypt_final(&walk, child); + err = blkcipher_walk_done(desc, &walk, 0); + } + + return err; +} + +static int crypto_ctr_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_spawn *spawn = crypto_instance_ctx(inst); + struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_cipher *cipher; + + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; + + return 0; +} + +static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_cipher(ctx->child); +} + +static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb) +{ + struct crypto_instance *inst; + struct crypto_alg *alg; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); + if (err) + return ERR_PTR(err); + + alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK); + if (IS_ERR(alg)) + return ERR_CAST(alg); + + /* Block size must be >= 4 bytes. */ + err = -EINVAL; + if (alg->cra_blocksize < 4) + goto out_put_alg; + + /* If this is false we'd fail the alignment of crypto_inc. */ + if (alg->cra_blocksize % 4) + goto out_put_alg; + + inst = crypto_alloc_instance("ctr", alg); + if (IS_ERR(inst)) + goto out; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = 1; + inst->alg.cra_alignmask = alg->cra_alignmask | (__alignof__(u32) - 1); + inst->alg.cra_type = &crypto_blkcipher_type; + + inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; + inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize; + inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize; + + inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx); + + inst->alg.cra_init = crypto_ctr_init_tfm; + inst->alg.cra_exit = crypto_ctr_exit_tfm; + + inst->alg.cra_blkcipher.setkey = crypto_ctr_setkey; + inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt; + inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt; + + inst->alg.cra_blkcipher.geniv = "chainiv"; + +out: + crypto_mod_put(alg); + return inst; + +out_put_alg: + inst = ERR_PTR(err); + goto out; +} + +static void crypto_ctr_free(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(inst); +} + +static struct crypto_template crypto_ctr_tmpl = { + .name = "ctr", + .alloc = crypto_ctr_alloc, + .free = crypto_ctr_free, + .module = THIS_MODULE, +}; + +static int crypto_rfc3686_setkey(struct crypto_tfm *parent, const u8 *key, + unsigned int keylen) +{ + struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(parent); + struct crypto_blkcipher *child = ctx->child; + int err; + + /* the nonce is stored in bytes at end of key */ + if (keylen < CTR_RFC3686_NONCE_SIZE) + return -EINVAL; + + memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE), + CTR_RFC3686_NONCE_SIZE); + + keylen -= CTR_RFC3686_NONCE_SIZE; + + crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_blkcipher_set_flags(child, crypto_tfm_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_blkcipher_setkey(child, key, keylen); + crypto_tfm_set_flags(parent, crypto_blkcipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + + return err; +} + +static int crypto_rfc3686_crypt(struct blkcipher_desc *desc, + struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct crypto_blkcipher *tfm = desc->tfm; + struct crypto_rfc3686_ctx *ctx = crypto_blkcipher_ctx(tfm); + struct crypto_blkcipher *child = ctx->child; + unsigned long alignmask = crypto_blkcipher_alignmask(tfm); + u8 ivblk[CTR_RFC3686_BLOCK_SIZE + alignmask]; + u8 *iv = PTR_ALIGN(ivblk + 0, alignmask + 1); + u8 *info = desc->info; + int err; + + /* set up counter block */ + memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE); + memcpy(iv + CTR_RFC3686_NONCE_SIZE, info, CTR_RFC3686_IV_SIZE); + + /* initialize counter portion of counter block */ + *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) = + cpu_to_be32(1); + + desc->tfm = child; + desc->info = iv; + err = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); + desc->tfm = tfm; + desc->info = info; + + return err; +} + +static int crypto_rfc3686_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_spawn *spawn = crypto_instance_ctx(inst); + struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_blkcipher *cipher; + + cipher = crypto_spawn_blkcipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; + + return 0; +} + +static void crypto_rfc3686_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_blkcipher(ctx->child); +} + +static struct crypto_instance *crypto_rfc3686_alloc(struct rtattr **tb) +{ + struct crypto_instance *inst; + struct crypto_alg *alg; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); + if (err) + return ERR_PTR(err); + + alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER, + CRYPTO_ALG_TYPE_MASK); + err = PTR_ERR(alg); + if (IS_ERR(alg)) + return ERR_PTR(err); + + /* We only support 16-byte blocks. */ + err = -EINVAL; + if (alg->cra_blkcipher.ivsize != CTR_RFC3686_BLOCK_SIZE) + goto out_put_alg; + + /* Not a stream cipher? */ + if (alg->cra_blocksize != 1) + goto out_put_alg; + + inst = crypto_alloc_instance("rfc3686", alg); + if (IS_ERR(inst)) + goto out; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = 1; + inst->alg.cra_alignmask = alg->cra_alignmask; + inst->alg.cra_type = &crypto_blkcipher_type; + + inst->alg.cra_blkcipher.ivsize = CTR_RFC3686_IV_SIZE; + inst->alg.cra_blkcipher.min_keysize = alg->cra_blkcipher.min_keysize + + CTR_RFC3686_NONCE_SIZE; + inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize + + CTR_RFC3686_NONCE_SIZE; + + inst->alg.cra_blkcipher.geniv = "seqiv"; + + inst->alg.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx); + + inst->alg.cra_init = crypto_rfc3686_init_tfm; + inst->alg.cra_exit = crypto_rfc3686_exit_tfm; + + inst->alg.cra_blkcipher.setkey = crypto_rfc3686_setkey; + inst->alg.cra_blkcipher.encrypt = crypto_rfc3686_crypt; + inst->alg.cra_blkcipher.decrypt = crypto_rfc3686_crypt; + +out: + crypto_mod_put(alg); + return inst; + +out_put_alg: + inst = ERR_PTR(err); + goto out; +} + +static struct crypto_template crypto_rfc3686_tmpl = { + .name = "rfc3686", + .alloc = crypto_rfc3686_alloc, + .free = crypto_ctr_free, + .module = THIS_MODULE, +}; + +static int __init crypto_ctr_module_init(void) +{ + int err; + + err = crypto_register_template(&crypto_ctr_tmpl); + if (err) + goto out; + + err = crypto_register_template(&crypto_rfc3686_tmpl); + if (err) + goto out_drop_ctr; + +out: + return err; + +out_drop_ctr: + crypto_unregister_template(&crypto_ctr_tmpl); + goto out; +} + +static void __exit crypto_ctr_module_exit(void) +{ + crypto_unregister_template(&crypto_rfc3686_tmpl); + crypto_unregister_template(&crypto_ctr_tmpl); +} + +module_init(crypto_ctr_module_init); +module_exit(crypto_ctr_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("CTR Counter block mode"); +MODULE_ALIAS("rfc3686"); diff --git a/crypto/cts.c b/crypto/cts.c new file mode 100644 index 00000000..ccf9c5de --- /dev/null +++ b/crypto/cts.c @@ -0,0 +1,353 @@ +/* + * CTS: Cipher Text Stealing mode + * + * COPYRIGHT (c) 2008 + * The Regents of the University of Michigan + * ALL RIGHTS RESERVED + * + * Permission is granted to use, copy, create derivative works + * and redistribute this software and such derivative works + * for any purpose, so long as the name of The University of + * Michigan is not used in any advertising or publicity + * pertaining to the use of distribution of this software + * without specific, written prior authorization. If the + * above copyright notice or any other identification of the + * University of Michigan is included in any copy of any + * portion of this software, then the disclaimer below must + * also be included. + * + * THIS SOFTWARE IS PROVIDED AS IS, WITHOUT REPRESENTATION + * FROM THE UNIVERSITY OF MICHIGAN AS TO ITS FITNESS FOR ANY + * PURPOSE, AND WITHOUT WARRANTY BY THE UNIVERSITY OF + * MICHIGAN OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING + * WITHOUT LIMITATION THE IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE + * REGENTS OF THE UNIVERSITY OF MICHIGAN SHALL NOT BE LIABLE + * FOR ANY DAMAGES, INCLUDING SPECIAL, INDIRECT, INCIDENTAL, OR + * CONSEQUENTIAL DAMAGES, WITH RESPECT TO ANY CLAIM ARISING + * OUT OF OR IN CONNECTION WITH THE USE OF THE SOFTWARE, EVEN + * IF IT HAS BEEN OR IS HEREAFTER ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGES. + */ + +/* Derived from various: + * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> + */ + +/* + * This is the Cipher Text Stealing mode as described by + * Section 8 of rfc2040 and referenced by rfc3962. + * rfc3962 includes errata information in its Appendix A. + */ + +#include <crypto/algapi.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/log2.h> +#include <linux/module.h> +#include <linux/scatterlist.h> +#include <crypto/scatterwalk.h> +#include <linux/slab.h> + +struct crypto_cts_ctx { + struct crypto_blkcipher *child; +}; + +static int crypto_cts_setkey(struct crypto_tfm *parent, const u8 *key, + unsigned int keylen) +{ + struct crypto_cts_ctx *ctx = crypto_tfm_ctx(parent); + struct crypto_blkcipher *child = ctx->child; + int err; + + crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_blkcipher_set_flags(child, crypto_tfm_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_blkcipher_setkey(child, key, keylen); + crypto_tfm_set_flags(parent, crypto_blkcipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + return err; +} + +static int cts_cbc_encrypt(struct crypto_cts_ctx *ctx, + struct blkcipher_desc *desc, + struct scatterlist *dst, + struct scatterlist *src, + unsigned int offset, + unsigned int nbytes) +{ + int bsize = crypto_blkcipher_blocksize(desc->tfm); + u8 tmp[bsize], tmp2[bsize]; + struct blkcipher_desc lcldesc; + struct scatterlist sgsrc[1], sgdst[1]; + int lastn = nbytes - bsize; + u8 iv[bsize]; + u8 s[bsize * 2], d[bsize * 2]; + int err; + + if (lastn < 0) + return -EINVAL; + + sg_init_table(sgsrc, 1); + sg_init_table(sgdst, 1); + + memset(s, 0, sizeof(s)); + scatterwalk_map_and_copy(s, src, offset, nbytes, 0); + + memcpy(iv, desc->info, bsize); + + lcldesc.tfm = ctx->child; + lcldesc.info = iv; + lcldesc.flags = desc->flags; + + sg_set_buf(&sgsrc[0], s, bsize); + sg_set_buf(&sgdst[0], tmp, bsize); + err = crypto_blkcipher_encrypt_iv(&lcldesc, sgdst, sgsrc, bsize); + + memcpy(d + bsize, tmp, lastn); + + lcldesc.info = tmp; + + sg_set_buf(&sgsrc[0], s + bsize, bsize); + sg_set_buf(&sgdst[0], tmp2, bsize); + err = crypto_blkcipher_encrypt_iv(&lcldesc, sgdst, sgsrc, bsize); + + memcpy(d, tmp2, bsize); + + scatterwalk_map_and_copy(d, dst, offset, nbytes, 1); + + memcpy(desc->info, tmp2, bsize); + + return err; +} + +static int crypto_cts_encrypt(struct blkcipher_desc *desc, + struct scatterlist *dst, struct scatterlist *src, + unsigned int nbytes) +{ + struct crypto_cts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + int bsize = crypto_blkcipher_blocksize(desc->tfm); + int tot_blocks = (nbytes + bsize - 1) / bsize; + int cbc_blocks = tot_blocks > 2 ? tot_blocks - 2 : 0; + struct blkcipher_desc lcldesc; + int err; + + lcldesc.tfm = ctx->child; + lcldesc.info = desc->info; + lcldesc.flags = desc->flags; + + if (tot_blocks == 1) { + err = crypto_blkcipher_encrypt_iv(&lcldesc, dst, src, bsize); + } else if (nbytes <= bsize * 2) { + err = cts_cbc_encrypt(ctx, desc, dst, src, 0, nbytes); + } else { + /* do normal function for tot_blocks - 2 */ + err = crypto_blkcipher_encrypt_iv(&lcldesc, dst, src, + cbc_blocks * bsize); + if (err == 0) { + /* do cts for final two blocks */ + err = cts_cbc_encrypt(ctx, desc, dst, src, + cbc_blocks * bsize, + nbytes - (cbc_blocks * bsize)); + } + } + + return err; +} + +static int cts_cbc_decrypt(struct crypto_cts_ctx *ctx, + struct blkcipher_desc *desc, + struct scatterlist *dst, + struct scatterlist *src, + unsigned int offset, + unsigned int nbytes) +{ + int bsize = crypto_blkcipher_blocksize(desc->tfm); + u8 tmp[bsize]; + struct blkcipher_desc lcldesc; + struct scatterlist sgsrc[1], sgdst[1]; + int lastn = nbytes - bsize; + u8 iv[bsize]; + u8 s[bsize * 2], d[bsize * 2]; + int err; + + if (lastn < 0) + return -EINVAL; + + sg_init_table(sgsrc, 1); + sg_init_table(sgdst, 1); + + scatterwalk_map_and_copy(s, src, offset, nbytes, 0); + + lcldesc.tfm = ctx->child; + lcldesc.info = iv; + lcldesc.flags = desc->flags; + + /* 1. Decrypt Cn-1 (s) to create Dn (tmp)*/ + memset(iv, 0, sizeof(iv)); + sg_set_buf(&sgsrc[0], s, bsize); + sg_set_buf(&sgdst[0], tmp, bsize); + err = crypto_blkcipher_decrypt_iv(&lcldesc, sgdst, sgsrc, bsize); + if (err) + return err; + /* 2. Pad Cn with zeros at the end to create C of length BB */ + memset(iv, 0, sizeof(iv)); + memcpy(iv, s + bsize, lastn); + /* 3. Exclusive-or Dn (tmp) with C (iv) to create Xn (tmp) */ + crypto_xor(tmp, iv, bsize); + /* 4. Select the first Ln bytes of Xn (tmp) to create Pn */ + memcpy(d + bsize, tmp, lastn); + + /* 5. Append the tail (BB - Ln) bytes of Xn (tmp) to Cn to create En */ + memcpy(s + bsize + lastn, tmp + lastn, bsize - lastn); + /* 6. Decrypt En to create Pn-1 */ + memset(iv, 0, sizeof(iv)); + sg_set_buf(&sgsrc[0], s + bsize, bsize); + sg_set_buf(&sgdst[0], d, bsize); + err = crypto_blkcipher_decrypt_iv(&lcldesc, sgdst, sgsrc, bsize); + + /* XOR with previous block */ + crypto_xor(d, desc->info, bsize); + + scatterwalk_map_and_copy(d, dst, offset, nbytes, 1); + + memcpy(desc->info, s, bsize); + return err; +} + +static int crypto_cts_decrypt(struct blkcipher_desc *desc, + struct scatterlist *dst, struct scatterlist *src, + unsigned int nbytes) +{ + struct crypto_cts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + int bsize = crypto_blkcipher_blocksize(desc->tfm); + int tot_blocks = (nbytes + bsize - 1) / bsize; + int cbc_blocks = tot_blocks > 2 ? tot_blocks - 2 : 0; + struct blkcipher_desc lcldesc; + int err; + + lcldesc.tfm = ctx->child; + lcldesc.info = desc->info; + lcldesc.flags = desc->flags; + + if (tot_blocks == 1) { + err = crypto_blkcipher_decrypt_iv(&lcldesc, dst, src, bsize); + } else if (nbytes <= bsize * 2) { + err = cts_cbc_decrypt(ctx, desc, dst, src, 0, nbytes); + } else { + /* do normal function for tot_blocks - 2 */ + err = crypto_blkcipher_decrypt_iv(&lcldesc, dst, src, + cbc_blocks * bsize); + if (err == 0) { + /* do cts for final two blocks */ + err = cts_cbc_decrypt(ctx, desc, dst, src, + cbc_blocks * bsize, + nbytes - (cbc_blocks * bsize)); + } + } + return err; +} + +static int crypto_cts_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_spawn *spawn = crypto_instance_ctx(inst); + struct crypto_cts_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_blkcipher *cipher; + + cipher = crypto_spawn_blkcipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; + return 0; +} + +static void crypto_cts_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_cts_ctx *ctx = crypto_tfm_ctx(tfm); + crypto_free_blkcipher(ctx->child); +} + +static struct crypto_instance *crypto_cts_alloc(struct rtattr **tb) +{ + struct crypto_instance *inst; + struct crypto_alg *alg; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); + if (err) + return ERR_PTR(err); + + alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER, + CRYPTO_ALG_TYPE_MASK); + err = PTR_ERR(alg); + if (IS_ERR(alg)) + return ERR_PTR(err); + + inst = ERR_PTR(-EINVAL); + if (!is_power_of_2(alg->cra_blocksize)) + goto out_put_alg; + + inst = crypto_alloc_instance("cts", alg); + if (IS_ERR(inst)) + goto out_put_alg; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = alg->cra_blocksize; + inst->alg.cra_alignmask = alg->cra_alignmask; + inst->alg.cra_type = &crypto_blkcipher_type; + + /* We access the data as u32s when xoring. */ + inst->alg.cra_alignmask |= __alignof__(u32) - 1; + + inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; + inst->alg.cra_blkcipher.min_keysize = alg->cra_blkcipher.min_keysize; + inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize; + + inst->alg.cra_blkcipher.geniv = "seqiv"; + + inst->alg.cra_ctxsize = sizeof(struct crypto_cts_ctx); + + inst->alg.cra_init = crypto_cts_init_tfm; + inst->alg.cra_exit = crypto_cts_exit_tfm; + + inst->alg.cra_blkcipher.setkey = crypto_cts_setkey; + inst->alg.cra_blkcipher.encrypt = crypto_cts_encrypt; + inst->alg.cra_blkcipher.decrypt = crypto_cts_decrypt; + +out_put_alg: + crypto_mod_put(alg); + return inst; +} + +static void crypto_cts_free(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(inst); +} + +static struct crypto_template crypto_cts_tmpl = { + .name = "cts", + .alloc = crypto_cts_alloc, + .free = crypto_cts_free, + .module = THIS_MODULE, +}; + +static int __init crypto_cts_module_init(void) +{ + return crypto_register_template(&crypto_cts_tmpl); +} + +static void __exit crypto_cts_module_exit(void) +{ + crypto_unregister_template(&crypto_cts_tmpl); +} + +module_init(crypto_cts_module_init); +module_exit(crypto_cts_module_exit); + +MODULE_LICENSE("Dual BSD/GPL"); +MODULE_DESCRIPTION("CTS-CBC CipherText Stealing for CBC"); diff --git a/crypto/deflate.c b/crypto/deflate.c new file mode 100644 index 00000000..b0165eca --- /dev/null +++ b/crypto/deflate.c @@ -0,0 +1,226 @@ +/* + * Cryptographic API. + * + * Deflate algorithm (RFC 1951), implemented here primarily for use + * by IPCOMP (RFC 3173 & RFC 2394). + * + * Copyright (c) 2003 James Morris <jmorris@intercode.com.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + * FIXME: deflate transforms will require up to a total of about 436k of kernel + * memory on i386 (390k for compression, the rest for decompression), as the + * current zlib kernel code uses a worst case pre-allocation system by default. + * This needs to be fixed so that the amount of memory required is properly + * related to the winbits and memlevel parameters. + * + * The default winbits of 11 should suit most packets, and it may be something + * to configure on a per-tfm basis in the future. + * + * Currently, compression history is not maintained between tfm calls, as + * it is not needed for IPCOMP and keeps the code simpler. It can be + * implemented if someone wants it. + */ +#include <linux/init.h> +#include <linux/module.h> +#include <linux/crypto.h> +#include <linux/zlib.h> +#include <linux/vmalloc.h> +#include <linux/interrupt.h> +#include <linux/mm.h> +#include <linux/net.h> + +#define DEFLATE_DEF_LEVEL Z_DEFAULT_COMPRESSION +#define DEFLATE_DEF_WINBITS 11 +#define DEFLATE_DEF_MEMLEVEL MAX_MEM_LEVEL + +struct deflate_ctx { + struct z_stream_s comp_stream; + struct z_stream_s decomp_stream; +}; + +static int deflate_comp_init(struct deflate_ctx *ctx) +{ + int ret = 0; + struct z_stream_s *stream = &ctx->comp_stream; + + stream->workspace = vzalloc(zlib_deflate_workspacesize( + -DEFLATE_DEF_WINBITS, DEFLATE_DEF_MEMLEVEL)); + if (!stream->workspace) { + ret = -ENOMEM; + goto out; + } + ret = zlib_deflateInit2(stream, DEFLATE_DEF_LEVEL, Z_DEFLATED, + -DEFLATE_DEF_WINBITS, DEFLATE_DEF_MEMLEVEL, + Z_DEFAULT_STRATEGY); + if (ret != Z_OK) { + ret = -EINVAL; + goto out_free; + } +out: + return ret; +out_free: + vfree(stream->workspace); + goto out; +} + +static int deflate_decomp_init(struct deflate_ctx *ctx) +{ + int ret = 0; + struct z_stream_s *stream = &ctx->decomp_stream; + + stream->workspace = vzalloc(zlib_inflate_workspacesize()); + if (!stream->workspace) { + ret = -ENOMEM; + goto out; + } + ret = zlib_inflateInit2(stream, -DEFLATE_DEF_WINBITS); + if (ret != Z_OK) { + ret = -EINVAL; + goto out_free; + } +out: + return ret; +out_free: + vfree(stream->workspace); + goto out; +} + +static void deflate_comp_exit(struct deflate_ctx *ctx) +{ + zlib_deflateEnd(&ctx->comp_stream); + vfree(ctx->comp_stream.workspace); +} + +static void deflate_decomp_exit(struct deflate_ctx *ctx) +{ + zlib_inflateEnd(&ctx->decomp_stream); + vfree(ctx->decomp_stream.workspace); +} + +static int deflate_init(struct crypto_tfm *tfm) +{ + struct deflate_ctx *ctx = crypto_tfm_ctx(tfm); + int ret; + + ret = deflate_comp_init(ctx); + if (ret) + goto out; + ret = deflate_decomp_init(ctx); + if (ret) + deflate_comp_exit(ctx); +out: + return ret; +} + +static void deflate_exit(struct crypto_tfm *tfm) +{ + struct deflate_ctx *ctx = crypto_tfm_ctx(tfm); + + deflate_comp_exit(ctx); + deflate_decomp_exit(ctx); +} + +static int deflate_compress(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen) +{ + int ret = 0; + struct deflate_ctx *dctx = crypto_tfm_ctx(tfm); + struct z_stream_s *stream = &dctx->comp_stream; + + ret = zlib_deflateReset(stream); + if (ret != Z_OK) { + ret = -EINVAL; + goto out; + } + + stream->next_in = (u8 *)src; + stream->avail_in = slen; + stream->next_out = (u8 *)dst; + stream->avail_out = *dlen; + + ret = zlib_deflate(stream, Z_FINISH); + if (ret != Z_STREAM_END) { + ret = -EINVAL; + goto out; + } + ret = 0; + *dlen = stream->total_out; +out: + return ret; +} + +static int deflate_decompress(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen) +{ + + int ret = 0; + struct deflate_ctx *dctx = crypto_tfm_ctx(tfm); + struct z_stream_s *stream = &dctx->decomp_stream; + + ret = zlib_inflateReset(stream); + if (ret != Z_OK) { + ret = -EINVAL; + goto out; + } + + stream->next_in = (u8 *)src; + stream->avail_in = slen; + stream->next_out = (u8 *)dst; + stream->avail_out = *dlen; + + ret = zlib_inflate(stream, Z_SYNC_FLUSH); + /* + * Work around a bug in zlib, which sometimes wants to taste an extra + * byte when being used in the (undocumented) raw deflate mode. + * (From USAGI). + */ + if (ret == Z_OK && !stream->avail_in && stream->avail_out) { + u8 zerostuff = 0; + stream->next_in = &zerostuff; + stream->avail_in = 1; + ret = zlib_inflate(stream, Z_FINISH); + } + if (ret != Z_STREAM_END) { + ret = -EINVAL; + goto out; + } + ret = 0; + *dlen = stream->total_out; +out: + return ret; +} + +static struct crypto_alg alg = { + .cra_name = "deflate", + .cra_flags = CRYPTO_ALG_TYPE_COMPRESS, + .cra_ctxsize = sizeof(struct deflate_ctx), + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(alg.cra_list), + .cra_init = deflate_init, + .cra_exit = deflate_exit, + .cra_u = { .compress = { + .coa_compress = deflate_compress, + .coa_decompress = deflate_decompress } } +}; + +static int __init deflate_mod_init(void) +{ + return crypto_register_alg(&alg); +} + +static void __exit deflate_mod_fini(void) +{ + crypto_unregister_alg(&alg); +} + +module_init(deflate_mod_init); +module_exit(deflate_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Deflate Compression Algorithm for IPCOMP"); +MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>"); + diff --git a/crypto/des_generic.c b/crypto/des_generic.c new file mode 100644 index 00000000..873818d4 --- /dev/null +++ b/crypto/des_generic.c @@ -0,0 +1,1007 @@ +/* + * Cryptographic API. + * + * DES & Triple DES EDE Cipher Algorithms. + * + * Copyright (c) 2005 Dag Arne Osvik <da@osvik.no> + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + */ + +#include <asm/byteorder.h> +#include <linux/bitops.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/errno.h> +#include <linux/crypto.h> +#include <linux/types.h> + +#include <crypto/des.h> + +#define ROL(x, r) ((x) = rol32((x), (r))) +#define ROR(x, r) ((x) = ror32((x), (r))) + +struct des_ctx { + u32 expkey[DES_EXPKEY_WORDS]; +}; + +struct des3_ede_ctx { + u32 expkey[DES3_EDE_EXPKEY_WORDS]; +}; + +/* Lookup tables for key expansion */ + +static const u8 pc1[256] = { + 0x00, 0x00, 0x40, 0x04, 0x10, 0x10, 0x50, 0x14, + 0x04, 0x40, 0x44, 0x44, 0x14, 0x50, 0x54, 0x54, + 0x02, 0x02, 0x42, 0x06, 0x12, 0x12, 0x52, 0x16, + 0x06, 0x42, 0x46, 0x46, 0x16, 0x52, 0x56, 0x56, + 0x80, 0x08, 0xc0, 0x0c, 0x90, 0x18, 0xd0, 0x1c, + 0x84, 0x48, 0xc4, 0x4c, 0x94, 0x58, 0xd4, 0x5c, + 0x82, 0x0a, 0xc2, 0x0e, 0x92, 0x1a, 0xd2, 0x1e, + 0x86, 0x4a, 0xc6, 0x4e, 0x96, 0x5a, 0xd6, 0x5e, + 0x20, 0x20, 0x60, 0x24, 0x30, 0x30, 0x70, 0x34, + 0x24, 0x60, 0x64, 0x64, 0x34, 0x70, 0x74, 0x74, + 0x22, 0x22, 0x62, 0x26, 0x32, 0x32, 0x72, 0x36, + 0x26, 0x62, 0x66, 0x66, 0x36, 0x72, 0x76, 0x76, + 0xa0, 0x28, 0xe0, 0x2c, 0xb0, 0x38, 0xf0, 0x3c, + 0xa4, 0x68, 0xe4, 0x6c, 0xb4, 0x78, 0xf4, 0x7c, + 0xa2, 0x2a, 0xe2, 0x2e, 0xb2, 0x3a, 0xf2, 0x3e, + 0xa6, 0x6a, 0xe6, 0x6e, 0xb6, 0x7a, 0xf6, 0x7e, + 0x08, 0x80, 0x48, 0x84, 0x18, 0x90, 0x58, 0x94, + 0x0c, 0xc0, 0x4c, 0xc4, 0x1c, 0xd0, 0x5c, 0xd4, + 0x0a, 0x82, 0x4a, 0x86, 0x1a, 0x92, 0x5a, 0x96, + 0x0e, 0xc2, 0x4e, 0xc6, 0x1e, 0xd2, 0x5e, 0xd6, + 0x88, 0x88, 0xc8, 0x8c, 0x98, 0x98, 0xd8, 0x9c, + 0x8c, 0xc8, 0xcc, 0xcc, 0x9c, 0xd8, 0xdc, 0xdc, + 0x8a, 0x8a, 0xca, 0x8e, 0x9a, 0x9a, 0xda, 0x9e, + 0x8e, 0xca, 0xce, 0xce, 0x9e, 0xda, 0xde, 0xde, + 0x28, 0xa0, 0x68, 0xa4, 0x38, 0xb0, 0x78, 0xb4, + 0x2c, 0xe0, 0x6c, 0xe4, 0x3c, 0xf0, 0x7c, 0xf4, + 0x2a, 0xa2, 0x6a, 0xa6, 0x3a, 0xb2, 0x7a, 0xb6, + 0x2e, 0xe2, 0x6e, 0xe6, 0x3e, 0xf2, 0x7e, 0xf6, + 0xa8, 0xa8, 0xe8, 0xac, 0xb8, 0xb8, 0xf8, 0xbc, + 0xac, 0xe8, 0xec, 0xec, 0xbc, 0xf8, 0xfc, 0xfc, + 0xaa, 0xaa, 0xea, 0xae, 0xba, 0xba, 0xfa, 0xbe, + 0xae, 0xea, 0xee, 0xee, 0xbe, 0xfa, 0xfe, 0xfe +}; + +static const u8 rs[256] = { + 0x00, 0x00, 0x80, 0x80, 0x02, 0x02, 0x82, 0x82, + 0x04, 0x04, 0x84, 0x84, 0x06, 0x06, 0x86, 0x86, + 0x08, 0x08, 0x88, 0x88, 0x0a, 0x0a, 0x8a, 0x8a, + 0x0c, 0x0c, 0x8c, 0x8c, 0x0e, 0x0e, 0x8e, 0x8e, + 0x10, 0x10, 0x90, 0x90, 0x12, 0x12, 0x92, 0x92, + 0x14, 0x14, 0x94, 0x94, 0x16, 0x16, 0x96, 0x96, + 0x18, 0x18, 0x98, 0x98, 0x1a, 0x1a, 0x9a, 0x9a, + 0x1c, 0x1c, 0x9c, 0x9c, 0x1e, 0x1e, 0x9e, 0x9e, + 0x20, 0x20, 0xa0, 0xa0, 0x22, 0x22, 0xa2, 0xa2, + 0x24, 0x24, 0xa4, 0xa4, 0x26, 0x26, 0xa6, 0xa6, + 0x28, 0x28, 0xa8, 0xa8, 0x2a, 0x2a, 0xaa, 0xaa, + 0x2c, 0x2c, 0xac, 0xac, 0x2e, 0x2e, 0xae, 0xae, + 0x30, 0x30, 0xb0, 0xb0, 0x32, 0x32, 0xb2, 0xb2, + 0x34, 0x34, 0xb4, 0xb4, 0x36, 0x36, 0xb6, 0xb6, + 0x38, 0x38, 0xb8, 0xb8, 0x3a, 0x3a, 0xba, 0xba, + 0x3c, 0x3c, 0xbc, 0xbc, 0x3e, 0x3e, 0xbe, 0xbe, + 0x40, 0x40, 0xc0, 0xc0, 0x42, 0x42, 0xc2, 0xc2, + 0x44, 0x44, 0xc4, 0xc4, 0x46, 0x46, 0xc6, 0xc6, + 0x48, 0x48, 0xc8, 0xc8, 0x4a, 0x4a, 0xca, 0xca, + 0x4c, 0x4c, 0xcc, 0xcc, 0x4e, 0x4e, 0xce, 0xce, + 0x50, 0x50, 0xd0, 0xd0, 0x52, 0x52, 0xd2, 0xd2, + 0x54, 0x54, 0xd4, 0xd4, 0x56, 0x56, 0xd6, 0xd6, + 0x58, 0x58, 0xd8, 0xd8, 0x5a, 0x5a, 0xda, 0xda, + 0x5c, 0x5c, 0xdc, 0xdc, 0x5e, 0x5e, 0xde, 0xde, + 0x60, 0x60, 0xe0, 0xe0, 0x62, 0x62, 0xe2, 0xe2, + 0x64, 0x64, 0xe4, 0xe4, 0x66, 0x66, 0xe6, 0xe6, + 0x68, 0x68, 0xe8, 0xe8, 0x6a, 0x6a, 0xea, 0xea, + 0x6c, 0x6c, 0xec, 0xec, 0x6e, 0x6e, 0xee, 0xee, + 0x70, 0x70, 0xf0, 0xf0, 0x72, 0x72, 0xf2, 0xf2, + 0x74, 0x74, 0xf4, 0xf4, 0x76, 0x76, 0xf6, 0xf6, + 0x78, 0x78, 0xf8, 0xf8, 0x7a, 0x7a, 0xfa, 0xfa, + 0x7c, 0x7c, 0xfc, 0xfc, 0x7e, 0x7e, 0xfe, 0xfe +}; + +static const u32 pc2[1024] = { + 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00040000, 0x00000000, 0x04000000, 0x00100000, + 0x00400000, 0x00000008, 0x00000800, 0x40000000, + 0x00440000, 0x00000008, 0x04000800, 0x40100000, + 0x00000400, 0x00000020, 0x08000000, 0x00000100, + 0x00040400, 0x00000020, 0x0c000000, 0x00100100, + 0x00400400, 0x00000028, 0x08000800, 0x40000100, + 0x00440400, 0x00000028, 0x0c000800, 0x40100100, + 0x80000000, 0x00000010, 0x00000000, 0x00800000, + 0x80040000, 0x00000010, 0x04000000, 0x00900000, + 0x80400000, 0x00000018, 0x00000800, 0x40800000, + 0x80440000, 0x00000018, 0x04000800, 0x40900000, + 0x80000400, 0x00000030, 0x08000000, 0x00800100, + 0x80040400, 0x00000030, 0x0c000000, 0x00900100, + 0x80400400, 0x00000038, 0x08000800, 0x40800100, + 0x80440400, 0x00000038, 0x0c000800, 0x40900100, + 0x10000000, 0x00000000, 0x00200000, 0x00001000, + 0x10040000, 0x00000000, 0x04200000, 0x00101000, + 0x10400000, 0x00000008, 0x00200800, 0x40001000, + 0x10440000, 0x00000008, 0x04200800, 0x40101000, + 0x10000400, 0x00000020, 0x08200000, 0x00001100, + 0x10040400, 0x00000020, 0x0c200000, 0x00101100, + 0x10400400, 0x00000028, 0x08200800, 0x40001100, + 0x10440400, 0x00000028, 0x0c200800, 0x40101100, + 0x90000000, 0x00000010, 0x00200000, 0x00801000, + 0x90040000, 0x00000010, 0x04200000, 0x00901000, + 0x90400000, 0x00000018, 0x00200800, 0x40801000, + 0x90440000, 0x00000018, 0x04200800, 0x40901000, + 0x90000400, 0x00000030, 0x08200000, 0x00801100, + 0x90040400, 0x00000030, 0x0c200000, 0x00901100, + 0x90400400, 0x00000038, 0x08200800, 0x40801100, + 0x90440400, 0x00000038, 0x0c200800, 0x40901100, + 0x00000200, 0x00080000, 0x00000000, 0x00000004, + 0x00040200, 0x00080000, 0x04000000, 0x00100004, + 0x00400200, 0x00080008, 0x00000800, 0x40000004, + 0x00440200, 0x00080008, 0x04000800, 0x40100004, + 0x00000600, 0x00080020, 0x08000000, 0x00000104, + 0x00040600, 0x00080020, 0x0c000000, 0x00100104, + 0x00400600, 0x00080028, 0x08000800, 0x40000104, + 0x00440600, 0x00080028, 0x0c000800, 0x40100104, + 0x80000200, 0x00080010, 0x00000000, 0x00800004, + 0x80040200, 0x00080010, 0x04000000, 0x00900004, + 0x80400200, 0x00080018, 0x00000800, 0x40800004, + 0x80440200, 0x00080018, 0x04000800, 0x40900004, + 0x80000600, 0x00080030, 0x08000000, 0x00800104, + 0x80040600, 0x00080030, 0x0c000000, 0x00900104, + 0x80400600, 0x00080038, 0x08000800, 0x40800104, + 0x80440600, 0x00080038, 0x0c000800, 0x40900104, + 0x10000200, 0x00080000, 0x00200000, 0x00001004, + 0x10040200, 0x00080000, 0x04200000, 0x00101004, + 0x10400200, 0x00080008, 0x00200800, 0x40001004, + 0x10440200, 0x00080008, 0x04200800, 0x40101004, + 0x10000600, 0x00080020, 0x08200000, 0x00001104, + 0x10040600, 0x00080020, 0x0c200000, 0x00101104, + 0x10400600, 0x00080028, 0x08200800, 0x40001104, + 0x10440600, 0x00080028, 0x0c200800, 0x40101104, + 0x90000200, 0x00080010, 0x00200000, 0x00801004, + 0x90040200, 0x00080010, 0x04200000, 0x00901004, + 0x90400200, 0x00080018, 0x00200800, 0x40801004, + 0x90440200, 0x00080018, 0x04200800, 0x40901004, + 0x90000600, 0x00080030, 0x08200000, 0x00801104, + 0x90040600, 0x00080030, 0x0c200000, 0x00901104, + 0x90400600, 0x00080038, 0x08200800, 0x40801104, + 0x90440600, 0x00080038, 0x0c200800, 0x40901104, + 0x00000002, 0x00002000, 0x20000000, 0x00000001, + 0x00040002, 0x00002000, 0x24000000, 0x00100001, + 0x00400002, 0x00002008, 0x20000800, 0x40000001, + 0x00440002, 0x00002008, 0x24000800, 0x40100001, + 0x00000402, 0x00002020, 0x28000000, 0x00000101, + 0x00040402, 0x00002020, 0x2c000000, 0x00100101, + 0x00400402, 0x00002028, 0x28000800, 0x40000101, + 0x00440402, 0x00002028, 0x2c000800, 0x40100101, + 0x80000002, 0x00002010, 0x20000000, 0x00800001, + 0x80040002, 0x00002010, 0x24000000, 0x00900001, + 0x80400002, 0x00002018, 0x20000800, 0x40800001, + 0x80440002, 0x00002018, 0x24000800, 0x40900001, + 0x80000402, 0x00002030, 0x28000000, 0x00800101, + 0x80040402, 0x00002030, 0x2c000000, 0x00900101, + 0x80400402, 0x00002038, 0x28000800, 0x40800101, + 0x80440402, 0x00002038, 0x2c000800, 0x40900101, + 0x10000002, 0x00002000, 0x20200000, 0x00001001, + 0x10040002, 0x00002000, 0x24200000, 0x00101001, + 0x10400002, 0x00002008, 0x20200800, 0x40001001, + 0x10440002, 0x00002008, 0x24200800, 0x40101001, + 0x10000402, 0x00002020, 0x28200000, 0x00001101, + 0x10040402, 0x00002020, 0x2c200000, 0x00101101, + 0x10400402, 0x00002028, 0x28200800, 0x40001101, + 0x10440402, 0x00002028, 0x2c200800, 0x40101101, + 0x90000002, 0x00002010, 0x20200000, 0x00801001, + 0x90040002, 0x00002010, 0x24200000, 0x00901001, + 0x90400002, 0x00002018, 0x20200800, 0x40801001, + 0x90440002, 0x00002018, 0x24200800, 0x40901001, + 0x90000402, 0x00002030, 0x28200000, 0x00801101, + 0x90040402, 0x00002030, 0x2c200000, 0x00901101, + 0x90400402, 0x00002038, 0x28200800, 0x40801101, + 0x90440402, 0x00002038, 0x2c200800, 0x40901101, + 0x00000202, 0x00082000, 0x20000000, 0x00000005, + 0x00040202, 0x00082000, 0x24000000, 0x00100005, + 0x00400202, 0x00082008, 0x20000800, 0x40000005, + 0x00440202, 0x00082008, 0x24000800, 0x40100005, + 0x00000602, 0x00082020, 0x28000000, 0x00000105, + 0x00040602, 0x00082020, 0x2c000000, 0x00100105, + 0x00400602, 0x00082028, 0x28000800, 0x40000105, + 0x00440602, 0x00082028, 0x2c000800, 0x40100105, + 0x80000202, 0x00082010, 0x20000000, 0x00800005, + 0x80040202, 0x00082010, 0x24000000, 0x00900005, + 0x80400202, 0x00082018, 0x20000800, 0x40800005, + 0x80440202, 0x00082018, 0x24000800, 0x40900005, + 0x80000602, 0x00082030, 0x28000000, 0x00800105, + 0x80040602, 0x00082030, 0x2c000000, 0x00900105, + 0x80400602, 0x00082038, 0x28000800, 0x40800105, + 0x80440602, 0x00082038, 0x2c000800, 0x40900105, + 0x10000202, 0x00082000, 0x20200000, 0x00001005, + 0x10040202, 0x00082000, 0x24200000, 0x00101005, + 0x10400202, 0x00082008, 0x20200800, 0x40001005, + 0x10440202, 0x00082008, 0x24200800, 0x40101005, + 0x10000602, 0x00082020, 0x28200000, 0x00001105, + 0x10040602, 0x00082020, 0x2c200000, 0x00101105, + 0x10400602, 0x00082028, 0x28200800, 0x40001105, + 0x10440602, 0x00082028, 0x2c200800, 0x40101105, + 0x90000202, 0x00082010, 0x20200000, 0x00801005, + 0x90040202, 0x00082010, 0x24200000, 0x00901005, + 0x90400202, 0x00082018, 0x20200800, 0x40801005, + 0x90440202, 0x00082018, 0x24200800, 0x40901005, + 0x90000602, 0x00082030, 0x28200000, 0x00801105, + 0x90040602, 0x00082030, 0x2c200000, 0x00901105, + 0x90400602, 0x00082038, 0x28200800, 0x40801105, + 0x90440602, 0x00082038, 0x2c200800, 0x40901105, + + 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000008, 0x00080000, 0x10000000, + 0x02000000, 0x00000000, 0x00000080, 0x00001000, + 0x02000000, 0x00000008, 0x00080080, 0x10001000, + 0x00004000, 0x00000000, 0x00000040, 0x00040000, + 0x00004000, 0x00000008, 0x00080040, 0x10040000, + 0x02004000, 0x00000000, 0x000000c0, 0x00041000, + 0x02004000, 0x00000008, 0x000800c0, 0x10041000, + 0x00020000, 0x00008000, 0x08000000, 0x00200000, + 0x00020000, 0x00008008, 0x08080000, 0x10200000, + 0x02020000, 0x00008000, 0x08000080, 0x00201000, + 0x02020000, 0x00008008, 0x08080080, 0x10201000, + 0x00024000, 0x00008000, 0x08000040, 0x00240000, + 0x00024000, 0x00008008, 0x08080040, 0x10240000, + 0x02024000, 0x00008000, 0x080000c0, 0x00241000, + 0x02024000, 0x00008008, 0x080800c0, 0x10241000, + 0x00000000, 0x01000000, 0x00002000, 0x00000020, + 0x00000000, 0x01000008, 0x00082000, 0x10000020, + 0x02000000, 0x01000000, 0x00002080, 0x00001020, + 0x02000000, 0x01000008, 0x00082080, 0x10001020, + 0x00004000, 0x01000000, 0x00002040, 0x00040020, + 0x00004000, 0x01000008, 0x00082040, 0x10040020, + 0x02004000, 0x01000000, 0x000020c0, 0x00041020, + 0x02004000, 0x01000008, 0x000820c0, 0x10041020, + 0x00020000, 0x01008000, 0x08002000, 0x00200020, + 0x00020000, 0x01008008, 0x08082000, 0x10200020, + 0x02020000, 0x01008000, 0x08002080, 0x00201020, + 0x02020000, 0x01008008, 0x08082080, 0x10201020, + 0x00024000, 0x01008000, 0x08002040, 0x00240020, + 0x00024000, 0x01008008, 0x08082040, 0x10240020, + 0x02024000, 0x01008000, 0x080020c0, 0x00241020, + 0x02024000, 0x01008008, 0x080820c0, 0x10241020, + 0x00000400, 0x04000000, 0x00100000, 0x00000004, + 0x00000400, 0x04000008, 0x00180000, 0x10000004, + 0x02000400, 0x04000000, 0x00100080, 0x00001004, + 0x02000400, 0x04000008, 0x00180080, 0x10001004, + 0x00004400, 0x04000000, 0x00100040, 0x00040004, + 0x00004400, 0x04000008, 0x00180040, 0x10040004, + 0x02004400, 0x04000000, 0x001000c0, 0x00041004, + 0x02004400, 0x04000008, 0x001800c0, 0x10041004, + 0x00020400, 0x04008000, 0x08100000, 0x00200004, + 0x00020400, 0x04008008, 0x08180000, 0x10200004, + 0x02020400, 0x04008000, 0x08100080, 0x00201004, + 0x02020400, 0x04008008, 0x08180080, 0x10201004, + 0x00024400, 0x04008000, 0x08100040, 0x00240004, + 0x00024400, 0x04008008, 0x08180040, 0x10240004, + 0x02024400, 0x04008000, 0x081000c0, 0x00241004, + 0x02024400, 0x04008008, 0x081800c0, 0x10241004, + 0x00000400, 0x05000000, 0x00102000, 0x00000024, + 0x00000400, 0x05000008, 0x00182000, 0x10000024, + 0x02000400, 0x05000000, 0x00102080, 0x00001024, + 0x02000400, 0x05000008, 0x00182080, 0x10001024, + 0x00004400, 0x05000000, 0x00102040, 0x00040024, + 0x00004400, 0x05000008, 0x00182040, 0x10040024, + 0x02004400, 0x05000000, 0x001020c0, 0x00041024, + 0x02004400, 0x05000008, 0x001820c0, 0x10041024, + 0x00020400, 0x05008000, 0x08102000, 0x00200024, + 0x00020400, 0x05008008, 0x08182000, 0x10200024, + 0x02020400, 0x05008000, 0x08102080, 0x00201024, + 0x02020400, 0x05008008, 0x08182080, 0x10201024, + 0x00024400, 0x05008000, 0x08102040, 0x00240024, + 0x00024400, 0x05008008, 0x08182040, 0x10240024, + 0x02024400, 0x05008000, 0x081020c0, 0x00241024, + 0x02024400, 0x05008008, 0x081820c0, 0x10241024, + 0x00000800, 0x00010000, 0x20000000, 0x00000010, + 0x00000800, 0x00010008, 0x20080000, 0x10000010, + 0x02000800, 0x00010000, 0x20000080, 0x00001010, + 0x02000800, 0x00010008, 0x20080080, 0x10001010, + 0x00004800, 0x00010000, 0x20000040, 0x00040010, + 0x00004800, 0x00010008, 0x20080040, 0x10040010, + 0x02004800, 0x00010000, 0x200000c0, 0x00041010, + 0x02004800, 0x00010008, 0x200800c0, 0x10041010, + 0x00020800, 0x00018000, 0x28000000, 0x00200010, + 0x00020800, 0x00018008, 0x28080000, 0x10200010, + 0x02020800, 0x00018000, 0x28000080, 0x00201010, + 0x02020800, 0x00018008, 0x28080080, 0x10201010, + 0x00024800, 0x00018000, 0x28000040, 0x00240010, + 0x00024800, 0x00018008, 0x28080040, 0x10240010, + 0x02024800, 0x00018000, 0x280000c0, 0x00241010, + 0x02024800, 0x00018008, 0x280800c0, 0x10241010, + 0x00000800, 0x01010000, 0x20002000, 0x00000030, + 0x00000800, 0x01010008, 0x20082000, 0x10000030, + 0x02000800, 0x01010000, 0x20002080, 0x00001030, + 0x02000800, 0x01010008, 0x20082080, 0x10001030, + 0x00004800, 0x01010000, 0x20002040, 0x00040030, + 0x00004800, 0x01010008, 0x20082040, 0x10040030, + 0x02004800, 0x01010000, 0x200020c0, 0x00041030, + 0x02004800, 0x01010008, 0x200820c0, 0x10041030, + 0x00020800, 0x01018000, 0x28002000, 0x00200030, + 0x00020800, 0x01018008, 0x28082000, 0x10200030, + 0x02020800, 0x01018000, 0x28002080, 0x00201030, + 0x02020800, 0x01018008, 0x28082080, 0x10201030, + 0x00024800, 0x01018000, 0x28002040, 0x00240030, + 0x00024800, 0x01018008, 0x28082040, 0x10240030, + 0x02024800, 0x01018000, 0x280020c0, 0x00241030, + 0x02024800, 0x01018008, 0x280820c0, 0x10241030, + 0x00000c00, 0x04010000, 0x20100000, 0x00000014, + 0x00000c00, 0x04010008, 0x20180000, 0x10000014, + 0x02000c00, 0x04010000, 0x20100080, 0x00001014, + 0x02000c00, 0x04010008, 0x20180080, 0x10001014, + 0x00004c00, 0x04010000, 0x20100040, 0x00040014, + 0x00004c00, 0x04010008, 0x20180040, 0x10040014, + 0x02004c00, 0x04010000, 0x201000c0, 0x00041014, + 0x02004c00, 0x04010008, 0x201800c0, 0x10041014, + 0x00020c00, 0x04018000, 0x28100000, 0x00200014, + 0x00020c00, 0x04018008, 0x28180000, 0x10200014, + 0x02020c00, 0x04018000, 0x28100080, 0x00201014, + 0x02020c00, 0x04018008, 0x28180080, 0x10201014, + 0x00024c00, 0x04018000, 0x28100040, 0x00240014, + 0x00024c00, 0x04018008, 0x28180040, 0x10240014, + 0x02024c00, 0x04018000, 0x281000c0, 0x00241014, + 0x02024c00, 0x04018008, 0x281800c0, 0x10241014, + 0x00000c00, 0x05010000, 0x20102000, 0x00000034, + 0x00000c00, 0x05010008, 0x20182000, 0x10000034, + 0x02000c00, 0x05010000, 0x20102080, 0x00001034, + 0x02000c00, 0x05010008, 0x20182080, 0x10001034, + 0x00004c00, 0x05010000, 0x20102040, 0x00040034, + 0x00004c00, 0x05010008, 0x20182040, 0x10040034, + 0x02004c00, 0x05010000, 0x201020c0, 0x00041034, + 0x02004c00, 0x05010008, 0x201820c0, 0x10041034, + 0x00020c00, 0x05018000, 0x28102000, 0x00200034, + 0x00020c00, 0x05018008, 0x28182000, 0x10200034, + 0x02020c00, 0x05018000, 0x28102080, 0x00201034, + 0x02020c00, 0x05018008, 0x28182080, 0x10201034, + 0x00024c00, 0x05018000, 0x28102040, 0x00240034, + 0x00024c00, 0x05018008, 0x28182040, 0x10240034, + 0x02024c00, 0x05018000, 0x281020c0, 0x00241034, + 0x02024c00, 0x05018008, 0x281820c0, 0x10241034 +}; + +/* S-box lookup tables */ + +static const u32 S1[64] = { + 0x01010400, 0x00000000, 0x00010000, 0x01010404, + 0x01010004, 0x00010404, 0x00000004, 0x00010000, + 0x00000400, 0x01010400, 0x01010404, 0x00000400, + 0x01000404, 0x01010004, 0x01000000, 0x00000004, + 0x00000404, 0x01000400, 0x01000400, 0x00010400, + 0x00010400, 0x01010000, 0x01010000, 0x01000404, + 0x00010004, 0x01000004, 0x01000004, 0x00010004, + 0x00000000, 0x00000404, 0x00010404, 0x01000000, + 0x00010000, 0x01010404, 0x00000004, 0x01010000, + 0x01010400, 0x01000000, 0x01000000, 0x00000400, + 0x01010004, 0x00010000, 0x00010400, 0x01000004, + 0x00000400, 0x00000004, 0x01000404, 0x00010404, + 0x01010404, 0x00010004, 0x01010000, 0x01000404, + 0x01000004, 0x00000404, 0x00010404, 0x01010400, + 0x00000404, 0x01000400, 0x01000400, 0x00000000, + 0x00010004, 0x00010400, 0x00000000, 0x01010004 +}; + +static const u32 S2[64] = { + 0x80108020, 0x80008000, 0x00008000, 0x00108020, + 0x00100000, 0x00000020, 0x80100020, 0x80008020, + 0x80000020, 0x80108020, 0x80108000, 0x80000000, + 0x80008000, 0x00100000, 0x00000020, 0x80100020, + 0x00108000, 0x00100020, 0x80008020, 0x00000000, + 0x80000000, 0x00008000, 0x00108020, 0x80100000, + 0x00100020, 0x80000020, 0x00000000, 0x00108000, + 0x00008020, 0x80108000, 0x80100000, 0x00008020, + 0x00000000, 0x00108020, 0x80100020, 0x00100000, + 0x80008020, 0x80100000, 0x80108000, 0x00008000, + 0x80100000, 0x80008000, 0x00000020, 0x80108020, + 0x00108020, 0x00000020, 0x00008000, 0x80000000, + 0x00008020, 0x80108000, 0x00100000, 0x80000020, + 0x00100020, 0x80008020, 0x80000020, 0x00100020, + 0x00108000, 0x00000000, 0x80008000, 0x00008020, + 0x80000000, 0x80100020, 0x80108020, 0x00108000 +}; + +static const u32 S3[64] = { + 0x00000208, 0x08020200, 0x00000000, 0x08020008, + 0x08000200, 0x00000000, 0x00020208, 0x08000200, + 0x00020008, 0x08000008, 0x08000008, 0x00020000, + 0x08020208, 0x00020008, 0x08020000, 0x00000208, + 0x08000000, 0x00000008, 0x08020200, 0x00000200, + 0x00020200, 0x08020000, 0x08020008, 0x00020208, + 0x08000208, 0x00020200, 0x00020000, 0x08000208, + 0x00000008, 0x08020208, 0x00000200, 0x08000000, + 0x08020200, 0x08000000, 0x00020008, 0x00000208, + 0x00020000, 0x08020200, 0x08000200, 0x00000000, + 0x00000200, 0x00020008, 0x08020208, 0x08000200, + 0x08000008, 0x00000200, 0x00000000, 0x08020008, + 0x08000208, 0x00020000, 0x08000000, 0x08020208, + 0x00000008, 0x00020208, 0x00020200, 0x08000008, + 0x08020000, 0x08000208, 0x00000208, 0x08020000, + 0x00020208, 0x00000008, 0x08020008, 0x00020200 +}; + +static const u32 S4[64] = { + 0x00802001, 0x00002081, 0x00002081, 0x00000080, + 0x00802080, 0x00800081, 0x00800001, 0x00002001, + 0x00000000, 0x00802000, 0x00802000, 0x00802081, + 0x00000081, 0x00000000, 0x00800080, 0x00800001, + 0x00000001, 0x00002000, 0x00800000, 0x00802001, + 0x00000080, 0x00800000, 0x00002001, 0x00002080, + 0x00800081, 0x00000001, 0x00002080, 0x00800080, + 0x00002000, 0x00802080, 0x00802081, 0x00000081, + 0x00800080, 0x00800001, 0x00802000, 0x00802081, + 0x00000081, 0x00000000, 0x00000000, 0x00802000, + 0x00002080, 0x00800080, 0x00800081, 0x00000001, + 0x00802001, 0x00002081, 0x00002081, 0x00000080, + 0x00802081, 0x00000081, 0x00000001, 0x00002000, + 0x00800001, 0x00002001, 0x00802080, 0x00800081, + 0x00002001, 0x00002080, 0x00800000, 0x00802001, + 0x00000080, 0x00800000, 0x00002000, 0x00802080 +}; + +static const u32 S5[64] = { + 0x00000100, 0x02080100, 0x02080000, 0x42000100, + 0x00080000, 0x00000100, 0x40000000, 0x02080000, + 0x40080100, 0x00080000, 0x02000100, 0x40080100, + 0x42000100, 0x42080000, 0x00080100, 0x40000000, + 0x02000000, 0x40080000, 0x40080000, 0x00000000, + 0x40000100, 0x42080100, 0x42080100, 0x02000100, + 0x42080000, 0x40000100, 0x00000000, 0x42000000, + 0x02080100, 0x02000000, 0x42000000, 0x00080100, + 0x00080000, 0x42000100, 0x00000100, 0x02000000, + 0x40000000, 0x02080000, 0x42000100, 0x40080100, + 0x02000100, 0x40000000, 0x42080000, 0x02080100, + 0x40080100, 0x00000100, 0x02000000, 0x42080000, + 0x42080100, 0x00080100, 0x42000000, 0x42080100, + 0x02080000, 0x00000000, 0x40080000, 0x42000000, + 0x00080100, 0x02000100, 0x40000100, 0x00080000, + 0x00000000, 0x40080000, 0x02080100, 0x40000100 +}; + +static const u32 S6[64] = { + 0x20000010, 0x20400000, 0x00004000, 0x20404010, + 0x20400000, 0x00000010, 0x20404010, 0x00400000, + 0x20004000, 0x00404010, 0x00400000, 0x20000010, + 0x00400010, 0x20004000, 0x20000000, 0x00004010, + 0x00000000, 0x00400010, 0x20004010, 0x00004000, + 0x00404000, 0x20004010, 0x00000010, 0x20400010, + 0x20400010, 0x00000000, 0x00404010, 0x20404000, + 0x00004010, 0x00404000, 0x20404000, 0x20000000, + 0x20004000, 0x00000010, 0x20400010, 0x00404000, + 0x20404010, 0x00400000, 0x00004010, 0x20000010, + 0x00400000, 0x20004000, 0x20000000, 0x00004010, + 0x20000010, 0x20404010, 0x00404000, 0x20400000, + 0x00404010, 0x20404000, 0x00000000, 0x20400010, + 0x00000010, 0x00004000, 0x20400000, 0x00404010, + 0x00004000, 0x00400010, 0x20004010, 0x00000000, + 0x20404000, 0x20000000, 0x00400010, 0x20004010 +}; + +static const u32 S7[64] = { + 0x00200000, 0x04200002, 0x04000802, 0x00000000, + 0x00000800, 0x04000802, 0x00200802, 0x04200800, + 0x04200802, 0x00200000, 0x00000000, 0x04000002, + 0x00000002, 0x04000000, 0x04200002, 0x00000802, + 0x04000800, 0x00200802, 0x00200002, 0x04000800, + 0x04000002, 0x04200000, 0x04200800, 0x00200002, + 0x04200000, 0x00000800, 0x00000802, 0x04200802, + 0x00200800, 0x00000002, 0x04000000, 0x00200800, + 0x04000000, 0x00200800, 0x00200000, 0x04000802, + 0x04000802, 0x04200002, 0x04200002, 0x00000002, + 0x00200002, 0x04000000, 0x04000800, 0x00200000, + 0x04200800, 0x00000802, 0x00200802, 0x04200800, + 0x00000802, 0x04000002, 0x04200802, 0x04200000, + 0x00200800, 0x00000000, 0x00000002, 0x04200802, + 0x00000000, 0x00200802, 0x04200000, 0x00000800, + 0x04000002, 0x04000800, 0x00000800, 0x00200002 +}; + +static const u32 S8[64] = { + 0x10001040, 0x00001000, 0x00040000, 0x10041040, + 0x10000000, 0x10001040, 0x00000040, 0x10000000, + 0x00040040, 0x10040000, 0x10041040, 0x00041000, + 0x10041000, 0x00041040, 0x00001000, 0x00000040, + 0x10040000, 0x10000040, 0x10001000, 0x00001040, + 0x00041000, 0x00040040, 0x10040040, 0x10041000, + 0x00001040, 0x00000000, 0x00000000, 0x10040040, + 0x10000040, 0x10001000, 0x00041040, 0x00040000, + 0x00041040, 0x00040000, 0x10041000, 0x00001000, + 0x00000040, 0x10040040, 0x00001000, 0x00041040, + 0x10001000, 0x00000040, 0x10000040, 0x10040000, + 0x10040040, 0x10000000, 0x00040000, 0x10001040, + 0x00000000, 0x10041040, 0x00040040, 0x10000040, + 0x10040000, 0x10001000, 0x10001040, 0x00000000, + 0x10041040, 0x00041000, 0x00041000, 0x00001040, + 0x00001040, 0x00040040, 0x10000000, 0x10041000 +}; + +/* Encryption components: IP, FP, and round function */ + +#define IP(L, R, T) \ + ROL(R, 4); \ + T = L; \ + L ^= R; \ + L &= 0xf0f0f0f0; \ + R ^= L; \ + L ^= T; \ + ROL(R, 12); \ + T = L; \ + L ^= R; \ + L &= 0xffff0000; \ + R ^= L; \ + L ^= T; \ + ROR(R, 14); \ + T = L; \ + L ^= R; \ + L &= 0xcccccccc; \ + R ^= L; \ + L ^= T; \ + ROL(R, 6); \ + T = L; \ + L ^= R; \ + L &= 0xff00ff00; \ + R ^= L; \ + L ^= T; \ + ROR(R, 7); \ + T = L; \ + L ^= R; \ + L &= 0xaaaaaaaa; \ + R ^= L; \ + L ^= T; \ + ROL(L, 1); + +#define FP(L, R, T) \ + ROR(L, 1); \ + T = L; \ + L ^= R; \ + L &= 0xaaaaaaaa; \ + R ^= L; \ + L ^= T; \ + ROL(R, 7); \ + T = L; \ + L ^= R; \ + L &= 0xff00ff00; \ + R ^= L; \ + L ^= T; \ + ROR(R, 6); \ + T = L; \ + L ^= R; \ + L &= 0xcccccccc; \ + R ^= L; \ + L ^= T; \ + ROL(R, 14); \ + T = L; \ + L ^= R; \ + L &= 0xffff0000; \ + R ^= L; \ + L ^= T; \ + ROR(R, 12); \ + T = L; \ + L ^= R; \ + L &= 0xf0f0f0f0; \ + R ^= L; \ + L ^= T; \ + ROR(R, 4); + +#define ROUND(L, R, A, B, K, d) \ + B = K[0]; A = K[1]; K += d; \ + B ^= R; A ^= R; \ + B &= 0x3f3f3f3f; ROR(A, 4); \ + L ^= S8[0xff & B]; A &= 0x3f3f3f3f; \ + L ^= S6[0xff & (B >> 8)]; B >>= 16; \ + L ^= S7[0xff & A]; \ + L ^= S5[0xff & (A >> 8)]; A >>= 16; \ + L ^= S4[0xff & B]; \ + L ^= S2[0xff & (B >> 8)]; \ + L ^= S3[0xff & A]; \ + L ^= S1[0xff & (A >> 8)]; + +/* + * PC2 lookup tables are organized as 2 consecutive sets of 4 interleaved + * tables of 128 elements. One set is for C_i and the other for D_i, while + * the 4 interleaved tables correspond to four 7-bit subsets of C_i or D_i. + * + * After PC1 each of the variables a,b,c,d contains a 7 bit subset of C_i + * or D_i in bits 7-1 (bit 0 being the least significant). + */ + +#define T1(x) pt[2 * (x) + 0] +#define T2(x) pt[2 * (x) + 1] +#define T3(x) pt[2 * (x) + 2] +#define T4(x) pt[2 * (x) + 3] + +#define DES_PC2(a, b, c, d) (T4(d) | T3(c) | T2(b) | T1(a)) + +/* + * Encryption key expansion + * + * RFC2451: Weak key checks SHOULD be performed. + * + * FIPS 74: + * + * Keys having duals are keys which produce all zeros, all ones, or + * alternating zero-one patterns in the C and D registers after Permuted + * Choice 1 has operated on the key. + * + */ +unsigned long des_ekey(u32 *pe, const u8 *k) +{ + /* K&R: long is at least 32 bits */ + unsigned long a, b, c, d, w; + const u32 *pt = pc2; + + d = k[4]; d &= 0x0e; d <<= 4; d |= k[0] & 0x1e; d = pc1[d]; + c = k[5]; c &= 0x0e; c <<= 4; c |= k[1] & 0x1e; c = pc1[c]; + b = k[6]; b &= 0x0e; b <<= 4; b |= k[2] & 0x1e; b = pc1[b]; + a = k[7]; a &= 0x0e; a <<= 4; a |= k[3] & 0x1e; a = pc1[a]; + + pe[15 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; + pe[14 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; + pe[13 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; + pe[12 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; + pe[11 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; + pe[10 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; + pe[ 9 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; + pe[ 8 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; + pe[ 7 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a]; + pe[ 6 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c]; + pe[ 5 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a]; + pe[ 4 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c]; + pe[ 3 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a]; + pe[ 2 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c]; + pe[ 1 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b]; + pe[ 0 * 2 + 0] = DES_PC2(b, c, d, a); + + /* Check if first half is weak */ + w = (a ^ c) | (b ^ d) | (rs[a] ^ c) | (b ^ rs[d]); + + /* Skip to next table set */ + pt += 512; + + d = k[0]; d &= 0xe0; d >>= 4; d |= k[4] & 0xf0; d = pc1[d + 1]; + c = k[1]; c &= 0xe0; c >>= 4; c |= k[5] & 0xf0; c = pc1[c + 1]; + b = k[2]; b &= 0xe0; b >>= 4; b |= k[6] & 0xf0; b = pc1[b + 1]; + a = k[3]; a &= 0xe0; a >>= 4; a |= k[7] & 0xf0; a = pc1[a + 1]; + + /* Check if second half is weak */ + w |= (a ^ c) | (b ^ d) | (rs[a] ^ c) | (b ^ rs[d]); + + pe[15 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; + pe[14 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; + pe[13 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; + pe[12 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; + pe[11 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; + pe[10 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; + pe[ 9 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; + pe[ 8 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; + pe[ 7 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a]; + pe[ 6 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c]; + pe[ 5 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a]; + pe[ 4 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c]; + pe[ 3 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a]; + pe[ 2 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c]; + pe[ 1 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; + pe[ 0 * 2 + 1] = DES_PC2(b, c, d, a); + + /* Fixup: 2413 5768 -> 1357 2468 */ + for (d = 0; d < 16; ++d) { + a = pe[2 * d]; + b = pe[2 * d + 1]; + c = a ^ b; + c &= 0xffff0000; + a ^= c; + b ^= c; + ROL(b, 18); + pe[2 * d] = a; + pe[2 * d + 1] = b; + } + + /* Zero if weak key */ + return w; +} +EXPORT_SYMBOL_GPL(des_ekey); + +/* + * Decryption key expansion + * + * No weak key checking is performed, as this is only used by triple DES + * + */ +static void dkey(u32 *pe, const u8 *k) +{ + /* K&R: long is at least 32 bits */ + unsigned long a, b, c, d; + const u32 *pt = pc2; + + d = k[4]; d &= 0x0e; d <<= 4; d |= k[0] & 0x1e; d = pc1[d]; + c = k[5]; c &= 0x0e; c <<= 4; c |= k[1] & 0x1e; c = pc1[c]; + b = k[6]; b &= 0x0e; b <<= 4; b |= k[2] & 0x1e; b = pc1[b]; + a = k[7]; a &= 0x0e; a <<= 4; a |= k[3] & 0x1e; a = pc1[a]; + + pe[ 0 * 2] = DES_PC2(a, b, c, d); d = rs[d]; + pe[ 1 * 2] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; + pe[ 2 * 2] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; + pe[ 3 * 2] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; + pe[ 4 * 2] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; + pe[ 5 * 2] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; + pe[ 6 * 2] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; + pe[ 7 * 2] = DES_PC2(d, a, b, c); c = rs[c]; + pe[ 8 * 2] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a]; + pe[ 9 * 2] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c]; + pe[10 * 2] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a]; + pe[11 * 2] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c]; + pe[12 * 2] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a]; + pe[13 * 2] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c]; + pe[14 * 2] = DES_PC2(c, d, a, b); b = rs[b]; + pe[15 * 2] = DES_PC2(b, c, d, a); + + /* Skip to next table set */ + pt += 512; + + d = k[0]; d &= 0xe0; d >>= 4; d |= k[4] & 0xf0; d = pc1[d + 1]; + c = k[1]; c &= 0xe0; c >>= 4; c |= k[5] & 0xf0; c = pc1[c + 1]; + b = k[2]; b &= 0xe0; b >>= 4; b |= k[6] & 0xf0; b = pc1[b + 1]; + a = k[3]; a &= 0xe0; a >>= 4; a |= k[7] & 0xf0; a = pc1[a + 1]; + + pe[ 0 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; + pe[ 1 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; + pe[ 2 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; + pe[ 3 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; + pe[ 4 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; + pe[ 5 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; + pe[ 6 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; + pe[ 7 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; + pe[ 8 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a]; + pe[ 9 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c]; + pe[10 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a]; + pe[11 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c]; + pe[12 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a]; + pe[13 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c]; + pe[14 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; + pe[15 * 2 + 1] = DES_PC2(b, c, d, a); + + /* Fixup: 2413 5768 -> 1357 2468 */ + for (d = 0; d < 16; ++d) { + a = pe[2 * d]; + b = pe[2 * d + 1]; + c = a ^ b; + c &= 0xffff0000; + a ^= c; + b ^= c; + ROL(b, 18); + pe[2 * d] = a; + pe[2 * d + 1] = b; + } +} + +static int des_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen) +{ + struct des_ctx *dctx = crypto_tfm_ctx(tfm); + u32 *flags = &tfm->crt_flags; + u32 tmp[DES_EXPKEY_WORDS]; + int ret; + + /* Expand to tmp */ + ret = des_ekey(tmp, key); + + if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) { + *flags |= CRYPTO_TFM_RES_WEAK_KEY; + return -EINVAL; + } + + /* Copy to output */ + memcpy(dctx->expkey, tmp, sizeof(dctx->expkey)); + + return 0; +} + +static void des_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct des_ctx *ctx = crypto_tfm_ctx(tfm); + const u32 *K = ctx->expkey; + const __le32 *s = (const __le32 *)src; + __le32 *d = (__le32 *)dst; + u32 L, R, A, B; + int i; + + L = le32_to_cpu(s[0]); + R = le32_to_cpu(s[1]); + + IP(L, R, A); + for (i = 0; i < 8; i++) { + ROUND(L, R, A, B, K, 2); + ROUND(R, L, A, B, K, 2); + } + FP(R, L, A); + + d[0] = cpu_to_le32(R); + d[1] = cpu_to_le32(L); +} + +static void des_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct des_ctx *ctx = crypto_tfm_ctx(tfm); + const u32 *K = ctx->expkey + DES_EXPKEY_WORDS - 2; + const __le32 *s = (const __le32 *)src; + __le32 *d = (__le32 *)dst; + u32 L, R, A, B; + int i; + + L = le32_to_cpu(s[0]); + R = le32_to_cpu(s[1]); + + IP(L, R, A); + for (i = 0; i < 8; i++) { + ROUND(L, R, A, B, K, -2); + ROUND(R, L, A, B, K, -2); + } + FP(R, L, A); + + d[0] = cpu_to_le32(R); + d[1] = cpu_to_le32(L); +} + +/* + * RFC2451: + * + * For DES-EDE3, there is no known need to reject weak or + * complementation keys. Any weakness is obviated by the use of + * multiple keys. + * + * However, if the first two or last two independent 64-bit keys are + * equal (k1 == k2 or k2 == k3), then the DES3 operation is simply the + * same as DES. Implementers MUST reject keys that exhibit this + * property. + * + */ +static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen) +{ + const u32 *K = (const u32 *)key; + struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm); + u32 *expkey = dctx->expkey; + u32 *flags = &tfm->crt_flags; + + if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) || + !((K[2] ^ K[4]) | (K[3] ^ K[5]))) && + (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) { + *flags |= CRYPTO_TFM_RES_WEAK_KEY; + return -EINVAL; + } + + des_ekey(expkey, key); expkey += DES_EXPKEY_WORDS; key += DES_KEY_SIZE; + dkey(expkey, key); expkey += DES_EXPKEY_WORDS; key += DES_KEY_SIZE; + des_ekey(expkey, key); + + return 0; +} + +static void des3_ede_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm); + const u32 *K = dctx->expkey; + const __le32 *s = (const __le32 *)src; + __le32 *d = (__le32 *)dst; + u32 L, R, A, B; + int i; + + L = le32_to_cpu(s[0]); + R = le32_to_cpu(s[1]); + + IP(L, R, A); + for (i = 0; i < 8; i++) { + ROUND(L, R, A, B, K, 2); + ROUND(R, L, A, B, K, 2); + } + for (i = 0; i < 8; i++) { + ROUND(R, L, A, B, K, 2); + ROUND(L, R, A, B, K, 2); + } + for (i = 0; i < 8; i++) { + ROUND(L, R, A, B, K, 2); + ROUND(R, L, A, B, K, 2); + } + FP(R, L, A); + + d[0] = cpu_to_le32(R); + d[1] = cpu_to_le32(L); +} + +static void des3_ede_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm); + const u32 *K = dctx->expkey + DES3_EDE_EXPKEY_WORDS - 2; + const __le32 *s = (const __le32 *)src; + __le32 *d = (__le32 *)dst; + u32 L, R, A, B; + int i; + + L = le32_to_cpu(s[0]); + R = le32_to_cpu(s[1]); + + IP(L, R, A); + for (i = 0; i < 8; i++) { + ROUND(L, R, A, B, K, -2); + ROUND(R, L, A, B, K, -2); + } + for (i = 0; i < 8; i++) { + ROUND(R, L, A, B, K, -2); + ROUND(L, R, A, B, K, -2); + } + for (i = 0; i < 8; i++) { + ROUND(L, R, A, B, K, -2); + ROUND(R, L, A, B, K, -2); + } + FP(R, L, A); + + d[0] = cpu_to_le32(R); + d[1] = cpu_to_le32(L); +} + +static struct crypto_alg des_alg = { + .cra_name = "des", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = DES_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct des_ctx), + .cra_module = THIS_MODULE, + .cra_alignmask = 3, + .cra_list = LIST_HEAD_INIT(des_alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = DES_KEY_SIZE, + .cia_max_keysize = DES_KEY_SIZE, + .cia_setkey = des_setkey, + .cia_encrypt = des_encrypt, + .cia_decrypt = des_decrypt } } +}; + +static struct crypto_alg des3_ede_alg = { + .cra_name = "des3_ede", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = DES3_EDE_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct des3_ede_ctx), + .cra_module = THIS_MODULE, + .cra_alignmask = 3, + .cra_list = LIST_HEAD_INIT(des3_ede_alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = DES3_EDE_KEY_SIZE, + .cia_max_keysize = DES3_EDE_KEY_SIZE, + .cia_setkey = des3_ede_setkey, + .cia_encrypt = des3_ede_encrypt, + .cia_decrypt = des3_ede_decrypt } } +}; + +MODULE_ALIAS("des3_ede"); + +static int __init des_generic_mod_init(void) +{ + int ret = 0; + + ret = crypto_register_alg(&des_alg); + if (ret < 0) + goto out; + + ret = crypto_register_alg(&des3_ede_alg); + if (ret < 0) + crypto_unregister_alg(&des_alg); +out: + return ret; +} + +static void __exit des_generic_mod_fini(void) +{ + crypto_unregister_alg(&des3_ede_alg); + crypto_unregister_alg(&des_alg); +} + +module_init(des_generic_mod_init); +module_exit(des_generic_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("DES & Triple DES EDE Cipher Algorithms"); +MODULE_AUTHOR("Dag Arne Osvik <da@osvik.no>"); +MODULE_ALIAS("des"); diff --git a/crypto/ecb.c b/crypto/ecb.c new file mode 100644 index 00000000..935cfef4 --- /dev/null +++ b/crypto/ecb.c @@ -0,0 +1,187 @@ +/* + * ECB: Electronic CodeBook mode + * + * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/algapi.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/scatterlist.h> +#include <linux/slab.h> + +struct crypto_ecb_ctx { + struct crypto_cipher *child; +}; + +static int crypto_ecb_setkey(struct crypto_tfm *parent, const u8 *key, + unsigned int keylen) +{ + struct crypto_ecb_ctx *ctx = crypto_tfm_ctx(parent); + struct crypto_cipher *child = ctx->child; + int err; + + crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_cipher_setkey(child, key, keylen); + crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + return err; +} + +static int crypto_ecb_crypt(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + struct crypto_cipher *tfm, + void (*fn)(struct crypto_tfm *, u8 *, const u8 *)) +{ + int bsize = crypto_cipher_blocksize(tfm); + unsigned int nbytes; + int err; + + err = blkcipher_walk_virt(desc, walk); + + while ((nbytes = walk->nbytes)) { + u8 *wsrc = walk->src.virt.addr; + u8 *wdst = walk->dst.virt.addr; + + do { + fn(crypto_cipher_tfm(tfm), wdst, wsrc); + + wsrc += bsize; + wdst += bsize; + } while ((nbytes -= bsize) >= bsize); + + err = blkcipher_walk_done(desc, walk, nbytes); + } + + return err; +} + +static int crypto_ecb_encrypt(struct blkcipher_desc *desc, + struct scatterlist *dst, struct scatterlist *src, + unsigned int nbytes) +{ + struct blkcipher_walk walk; + struct crypto_blkcipher *tfm = desc->tfm; + struct crypto_ecb_ctx *ctx = crypto_blkcipher_ctx(tfm); + struct crypto_cipher *child = ctx->child; + + blkcipher_walk_init(&walk, dst, src, nbytes); + return crypto_ecb_crypt(desc, &walk, child, + crypto_cipher_alg(child)->cia_encrypt); +} + +static int crypto_ecb_decrypt(struct blkcipher_desc *desc, + struct scatterlist *dst, struct scatterlist *src, + unsigned int nbytes) +{ + struct blkcipher_walk walk; + struct crypto_blkcipher *tfm = desc->tfm; + struct crypto_ecb_ctx *ctx = crypto_blkcipher_ctx(tfm); + struct crypto_cipher *child = ctx->child; + + blkcipher_walk_init(&walk, dst, src, nbytes); + return crypto_ecb_crypt(desc, &walk, child, + crypto_cipher_alg(child)->cia_decrypt); +} + +static int crypto_ecb_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_spawn *spawn = crypto_instance_ctx(inst); + struct crypto_ecb_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_cipher *cipher; + + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; + return 0; +} + +static void crypto_ecb_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_ecb_ctx *ctx = crypto_tfm_ctx(tfm); + crypto_free_cipher(ctx->child); +} + +static struct crypto_instance *crypto_ecb_alloc(struct rtattr **tb) +{ + struct crypto_instance *inst; + struct crypto_alg *alg; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); + if (err) + return ERR_PTR(err); + + alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK); + if (IS_ERR(alg)) + return ERR_CAST(alg); + + inst = crypto_alloc_instance("ecb", alg); + if (IS_ERR(inst)) + goto out_put_alg; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = alg->cra_blocksize; + inst->alg.cra_alignmask = alg->cra_alignmask; + inst->alg.cra_type = &crypto_blkcipher_type; + + inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize; + inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize; + + inst->alg.cra_ctxsize = sizeof(struct crypto_ecb_ctx); + + inst->alg.cra_init = crypto_ecb_init_tfm; + inst->alg.cra_exit = crypto_ecb_exit_tfm; + + inst->alg.cra_blkcipher.setkey = crypto_ecb_setkey; + inst->alg.cra_blkcipher.encrypt = crypto_ecb_encrypt; + inst->alg.cra_blkcipher.decrypt = crypto_ecb_decrypt; + +out_put_alg: + crypto_mod_put(alg); + return inst; +} + +static void crypto_ecb_free(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(inst); +} + +static struct crypto_template crypto_ecb_tmpl = { + .name = "ecb", + .alloc = crypto_ecb_alloc, + .free = crypto_ecb_free, + .module = THIS_MODULE, +}; + +static int __init crypto_ecb_module_init(void) +{ + return crypto_register_template(&crypto_ecb_tmpl); +} + +static void __exit crypto_ecb_module_exit(void) +{ + crypto_unregister_template(&crypto_ecb_tmpl); +} + +module_init(crypto_ecb_module_init); +module_exit(crypto_ecb_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("ECB block cipher algorithm"); diff --git a/crypto/eseqiv.c b/crypto/eseqiv.c new file mode 100644 index 00000000..42ce9f57 --- /dev/null +++ b/crypto/eseqiv.c @@ -0,0 +1,269 @@ +/* + * eseqiv: Encrypted Sequence Number IV Generator + * + * This generator generates an IV based on a sequence number by xoring it + * with a salt and then encrypting it with the same key as used to encrypt + * the plain text. This algorithm requires that the block size be equal + * to the IV size. It is mainly useful for CBC. + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/internal/skcipher.h> +#include <crypto/rng.h> +#include <crypto/scatterwalk.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/mm.h> +#include <linux/module.h> +#include <linux/scatterlist.h> +#include <linux/spinlock.h> +#include <linux/string.h> + +struct eseqiv_request_ctx { + struct scatterlist src[2]; + struct scatterlist dst[2]; + char tail[]; +}; + +struct eseqiv_ctx { + spinlock_t lock; + unsigned int reqoff; + char salt[]; +}; + +static void eseqiv_complete2(struct skcipher_givcrypt_request *req) +{ + struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); + struct eseqiv_request_ctx *reqctx = skcipher_givcrypt_reqctx(req); + + memcpy(req->giv, PTR_ALIGN((u8 *)reqctx->tail, + crypto_ablkcipher_alignmask(geniv) + 1), + crypto_ablkcipher_ivsize(geniv)); +} + +static void eseqiv_complete(struct crypto_async_request *base, int err) +{ + struct skcipher_givcrypt_request *req = base->data; + + if (err) + goto out; + + eseqiv_complete2(req); + +out: + skcipher_givcrypt_complete(req, err); +} + +static int eseqiv_givencrypt(struct skcipher_givcrypt_request *req) +{ + struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); + struct eseqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + struct eseqiv_request_ctx *reqctx = skcipher_givcrypt_reqctx(req); + struct ablkcipher_request *subreq; + crypto_completion_t complete; + void *data; + struct scatterlist *osrc, *odst; + struct scatterlist *dst; + struct page *srcp; + struct page *dstp; + u8 *giv; + u8 *vsrc; + u8 *vdst; + __be64 seq; + unsigned int ivsize; + unsigned int len; + int err; + + subreq = (void *)(reqctx->tail + ctx->reqoff); + ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv)); + + giv = req->giv; + complete = req->creq.base.complete; + data = req->creq.base.data; + + osrc = req->creq.src; + odst = req->creq.dst; + srcp = sg_page(osrc); + dstp = sg_page(odst); + vsrc = PageHighMem(srcp) ? NULL : page_address(srcp) + osrc->offset; + vdst = PageHighMem(dstp) ? NULL : page_address(dstp) + odst->offset; + + ivsize = crypto_ablkcipher_ivsize(geniv); + + if (vsrc != giv + ivsize && vdst != giv + ivsize) { + giv = PTR_ALIGN((u8 *)reqctx->tail, + crypto_ablkcipher_alignmask(geniv) + 1); + complete = eseqiv_complete; + data = req; + } + + ablkcipher_request_set_callback(subreq, req->creq.base.flags, complete, + data); + + sg_init_table(reqctx->src, 2); + sg_set_buf(reqctx->src, giv, ivsize); + scatterwalk_crypto_chain(reqctx->src, osrc, vsrc == giv + ivsize, 2); + + dst = reqctx->src; + if (osrc != odst) { + sg_init_table(reqctx->dst, 2); + sg_set_buf(reqctx->dst, giv, ivsize); + scatterwalk_crypto_chain(reqctx->dst, odst, vdst == giv + ivsize, 2); + + dst = reqctx->dst; + } + + ablkcipher_request_set_crypt(subreq, reqctx->src, dst, + req->creq.nbytes + ivsize, + req->creq.info); + + memcpy(req->creq.info, ctx->salt, ivsize); + + len = ivsize; + if (ivsize > sizeof(u64)) { + memset(req->giv, 0, ivsize - sizeof(u64)); + len = sizeof(u64); + } + seq = cpu_to_be64(req->seq); + memcpy(req->giv + ivsize - len, &seq, len); + + err = crypto_ablkcipher_encrypt(subreq); + if (err) + goto out; + + if (giv != req->giv) + eseqiv_complete2(req); + +out: + return err; +} + +static int eseqiv_givencrypt_first(struct skcipher_givcrypt_request *req) +{ + struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); + struct eseqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + int err = 0; + + spin_lock_bh(&ctx->lock); + if (crypto_ablkcipher_crt(geniv)->givencrypt != eseqiv_givencrypt_first) + goto unlock; + + crypto_ablkcipher_crt(geniv)->givencrypt = eseqiv_givencrypt; + err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt, + crypto_ablkcipher_ivsize(geniv)); + +unlock: + spin_unlock_bh(&ctx->lock); + + if (err) + return err; + + return eseqiv_givencrypt(req); +} + +static int eseqiv_init(struct crypto_tfm *tfm) +{ + struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm); + struct eseqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + unsigned long alignmask; + unsigned int reqsize; + + spin_lock_init(&ctx->lock); + + alignmask = crypto_tfm_ctx_alignment() - 1; + reqsize = sizeof(struct eseqiv_request_ctx); + + if (alignmask & reqsize) { + alignmask &= reqsize; + alignmask--; + } + + alignmask = ~alignmask; + alignmask &= crypto_ablkcipher_alignmask(geniv); + + reqsize += alignmask; + reqsize += crypto_ablkcipher_ivsize(geniv); + reqsize = ALIGN(reqsize, crypto_tfm_ctx_alignment()); + + ctx->reqoff = reqsize - sizeof(struct eseqiv_request_ctx); + + tfm->crt_ablkcipher.reqsize = reqsize + + sizeof(struct ablkcipher_request); + + return skcipher_geniv_init(tfm); +} + +static struct crypto_template eseqiv_tmpl; + +static struct crypto_instance *eseqiv_alloc(struct rtattr **tb) +{ + struct crypto_instance *inst; + int err; + + err = crypto_get_default_rng(); + if (err) + return ERR_PTR(err); + + inst = skcipher_geniv_alloc(&eseqiv_tmpl, tb, 0, 0); + if (IS_ERR(inst)) + goto put_rng; + + err = -EINVAL; + if (inst->alg.cra_ablkcipher.ivsize != inst->alg.cra_blocksize) + goto free_inst; + + inst->alg.cra_ablkcipher.givencrypt = eseqiv_givencrypt_first; + + inst->alg.cra_init = eseqiv_init; + inst->alg.cra_exit = skcipher_geniv_exit; + + inst->alg.cra_ctxsize = sizeof(struct eseqiv_ctx); + inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize; + +out: + return inst; + +free_inst: + skcipher_geniv_free(inst); + inst = ERR_PTR(err); +put_rng: + crypto_put_default_rng(); + goto out; +} + +static void eseqiv_free(struct crypto_instance *inst) +{ + skcipher_geniv_free(inst); + crypto_put_default_rng(); +} + +static struct crypto_template eseqiv_tmpl = { + .name = "eseqiv", + .alloc = eseqiv_alloc, + .free = eseqiv_free, + .module = THIS_MODULE, +}; + +static int __init eseqiv_module_init(void) +{ + return crypto_register_template(&eseqiv_tmpl); +} + +static void __exit eseqiv_module_exit(void) +{ + crypto_unregister_template(&eseqiv_tmpl); +} + +module_init(eseqiv_module_init); +module_exit(eseqiv_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Encrypted Sequence Number IV Generator"); diff --git a/crypto/fcrypt.c b/crypto/fcrypt.c new file mode 100644 index 00000000..c33107e3 --- /dev/null +++ b/crypto/fcrypt.c @@ -0,0 +1,423 @@ +/* FCrypt encryption algorithm + * + * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved. + * Written by David Howells (dhowells@redhat.com) + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version + * 2 of the License, or (at your option) any later version. + * + * Based on code: + * + * Copyright (c) 1995 - 2000 Kungliga Tekniska Högskolan + * (Royal Institute of Technology, Stockholm, Sweden). + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * 3. Neither the name of the Institute nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + */ + +#include <asm/byteorder.h> +#include <linux/bitops.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/crypto.h> + +#define ROUNDS 16 + +struct fcrypt_ctx { + __be32 sched[ROUNDS]; +}; + +/* Rotate right two 32 bit numbers as a 56 bit number */ +#define ror56(hi, lo, n) \ +do { \ + u32 t = lo & ((1 << n) - 1); \ + lo = (lo >> n) | ((hi & ((1 << n) - 1)) << (32 - n)); \ + hi = (hi >> n) | (t << (24-n)); \ +} while (0) + +/* Rotate right one 64 bit number as a 56 bit number */ +#define ror56_64(k, n) \ +do { \ + k = (k >> n) | ((k & ((1 << n) - 1)) << (56 - n)); \ +} while (0) + +/* + * Sboxes for Feistel network derived from + * /afs/transarc.com/public/afsps/afs.rel31b.export-src/rxkad/sboxes.h + */ +#undef Z +#define Z(x) cpu_to_be32(x << 3) +static const __be32 sbox0[256] = { + Z(0xea), Z(0x7f), Z(0xb2), Z(0x64), Z(0x9d), Z(0xb0), Z(0xd9), Z(0x11), + Z(0xcd), Z(0x86), Z(0x86), Z(0x91), Z(0x0a), Z(0xb2), Z(0x93), Z(0x06), + Z(0x0e), Z(0x06), Z(0xd2), Z(0x65), Z(0x73), Z(0xc5), Z(0x28), Z(0x60), + Z(0xf2), Z(0x20), Z(0xb5), Z(0x38), Z(0x7e), Z(0xda), Z(0x9f), Z(0xe3), + Z(0xd2), Z(0xcf), Z(0xc4), Z(0x3c), Z(0x61), Z(0xff), Z(0x4a), Z(0x4a), + Z(0x35), Z(0xac), Z(0xaa), Z(0x5f), Z(0x2b), Z(0xbb), Z(0xbc), Z(0x53), + Z(0x4e), Z(0x9d), Z(0x78), Z(0xa3), Z(0xdc), Z(0x09), Z(0x32), Z(0x10), + Z(0xc6), Z(0x6f), Z(0x66), Z(0xd6), Z(0xab), Z(0xa9), Z(0xaf), Z(0xfd), + Z(0x3b), Z(0x95), Z(0xe8), Z(0x34), Z(0x9a), Z(0x81), Z(0x72), Z(0x80), + Z(0x9c), Z(0xf3), Z(0xec), Z(0xda), Z(0x9f), Z(0x26), Z(0x76), Z(0x15), + Z(0x3e), Z(0x55), Z(0x4d), Z(0xde), Z(0x84), Z(0xee), Z(0xad), Z(0xc7), + Z(0xf1), Z(0x6b), Z(0x3d), Z(0xd3), Z(0x04), Z(0x49), Z(0xaa), Z(0x24), + Z(0x0b), Z(0x8a), Z(0x83), Z(0xba), Z(0xfa), Z(0x85), Z(0xa0), Z(0xa8), + Z(0xb1), Z(0xd4), Z(0x01), Z(0xd8), Z(0x70), Z(0x64), Z(0xf0), Z(0x51), + Z(0xd2), Z(0xc3), Z(0xa7), Z(0x75), Z(0x8c), Z(0xa5), Z(0x64), Z(0xef), + Z(0x10), Z(0x4e), Z(0xb7), Z(0xc6), Z(0x61), Z(0x03), Z(0xeb), Z(0x44), + Z(0x3d), Z(0xe5), Z(0xb3), Z(0x5b), Z(0xae), Z(0xd5), Z(0xad), Z(0x1d), + Z(0xfa), Z(0x5a), Z(0x1e), Z(0x33), Z(0xab), Z(0x93), Z(0xa2), Z(0xb7), + Z(0xe7), Z(0xa8), Z(0x45), Z(0xa4), Z(0xcd), Z(0x29), Z(0x63), Z(0x44), + Z(0xb6), Z(0x69), Z(0x7e), Z(0x2e), Z(0x62), Z(0x03), Z(0xc8), Z(0xe0), + Z(0x17), Z(0xbb), Z(0xc7), Z(0xf3), Z(0x3f), Z(0x36), Z(0xba), Z(0x71), + Z(0x8e), Z(0x97), Z(0x65), Z(0x60), Z(0x69), Z(0xb6), Z(0xf6), Z(0xe6), + Z(0x6e), Z(0xe0), Z(0x81), Z(0x59), Z(0xe8), Z(0xaf), Z(0xdd), Z(0x95), + Z(0x22), Z(0x99), Z(0xfd), Z(0x63), Z(0x19), Z(0x74), Z(0x61), Z(0xb1), + Z(0xb6), Z(0x5b), Z(0xae), Z(0x54), Z(0xb3), Z(0x70), Z(0xff), Z(0xc6), + Z(0x3b), Z(0x3e), Z(0xc1), Z(0xd7), Z(0xe1), Z(0x0e), Z(0x76), Z(0xe5), + Z(0x36), Z(0x4f), Z(0x59), Z(0xc7), Z(0x08), Z(0x6e), Z(0x82), Z(0xa6), + Z(0x93), Z(0xc4), Z(0xaa), Z(0x26), Z(0x49), Z(0xe0), Z(0x21), Z(0x64), + Z(0x07), Z(0x9f), Z(0x64), Z(0x81), Z(0x9c), Z(0xbf), Z(0xf9), Z(0xd1), + Z(0x43), Z(0xf8), Z(0xb6), Z(0xb9), Z(0xf1), Z(0x24), Z(0x75), Z(0x03), + Z(0xe4), Z(0xb0), Z(0x99), Z(0x46), Z(0x3d), Z(0xf5), Z(0xd1), Z(0x39), + Z(0x72), Z(0x12), Z(0xf6), Z(0xba), Z(0x0c), Z(0x0d), Z(0x42), Z(0x2e) +}; + +#undef Z +#define Z(x) cpu_to_be32((x << 27) | (x >> 5)) +static const __be32 sbox1[256] = { + Z(0x77), Z(0x14), Z(0xa6), Z(0xfe), Z(0xb2), Z(0x5e), Z(0x8c), Z(0x3e), + Z(0x67), Z(0x6c), Z(0xa1), Z(0x0d), Z(0xc2), Z(0xa2), Z(0xc1), Z(0x85), + Z(0x6c), Z(0x7b), Z(0x67), Z(0xc6), Z(0x23), Z(0xe3), Z(0xf2), Z(0x89), + Z(0x50), Z(0x9c), Z(0x03), Z(0xb7), Z(0x73), Z(0xe6), Z(0xe1), Z(0x39), + Z(0x31), Z(0x2c), Z(0x27), Z(0x9f), Z(0xa5), Z(0x69), Z(0x44), Z(0xd6), + Z(0x23), Z(0x83), Z(0x98), Z(0x7d), Z(0x3c), Z(0xb4), Z(0x2d), Z(0x99), + Z(0x1c), Z(0x1f), Z(0x8c), Z(0x20), Z(0x03), Z(0x7c), Z(0x5f), Z(0xad), + Z(0xf4), Z(0xfa), Z(0x95), Z(0xca), Z(0x76), Z(0x44), Z(0xcd), Z(0xb6), + Z(0xb8), Z(0xa1), Z(0xa1), Z(0xbe), Z(0x9e), Z(0x54), Z(0x8f), Z(0x0b), + Z(0x16), Z(0x74), Z(0x31), Z(0x8a), Z(0x23), Z(0x17), Z(0x04), Z(0xfa), + Z(0x79), Z(0x84), Z(0xb1), Z(0xf5), Z(0x13), Z(0xab), Z(0xb5), Z(0x2e), + Z(0xaa), Z(0x0c), Z(0x60), Z(0x6b), Z(0x5b), Z(0xc4), Z(0x4b), Z(0xbc), + Z(0xe2), Z(0xaf), Z(0x45), Z(0x73), Z(0xfa), Z(0xc9), Z(0x49), Z(0xcd), + Z(0x00), Z(0x92), Z(0x7d), Z(0x97), Z(0x7a), Z(0x18), Z(0x60), Z(0x3d), + Z(0xcf), Z(0x5b), Z(0xde), Z(0xc6), Z(0xe2), Z(0xe6), Z(0xbb), Z(0x8b), + Z(0x06), Z(0xda), Z(0x08), Z(0x15), Z(0x1b), Z(0x88), Z(0x6a), Z(0x17), + Z(0x89), Z(0xd0), Z(0xa9), Z(0xc1), Z(0xc9), Z(0x70), Z(0x6b), Z(0xe5), + Z(0x43), Z(0xf4), Z(0x68), Z(0xc8), Z(0xd3), Z(0x84), Z(0x28), Z(0x0a), + Z(0x52), Z(0x66), Z(0xa3), Z(0xca), Z(0xf2), Z(0xe3), Z(0x7f), Z(0x7a), + Z(0x31), Z(0xf7), Z(0x88), Z(0x94), Z(0x5e), Z(0x9c), Z(0x63), Z(0xd5), + Z(0x24), Z(0x66), Z(0xfc), Z(0xb3), Z(0x57), Z(0x25), Z(0xbe), Z(0x89), + Z(0x44), Z(0xc4), Z(0xe0), Z(0x8f), Z(0x23), Z(0x3c), Z(0x12), Z(0x52), + Z(0xf5), Z(0x1e), Z(0xf4), Z(0xcb), Z(0x18), Z(0x33), Z(0x1f), Z(0xf8), + Z(0x69), Z(0x10), Z(0x9d), Z(0xd3), Z(0xf7), Z(0x28), Z(0xf8), Z(0x30), + Z(0x05), Z(0x5e), Z(0x32), Z(0xc0), Z(0xd5), Z(0x19), Z(0xbd), Z(0x45), + Z(0x8b), Z(0x5b), Z(0xfd), Z(0xbc), Z(0xe2), Z(0x5c), Z(0xa9), Z(0x96), + Z(0xef), Z(0x70), Z(0xcf), Z(0xc2), Z(0x2a), Z(0xb3), Z(0x61), Z(0xad), + Z(0x80), Z(0x48), Z(0x81), Z(0xb7), Z(0x1d), Z(0x43), Z(0xd9), Z(0xd7), + Z(0x45), Z(0xf0), Z(0xd8), Z(0x8a), Z(0x59), Z(0x7c), Z(0x57), Z(0xc1), + Z(0x79), Z(0xc7), Z(0x34), Z(0xd6), Z(0x43), Z(0xdf), Z(0xe4), Z(0x78), + Z(0x16), Z(0x06), Z(0xda), Z(0x92), Z(0x76), Z(0x51), Z(0xe1), Z(0xd4), + Z(0x70), Z(0x03), Z(0xe0), Z(0x2f), Z(0x96), Z(0x91), Z(0x82), Z(0x80) +}; + +#undef Z +#define Z(x) cpu_to_be32(x << 11) +static const __be32 sbox2[256] = { + Z(0xf0), Z(0x37), Z(0x24), Z(0x53), Z(0x2a), Z(0x03), Z(0x83), Z(0x86), + Z(0xd1), Z(0xec), Z(0x50), Z(0xf0), Z(0x42), Z(0x78), Z(0x2f), Z(0x6d), + Z(0xbf), Z(0x80), Z(0x87), Z(0x27), Z(0x95), Z(0xe2), Z(0xc5), Z(0x5d), + Z(0xf9), Z(0x6f), Z(0xdb), Z(0xb4), Z(0x65), Z(0x6e), Z(0xe7), Z(0x24), + Z(0xc8), Z(0x1a), Z(0xbb), Z(0x49), Z(0xb5), Z(0x0a), Z(0x7d), Z(0xb9), + Z(0xe8), Z(0xdc), Z(0xb7), Z(0xd9), Z(0x45), Z(0x20), Z(0x1b), Z(0xce), + Z(0x59), Z(0x9d), Z(0x6b), Z(0xbd), Z(0x0e), Z(0x8f), Z(0xa3), Z(0xa9), + Z(0xbc), Z(0x74), Z(0xa6), Z(0xf6), Z(0x7f), Z(0x5f), Z(0xb1), Z(0x68), + Z(0x84), Z(0xbc), Z(0xa9), Z(0xfd), Z(0x55), Z(0x50), Z(0xe9), Z(0xb6), + Z(0x13), Z(0x5e), Z(0x07), Z(0xb8), Z(0x95), Z(0x02), Z(0xc0), Z(0xd0), + Z(0x6a), Z(0x1a), Z(0x85), Z(0xbd), Z(0xb6), Z(0xfd), Z(0xfe), Z(0x17), + Z(0x3f), Z(0x09), Z(0xa3), Z(0x8d), Z(0xfb), Z(0xed), Z(0xda), Z(0x1d), + Z(0x6d), Z(0x1c), Z(0x6c), Z(0x01), Z(0x5a), Z(0xe5), Z(0x71), Z(0x3e), + Z(0x8b), Z(0x6b), Z(0xbe), Z(0x29), Z(0xeb), Z(0x12), Z(0x19), Z(0x34), + Z(0xcd), Z(0xb3), Z(0xbd), Z(0x35), Z(0xea), Z(0x4b), Z(0xd5), Z(0xae), + Z(0x2a), Z(0x79), Z(0x5a), Z(0xa5), Z(0x32), Z(0x12), Z(0x7b), Z(0xdc), + Z(0x2c), Z(0xd0), Z(0x22), Z(0x4b), Z(0xb1), Z(0x85), Z(0x59), Z(0x80), + Z(0xc0), Z(0x30), Z(0x9f), Z(0x73), Z(0xd3), Z(0x14), Z(0x48), Z(0x40), + Z(0x07), Z(0x2d), Z(0x8f), Z(0x80), Z(0x0f), Z(0xce), Z(0x0b), Z(0x5e), + Z(0xb7), Z(0x5e), Z(0xac), Z(0x24), Z(0x94), Z(0x4a), Z(0x18), Z(0x15), + Z(0x05), Z(0xe8), Z(0x02), Z(0x77), Z(0xa9), Z(0xc7), Z(0x40), Z(0x45), + Z(0x89), Z(0xd1), Z(0xea), Z(0xde), Z(0x0c), Z(0x79), Z(0x2a), Z(0x99), + Z(0x6c), Z(0x3e), Z(0x95), Z(0xdd), Z(0x8c), Z(0x7d), Z(0xad), Z(0x6f), + Z(0xdc), Z(0xff), Z(0xfd), Z(0x62), Z(0x47), Z(0xb3), Z(0x21), Z(0x8a), + Z(0xec), Z(0x8e), Z(0x19), Z(0x18), Z(0xb4), Z(0x6e), Z(0x3d), Z(0xfd), + Z(0x74), Z(0x54), Z(0x1e), Z(0x04), Z(0x85), Z(0xd8), Z(0xbc), Z(0x1f), + Z(0x56), Z(0xe7), Z(0x3a), Z(0x56), Z(0x67), Z(0xd6), Z(0xc8), Z(0xa5), + Z(0xf3), Z(0x8e), Z(0xde), Z(0xae), Z(0x37), Z(0x49), Z(0xb7), Z(0xfa), + Z(0xc8), Z(0xf4), Z(0x1f), Z(0xe0), Z(0x2a), Z(0x9b), Z(0x15), Z(0xd1), + Z(0x34), Z(0x0e), Z(0xb5), Z(0xe0), Z(0x44), Z(0x78), Z(0x84), Z(0x59), + Z(0x56), Z(0x68), Z(0x77), Z(0xa5), Z(0x14), Z(0x06), Z(0xf5), Z(0x2f), + Z(0x8c), Z(0x8a), Z(0x73), Z(0x80), Z(0x76), Z(0xb4), Z(0x10), Z(0x86) +}; + +#undef Z +#define Z(x) cpu_to_be32(x << 19) +static const __be32 sbox3[256] = { + Z(0xa9), Z(0x2a), Z(0x48), Z(0x51), Z(0x84), Z(0x7e), Z(0x49), Z(0xe2), + Z(0xb5), Z(0xb7), Z(0x42), Z(0x33), Z(0x7d), Z(0x5d), Z(0xa6), Z(0x12), + Z(0x44), Z(0x48), Z(0x6d), Z(0x28), Z(0xaa), Z(0x20), Z(0x6d), Z(0x57), + Z(0xd6), Z(0x6b), Z(0x5d), Z(0x72), Z(0xf0), Z(0x92), Z(0x5a), Z(0x1b), + Z(0x53), Z(0x80), Z(0x24), Z(0x70), Z(0x9a), Z(0xcc), Z(0xa7), Z(0x66), + Z(0xa1), Z(0x01), Z(0xa5), Z(0x41), Z(0x97), Z(0x41), Z(0x31), Z(0x82), + Z(0xf1), Z(0x14), Z(0xcf), Z(0x53), Z(0x0d), Z(0xa0), Z(0x10), Z(0xcc), + Z(0x2a), Z(0x7d), Z(0xd2), Z(0xbf), Z(0x4b), Z(0x1a), Z(0xdb), Z(0x16), + Z(0x47), Z(0xf6), Z(0x51), Z(0x36), Z(0xed), Z(0xf3), Z(0xb9), Z(0x1a), + Z(0xa7), Z(0xdf), Z(0x29), Z(0x43), Z(0x01), Z(0x54), Z(0x70), Z(0xa4), + Z(0xbf), Z(0xd4), Z(0x0b), Z(0x53), Z(0x44), Z(0x60), Z(0x9e), Z(0x23), + Z(0xa1), Z(0x18), Z(0x68), Z(0x4f), Z(0xf0), Z(0x2f), Z(0x82), Z(0xc2), + Z(0x2a), Z(0x41), Z(0xb2), Z(0x42), Z(0x0c), Z(0xed), Z(0x0c), Z(0x1d), + Z(0x13), Z(0x3a), Z(0x3c), Z(0x6e), Z(0x35), Z(0xdc), Z(0x60), Z(0x65), + Z(0x85), Z(0xe9), Z(0x64), Z(0x02), Z(0x9a), Z(0x3f), Z(0x9f), Z(0x87), + Z(0x96), Z(0xdf), Z(0xbe), Z(0xf2), Z(0xcb), Z(0xe5), Z(0x6c), Z(0xd4), + Z(0x5a), Z(0x83), Z(0xbf), Z(0x92), Z(0x1b), Z(0x94), Z(0x00), Z(0x42), + Z(0xcf), Z(0x4b), Z(0x00), Z(0x75), Z(0xba), Z(0x8f), Z(0x76), Z(0x5f), + Z(0x5d), Z(0x3a), Z(0x4d), Z(0x09), Z(0x12), Z(0x08), Z(0x38), Z(0x95), + Z(0x17), Z(0xe4), Z(0x01), Z(0x1d), Z(0x4c), Z(0xa9), Z(0xcc), Z(0x85), + Z(0x82), Z(0x4c), Z(0x9d), Z(0x2f), Z(0x3b), Z(0x66), Z(0xa1), Z(0x34), + Z(0x10), Z(0xcd), Z(0x59), Z(0x89), Z(0xa5), Z(0x31), Z(0xcf), Z(0x05), + Z(0xc8), Z(0x84), Z(0xfa), Z(0xc7), Z(0xba), Z(0x4e), Z(0x8b), Z(0x1a), + Z(0x19), Z(0xf1), Z(0xa1), Z(0x3b), Z(0x18), Z(0x12), Z(0x17), Z(0xb0), + Z(0x98), Z(0x8d), Z(0x0b), Z(0x23), Z(0xc3), Z(0x3a), Z(0x2d), Z(0x20), + Z(0xdf), Z(0x13), Z(0xa0), Z(0xa8), Z(0x4c), Z(0x0d), Z(0x6c), Z(0x2f), + Z(0x47), Z(0x13), Z(0x13), Z(0x52), Z(0x1f), Z(0x2d), Z(0xf5), Z(0x79), + Z(0x3d), Z(0xa2), Z(0x54), Z(0xbd), Z(0x69), Z(0xc8), Z(0x6b), Z(0xf3), + Z(0x05), Z(0x28), Z(0xf1), Z(0x16), Z(0x46), Z(0x40), Z(0xb0), Z(0x11), + Z(0xd3), Z(0xb7), Z(0x95), Z(0x49), Z(0xcf), Z(0xc3), Z(0x1d), Z(0x8f), + Z(0xd8), Z(0xe1), Z(0x73), Z(0xdb), Z(0xad), Z(0xc8), Z(0xc9), Z(0xa9), + Z(0xa1), Z(0xc2), Z(0xc5), Z(0xe3), Z(0xba), Z(0xfc), Z(0x0e), Z(0x25) +}; + +/* + * This is a 16 round Feistel network with permutation F_ENCRYPT + */ +#define F_ENCRYPT(R, L, sched) \ +do { \ + union lc4 { __be32 l; u8 c[4]; } u; \ + u.l = sched ^ R; \ + L ^= sbox0[u.c[0]] ^ sbox1[u.c[1]] ^ sbox2[u.c[2]] ^ sbox3[u.c[3]]; \ +} while (0) + +/* + * encryptor + */ +static void fcrypt_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + const struct fcrypt_ctx *ctx = crypto_tfm_ctx(tfm); + struct { + __be32 l, r; + } X; + + memcpy(&X, src, sizeof(X)); + + F_ENCRYPT(X.r, X.l, ctx->sched[0x0]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x1]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x2]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x3]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x4]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x5]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x6]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x7]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x8]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x9]); + F_ENCRYPT(X.r, X.l, ctx->sched[0xa]); + F_ENCRYPT(X.l, X.r, ctx->sched[0xb]); + F_ENCRYPT(X.r, X.l, ctx->sched[0xc]); + F_ENCRYPT(X.l, X.r, ctx->sched[0xd]); + F_ENCRYPT(X.r, X.l, ctx->sched[0xe]); + F_ENCRYPT(X.l, X.r, ctx->sched[0xf]); + + memcpy(dst, &X, sizeof(X)); +} + +/* + * decryptor + */ +static void fcrypt_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + const struct fcrypt_ctx *ctx = crypto_tfm_ctx(tfm); + struct { + __be32 l, r; + } X; + + memcpy(&X, src, sizeof(X)); + + F_ENCRYPT(X.l, X.r, ctx->sched[0xf]); + F_ENCRYPT(X.r, X.l, ctx->sched[0xe]); + F_ENCRYPT(X.l, X.r, ctx->sched[0xd]); + F_ENCRYPT(X.r, X.l, ctx->sched[0xc]); + F_ENCRYPT(X.l, X.r, ctx->sched[0xb]); + F_ENCRYPT(X.r, X.l, ctx->sched[0xa]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x9]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x8]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x7]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x6]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x5]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x4]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x3]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x2]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x1]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x0]); + + memcpy(dst, &X, sizeof(X)); +} + +/* + * Generate a key schedule from key, the least significant bit in each key byte + * is parity and shall be ignored. This leaves 56 significant bits in the key + * to scatter over the 16 key schedules. For each schedule extract the low + * order 32 bits and use as schedule, then rotate right by 11 bits. + */ +static int fcrypt_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) +{ + struct fcrypt_ctx *ctx = crypto_tfm_ctx(tfm); + +#if BITS_PER_LONG == 64 /* the 64-bit version can also be used for 32-bit + * kernels - it seems to be faster but the code is + * larger */ + + u64 k; /* k holds all 56 non-parity bits */ + + /* discard the parity bits */ + k = (*key++) >> 1; + k <<= 7; + k |= (*key++) >> 1; + k <<= 7; + k |= (*key++) >> 1; + k <<= 7; + k |= (*key++) >> 1; + k <<= 7; + k |= (*key++) >> 1; + k <<= 7; + k |= (*key++) >> 1; + k <<= 7; + k |= (*key++) >> 1; + k <<= 7; + k |= (*key) >> 1; + + /* Use lower 32 bits for schedule, rotate by 11 each round (16 times) */ + ctx->sched[0x0] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0x1] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0x2] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0x3] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0x4] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0x5] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0x6] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0x7] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0x8] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0x9] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0xa] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0xb] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0xc] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0xd] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0xe] = cpu_to_be32(k); ror56_64(k, 11); + ctx->sched[0xf] = cpu_to_be32(k); + + return 0; +#else + u32 hi, lo; /* hi is upper 24 bits and lo lower 32, total 56 */ + + /* discard the parity bits */ + lo = (*key++) >> 1; + lo <<= 7; + lo |= (*key++) >> 1; + lo <<= 7; + lo |= (*key++) >> 1; + lo <<= 7; + lo |= (*key++) >> 1; + hi = lo >> 4; + lo &= 0xf; + lo <<= 7; + lo |= (*key++) >> 1; + lo <<= 7; + lo |= (*key++) >> 1; + lo <<= 7; + lo |= (*key++) >> 1; + lo <<= 7; + lo |= (*key) >> 1; + + /* Use lower 32 bits for schedule, rotate by 11 each round (16 times) */ + ctx->sched[0x0] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0x1] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0x2] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0x3] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0x4] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0x5] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0x6] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0x7] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0x8] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0x9] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0xa] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0xb] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0xc] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0xd] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0xe] = cpu_to_be32(lo); ror56(hi, lo, 11); + ctx->sched[0xf] = cpu_to_be32(lo); + return 0; +#endif +} + +static struct crypto_alg fcrypt_alg = { + .cra_name = "fcrypt", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = 8, + .cra_ctxsize = sizeof(struct fcrypt_ctx), + .cra_module = THIS_MODULE, + .cra_alignmask = 3, + .cra_list = LIST_HEAD_INIT(fcrypt_alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = 8, + .cia_max_keysize = 8, + .cia_setkey = fcrypt_setkey, + .cia_encrypt = fcrypt_encrypt, + .cia_decrypt = fcrypt_decrypt } } +}; + +static int __init fcrypt_mod_init(void) +{ + return crypto_register_alg(&fcrypt_alg); +} + +static void __exit fcrypt_mod_fini(void) +{ + crypto_unregister_alg(&fcrypt_alg); +} + +module_init(fcrypt_mod_init); +module_exit(fcrypt_mod_fini); + +MODULE_LICENSE("Dual BSD/GPL"); +MODULE_DESCRIPTION("FCrypt Cipher Algorithm"); +MODULE_AUTHOR("David Howells <dhowells@redhat.com>"); diff --git a/crypto/fips.c b/crypto/fips.c new file mode 100644 index 00000000..55397008 --- /dev/null +++ b/crypto/fips.c @@ -0,0 +1,27 @@ +/* + * FIPS 200 support. + * + * Copyright (c) 2008 Neil Horman <nhorman@tuxdriver.com> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include "internal.h" + +int fips_enabled; +EXPORT_SYMBOL_GPL(fips_enabled); + +/* Process kernel command-line parameter at boot time. fips=0 or fips=1 */ +static int fips_enable(char *str) +{ + fips_enabled = !!simple_strtol(str, NULL, 0); + printk(KERN_INFO "fips mode: %s\n", + fips_enabled ? "enabled" : "disabled"); + return 1; +} + +__setup("fips=", fips_enable); diff --git a/crypto/gcm.c b/crypto/gcm.c new file mode 100644 index 00000000..1a252639 --- /dev/null +++ b/crypto/gcm.c @@ -0,0 +1,1368 @@ +/* + * GCM: Galois/Counter Mode. + * + * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 as published + * by the Free Software Foundation. + */ + +#include <crypto/gf128mul.h> +#include <crypto/internal/aead.h> +#include <crypto/internal/skcipher.h> +#include <crypto/internal/hash.h> +#include <crypto/scatterwalk.h> +#include <crypto/hash.h> +#include "internal.h" +#include <linux/completion.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/slab.h> + +struct gcm_instance_ctx { + struct crypto_skcipher_spawn ctr; + struct crypto_ahash_spawn ghash; +}; + +struct crypto_gcm_ctx { + struct crypto_ablkcipher *ctr; + struct crypto_ahash *ghash; +}; + +struct crypto_rfc4106_ctx { + struct crypto_aead *child; + u8 nonce[4]; +}; + +struct crypto_rfc4543_ctx { + struct crypto_aead *child; + u8 nonce[4]; +}; + +struct crypto_rfc4543_req_ctx { + u8 auth_tag[16]; + struct scatterlist cipher[1]; + struct scatterlist payload[2]; + struct scatterlist assoc[2]; + struct aead_request subreq; +}; + +struct crypto_gcm_ghash_ctx { + unsigned int cryptlen; + struct scatterlist *src; + void (*complete)(struct aead_request *req, int err); +}; + +struct crypto_gcm_req_priv_ctx { + u8 auth_tag[16]; + u8 iauth_tag[16]; + struct scatterlist src[2]; + struct scatterlist dst[2]; + struct crypto_gcm_ghash_ctx ghash_ctx; + union { + struct ahash_request ahreq; + struct ablkcipher_request abreq; + } u; +}; + +struct crypto_gcm_setkey_result { + int err; + struct completion completion; +}; + +static void *gcm_zeroes; + +static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx( + struct aead_request *req) +{ + unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); + + return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); +} + +static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err) +{ + struct crypto_gcm_setkey_result *result = req->data; + + if (err == -EINPROGRESS) + return; + + result->err = err; + complete(&result->completion); +} + +static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key, + unsigned int keylen) +{ + struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_ahash *ghash = ctx->ghash; + struct crypto_ablkcipher *ctr = ctx->ctr; + struct { + be128 hash; + u8 iv[8]; + + struct crypto_gcm_setkey_result result; + + struct scatterlist sg[1]; + struct ablkcipher_request req; + } *data; + int err; + + crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK); + crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) & + CRYPTO_TFM_REQ_MASK); + + err = crypto_ablkcipher_setkey(ctr, key, keylen); + if (err) + return err; + + crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) & + CRYPTO_TFM_RES_MASK); + + data = kzalloc(sizeof(*data) + crypto_ablkcipher_reqsize(ctr), + GFP_KERNEL); + if (!data) + return -ENOMEM; + + init_completion(&data->result.completion); + sg_init_one(data->sg, &data->hash, sizeof(data->hash)); + ablkcipher_request_set_tfm(&data->req, ctr); + ablkcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP | + CRYPTO_TFM_REQ_MAY_BACKLOG, + crypto_gcm_setkey_done, + &data->result); + ablkcipher_request_set_crypt(&data->req, data->sg, data->sg, + sizeof(data->hash), data->iv); + + err = crypto_ablkcipher_encrypt(&data->req); + if (err == -EINPROGRESS || err == -EBUSY) { + err = wait_for_completion_interruptible( + &data->result.completion); + if (!err) + err = data->result.err; + } + + if (err) + goto out; + + crypto_ahash_clear_flags(ghash, CRYPTO_TFM_REQ_MASK); + crypto_ahash_set_flags(ghash, crypto_aead_get_flags(aead) & + CRYPTO_TFM_REQ_MASK); + err = crypto_ahash_setkey(ghash, (u8 *)&data->hash, sizeof(be128)); + crypto_aead_set_flags(aead, crypto_ahash_get_flags(ghash) & + CRYPTO_TFM_RES_MASK); + +out: + kfree(data); + return err; +} + +static int crypto_gcm_setauthsize(struct crypto_aead *tfm, + unsigned int authsize) +{ + switch (authsize) { + case 4: + case 8: + case 12: + case 13: + case 14: + case 15: + case 16: + break; + default: + return -EINVAL; + } + + return 0; +} + +static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req, + struct aead_request *req, + unsigned int cryptlen) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + struct scatterlist *dst; + __be32 counter = cpu_to_be32(1); + + memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag)); + memcpy(req->iv + 12, &counter, 4); + + sg_init_table(pctx->src, 2); + sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag)); + scatterwalk_sg_chain(pctx->src, 2, req->src); + + dst = pctx->src; + if (req->src != req->dst) { + sg_init_table(pctx->dst, 2); + sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag)); + scatterwalk_sg_chain(pctx->dst, 2, req->dst); + dst = pctx->dst; + } + + ablkcipher_request_set_tfm(ablk_req, ctx->ctr); + ablkcipher_request_set_crypt(ablk_req, pctx->src, dst, + cryptlen + sizeof(pctx->auth_tag), + req->iv); +} + +static inline unsigned int gcm_remain(unsigned int len) +{ + len &= 0xfU; + return len ? 16 - len : 0; +} + +static void gcm_hash_len_done(struct crypto_async_request *areq, int err); +static void gcm_hash_final_done(struct crypto_async_request *areq, int err); + +static int gcm_hash_update(struct aead_request *req, + struct crypto_gcm_req_priv_ctx *pctx, + crypto_completion_t complete, + struct scatterlist *src, + unsigned int len) +{ + struct ahash_request *ahreq = &pctx->u.ahreq; + + ahash_request_set_callback(ahreq, aead_request_flags(req), + complete, req); + ahash_request_set_crypt(ahreq, src, NULL, len); + + return crypto_ahash_update(ahreq); +} + +static int gcm_hash_remain(struct aead_request *req, + struct crypto_gcm_req_priv_ctx *pctx, + unsigned int remain, + crypto_completion_t complete) +{ + struct ahash_request *ahreq = &pctx->u.ahreq; + + ahash_request_set_callback(ahreq, aead_request_flags(req), + complete, req); + sg_init_one(pctx->src, gcm_zeroes, remain); + ahash_request_set_crypt(ahreq, pctx->src, NULL, remain); + + return crypto_ahash_update(ahreq); +} + +static int gcm_hash_len(struct aead_request *req, + struct crypto_gcm_req_priv_ctx *pctx) +{ + struct ahash_request *ahreq = &pctx->u.ahreq; + struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; + u128 lengths; + + lengths.a = cpu_to_be64(req->assoclen * 8); + lengths.b = cpu_to_be64(gctx->cryptlen * 8); + memcpy(pctx->iauth_tag, &lengths, 16); + sg_init_one(pctx->src, pctx->iauth_tag, 16); + ahash_request_set_callback(ahreq, aead_request_flags(req), + gcm_hash_len_done, req); + ahash_request_set_crypt(ahreq, pctx->src, + NULL, sizeof(lengths)); + + return crypto_ahash_update(ahreq); +} + +static int gcm_hash_final(struct aead_request *req, + struct crypto_gcm_req_priv_ctx *pctx) +{ + struct ahash_request *ahreq = &pctx->u.ahreq; + + ahash_request_set_callback(ahreq, aead_request_flags(req), + gcm_hash_final_done, req); + ahash_request_set_crypt(ahreq, NULL, pctx->iauth_tag, 0); + + return crypto_ahash_final(ahreq); +} + +static void __gcm_hash_final_done(struct aead_request *req, int err) +{ + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; + + if (!err) + crypto_xor(pctx->auth_tag, pctx->iauth_tag, 16); + + gctx->complete(req, err); +} + +static void gcm_hash_final_done(struct crypto_async_request *areq, int err) +{ + struct aead_request *req = areq->data; + + __gcm_hash_final_done(req, err); +} + +static void __gcm_hash_len_done(struct aead_request *req, int err) +{ + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + + if (!err) { + err = gcm_hash_final(req, pctx); + if (err == -EINPROGRESS || err == -EBUSY) + return; + } + + __gcm_hash_final_done(req, err); +} + +static void gcm_hash_len_done(struct crypto_async_request *areq, int err) +{ + struct aead_request *req = areq->data; + + __gcm_hash_len_done(req, err); +} + +static void __gcm_hash_crypt_remain_done(struct aead_request *req, int err) +{ + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + + if (!err) { + err = gcm_hash_len(req, pctx); + if (err == -EINPROGRESS || err == -EBUSY) + return; + } + + __gcm_hash_len_done(req, err); +} + +static void gcm_hash_crypt_remain_done(struct crypto_async_request *areq, + int err) +{ + struct aead_request *req = areq->data; + + __gcm_hash_crypt_remain_done(req, err); +} + +static void __gcm_hash_crypt_done(struct aead_request *req, int err) +{ + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; + unsigned int remain; + + if (!err) { + remain = gcm_remain(gctx->cryptlen); + BUG_ON(!remain); + err = gcm_hash_remain(req, pctx, remain, + gcm_hash_crypt_remain_done); + if (err == -EINPROGRESS || err == -EBUSY) + return; + } + + __gcm_hash_crypt_remain_done(req, err); +} + +static void gcm_hash_crypt_done(struct crypto_async_request *areq, int err) +{ + struct aead_request *req = areq->data; + + __gcm_hash_crypt_done(req, err); +} + +static void __gcm_hash_assoc_remain_done(struct aead_request *req, int err) +{ + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; + crypto_completion_t complete; + unsigned int remain = 0; + + if (!err && gctx->cryptlen) { + remain = gcm_remain(gctx->cryptlen); + complete = remain ? gcm_hash_crypt_done : + gcm_hash_crypt_remain_done; + err = gcm_hash_update(req, pctx, complete, + gctx->src, gctx->cryptlen); + if (err == -EINPROGRESS || err == -EBUSY) + return; + } + + if (remain) + __gcm_hash_crypt_done(req, err); + else + __gcm_hash_crypt_remain_done(req, err); +} + +static void gcm_hash_assoc_remain_done(struct crypto_async_request *areq, + int err) +{ + struct aead_request *req = areq->data; + + __gcm_hash_assoc_remain_done(req, err); +} + +static void __gcm_hash_assoc_done(struct aead_request *req, int err) +{ + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + unsigned int remain; + + if (!err) { + remain = gcm_remain(req->assoclen); + BUG_ON(!remain); + err = gcm_hash_remain(req, pctx, remain, + gcm_hash_assoc_remain_done); + if (err == -EINPROGRESS || err == -EBUSY) + return; + } + + __gcm_hash_assoc_remain_done(req, err); +} + +static void gcm_hash_assoc_done(struct crypto_async_request *areq, int err) +{ + struct aead_request *req = areq->data; + + __gcm_hash_assoc_done(req, err); +} + +static void __gcm_hash_init_done(struct aead_request *req, int err) +{ + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + crypto_completion_t complete; + unsigned int remain = 0; + + if (!err && req->assoclen) { + remain = gcm_remain(req->assoclen); + complete = remain ? gcm_hash_assoc_done : + gcm_hash_assoc_remain_done; + err = gcm_hash_update(req, pctx, complete, + req->assoc, req->assoclen); + if (err == -EINPROGRESS || err == -EBUSY) + return; + } + + if (remain) + __gcm_hash_assoc_done(req, err); + else + __gcm_hash_assoc_remain_done(req, err); +} + +static void gcm_hash_init_done(struct crypto_async_request *areq, int err) +{ + struct aead_request *req = areq->data; + + __gcm_hash_init_done(req, err); +} + +static int gcm_hash(struct aead_request *req, + struct crypto_gcm_req_priv_ctx *pctx) +{ + struct ahash_request *ahreq = &pctx->u.ahreq; + struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; + struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(req->base.tfm); + unsigned int remain; + crypto_completion_t complete; + int err; + + ahash_request_set_tfm(ahreq, ctx->ghash); + + ahash_request_set_callback(ahreq, aead_request_flags(req), + gcm_hash_init_done, req); + err = crypto_ahash_init(ahreq); + if (err) + return err; + remain = gcm_remain(req->assoclen); + complete = remain ? gcm_hash_assoc_done : gcm_hash_assoc_remain_done; + err = gcm_hash_update(req, pctx, complete, req->assoc, req->assoclen); + if (err) + return err; + if (remain) { + err = gcm_hash_remain(req, pctx, remain, + gcm_hash_assoc_remain_done); + if (err) + return err; + } + remain = gcm_remain(gctx->cryptlen); + complete = remain ? gcm_hash_crypt_done : gcm_hash_crypt_remain_done; + err = gcm_hash_update(req, pctx, complete, gctx->src, gctx->cryptlen); + if (err) + return err; + if (remain) { + err = gcm_hash_remain(req, pctx, remain, + gcm_hash_crypt_remain_done); + if (err) + return err; + } + err = gcm_hash_len(req, pctx); + if (err) + return err; + err = gcm_hash_final(req, pctx); + if (err) + return err; + + return 0; +} + +static void gcm_enc_copy_hash(struct aead_request *req, + struct crypto_gcm_req_priv_ctx *pctx) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + u8 *auth_tag = pctx->auth_tag; + + scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen, + crypto_aead_authsize(aead), 1); +} + +static void gcm_enc_hash_done(struct aead_request *req, int err) +{ + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + + if (!err) + gcm_enc_copy_hash(req, pctx); + + aead_request_complete(req, err); +} + +static void gcm_encrypt_done(struct crypto_async_request *areq, int err) +{ + struct aead_request *req = areq->data; + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + + if (!err) { + err = gcm_hash(req, pctx); + if (err == -EINPROGRESS || err == -EBUSY) + return; + else if (!err) { + crypto_xor(pctx->auth_tag, pctx->iauth_tag, 16); + gcm_enc_copy_hash(req, pctx); + } + } + + aead_request_complete(req, err); +} + +static int crypto_gcm_encrypt(struct aead_request *req) +{ + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + struct ablkcipher_request *abreq = &pctx->u.abreq; + struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; + int err; + + crypto_gcm_init_crypt(abreq, req, req->cryptlen); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + gcm_encrypt_done, req); + + gctx->src = req->dst; + gctx->cryptlen = req->cryptlen; + gctx->complete = gcm_enc_hash_done; + + err = crypto_ablkcipher_encrypt(abreq); + if (err) + return err; + + err = gcm_hash(req, pctx); + if (err) + return err; + + crypto_xor(pctx->auth_tag, pctx->iauth_tag, 16); + gcm_enc_copy_hash(req, pctx); + + return 0; +} + +static int crypto_gcm_verify(struct aead_request *req, + struct crypto_gcm_req_priv_ctx *pctx) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + u8 *auth_tag = pctx->auth_tag; + u8 *iauth_tag = pctx->iauth_tag; + unsigned int authsize = crypto_aead_authsize(aead); + unsigned int cryptlen = req->cryptlen - authsize; + + crypto_xor(auth_tag, iauth_tag, 16); + scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0); + return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0; +} + +static void gcm_decrypt_done(struct crypto_async_request *areq, int err) +{ + struct aead_request *req = areq->data; + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + + if (!err) + err = crypto_gcm_verify(req, pctx); + + aead_request_complete(req, err); +} + +static void gcm_dec_hash_done(struct aead_request *req, int err) +{ + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + struct ablkcipher_request *abreq = &pctx->u.abreq; + struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; + + if (!err) { + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + gcm_decrypt_done, req); + crypto_gcm_init_crypt(abreq, req, gctx->cryptlen); + err = crypto_ablkcipher_decrypt(abreq); + if (err == -EINPROGRESS || err == -EBUSY) + return; + else if (!err) + err = crypto_gcm_verify(req, pctx); + } + + aead_request_complete(req, err); +} + +static int crypto_gcm_decrypt(struct aead_request *req) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); + struct ablkcipher_request *abreq = &pctx->u.abreq; + struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; + unsigned int authsize = crypto_aead_authsize(aead); + unsigned int cryptlen = req->cryptlen; + int err; + + if (cryptlen < authsize) + return -EINVAL; + cryptlen -= authsize; + + gctx->src = req->src; + gctx->cryptlen = cryptlen; + gctx->complete = gcm_dec_hash_done; + + err = gcm_hash(req, pctx); + if (err) + return err; + + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + gcm_decrypt_done, req); + crypto_gcm_init_crypt(abreq, req, cryptlen); + err = crypto_ablkcipher_decrypt(abreq); + if (err) + return err; + + return crypto_gcm_verify(req, pctx); +} + +static int crypto_gcm_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst); + struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_ablkcipher *ctr; + struct crypto_ahash *ghash; + unsigned long align; + int err; + + ghash = crypto_spawn_ahash(&ictx->ghash); + if (IS_ERR(ghash)) + return PTR_ERR(ghash); + + ctr = crypto_spawn_skcipher(&ictx->ctr); + err = PTR_ERR(ctr); + if (IS_ERR(ctr)) + goto err_free_hash; + + ctx->ctr = ctr; + ctx->ghash = ghash; + + align = crypto_tfm_alg_alignmask(tfm); + align &= ~(crypto_tfm_ctx_alignment() - 1); + tfm->crt_aead.reqsize = align + + offsetof(struct crypto_gcm_req_priv_ctx, u) + + max(sizeof(struct ablkcipher_request) + + crypto_ablkcipher_reqsize(ctr), + sizeof(struct ahash_request) + + crypto_ahash_reqsize(ghash)); + + return 0; + +err_free_hash: + crypto_free_ahash(ghash); + return err; +} + +static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_ahash(ctx->ghash); + crypto_free_ablkcipher(ctx->ctr); +} + +static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb, + const char *full_name, + const char *ctr_name, + const char *ghash_name) +{ + struct crypto_attr_type *algt; + struct crypto_instance *inst; + struct crypto_alg *ctr; + struct crypto_alg *ghash_alg; + struct ahash_alg *ghash_ahash_alg; + struct gcm_instance_ctx *ctx; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) + return ERR_PTR(-EINVAL); + + ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type, + CRYPTO_ALG_TYPE_HASH, + CRYPTO_ALG_TYPE_AHASH_MASK); + err = PTR_ERR(ghash_alg); + if (IS_ERR(ghash_alg)) + return ERR_PTR(err); + + err = -ENOMEM; + inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); + if (!inst) + goto out_put_ghash; + + ctx = crypto_instance_ctx(inst); + ghash_ahash_alg = container_of(ghash_alg, struct ahash_alg, halg.base); + err = crypto_init_ahash_spawn(&ctx->ghash, &ghash_ahash_alg->halg, + inst); + if (err) + goto err_free_inst; + + crypto_set_skcipher_spawn(&ctx->ctr, inst); + err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0, + crypto_requires_sync(algt->type, + algt->mask)); + if (err) + goto err_drop_ghash; + + ctr = crypto_skcipher_spawn_alg(&ctx->ctr); + + /* We only support 16-byte blocks. */ + if (ctr->cra_ablkcipher.ivsize != 16) + goto out_put_ctr; + + /* Not a stream cipher? */ + err = -EINVAL; + if (ctr->cra_blocksize != 1) + goto out_put_ctr; + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "gcm_base(%s,%s)", ctr->cra_driver_name, + ghash_alg->cra_driver_name) >= + CRYPTO_MAX_ALG_NAME) + goto out_put_ctr; + + memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME); + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD; + inst->alg.cra_flags |= ctr->cra_flags & CRYPTO_ALG_ASYNC; + inst->alg.cra_priority = ctr->cra_priority; + inst->alg.cra_blocksize = 1; + inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1); + inst->alg.cra_type = &crypto_aead_type; + inst->alg.cra_aead.ivsize = 16; + inst->alg.cra_aead.maxauthsize = 16; + inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx); + inst->alg.cra_init = crypto_gcm_init_tfm; + inst->alg.cra_exit = crypto_gcm_exit_tfm; + inst->alg.cra_aead.setkey = crypto_gcm_setkey; + inst->alg.cra_aead.setauthsize = crypto_gcm_setauthsize; + inst->alg.cra_aead.encrypt = crypto_gcm_encrypt; + inst->alg.cra_aead.decrypt = crypto_gcm_decrypt; + +out: + crypto_mod_put(ghash_alg); + return inst; + +out_put_ctr: + crypto_drop_skcipher(&ctx->ctr); +err_drop_ghash: + crypto_drop_ahash(&ctx->ghash); +err_free_inst: + kfree(inst); +out_put_ghash: + inst = ERR_PTR(err); + goto out; +} + +static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb) +{ + int err; + const char *cipher_name; + char ctr_name[CRYPTO_MAX_ALG_NAME]; + char full_name[CRYPTO_MAX_ALG_NAME]; + + cipher_name = crypto_attr_alg_name(tb[1]); + err = PTR_ERR(cipher_name); + if (IS_ERR(cipher_name)) + return ERR_PTR(err); + + if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >= + CRYPTO_MAX_ALG_NAME) + return ERR_PTR(-ENAMETOOLONG); + + if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm(%s)", cipher_name) >= + CRYPTO_MAX_ALG_NAME) + return ERR_PTR(-ENAMETOOLONG); + + return crypto_gcm_alloc_common(tb, full_name, ctr_name, "ghash"); +} + +static void crypto_gcm_free(struct crypto_instance *inst) +{ + struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst); + + crypto_drop_skcipher(&ctx->ctr); + crypto_drop_ahash(&ctx->ghash); + kfree(inst); +} + +static struct crypto_template crypto_gcm_tmpl = { + .name = "gcm", + .alloc = crypto_gcm_alloc, + .free = crypto_gcm_free, + .module = THIS_MODULE, +}; + +static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb) +{ + int err; + const char *ctr_name; + const char *ghash_name; + char full_name[CRYPTO_MAX_ALG_NAME]; + + ctr_name = crypto_attr_alg_name(tb[1]); + err = PTR_ERR(ctr_name); + if (IS_ERR(ctr_name)) + return ERR_PTR(err); + + ghash_name = crypto_attr_alg_name(tb[2]); + err = PTR_ERR(ghash_name); + if (IS_ERR(ghash_name)) + return ERR_PTR(err); + + if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s,%s)", + ctr_name, ghash_name) >= CRYPTO_MAX_ALG_NAME) + return ERR_PTR(-ENAMETOOLONG); + + return crypto_gcm_alloc_common(tb, full_name, ctr_name, ghash_name); +} + +static struct crypto_template crypto_gcm_base_tmpl = { + .name = "gcm_base", + .alloc = crypto_gcm_base_alloc, + .free = crypto_gcm_free, + .module = THIS_MODULE, +}; + +static int crypto_rfc4106_setkey(struct crypto_aead *parent, const u8 *key, + unsigned int keylen) +{ + struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent); + struct crypto_aead *child = ctx->child; + int err; + + if (keylen < 4) + return -EINVAL; + + keylen -= 4; + memcpy(ctx->nonce, key + keylen, 4); + + crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_aead_set_flags(child, crypto_aead_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_aead_setkey(child, key, keylen); + crypto_aead_set_flags(parent, crypto_aead_get_flags(child) & + CRYPTO_TFM_RES_MASK); + + return err; +} + +static int crypto_rfc4106_setauthsize(struct crypto_aead *parent, + unsigned int authsize) +{ + struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent); + + switch (authsize) { + case 8: + case 12: + case 16: + break; + default: + return -EINVAL; + } + + return crypto_aead_setauthsize(ctx->child, authsize); +} + +static struct aead_request *crypto_rfc4106_crypt(struct aead_request *req) +{ + struct aead_request *subreq = aead_request_ctx(req); + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_aead *child = ctx->child; + u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child), + crypto_aead_alignmask(child) + 1); + + memcpy(iv, ctx->nonce, 4); + memcpy(iv + 4, req->iv, 8); + + aead_request_set_tfm(subreq, child); + aead_request_set_callback(subreq, req->base.flags, req->base.complete, + req->base.data); + aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, iv); + aead_request_set_assoc(subreq, req->assoc, req->assoclen); + + return subreq; +} + +static int crypto_rfc4106_encrypt(struct aead_request *req) +{ + req = crypto_rfc4106_crypt(req); + + return crypto_aead_encrypt(req); +} + +static int crypto_rfc4106_decrypt(struct aead_request *req) +{ + req = crypto_rfc4106_crypt(req); + + return crypto_aead_decrypt(req); +} + +static int crypto_rfc4106_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst); + struct crypto_rfc4106_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_aead *aead; + unsigned long align; + + aead = crypto_spawn_aead(spawn); + if (IS_ERR(aead)) + return PTR_ERR(aead); + + ctx->child = aead; + + align = crypto_aead_alignmask(aead); + align &= ~(crypto_tfm_ctx_alignment() - 1); + tfm->crt_aead.reqsize = sizeof(struct aead_request) + + ALIGN(crypto_aead_reqsize(aead), + crypto_tfm_ctx_alignment()) + + align + 16; + + return 0; +} + +static void crypto_rfc4106_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_rfc4106_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_aead(ctx->child); +} + +static struct crypto_instance *crypto_rfc4106_alloc(struct rtattr **tb) +{ + struct crypto_attr_type *algt; + struct crypto_instance *inst; + struct crypto_aead_spawn *spawn; + struct crypto_alg *alg; + const char *ccm_name; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) + return ERR_PTR(-EINVAL); + + ccm_name = crypto_attr_alg_name(tb[1]); + err = PTR_ERR(ccm_name); + if (IS_ERR(ccm_name)) + return ERR_PTR(err); + + inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); + if (!inst) + return ERR_PTR(-ENOMEM); + + spawn = crypto_instance_ctx(inst); + crypto_set_aead_spawn(spawn, inst); + err = crypto_grab_aead(spawn, ccm_name, 0, + crypto_requires_sync(algt->type, algt->mask)); + if (err) + goto out_free_inst; + + alg = crypto_aead_spawn_alg(spawn); + + err = -EINVAL; + + /* We only support 16-byte blocks. */ + if (alg->cra_aead.ivsize != 16) + goto out_drop_alg; + + /* Not a stream cipher? */ + if (alg->cra_blocksize != 1) + goto out_drop_alg; + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, + "rfc4106(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME || + snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "rfc4106(%s)", alg->cra_driver_name) >= + CRYPTO_MAX_ALG_NAME) + goto out_drop_alg; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD; + inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = 1; + inst->alg.cra_alignmask = alg->cra_alignmask; + inst->alg.cra_type = &crypto_nivaead_type; + + inst->alg.cra_aead.ivsize = 8; + inst->alg.cra_aead.maxauthsize = 16; + + inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4106_ctx); + + inst->alg.cra_init = crypto_rfc4106_init_tfm; + inst->alg.cra_exit = crypto_rfc4106_exit_tfm; + + inst->alg.cra_aead.setkey = crypto_rfc4106_setkey; + inst->alg.cra_aead.setauthsize = crypto_rfc4106_setauthsize; + inst->alg.cra_aead.encrypt = crypto_rfc4106_encrypt; + inst->alg.cra_aead.decrypt = crypto_rfc4106_decrypt; + + inst->alg.cra_aead.geniv = "seqiv"; + +out: + return inst; + +out_drop_alg: + crypto_drop_aead(spawn); +out_free_inst: + kfree(inst); + inst = ERR_PTR(err); + goto out; +} + +static void crypto_rfc4106_free(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(inst); +} + +static struct crypto_template crypto_rfc4106_tmpl = { + .name = "rfc4106", + .alloc = crypto_rfc4106_alloc, + .free = crypto_rfc4106_free, + .module = THIS_MODULE, +}; + +static inline struct crypto_rfc4543_req_ctx *crypto_rfc4543_reqctx( + struct aead_request *req) +{ + unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); + + return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); +} + +static int crypto_rfc4543_setkey(struct crypto_aead *parent, const u8 *key, + unsigned int keylen) +{ + struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent); + struct crypto_aead *child = ctx->child; + int err; + + if (keylen < 4) + return -EINVAL; + + keylen -= 4; + memcpy(ctx->nonce, key + keylen, 4); + + crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_aead_set_flags(child, crypto_aead_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_aead_setkey(child, key, keylen); + crypto_aead_set_flags(parent, crypto_aead_get_flags(child) & + CRYPTO_TFM_RES_MASK); + + return err; +} + +static int crypto_rfc4543_setauthsize(struct crypto_aead *parent, + unsigned int authsize) +{ + struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent); + + if (authsize != 16) + return -EINVAL; + + return crypto_aead_setauthsize(ctx->child, authsize); +} + +static struct aead_request *crypto_rfc4543_crypt(struct aead_request *req, + int enc) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_rfc4543_req_ctx *rctx = crypto_rfc4543_reqctx(req); + struct aead_request *subreq = &rctx->subreq; + struct scatterlist *dst = req->dst; + struct scatterlist *cipher = rctx->cipher; + struct scatterlist *payload = rctx->payload; + struct scatterlist *assoc = rctx->assoc; + unsigned int authsize = crypto_aead_authsize(aead); + unsigned int assoclen = req->assoclen; + struct page *dstp; + u8 *vdst; + u8 *iv = PTR_ALIGN((u8 *)(rctx + 1) + crypto_aead_reqsize(ctx->child), + crypto_aead_alignmask(ctx->child) + 1); + + memcpy(iv, ctx->nonce, 4); + memcpy(iv + 4, req->iv, 8); + + /* construct cipher/plaintext */ + if (enc) + memset(rctx->auth_tag, 0, authsize); + else + scatterwalk_map_and_copy(rctx->auth_tag, dst, + req->cryptlen - authsize, + authsize, 0); + + sg_init_one(cipher, rctx->auth_tag, authsize); + + /* construct the aad */ + dstp = sg_page(dst); + vdst = PageHighMem(dstp) ? NULL : page_address(dstp) + dst->offset; + + sg_init_table(payload, 2); + sg_set_buf(payload, req->iv, 8); + scatterwalk_crypto_chain(payload, dst, vdst == req->iv + 8, 2); + assoclen += 8 + req->cryptlen - (enc ? 0 : authsize); + + sg_init_table(assoc, 2); + sg_set_page(assoc, sg_page(req->assoc), req->assoc->length, + req->assoc->offset); + scatterwalk_crypto_chain(assoc, payload, 0, 2); + + aead_request_set_tfm(subreq, ctx->child); + aead_request_set_callback(subreq, req->base.flags, req->base.complete, + req->base.data); + aead_request_set_crypt(subreq, cipher, cipher, enc ? 0 : authsize, iv); + aead_request_set_assoc(subreq, assoc, assoclen); + + return subreq; +} + +static int crypto_rfc4543_encrypt(struct aead_request *req) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_rfc4543_req_ctx *rctx = crypto_rfc4543_reqctx(req); + struct aead_request *subreq; + int err; + + subreq = crypto_rfc4543_crypt(req, 1); + err = crypto_aead_encrypt(subreq); + if (err) + return err; + + scatterwalk_map_and_copy(rctx->auth_tag, req->dst, req->cryptlen, + crypto_aead_authsize(aead), 1); + + return 0; +} + +static int crypto_rfc4543_decrypt(struct aead_request *req) +{ + req = crypto_rfc4543_crypt(req, 0); + + return crypto_aead_decrypt(req); +} + +static int crypto_rfc4543_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst); + struct crypto_rfc4543_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_aead *aead; + unsigned long align; + + aead = crypto_spawn_aead(spawn); + if (IS_ERR(aead)) + return PTR_ERR(aead); + + ctx->child = aead; + + align = crypto_aead_alignmask(aead); + align &= ~(crypto_tfm_ctx_alignment() - 1); + tfm->crt_aead.reqsize = sizeof(struct crypto_rfc4543_req_ctx) + + ALIGN(crypto_aead_reqsize(aead), + crypto_tfm_ctx_alignment()) + + align + 16; + + return 0; +} + +static void crypto_rfc4543_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_rfc4543_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_aead(ctx->child); +} + +static struct crypto_instance *crypto_rfc4543_alloc(struct rtattr **tb) +{ + struct crypto_attr_type *algt; + struct crypto_instance *inst; + struct crypto_aead_spawn *spawn; + struct crypto_alg *alg; + const char *ccm_name; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) + return ERR_PTR(-EINVAL); + + ccm_name = crypto_attr_alg_name(tb[1]); + err = PTR_ERR(ccm_name); + if (IS_ERR(ccm_name)) + return ERR_PTR(err); + + inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); + if (!inst) + return ERR_PTR(-ENOMEM); + + spawn = crypto_instance_ctx(inst); + crypto_set_aead_spawn(spawn, inst); + err = crypto_grab_aead(spawn, ccm_name, 0, + crypto_requires_sync(algt->type, algt->mask)); + if (err) + goto out_free_inst; + + alg = crypto_aead_spawn_alg(spawn); + + err = -EINVAL; + + /* We only support 16-byte blocks. */ + if (alg->cra_aead.ivsize != 16) + goto out_drop_alg; + + /* Not a stream cipher? */ + if (alg->cra_blocksize != 1) + goto out_drop_alg; + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, + "rfc4543(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME || + snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "rfc4543(%s)", alg->cra_driver_name) >= + CRYPTO_MAX_ALG_NAME) + goto out_drop_alg; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD; + inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = 1; + inst->alg.cra_alignmask = alg->cra_alignmask; + inst->alg.cra_type = &crypto_nivaead_type; + + inst->alg.cra_aead.ivsize = 8; + inst->alg.cra_aead.maxauthsize = 16; + + inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4543_ctx); + + inst->alg.cra_init = crypto_rfc4543_init_tfm; + inst->alg.cra_exit = crypto_rfc4543_exit_tfm; + + inst->alg.cra_aead.setkey = crypto_rfc4543_setkey; + inst->alg.cra_aead.setauthsize = crypto_rfc4543_setauthsize; + inst->alg.cra_aead.encrypt = crypto_rfc4543_encrypt; + inst->alg.cra_aead.decrypt = crypto_rfc4543_decrypt; + + inst->alg.cra_aead.geniv = "seqiv"; + +out: + return inst; + +out_drop_alg: + crypto_drop_aead(spawn); +out_free_inst: + kfree(inst); + inst = ERR_PTR(err); + goto out; +} + +static void crypto_rfc4543_free(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(inst); +} + +static struct crypto_template crypto_rfc4543_tmpl = { + .name = "rfc4543", + .alloc = crypto_rfc4543_alloc, + .free = crypto_rfc4543_free, + .module = THIS_MODULE, +}; + +static int __init crypto_gcm_module_init(void) +{ + int err; + + gcm_zeroes = kzalloc(16, GFP_KERNEL); + if (!gcm_zeroes) + return -ENOMEM; + + err = crypto_register_template(&crypto_gcm_base_tmpl); + if (err) + goto out; + + err = crypto_register_template(&crypto_gcm_tmpl); + if (err) + goto out_undo_base; + + err = crypto_register_template(&crypto_rfc4106_tmpl); + if (err) + goto out_undo_gcm; + + err = crypto_register_template(&crypto_rfc4543_tmpl); + if (err) + goto out_undo_rfc4106; + + return 0; + +out_undo_rfc4106: + crypto_unregister_template(&crypto_rfc4106_tmpl); +out_undo_gcm: + crypto_unregister_template(&crypto_gcm_tmpl); +out_undo_base: + crypto_unregister_template(&crypto_gcm_base_tmpl); +out: + kfree(gcm_zeroes); + return err; +} + +static void __exit crypto_gcm_module_exit(void) +{ + kfree(gcm_zeroes); + crypto_unregister_template(&crypto_rfc4543_tmpl); + crypto_unregister_template(&crypto_rfc4106_tmpl); + crypto_unregister_template(&crypto_gcm_tmpl); + crypto_unregister_template(&crypto_gcm_base_tmpl); +} + +module_init(crypto_gcm_module_init); +module_exit(crypto_gcm_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Galois/Counter Mode"); +MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>"); +MODULE_ALIAS("gcm_base"); +MODULE_ALIAS("rfc4106"); +MODULE_ALIAS("rfc4543"); diff --git a/crypto/gf128mul.c b/crypto/gf128mul.c new file mode 100644 index 00000000..5276607c --- /dev/null +++ b/crypto/gf128mul.c @@ -0,0 +1,477 @@ +/* gf128mul.c - GF(2^128) multiplication functions + * + * Copyright (c) 2003, Dr Brian Gladman, Worcester, UK. + * Copyright (c) 2006, Rik Snel <rsnel@cube.dyndns.org> + * + * Based on Dr Brian Gladman's (GPL'd) work published at + * http://gladman.plushost.co.uk/oldsite/cryptography_technology/index.php + * See the original copyright notice below. + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + */ + +/* + --------------------------------------------------------------------------- + Copyright (c) 2003, Dr Brian Gladman, Worcester, UK. All rights reserved. + + LICENSE TERMS + + The free distribution and use of this software in both source and binary + form is allowed (with or without changes) provided that: + + 1. distributions of this source code include the above copyright + notice, this list of conditions and the following disclaimer; + + 2. distributions in binary form include the above copyright + notice, this list of conditions and the following disclaimer + in the documentation and/or other associated materials; + + 3. the copyright holder's name is not used to endorse products + built using this software without specific written permission. + + ALTERNATIVELY, provided that this notice is retained in full, this product + may be distributed under the terms of the GNU General Public License (GPL), + in which case the provisions of the GPL apply INSTEAD OF those given above. + + DISCLAIMER + + This software is provided 'as is' with no explicit or implied warranties + in respect of its properties, including, but not limited to, correctness + and/or fitness for purpose. + --------------------------------------------------------------------------- + Issue 31/01/2006 + + This file provides fast multiplication in GF(128) as required by several + cryptographic authentication modes +*/ + +#include <crypto/gf128mul.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/slab.h> + +#define gf128mul_dat(q) { \ + q(0x00), q(0x01), q(0x02), q(0x03), q(0x04), q(0x05), q(0x06), q(0x07),\ + q(0x08), q(0x09), q(0x0a), q(0x0b), q(0x0c), q(0x0d), q(0x0e), q(0x0f),\ + q(0x10), q(0x11), q(0x12), q(0x13), q(0x14), q(0x15), q(0x16), q(0x17),\ + q(0x18), q(0x19), q(0x1a), q(0x1b), q(0x1c), q(0x1d), q(0x1e), q(0x1f),\ + q(0x20), q(0x21), q(0x22), q(0x23), q(0x24), q(0x25), q(0x26), q(0x27),\ + q(0x28), q(0x29), q(0x2a), q(0x2b), q(0x2c), q(0x2d), q(0x2e), q(0x2f),\ + q(0x30), q(0x31), q(0x32), q(0x33), q(0x34), q(0x35), q(0x36), q(0x37),\ + q(0x38), q(0x39), q(0x3a), q(0x3b), q(0x3c), q(0x3d), q(0x3e), q(0x3f),\ + q(0x40), q(0x41), q(0x42), q(0x43), q(0x44), q(0x45), q(0x46), q(0x47),\ + q(0x48), q(0x49), q(0x4a), q(0x4b), q(0x4c), q(0x4d), q(0x4e), q(0x4f),\ + q(0x50), q(0x51), q(0x52), q(0x53), q(0x54), q(0x55), q(0x56), q(0x57),\ + q(0x58), q(0x59), q(0x5a), q(0x5b), q(0x5c), q(0x5d), q(0x5e), q(0x5f),\ + q(0x60), q(0x61), q(0x62), q(0x63), q(0x64), q(0x65), q(0x66), q(0x67),\ + q(0x68), q(0x69), q(0x6a), q(0x6b), q(0x6c), q(0x6d), q(0x6e), q(0x6f),\ + q(0x70), q(0x71), q(0x72), q(0x73), q(0x74), q(0x75), q(0x76), q(0x77),\ + q(0x78), q(0x79), q(0x7a), q(0x7b), q(0x7c), q(0x7d), q(0x7e), q(0x7f),\ + q(0x80), q(0x81), q(0x82), q(0x83), q(0x84), q(0x85), q(0x86), q(0x87),\ + q(0x88), q(0x89), q(0x8a), q(0x8b), q(0x8c), q(0x8d), q(0x8e), q(0x8f),\ + q(0x90), q(0x91), q(0x92), q(0x93), q(0x94), q(0x95), q(0x96), q(0x97),\ + q(0x98), q(0x99), q(0x9a), q(0x9b), q(0x9c), q(0x9d), q(0x9e), q(0x9f),\ + q(0xa0), q(0xa1), q(0xa2), q(0xa3), q(0xa4), q(0xa5), q(0xa6), q(0xa7),\ + q(0xa8), q(0xa9), q(0xaa), q(0xab), q(0xac), q(0xad), q(0xae), q(0xaf),\ + q(0xb0), q(0xb1), q(0xb2), q(0xb3), q(0xb4), q(0xb5), q(0xb6), q(0xb7),\ + q(0xb8), q(0xb9), q(0xba), q(0xbb), q(0xbc), q(0xbd), q(0xbe), q(0xbf),\ + q(0xc0), q(0xc1), q(0xc2), q(0xc3), q(0xc4), q(0xc5), q(0xc6), q(0xc7),\ + q(0xc8), q(0xc9), q(0xca), q(0xcb), q(0xcc), q(0xcd), q(0xce), q(0xcf),\ + q(0xd0), q(0xd1), q(0xd2), q(0xd3), q(0xd4), q(0xd5), q(0xd6), q(0xd7),\ + q(0xd8), q(0xd9), q(0xda), q(0xdb), q(0xdc), q(0xdd), q(0xde), q(0xdf),\ + q(0xe0), q(0xe1), q(0xe2), q(0xe3), q(0xe4), q(0xe5), q(0xe6), q(0xe7),\ + q(0xe8), q(0xe9), q(0xea), q(0xeb), q(0xec), q(0xed), q(0xee), q(0xef),\ + q(0xf0), q(0xf1), q(0xf2), q(0xf3), q(0xf4), q(0xf5), q(0xf6), q(0xf7),\ + q(0xf8), q(0xf9), q(0xfa), q(0xfb), q(0xfc), q(0xfd), q(0xfe), q(0xff) \ +} + +/* Given the value i in 0..255 as the byte overflow when a field element + in GHASH is multiplied by x^8, this function will return the values that + are generated in the lo 16-bit word of the field value by applying the + modular polynomial. The values lo_byte and hi_byte are returned via the + macro xp_fun(lo_byte, hi_byte) so that the values can be assembled into + memory as required by a suitable definition of this macro operating on + the table above +*/ + +#define xx(p, q) 0x##p##q + +#define xda_bbe(i) ( \ + (i & 0x80 ? xx(43, 80) : 0) ^ (i & 0x40 ? xx(21, c0) : 0) ^ \ + (i & 0x20 ? xx(10, e0) : 0) ^ (i & 0x10 ? xx(08, 70) : 0) ^ \ + (i & 0x08 ? xx(04, 38) : 0) ^ (i & 0x04 ? xx(02, 1c) : 0) ^ \ + (i & 0x02 ? xx(01, 0e) : 0) ^ (i & 0x01 ? xx(00, 87) : 0) \ +) + +#define xda_lle(i) ( \ + (i & 0x80 ? xx(e1, 00) : 0) ^ (i & 0x40 ? xx(70, 80) : 0) ^ \ + (i & 0x20 ? xx(38, 40) : 0) ^ (i & 0x10 ? xx(1c, 20) : 0) ^ \ + (i & 0x08 ? xx(0e, 10) : 0) ^ (i & 0x04 ? xx(07, 08) : 0) ^ \ + (i & 0x02 ? xx(03, 84) : 0) ^ (i & 0x01 ? xx(01, c2) : 0) \ +) + +static const u16 gf128mul_table_lle[256] = gf128mul_dat(xda_lle); +static const u16 gf128mul_table_bbe[256] = gf128mul_dat(xda_bbe); + +/* These functions multiply a field element by x, by x^4 and by x^8 + * in the polynomial field representation. It uses 32-bit word operations + * to gain speed but compensates for machine endianess and hence works + * correctly on both styles of machine. + */ + +static void gf128mul_x_lle(be128 *r, const be128 *x) +{ + u64 a = be64_to_cpu(x->a); + u64 b = be64_to_cpu(x->b); + u64 _tt = gf128mul_table_lle[(b << 7) & 0xff]; + + r->b = cpu_to_be64((b >> 1) | (a << 63)); + r->a = cpu_to_be64((a >> 1) ^ (_tt << 48)); +} + +static void gf128mul_x_bbe(be128 *r, const be128 *x) +{ + u64 a = be64_to_cpu(x->a); + u64 b = be64_to_cpu(x->b); + u64 _tt = gf128mul_table_bbe[a >> 63]; + + r->a = cpu_to_be64((a << 1) | (b >> 63)); + r->b = cpu_to_be64((b << 1) ^ _tt); +} + +void gf128mul_x_ble(be128 *r, const be128 *x) +{ + u64 a = le64_to_cpu(x->a); + u64 b = le64_to_cpu(x->b); + u64 _tt = gf128mul_table_bbe[b >> 63]; + + r->a = cpu_to_le64((a << 1) ^ _tt); + r->b = cpu_to_le64((b << 1) | (a >> 63)); +} +EXPORT_SYMBOL(gf128mul_x_ble); + +static void gf128mul_x8_lle(be128 *x) +{ + u64 a = be64_to_cpu(x->a); + u64 b = be64_to_cpu(x->b); + u64 _tt = gf128mul_table_lle[b & 0xff]; + + x->b = cpu_to_be64((b >> 8) | (a << 56)); + x->a = cpu_to_be64((a >> 8) ^ (_tt << 48)); +} + +static void gf128mul_x8_bbe(be128 *x) +{ + u64 a = be64_to_cpu(x->a); + u64 b = be64_to_cpu(x->b); + u64 _tt = gf128mul_table_bbe[a >> 56]; + + x->a = cpu_to_be64((a << 8) | (b >> 56)); + x->b = cpu_to_be64((b << 8) ^ _tt); +} + +void gf128mul_lle(be128 *r, const be128 *b) +{ + be128 p[8]; + int i; + + p[0] = *r; + for (i = 0; i < 7; ++i) + gf128mul_x_lle(&p[i + 1], &p[i]); + + memset(r, 0, sizeof(*r)); + for (i = 0;;) { + u8 ch = ((u8 *)b)[15 - i]; + + if (ch & 0x80) + be128_xor(r, r, &p[0]); + if (ch & 0x40) + be128_xor(r, r, &p[1]); + if (ch & 0x20) + be128_xor(r, r, &p[2]); + if (ch & 0x10) + be128_xor(r, r, &p[3]); + if (ch & 0x08) + be128_xor(r, r, &p[4]); + if (ch & 0x04) + be128_xor(r, r, &p[5]); + if (ch & 0x02) + be128_xor(r, r, &p[6]); + if (ch & 0x01) + be128_xor(r, r, &p[7]); + + if (++i >= 16) + break; + + gf128mul_x8_lle(r); + } +} +EXPORT_SYMBOL(gf128mul_lle); + +void gf128mul_bbe(be128 *r, const be128 *b) +{ + be128 p[8]; + int i; + + p[0] = *r; + for (i = 0; i < 7; ++i) + gf128mul_x_bbe(&p[i + 1], &p[i]); + + memset(r, 0, sizeof(*r)); + for (i = 0;;) { + u8 ch = ((u8 *)b)[i]; + + if (ch & 0x80) + be128_xor(r, r, &p[7]); + if (ch & 0x40) + be128_xor(r, r, &p[6]); + if (ch & 0x20) + be128_xor(r, r, &p[5]); + if (ch & 0x10) + be128_xor(r, r, &p[4]); + if (ch & 0x08) + be128_xor(r, r, &p[3]); + if (ch & 0x04) + be128_xor(r, r, &p[2]); + if (ch & 0x02) + be128_xor(r, r, &p[1]); + if (ch & 0x01) + be128_xor(r, r, &p[0]); + + if (++i >= 16) + break; + + gf128mul_x8_bbe(r); + } +} +EXPORT_SYMBOL(gf128mul_bbe); + +/* This version uses 64k bytes of table space. + A 16 byte buffer has to be multiplied by a 16 byte key + value in GF(128). If we consider a GF(128) value in + the buffer's lowest byte, we can construct a table of + the 256 16 byte values that result from the 256 values + of this byte. This requires 4096 bytes. But we also + need tables for each of the 16 higher bytes in the + buffer as well, which makes 64 kbytes in total. +*/ +/* additional explanation + * t[0][BYTE] contains g*BYTE + * t[1][BYTE] contains g*x^8*BYTE + * .. + * t[15][BYTE] contains g*x^120*BYTE */ +struct gf128mul_64k *gf128mul_init_64k_lle(const be128 *g) +{ + struct gf128mul_64k *t; + int i, j, k; + + t = kzalloc(sizeof(*t), GFP_KERNEL); + if (!t) + goto out; + + for (i = 0; i < 16; i++) { + t->t[i] = kzalloc(sizeof(*t->t[i]), GFP_KERNEL); + if (!t->t[i]) { + gf128mul_free_64k(t); + t = NULL; + goto out; + } + } + + t->t[0]->t[128] = *g; + for (j = 64; j > 0; j >>= 1) + gf128mul_x_lle(&t->t[0]->t[j], &t->t[0]->t[j + j]); + + for (i = 0;;) { + for (j = 2; j < 256; j += j) + for (k = 1; k < j; ++k) + be128_xor(&t->t[i]->t[j + k], + &t->t[i]->t[j], &t->t[i]->t[k]); + + if (++i >= 16) + break; + + for (j = 128; j > 0; j >>= 1) { + t->t[i]->t[j] = t->t[i - 1]->t[j]; + gf128mul_x8_lle(&t->t[i]->t[j]); + } + } + +out: + return t; +} +EXPORT_SYMBOL(gf128mul_init_64k_lle); + +struct gf128mul_64k *gf128mul_init_64k_bbe(const be128 *g) +{ + struct gf128mul_64k *t; + int i, j, k; + + t = kzalloc(sizeof(*t), GFP_KERNEL); + if (!t) + goto out; + + for (i = 0; i < 16; i++) { + t->t[i] = kzalloc(sizeof(*t->t[i]), GFP_KERNEL); + if (!t->t[i]) { + gf128mul_free_64k(t); + t = NULL; + goto out; + } + } + + t->t[0]->t[1] = *g; + for (j = 1; j <= 64; j <<= 1) + gf128mul_x_bbe(&t->t[0]->t[j + j], &t->t[0]->t[j]); + + for (i = 0;;) { + for (j = 2; j < 256; j += j) + for (k = 1; k < j; ++k) + be128_xor(&t->t[i]->t[j + k], + &t->t[i]->t[j], &t->t[i]->t[k]); + + if (++i >= 16) + break; + + for (j = 128; j > 0; j >>= 1) { + t->t[i]->t[j] = t->t[i - 1]->t[j]; + gf128mul_x8_bbe(&t->t[i]->t[j]); + } + } + +out: + return t; +} +EXPORT_SYMBOL(gf128mul_init_64k_bbe); + +void gf128mul_free_64k(struct gf128mul_64k *t) +{ + int i; + + for (i = 0; i < 16; i++) + kfree(t->t[i]); + kfree(t); +} +EXPORT_SYMBOL(gf128mul_free_64k); + +void gf128mul_64k_lle(be128 *a, struct gf128mul_64k *t) +{ + u8 *ap = (u8 *)a; + be128 r[1]; + int i; + + *r = t->t[0]->t[ap[0]]; + for (i = 1; i < 16; ++i) + be128_xor(r, r, &t->t[i]->t[ap[i]]); + *a = *r; +} +EXPORT_SYMBOL(gf128mul_64k_lle); + +void gf128mul_64k_bbe(be128 *a, struct gf128mul_64k *t) +{ + u8 *ap = (u8 *)a; + be128 r[1]; + int i; + + *r = t->t[0]->t[ap[15]]; + for (i = 1; i < 16; ++i) + be128_xor(r, r, &t->t[i]->t[ap[15 - i]]); + *a = *r; +} +EXPORT_SYMBOL(gf128mul_64k_bbe); + +/* This version uses 4k bytes of table space. + A 16 byte buffer has to be multiplied by a 16 byte key + value in GF(128). If we consider a GF(128) value in a + single byte, we can construct a table of the 256 16 byte + values that result from the 256 values of this byte. + This requires 4096 bytes. If we take the highest byte in + the buffer and use this table to get the result, we then + have to multiply by x^120 to get the final value. For the + next highest byte the result has to be multiplied by x^112 + and so on. But we can do this by accumulating the result + in an accumulator starting with the result for the top + byte. We repeatedly multiply the accumulator value by + x^8 and then add in (i.e. xor) the 16 bytes of the next + lower byte in the buffer, stopping when we reach the + lowest byte. This requires a 4096 byte table. +*/ +struct gf128mul_4k *gf128mul_init_4k_lle(const be128 *g) +{ + struct gf128mul_4k *t; + int j, k; + + t = kzalloc(sizeof(*t), GFP_KERNEL); + if (!t) + goto out; + + t->t[128] = *g; + for (j = 64; j > 0; j >>= 1) + gf128mul_x_lle(&t->t[j], &t->t[j+j]); + + for (j = 2; j < 256; j += j) + for (k = 1; k < j; ++k) + be128_xor(&t->t[j + k], &t->t[j], &t->t[k]); + +out: + return t; +} +EXPORT_SYMBOL(gf128mul_init_4k_lle); + +struct gf128mul_4k *gf128mul_init_4k_bbe(const be128 *g) +{ + struct gf128mul_4k *t; + int j, k; + + t = kzalloc(sizeof(*t), GFP_KERNEL); + if (!t) + goto out; + + t->t[1] = *g; + for (j = 1; j <= 64; j <<= 1) + gf128mul_x_bbe(&t->t[j + j], &t->t[j]); + + for (j = 2; j < 256; j += j) + for (k = 1; k < j; ++k) + be128_xor(&t->t[j + k], &t->t[j], &t->t[k]); + +out: + return t; +} +EXPORT_SYMBOL(gf128mul_init_4k_bbe); + +void gf128mul_4k_lle(be128 *a, struct gf128mul_4k *t) +{ + u8 *ap = (u8 *)a; + be128 r[1]; + int i = 15; + + *r = t->t[ap[15]]; + while (i--) { + gf128mul_x8_lle(r); + be128_xor(r, r, &t->t[ap[i]]); + } + *a = *r; +} +EXPORT_SYMBOL(gf128mul_4k_lle); + +void gf128mul_4k_bbe(be128 *a, struct gf128mul_4k *t) +{ + u8 *ap = (u8 *)a; + be128 r[1]; + int i = 0; + + *r = t->t[ap[0]]; + while (++i < 16) { + gf128mul_x8_bbe(r); + be128_xor(r, r, &t->t[ap[i]]); + } + *a = *r; +} +EXPORT_SYMBOL(gf128mul_4k_bbe); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Functions for multiplying elements of GF(2^128)"); diff --git a/crypto/ghash-generic.c b/crypto/ghash-generic.c new file mode 100644 index 00000000..7835b8fc --- /dev/null +++ b/crypto/ghash-generic.c @@ -0,0 +1,176 @@ +/* + * GHASH: digest algorithm for GCM (Galois/Counter Mode). + * + * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi> + * Copyright (c) 2009 Intel Corp. + * Author: Huang Ying <ying.huang@intel.com> + * + * The algorithm implementation is copied from gcm.c. + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 as published + * by the Free Software Foundation. + */ + +#include <crypto/algapi.h> +#include <crypto/gf128mul.h> +#include <crypto/internal/hash.h> +#include <linux/crypto.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> + +#define GHASH_BLOCK_SIZE 16 +#define GHASH_DIGEST_SIZE 16 + +struct ghash_ctx { + struct gf128mul_4k *gf128; +}; + +struct ghash_desc_ctx { + u8 buffer[GHASH_BLOCK_SIZE]; + u32 bytes; +}; + +static int ghash_init(struct shash_desc *desc) +{ + struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); + + memset(dctx, 0, sizeof(*dctx)); + + return 0; +} + +static int ghash_setkey(struct crypto_shash *tfm, + const u8 *key, unsigned int keylen) +{ + struct ghash_ctx *ctx = crypto_shash_ctx(tfm); + + if (keylen != GHASH_BLOCK_SIZE) { + crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); + return -EINVAL; + } + + if (ctx->gf128) + gf128mul_free_4k(ctx->gf128); + ctx->gf128 = gf128mul_init_4k_lle((be128 *)key); + if (!ctx->gf128) + return -ENOMEM; + + return 0; +} + +static int ghash_update(struct shash_desc *desc, + const u8 *src, unsigned int srclen) +{ + struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); + struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm); + u8 *dst = dctx->buffer; + + if (!ctx->gf128) + return -ENOKEY; + + if (dctx->bytes) { + int n = min(srclen, dctx->bytes); + u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes); + + dctx->bytes -= n; + srclen -= n; + + while (n--) + *pos++ ^= *src++; + + if (!dctx->bytes) + gf128mul_4k_lle((be128 *)dst, ctx->gf128); + } + + while (srclen >= GHASH_BLOCK_SIZE) { + crypto_xor(dst, src, GHASH_BLOCK_SIZE); + gf128mul_4k_lle((be128 *)dst, ctx->gf128); + src += GHASH_BLOCK_SIZE; + srclen -= GHASH_BLOCK_SIZE; + } + + if (srclen) { + dctx->bytes = GHASH_BLOCK_SIZE - srclen; + while (srclen--) + *dst++ ^= *src++; + } + + return 0; +} + +static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx) +{ + u8 *dst = dctx->buffer; + + if (dctx->bytes) { + u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes); + + while (dctx->bytes--) + *tmp++ ^= 0; + + gf128mul_4k_lle((be128 *)dst, ctx->gf128); + } + + dctx->bytes = 0; +} + +static int ghash_final(struct shash_desc *desc, u8 *dst) +{ + struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); + struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm); + u8 *buf = dctx->buffer; + + if (!ctx->gf128) + return -ENOKEY; + + ghash_flush(ctx, dctx); + memcpy(dst, buf, GHASH_BLOCK_SIZE); + + return 0; +} + +static void ghash_exit_tfm(struct crypto_tfm *tfm) +{ + struct ghash_ctx *ctx = crypto_tfm_ctx(tfm); + if (ctx->gf128) + gf128mul_free_4k(ctx->gf128); +} + +static struct shash_alg ghash_alg = { + .digestsize = GHASH_DIGEST_SIZE, + .init = ghash_init, + .update = ghash_update, + .final = ghash_final, + .setkey = ghash_setkey, + .descsize = sizeof(struct ghash_desc_ctx), + .base = { + .cra_name = "ghash", + .cra_driver_name = "ghash-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = GHASH_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct ghash_ctx), + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(ghash_alg.base.cra_list), + .cra_exit = ghash_exit_tfm, + }, +}; + +static int __init ghash_mod_init(void) +{ + return crypto_register_shash(&ghash_alg); +} + +static void __exit ghash_mod_exit(void) +{ + crypto_unregister_shash(&ghash_alg); +} + +module_init(ghash_mod_init); +module_exit(ghash_mod_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("GHASH Message Digest Algorithm"); +MODULE_ALIAS("ghash"); diff --git a/crypto/hmac.c b/crypto/hmac.c new file mode 100644 index 00000000..8d9544cf --- /dev/null +++ b/crypto/hmac.c @@ -0,0 +1,273 @@ +/* + * Cryptographic API. + * + * HMAC: Keyed-Hashing for Message Authentication (RFC2104). + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> + * + * The HMAC implementation is derived from USAGI. + * Copyright (c) 2002 Kazunori Miyazawa <miyazawa@linux-ipv6.org> / USAGI + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/internal/hash.h> +#include <crypto/scatterwalk.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/scatterlist.h> +#include <linux/string.h> + +struct hmac_ctx { + struct crypto_shash *hash; +}; + +static inline void *align_ptr(void *p, unsigned int align) +{ + return (void *)ALIGN((unsigned long)p, align); +} + +static inline struct hmac_ctx *hmac_ctx(struct crypto_shash *tfm) +{ + return align_ptr(crypto_shash_ctx_aligned(tfm) + + crypto_shash_statesize(tfm) * 2, + crypto_tfm_ctx_alignment()); +} + +static int hmac_setkey(struct crypto_shash *parent, + const u8 *inkey, unsigned int keylen) +{ + int bs = crypto_shash_blocksize(parent); + int ds = crypto_shash_digestsize(parent); + int ss = crypto_shash_statesize(parent); + char *ipad = crypto_shash_ctx_aligned(parent); + char *opad = ipad + ss; + struct hmac_ctx *ctx = align_ptr(opad + ss, + crypto_tfm_ctx_alignment()); + struct crypto_shash *hash = ctx->hash; + struct { + struct shash_desc shash; + char ctx[crypto_shash_descsize(hash)]; + } desc; + unsigned int i; + + desc.shash.tfm = hash; + desc.shash.flags = crypto_shash_get_flags(parent) & + CRYPTO_TFM_REQ_MAY_SLEEP; + + if (keylen > bs) { + int err; + + err = crypto_shash_digest(&desc.shash, inkey, keylen, ipad); + if (err) + return err; + + keylen = ds; + } else + memcpy(ipad, inkey, keylen); + + memset(ipad + keylen, 0, bs - keylen); + memcpy(opad, ipad, bs); + + for (i = 0; i < bs; i++) { + ipad[i] ^= 0x36; + opad[i] ^= 0x5c; + } + + return crypto_shash_init(&desc.shash) ?: + crypto_shash_update(&desc.shash, ipad, bs) ?: + crypto_shash_export(&desc.shash, ipad) ?: + crypto_shash_init(&desc.shash) ?: + crypto_shash_update(&desc.shash, opad, bs) ?: + crypto_shash_export(&desc.shash, opad); +} + +static int hmac_export(struct shash_desc *pdesc, void *out) +{ + struct shash_desc *desc = shash_desc_ctx(pdesc); + + desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP; + + return crypto_shash_export(desc, out); +} + +static int hmac_import(struct shash_desc *pdesc, const void *in) +{ + struct shash_desc *desc = shash_desc_ctx(pdesc); + struct hmac_ctx *ctx = hmac_ctx(pdesc->tfm); + + desc->tfm = ctx->hash; + desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP; + + return crypto_shash_import(desc, in); +} + +static int hmac_init(struct shash_desc *pdesc) +{ + return hmac_import(pdesc, crypto_shash_ctx_aligned(pdesc->tfm)); +} + +static int hmac_update(struct shash_desc *pdesc, + const u8 *data, unsigned int nbytes) +{ + struct shash_desc *desc = shash_desc_ctx(pdesc); + + desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP; + + return crypto_shash_update(desc, data, nbytes); +} + +static int hmac_final(struct shash_desc *pdesc, u8 *out) +{ + struct crypto_shash *parent = pdesc->tfm; + int ds = crypto_shash_digestsize(parent); + int ss = crypto_shash_statesize(parent); + char *opad = crypto_shash_ctx_aligned(parent) + ss; + struct shash_desc *desc = shash_desc_ctx(pdesc); + + desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP; + + return crypto_shash_final(desc, out) ?: + crypto_shash_import(desc, opad) ?: + crypto_shash_finup(desc, out, ds, out); +} + +static int hmac_finup(struct shash_desc *pdesc, const u8 *data, + unsigned int nbytes, u8 *out) +{ + + struct crypto_shash *parent = pdesc->tfm; + int ds = crypto_shash_digestsize(parent); + int ss = crypto_shash_statesize(parent); + char *opad = crypto_shash_ctx_aligned(parent) + ss; + struct shash_desc *desc = shash_desc_ctx(pdesc); + + desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP; + + return crypto_shash_finup(desc, data, nbytes, out) ?: + crypto_shash_import(desc, opad) ?: + crypto_shash_finup(desc, out, ds, out); +} + +static int hmac_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_shash *parent = __crypto_shash_cast(tfm); + struct crypto_shash *hash; + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_shash_spawn *spawn = crypto_instance_ctx(inst); + struct hmac_ctx *ctx = hmac_ctx(parent); + + hash = crypto_spawn_shash(spawn); + if (IS_ERR(hash)) + return PTR_ERR(hash); + + parent->descsize = sizeof(struct shash_desc) + + crypto_shash_descsize(hash); + + ctx->hash = hash; + return 0; +} + +static void hmac_exit_tfm(struct crypto_tfm *tfm) +{ + struct hmac_ctx *ctx = hmac_ctx(__crypto_shash_cast(tfm)); + crypto_free_shash(ctx->hash); +} + +static int hmac_create(struct crypto_template *tmpl, struct rtattr **tb) +{ + struct shash_instance *inst; + struct crypto_alg *alg; + struct shash_alg *salg; + int err; + int ds; + int ss; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); + if (err) + return err; + + salg = shash_attr_alg(tb[1], 0, 0); + if (IS_ERR(salg)) + return PTR_ERR(salg); + + err = -EINVAL; + ds = salg->digestsize; + ss = salg->statesize; + alg = &salg->base; + if (ds > alg->cra_blocksize || + ss < alg->cra_blocksize) + goto out_put_alg; + + inst = shash_alloc_instance("hmac", alg); + err = PTR_ERR(inst); + if (IS_ERR(inst)) + goto out_put_alg; + + err = crypto_init_shash_spawn(shash_instance_ctx(inst), salg, + shash_crypto_instance(inst)); + if (err) + goto out_free_inst; + + inst->alg.base.cra_priority = alg->cra_priority; + inst->alg.base.cra_blocksize = alg->cra_blocksize; + inst->alg.base.cra_alignmask = alg->cra_alignmask; + + ss = ALIGN(ss, alg->cra_alignmask + 1); + inst->alg.digestsize = ds; + inst->alg.statesize = ss; + + inst->alg.base.cra_ctxsize = sizeof(struct hmac_ctx) + + ALIGN(ss * 2, crypto_tfm_ctx_alignment()); + + inst->alg.base.cra_init = hmac_init_tfm; + inst->alg.base.cra_exit = hmac_exit_tfm; + + inst->alg.init = hmac_init; + inst->alg.update = hmac_update; + inst->alg.final = hmac_final; + inst->alg.finup = hmac_finup; + inst->alg.export = hmac_export; + inst->alg.import = hmac_import; + inst->alg.setkey = hmac_setkey; + + err = shash_register_instance(tmpl, inst); + if (err) { +out_free_inst: + shash_free_instance(shash_crypto_instance(inst)); + } + +out_put_alg: + crypto_mod_put(alg); + return err; +} + +static struct crypto_template hmac_tmpl = { + .name = "hmac", + .create = hmac_create, + .free = shash_free_instance, + .module = THIS_MODULE, +}; + +static int __init hmac_module_init(void) +{ + return crypto_register_template(&hmac_tmpl); +} + +static void __exit hmac_module_exit(void) +{ + crypto_unregister_template(&hmac_tmpl); +} + +module_init(hmac_module_init); +module_exit(hmac_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("HMAC hash algorithm"); diff --git a/crypto/internal.h b/crypto/internal.h new file mode 100644 index 00000000..b865ca1a --- /dev/null +++ b/crypto/internal.h @@ -0,0 +1,144 @@ +/* + * Cryptographic API. + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ +#ifndef _CRYPTO_INTERNAL_H +#define _CRYPTO_INTERNAL_H + +#include <crypto/algapi.h> +#include <linux/completion.h> +#include <linux/mm.h> +#include <linux/highmem.h> +#include <linux/interrupt.h> +#include <linux/init.h> +#include <linux/list.h> +#include <linux/module.h> +#include <linux/kernel.h> +#include <linux/notifier.h> +#include <linux/rwsem.h> +#include <linux/slab.h> +#include <linux/fips.h> + +/* Crypto notification events. */ +enum { + CRYPTO_MSG_ALG_REQUEST, + CRYPTO_MSG_ALG_REGISTER, + CRYPTO_MSG_ALG_UNREGISTER, + CRYPTO_MSG_TMPL_REGISTER, + CRYPTO_MSG_TMPL_UNREGISTER, +}; + +struct crypto_instance; +struct crypto_template; + +struct crypto_larval { + struct crypto_alg alg; + struct crypto_alg *adult; + struct completion completion; + u32 mask; +}; + +extern struct list_head crypto_alg_list; +extern struct rw_semaphore crypto_alg_sem; +extern struct blocking_notifier_head crypto_chain; + +#ifdef CONFIG_PROC_FS +void __init crypto_init_proc(void); +void __exit crypto_exit_proc(void); +#else +static inline void crypto_init_proc(void) +{ } +static inline void crypto_exit_proc(void) +{ } +#endif + +static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg) +{ + return alg->cra_ctxsize; +} + +static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg) +{ + return alg->cra_ctxsize; +} + +struct crypto_alg *crypto_mod_get(struct crypto_alg *alg); +struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask); +struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask); + +int crypto_init_cipher_ops(struct crypto_tfm *tfm); +int crypto_init_compress_ops(struct crypto_tfm *tfm); + +void crypto_exit_cipher_ops(struct crypto_tfm *tfm); +void crypto_exit_compress_ops(struct crypto_tfm *tfm); + +struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask); +void crypto_larval_kill(struct crypto_alg *alg); +struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask); +void crypto_larval_error(const char *name, u32 type, u32 mask); +void crypto_alg_tested(const char *name, int err); + +void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list, + struct crypto_alg *nalg); +void crypto_remove_final(struct list_head *list); +void crypto_shoot_alg(struct crypto_alg *alg); +struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, + u32 mask); +void *crypto_create_tfm(struct crypto_alg *alg, + const struct crypto_type *frontend); +struct crypto_alg *crypto_find_alg(const char *alg_name, + const struct crypto_type *frontend, + u32 type, u32 mask); +void *crypto_alloc_tfm(const char *alg_name, + const struct crypto_type *frontend, u32 type, u32 mask); + +int crypto_register_notifier(struct notifier_block *nb); +int crypto_unregister_notifier(struct notifier_block *nb); +int crypto_probing_notify(unsigned long val, void *v); + +static inline void crypto_alg_put(struct crypto_alg *alg) +{ + if (atomic_dec_and_test(&alg->cra_refcnt) && alg->cra_destroy) + alg->cra_destroy(alg); +} + +static inline int crypto_tmpl_get(struct crypto_template *tmpl) +{ + return try_module_get(tmpl->module); +} + +static inline void crypto_tmpl_put(struct crypto_template *tmpl) +{ + module_put(tmpl->module); +} + +static inline int crypto_is_larval(struct crypto_alg *alg) +{ + return alg->cra_flags & CRYPTO_ALG_LARVAL; +} + +static inline int crypto_is_dead(struct crypto_alg *alg) +{ + return alg->cra_flags & CRYPTO_ALG_DEAD; +} + +static inline int crypto_is_moribund(struct crypto_alg *alg) +{ + return alg->cra_flags & (CRYPTO_ALG_DEAD | CRYPTO_ALG_DYING); +} + +static inline void crypto_notify(unsigned long val, void *v) +{ + blocking_notifier_call_chain(&crypto_chain, val, v); +} + +#endif /* _CRYPTO_INTERNAL_H */ + diff --git a/crypto/khazad.c b/crypto/khazad.c new file mode 100644 index 00000000..527e4e39 --- /dev/null +++ b/crypto/khazad.c @@ -0,0 +1,883 @@ +/* + * Cryptographic API. + * + * Khazad Algorithm + * + * The Khazad algorithm was developed by Paulo S. L. M. Barreto and + * Vincent Rijmen. It was a finalist in the NESSIE encryption contest. + * + * The original authors have disclaimed all copyright interest in this + * code and thus put it in the public domain. The subsequent authors + * have put this under the GNU General Public License. + * + * By Aaron Grothe ajgrothe@yahoo.com, August 1, 2004 + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + */ + +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <asm/byteorder.h> +#include <linux/crypto.h> +#include <linux/types.h> + +#define KHAZAD_KEY_SIZE 16 +#define KHAZAD_BLOCK_SIZE 8 +#define KHAZAD_ROUNDS 8 + +struct khazad_ctx { + u64 E[KHAZAD_ROUNDS + 1]; + u64 D[KHAZAD_ROUNDS + 1]; +}; + +static const u64 T0[256] = { + 0xbad3d268bbb96a01ULL, 0x54fc4d19e59a66b1ULL, 0x2f71bc93e26514cdULL, + 0x749ccdb925871b51ULL, 0x53f55102f7a257a4ULL, 0xd3686bb8d0d6be03ULL, + 0xd26b6fbdd6deb504ULL, 0x4dd72964b35285feULL, 0x50f05d0dfdba4aadULL, + 0xace98a26cf09e063ULL, 0x8d8a0e83091c9684ULL, 0xbfdcc679a5914d1aULL, + 0x7090ddad3da7374dULL, 0x52f65507f1aa5ca3ULL, 0x9ab352c87ba417e1ULL, + 0x4cd42d61b55a8ef9ULL, 0xea238f65460320acULL, 0xd56273a6c4e68411ULL, + 0x97a466f155cc68c2ULL, 0xd16e63b2dcc6a80dULL, 0x3355ccffaa85d099ULL, + 0x51f35908fbb241aaULL, 0x5bed712ac7e20f9cULL, 0xa6f7a204f359ae55ULL, + 0xde7f5f81febec120ULL, 0x48d83d75ad7aa2e5ULL, 0xa8e59a32d729cc7fULL, + 0x99b65ec771bc0ae8ULL, 0xdb704b90e096e63bULL, 0x3256c8faac8ddb9eULL, + 0xb7c4e65195d11522ULL, 0xfc19d72b32b3aaceULL, 0xe338ab48704b7393ULL, + 0x9ebf42dc63843bfdULL, 0x91ae7eef41fc52d0ULL, 0x9bb056cd7dac1ce6ULL, + 0xe23baf4d76437894ULL, 0xbbd0d66dbdb16106ULL, 0x41c319589b32f1daULL, + 0x6eb2a5cb7957e517ULL, 0xa5f2ae0bf941b35cULL, 0xcb400bc08016564bULL, + 0x6bbdb1da677fc20cULL, 0x95a26efb59dc7eccULL, 0xa1febe1fe1619f40ULL, + 0xf308eb1810cbc3e3ULL, 0xb1cefe4f81e12f30ULL, 0x0206080a0c10160eULL, + 0xcc4917db922e675eULL, 0xc45137f3a26e3f66ULL, 0x1d2774694ee8cf53ULL, + 0x143c504478a09c6cULL, 0xc3582be8b0560e73ULL, 0x63a591f2573f9a34ULL, + 0xda734f95e69eed3cULL, 0x5de76934d3d2358eULL, 0x5fe1613edfc22380ULL, + 0xdc79578bf2aed72eULL, 0x7d87e99413cf486eULL, 0xcd4a13de94266c59ULL, + 0x7f81e19e1fdf5e60ULL, 0x5aee752fc1ea049bULL, 0x6cb4adc17547f319ULL, + 0x5ce46d31d5da3e89ULL, 0xf704fb0c08ebefffULL, 0x266a98bed42d47f2ULL, + 0xff1cdb2438abb7c7ULL, 0xed2a937e543b11b9ULL, 0xe825876f4a1336a2ULL, + 0x9dba4ed3699c26f4ULL, 0x6fb1a1ce7f5fee10ULL, 0x8e8f028c03048b8dULL, + 0x192b647d56c8e34fULL, 0xa0fdba1ae7699447ULL, 0xf00de7171ad3deeaULL, + 0x89861e97113cba98ULL, 0x0f113c332278692dULL, 0x07091c1b12383115ULL, + 0xafec8629c511fd6aULL, 0xfb10cb30208b9bdbULL, 0x0818202830405838ULL, + 0x153f54417ea8976bULL, 0x0d1734392e687f23ULL, 0x040c101418202c1cULL, + 0x0103040506080b07ULL, 0x64ac8de94507ab21ULL, 0xdf7c5b84f8b6ca27ULL, + 0x769ac5b329970d5fULL, 0x798bf9800bef6472ULL, 0xdd7a538ef4a6dc29ULL, + 0x3d47f4c98ef5b2b3ULL, 0x163a584e74b08a62ULL, 0x3f41fcc382e5a4bdULL, + 0x3759dcebb2a5fc85ULL, 0x6db7a9c4734ff81eULL, 0x3848e0d890dd95a8ULL, + 0xb9d6de67b1a17708ULL, 0x7395d1a237bf2a44ULL, 0xe926836a4c1b3da5ULL, + 0x355fd4e1beb5ea8bULL, 0x55ff491ce3926db6ULL, 0x7193d9a83baf3c4aULL, + 0x7b8df18a07ff727cULL, 0x8c890a860f149d83ULL, 0x7296d5a731b72143ULL, + 0x88851a921734b19fULL, 0xf607ff090ee3e4f8ULL, 0x2a7ea882fc4d33d6ULL, + 0x3e42f8c684edafbaULL, 0x5ee2653bd9ca2887ULL, 0x27699cbbd2254cf5ULL, + 0x46ca0543890ac0cfULL, 0x0c14303c28607424ULL, 0x65af89ec430fa026ULL, + 0x68b8bdd56d67df05ULL, 0x61a399f85b2f8c3aULL, 0x03050c0f0a181d09ULL, + 0xc15e23e2bc46187dULL, 0x57f94116ef827bb8ULL, 0xd6677fa9cefe9918ULL, + 0xd976439aec86f035ULL, 0x58e87d25cdfa1295ULL, 0xd875479fea8efb32ULL, + 0x66aa85e34917bd2fULL, 0xd7647bacc8f6921fULL, 0x3a4ee8d29ccd83a6ULL, + 0xc84507cf8a0e4b42ULL, 0x3c44f0cc88fdb9b4ULL, 0xfa13cf35268390dcULL, + 0x96a762f453c463c5ULL, 0xa7f4a601f551a552ULL, 0x98b55ac277b401efULL, + 0xec29977b52331abeULL, 0xb8d5da62b7a97c0fULL, 0xc7543bfca876226fULL, + 0xaeef822cc319f66dULL, 0x69bbb9d06b6fd402ULL, 0x4bdd317aa762bfecULL, + 0xabe0963ddd31d176ULL, 0xa9e69e37d121c778ULL, 0x67a981e64f1fb628ULL, + 0x0a1e28223c504e36ULL, 0x47c901468f02cbc8ULL, 0xf20bef1d16c3c8e4ULL, + 0xb5c2ee5b99c1032cULL, 0x226688aacc0d6beeULL, 0xe532b356647b4981ULL, + 0xee2f9f715e230cb0ULL, 0xbedfc27ca399461dULL, 0x2b7dac87fa4538d1ULL, + 0x819e3ebf217ce2a0ULL, 0x1236485a6c90a67eULL, 0x839836b52d6cf4aeULL, + 0x1b2d6c775ad8f541ULL, 0x0e1238362470622aULL, 0x23658cafca0560e9ULL, + 0xf502f30604fbf9f1ULL, 0x45cf094c8312ddc6ULL, 0x216384a5c61576e7ULL, + 0xce4f1fd19e3e7150ULL, 0x49db3970ab72a9e2ULL, 0x2c74b09ce87d09c4ULL, + 0xf916c33a2c9b8dd5ULL, 0xe637bf596e635488ULL, 0xb6c7e25493d91e25ULL, + 0x2878a088f05d25d8ULL, 0x17395c4b72b88165ULL, 0x829b32b02b64ffa9ULL, + 0x1a2e68725cd0fe46ULL, 0x8b80169d1d2cac96ULL, 0xfe1fdf213ea3bcc0ULL, + 0x8a8312981b24a791ULL, 0x091b242d3648533fULL, 0xc94603ca8c064045ULL, + 0x879426a1354cd8b2ULL, 0x4ed2256bb94a98f7ULL, 0xe13ea3427c5b659dULL, + 0x2e72b896e46d1fcaULL, 0xe431b75362734286ULL, 0xe03da7477a536e9aULL, + 0xeb208b60400b2babULL, 0x90ad7aea47f459d7ULL, 0xa4f1aa0eff49b85bULL, + 0x1e22786644f0d25aULL, 0x85922eab395ccebcULL, 0x60a09dfd5d27873dULL, + 0x0000000000000000ULL, 0x256f94b1de355afbULL, 0xf401f70302f3f2f6ULL, + 0xf10ee3121cdbd5edULL, 0x94a16afe5fd475cbULL, 0x0b1d2c273a584531ULL, + 0xe734bb5c686b5f8fULL, 0x759fc9bc238f1056ULL, 0xef2c9b74582b07b7ULL, + 0x345cd0e4b8bde18cULL, 0x3153c4f5a695c697ULL, 0xd46177a3c2ee8f16ULL, + 0xd06d67b7dacea30aULL, 0x869722a43344d3b5ULL, 0x7e82e59b19d75567ULL, + 0xadea8e23c901eb64ULL, 0xfd1ad32e34bba1c9ULL, 0x297ba48df6552edfULL, + 0x3050c0f0a09dcd90ULL, 0x3b4decd79ac588a1ULL, 0x9fbc46d9658c30faULL, + 0xf815c73f2a9386d2ULL, 0xc6573ff9ae7e2968ULL, 0x13354c5f6a98ad79ULL, + 0x060a181e14303a12ULL, 0x050f14111e28271bULL, 0xc55233f6a4663461ULL, + 0x113344556688bb77ULL, 0x7799c1b62f9f0658ULL, 0x7c84ed9115c74369ULL, + 0x7a8ef58f01f7797bULL, 0x7888fd850de76f75ULL, 0x365ad8eeb4adf782ULL, + 0x1c24706c48e0c454ULL, 0x394be4dd96d59eafULL, 0x59eb7920cbf21992ULL, + 0x1828607850c0e848ULL, 0x56fa4513e98a70bfULL, 0xb3c8f6458df1393eULL, + 0xb0cdfa4a87e92437ULL, 0x246c90b4d83d51fcULL, 0x206080a0c01d7de0ULL, + 0xb2cbf2408bf93239ULL, 0x92ab72e04be44fd9ULL, 0xa3f8b615ed71894eULL, + 0xc05d27e7ba4e137aULL, 0x44cc0d49851ad6c1ULL, 0x62a695f751379133ULL, + 0x103040506080b070ULL, 0xb4c1ea5e9fc9082bULL, 0x84912aae3f54c5bbULL, + 0x43c511529722e7d4ULL, 0x93a876e54dec44deULL, 0xc25b2fedb65e0574ULL, + 0x4ade357fa16ab4ebULL, 0xbddace73a9815b14ULL, 0x8f8c0689050c808aULL, + 0x2d77b499ee7502c3ULL, 0xbcd9ca76af895013ULL, 0x9cb94ad66f942df3ULL, + 0x6abeb5df6177c90bULL, 0x40c01d5d9d3afaddULL, 0xcf4c1bd498367a57ULL, + 0xa2fbb210eb798249ULL, 0x809d3aba2774e9a7ULL, 0x4fd1216ebf4293f0ULL, + 0x1f217c6342f8d95dULL, 0xca430fc5861e5d4cULL, 0xaae39238db39da71ULL, + 0x42c61557912aecd3ULL +}; + +static const u64 T1[256] = { + 0xd3ba68d2b9bb016aULL, 0xfc54194d9ae5b166ULL, 0x712f93bc65e2cd14ULL, + 0x9c74b9cd8725511bULL, 0xf5530251a2f7a457ULL, 0x68d3b86bd6d003beULL, + 0x6bd2bd6fded604b5ULL, 0xd74d642952b3fe85ULL, 0xf0500d5dbafdad4aULL, + 0xe9ac268a09cf63e0ULL, 0x8a8d830e1c098496ULL, 0xdcbf79c691a51a4dULL, + 0x9070addda73d4d37ULL, 0xf6520755aaf1a35cULL, 0xb39ac852a47be117ULL, + 0xd44c612d5ab5f98eULL, 0x23ea658f0346ac20ULL, 0x62d5a673e6c41184ULL, + 0xa497f166cc55c268ULL, 0x6ed1b263c6dc0da8ULL, 0x5533ffcc85aa99d0ULL, + 0xf3510859b2fbaa41ULL, 0xed5b2a71e2c79c0fULL, 0xf7a604a259f355aeULL, + 0x7fde815fbefe20c1ULL, 0xd848753d7aade5a2ULL, 0xe5a8329a29d77fccULL, + 0xb699c75ebc71e80aULL, 0x70db904b96e03be6ULL, 0x5632fac88dac9edbULL, + 0xc4b751e6d1952215ULL, 0x19fc2bd7b332ceaaULL, 0x38e348ab4b709373ULL, + 0xbf9edc428463fd3bULL, 0xae91ef7efc41d052ULL, 0xb09bcd56ac7de61cULL, + 0x3be24daf43769478ULL, 0xd0bb6dd6b1bd0661ULL, 0xc3415819329bdaf1ULL, + 0xb26ecba5577917e5ULL, 0xf2a50bae41f95cb3ULL, 0x40cbc00b16804b56ULL, + 0xbd6bdab17f670cc2ULL, 0xa295fb6edc59cc7eULL, 0xfea11fbe61e1409fULL, + 0x08f318ebcb10e3c3ULL, 0xceb14ffee181302fULL, 0x06020a08100c0e16ULL, + 0x49ccdb172e925e67ULL, 0x51c4f3376ea2663fULL, 0x271d6974e84e53cfULL, + 0x3c144450a0786c9cULL, 0x58c3e82b56b0730eULL, 0xa563f2913f57349aULL, + 0x73da954f9ee63cedULL, 0xe75d3469d2d38e35ULL, 0xe15f3e61c2df8023ULL, + 0x79dc8b57aef22ed7ULL, 0x877d94e9cf136e48ULL, 0x4acdde132694596cULL, + 0x817f9ee1df1f605eULL, 0xee5a2f75eac19b04ULL, 0xb46cc1ad477519f3ULL, + 0xe45c316ddad5893eULL, 0x04f70cfbeb08ffefULL, 0x6a26be982dd4f247ULL, + 0x1cff24dbab38c7b7ULL, 0x2aed7e933b54b911ULL, 0x25e86f87134aa236ULL, + 0xba9dd34e9c69f426ULL, 0xb16fcea15f7f10eeULL, 0x8f8e8c0204038d8bULL, + 0x2b197d64c8564fe3ULL, 0xfda01aba69e74794ULL, 0x0df017e7d31aeadeULL, + 0x8689971e3c1198baULL, 0x110f333c78222d69ULL, 0x09071b1c38121531ULL, + 0xecaf298611c56afdULL, 0x10fb30cb8b20db9bULL, 0x1808282040303858ULL, + 0x3f154154a87e6b97ULL, 0x170d3934682e237fULL, 0x0c04141020181c2cULL, + 0x030105040806070bULL, 0xac64e98d074521abULL, 0x7cdf845bb6f827caULL, + 0x9a76b3c597295f0dULL, 0x8b7980f9ef0b7264ULL, 0x7add8e53a6f429dcULL, + 0x473dc9f4f58eb3b2ULL, 0x3a164e58b074628aULL, 0x413fc3fce582bda4ULL, + 0x5937ebdca5b285fcULL, 0xb76dc4a94f731ef8ULL, 0x4838d8e0dd90a895ULL, + 0xd6b967dea1b10877ULL, 0x9573a2d1bf37442aULL, 0x26e96a831b4ca53dULL, + 0x5f35e1d4b5be8beaULL, 0xff551c4992e3b66dULL, 0x9371a8d9af3b4a3cULL, + 0x8d7b8af1ff077c72ULL, 0x898c860a140f839dULL, 0x9672a7d5b7314321ULL, + 0x8588921a34179fb1ULL, 0x07f609ffe30ef8e4ULL, 0x7e2a82a84dfcd633ULL, + 0x423ec6f8ed84baafULL, 0xe25e3b65cad98728ULL, 0x6927bb9c25d2f54cULL, + 0xca4643050a89cfc0ULL, 0x140c3c3060282474ULL, 0xaf65ec890f4326a0ULL, + 0xb868d5bd676d05dfULL, 0xa361f8992f5b3a8cULL, 0x05030f0c180a091dULL, + 0x5ec1e22346bc7d18ULL, 0xf957164182efb87bULL, 0x67d6a97ffece1899ULL, + 0x76d99a4386ec35f0ULL, 0xe858257dfacd9512ULL, 0x75d89f478eea32fbULL, + 0xaa66e38517492fbdULL, 0x64d7ac7bf6c81f92ULL, 0x4e3ad2e8cd9ca683ULL, + 0x45c8cf070e8a424bULL, 0x443cccf0fd88b4b9ULL, 0x13fa35cf8326dc90ULL, + 0xa796f462c453c563ULL, 0xf4a701a651f552a5ULL, 0xb598c25ab477ef01ULL, + 0x29ec7b973352be1aULL, 0xd5b862daa9b70f7cULL, 0x54c7fc3b76a86f22ULL, + 0xefae2c8219c36df6ULL, 0xbb69d0b96f6b02d4ULL, 0xdd4b7a3162a7ecbfULL, + 0xe0ab3d9631dd76d1ULL, 0xe6a9379e21d178c7ULL, 0xa967e6811f4f28b6ULL, + 0x1e0a2228503c364eULL, 0xc9474601028fc8cbULL, 0x0bf21defc316e4c8ULL, + 0xc2b55beec1992c03ULL, 0x6622aa880dccee6bULL, 0x32e556b37b648149ULL, + 0x2fee719f235eb00cULL, 0xdfbe7cc299a31d46ULL, 0x7d2b87ac45fad138ULL, + 0x9e81bf3e7c21a0e2ULL, 0x36125a48906c7ea6ULL, 0x9883b5366c2daef4ULL, + 0x2d1b776cd85a41f5ULL, 0x120e363870242a62ULL, 0x6523af8c05cae960ULL, + 0x02f506f3fb04f1f9ULL, 0xcf454c091283c6ddULL, 0x6321a58415c6e776ULL, + 0x4fced11f3e9e5071ULL, 0xdb49703972abe2a9ULL, 0x742c9cb07de8c409ULL, + 0x16f93ac39b2cd58dULL, 0x37e659bf636e8854ULL, 0xc7b654e2d993251eULL, + 0x782888a05df0d825ULL, 0x39174b5cb8726581ULL, 0x9b82b032642ba9ffULL, + 0x2e1a7268d05c46feULL, 0x808b9d162c1d96acULL, 0x1ffe21dfa33ec0bcULL, + 0x838a9812241b91a7ULL, 0x1b092d2448363f53ULL, 0x46c9ca03068c4540ULL, + 0x9487a1264c35b2d8ULL, 0xd24e6b254ab9f798ULL, 0x3ee142a35b7c9d65ULL, + 0x722e96b86de4ca1fULL, 0x31e453b773628642ULL, 0x3de047a7537a9a6eULL, + 0x20eb608b0b40ab2bULL, 0xad90ea7af447d759ULL, 0xf1a40eaa49ff5bb8ULL, + 0x221e6678f0445ad2ULL, 0x9285ab2e5c39bcceULL, 0xa060fd9d275d3d87ULL, + 0x0000000000000000ULL, 0x6f25b19435defb5aULL, 0x01f403f7f302f6f2ULL, + 0x0ef112e3db1cedd5ULL, 0xa194fe6ad45fcb75ULL, 0x1d0b272c583a3145ULL, + 0x34e75cbb6b688f5fULL, 0x9f75bcc98f235610ULL, 0x2cef749b2b58b707ULL, + 0x5c34e4d0bdb88ce1ULL, 0x5331f5c495a697c6ULL, 0x61d4a377eec2168fULL, + 0x6dd0b767ceda0aa3ULL, 0x9786a4224433b5d3ULL, 0x827e9be5d7196755ULL, + 0xeaad238e01c964ebULL, 0x1afd2ed3bb34c9a1ULL, 0x7b298da455f6df2eULL, + 0x5030f0c09da090cdULL, 0x4d3bd7ecc59aa188ULL, 0xbc9fd9468c65fa30ULL, + 0x15f83fc7932ad286ULL, 0x57c6f93f7eae6829ULL, 0x35135f4c986a79adULL, + 0x0a061e183014123aULL, 0x0f051114281e1b27ULL, 0x52c5f63366a46134ULL, + 0x33115544886677bbULL, 0x9977b6c19f2f5806ULL, 0x847c91edc7156943ULL, + 0x8e7a8ff5f7017b79ULL, 0x887885fde70d756fULL, 0x5a36eed8adb482f7ULL, + 0x241c6c70e04854c4ULL, 0x4b39dde4d596af9eULL, 0xeb592079f2cb9219ULL, + 0x28187860c05048e8ULL, 0xfa5613458ae9bf70ULL, 0xc8b345f6f18d3e39ULL, + 0xcdb04afae9873724ULL, 0x6c24b4903dd8fc51ULL, 0x6020a0801dc0e07dULL, + 0xcbb240f2f98b3932ULL, 0xab92e072e44bd94fULL, 0xf8a315b671ed4e89ULL, + 0x5dc0e7274eba7a13ULL, 0xcc44490d1a85c1d6ULL, 0xa662f79537513391ULL, + 0x30105040806070b0ULL, 0xc1b45eeac99f2b08ULL, 0x9184ae2a543fbbc5ULL, + 0xc54352112297d4e7ULL, 0xa893e576ec4dde44ULL, 0x5bc2ed2f5eb67405ULL, + 0xde4a7f356aa1ebb4ULL, 0xdabd73ce81a9145bULL, 0x8c8f89060c058a80ULL, + 0x772d99b475eec302ULL, 0xd9bc76ca89af1350ULL, 0xb99cd64a946ff32dULL, + 0xbe6adfb577610bc9ULL, 0xc0405d1d3a9dddfaULL, 0x4ccfd41b3698577aULL, + 0xfba210b279eb4982ULL, 0x9d80ba3a7427a7e9ULL, 0xd14f6e2142bff093ULL, + 0x211f637cf8425dd9ULL, 0x43cac50f1e864c5dULL, 0xe3aa389239db71daULL, + 0xc64257152a91d3ecULL +}; + +static const u64 T2[256] = { + 0xd268bad36a01bbb9ULL, 0x4d1954fc66b1e59aULL, 0xbc932f7114cde265ULL, + 0xcdb9749c1b512587ULL, 0x510253f557a4f7a2ULL, 0x6bb8d368be03d0d6ULL, + 0x6fbdd26bb504d6deULL, 0x29644dd785feb352ULL, 0x5d0d50f04aadfdbaULL, + 0x8a26ace9e063cf09ULL, 0x0e838d8a9684091cULL, 0xc679bfdc4d1aa591ULL, + 0xddad7090374d3da7ULL, 0x550752f65ca3f1aaULL, 0x52c89ab317e17ba4ULL, + 0x2d614cd48ef9b55aULL, 0x8f65ea2320ac4603ULL, 0x73a6d5628411c4e6ULL, + 0x66f197a468c255ccULL, 0x63b2d16ea80ddcc6ULL, 0xccff3355d099aa85ULL, + 0x590851f341aafbb2ULL, 0x712a5bed0f9cc7e2ULL, 0xa204a6f7ae55f359ULL, + 0x5f81de7fc120febeULL, 0x3d7548d8a2e5ad7aULL, 0x9a32a8e5cc7fd729ULL, + 0x5ec799b60ae871bcULL, 0x4b90db70e63be096ULL, 0xc8fa3256db9eac8dULL, + 0xe651b7c4152295d1ULL, 0xd72bfc19aace32b3ULL, 0xab48e3387393704bULL, + 0x42dc9ebf3bfd6384ULL, 0x7eef91ae52d041fcULL, 0x56cd9bb01ce67dacULL, + 0xaf4de23b78947643ULL, 0xd66dbbd06106bdb1ULL, 0x195841c3f1da9b32ULL, + 0xa5cb6eb2e5177957ULL, 0xae0ba5f2b35cf941ULL, 0x0bc0cb40564b8016ULL, + 0xb1da6bbdc20c677fULL, 0x6efb95a27ecc59dcULL, 0xbe1fa1fe9f40e161ULL, + 0xeb18f308c3e310cbULL, 0xfe4fb1ce2f3081e1ULL, 0x080a0206160e0c10ULL, + 0x17dbcc49675e922eULL, 0x37f3c4513f66a26eULL, 0x74691d27cf534ee8ULL, + 0x5044143c9c6c78a0ULL, 0x2be8c3580e73b056ULL, 0x91f263a59a34573fULL, + 0x4f95da73ed3ce69eULL, 0x69345de7358ed3d2ULL, 0x613e5fe12380dfc2ULL, + 0x578bdc79d72ef2aeULL, 0xe9947d87486e13cfULL, 0x13decd4a6c599426ULL, + 0xe19e7f815e601fdfULL, 0x752f5aee049bc1eaULL, 0xadc16cb4f3197547ULL, + 0x6d315ce43e89d5daULL, 0xfb0cf704efff08ebULL, 0x98be266a47f2d42dULL, + 0xdb24ff1cb7c738abULL, 0x937eed2a11b9543bULL, 0x876fe82536a24a13ULL, + 0x4ed39dba26f4699cULL, 0xa1ce6fb1ee107f5fULL, 0x028c8e8f8b8d0304ULL, + 0x647d192be34f56c8ULL, 0xba1aa0fd9447e769ULL, 0xe717f00ddeea1ad3ULL, + 0x1e978986ba98113cULL, 0x3c330f11692d2278ULL, 0x1c1b070931151238ULL, + 0x8629afecfd6ac511ULL, 0xcb30fb109bdb208bULL, 0x2028081858383040ULL, + 0x5441153f976b7ea8ULL, 0x34390d177f232e68ULL, 0x1014040c2c1c1820ULL, + 0x040501030b070608ULL, 0x8de964acab214507ULL, 0x5b84df7cca27f8b6ULL, + 0xc5b3769a0d5f2997ULL, 0xf980798b64720befULL, 0x538edd7adc29f4a6ULL, + 0xf4c93d47b2b38ef5ULL, 0x584e163a8a6274b0ULL, 0xfcc33f41a4bd82e5ULL, + 0xdceb3759fc85b2a5ULL, 0xa9c46db7f81e734fULL, 0xe0d8384895a890ddULL, + 0xde67b9d67708b1a1ULL, 0xd1a273952a4437bfULL, 0x836ae9263da54c1bULL, + 0xd4e1355fea8bbeb5ULL, 0x491c55ff6db6e392ULL, 0xd9a871933c4a3bafULL, + 0xf18a7b8d727c07ffULL, 0x0a868c899d830f14ULL, 0xd5a77296214331b7ULL, + 0x1a928885b19f1734ULL, 0xff09f607e4f80ee3ULL, 0xa8822a7e33d6fc4dULL, + 0xf8c63e42afba84edULL, 0x653b5ee22887d9caULL, 0x9cbb27694cf5d225ULL, + 0x054346cac0cf890aULL, 0x303c0c1474242860ULL, 0x89ec65afa026430fULL, + 0xbdd568b8df056d67ULL, 0x99f861a38c3a5b2fULL, 0x0c0f03051d090a18ULL, + 0x23e2c15e187dbc46ULL, 0x411657f97bb8ef82ULL, 0x7fa9d6679918cefeULL, + 0x439ad976f035ec86ULL, 0x7d2558e81295cdfaULL, 0x479fd875fb32ea8eULL, + 0x85e366aabd2f4917ULL, 0x7bacd764921fc8f6ULL, 0xe8d23a4e83a69ccdULL, + 0x07cfc8454b428a0eULL, 0xf0cc3c44b9b488fdULL, 0xcf35fa1390dc2683ULL, + 0x62f496a763c553c4ULL, 0xa601a7f4a552f551ULL, 0x5ac298b501ef77b4ULL, + 0x977bec291abe5233ULL, 0xda62b8d57c0fb7a9ULL, 0x3bfcc754226fa876ULL, + 0x822caeeff66dc319ULL, 0xb9d069bbd4026b6fULL, 0x317a4bddbfeca762ULL, + 0x963dabe0d176dd31ULL, 0x9e37a9e6c778d121ULL, 0x81e667a9b6284f1fULL, + 0x28220a1e4e363c50ULL, 0x014647c9cbc88f02ULL, 0xef1df20bc8e416c3ULL, + 0xee5bb5c2032c99c1ULL, 0x88aa22666beecc0dULL, 0xb356e5324981647bULL, + 0x9f71ee2f0cb05e23ULL, 0xc27cbedf461da399ULL, 0xac872b7d38d1fa45ULL, + 0x3ebf819ee2a0217cULL, 0x485a1236a67e6c90ULL, 0x36b58398f4ae2d6cULL, + 0x6c771b2df5415ad8ULL, 0x38360e12622a2470ULL, 0x8caf236560e9ca05ULL, + 0xf306f502f9f104fbULL, 0x094c45cfddc68312ULL, 0x84a5216376e7c615ULL, + 0x1fd1ce4f71509e3eULL, 0x397049dba9e2ab72ULL, 0xb09c2c7409c4e87dULL, + 0xc33af9168dd52c9bULL, 0xbf59e63754886e63ULL, 0xe254b6c71e2593d9ULL, + 0xa088287825d8f05dULL, 0x5c4b1739816572b8ULL, 0x32b0829bffa92b64ULL, + 0x68721a2efe465cd0ULL, 0x169d8b80ac961d2cULL, 0xdf21fe1fbcc03ea3ULL, + 0x12988a83a7911b24ULL, 0x242d091b533f3648ULL, 0x03cac94640458c06ULL, + 0x26a18794d8b2354cULL, 0x256b4ed298f7b94aULL, 0xa342e13e659d7c5bULL, + 0xb8962e721fcae46dULL, 0xb753e43142866273ULL, 0xa747e03d6e9a7a53ULL, + 0x8b60eb202bab400bULL, 0x7aea90ad59d747f4ULL, 0xaa0ea4f1b85bff49ULL, + 0x78661e22d25a44f0ULL, 0x2eab8592cebc395cULL, 0x9dfd60a0873d5d27ULL, + 0x0000000000000000ULL, 0x94b1256f5afbde35ULL, 0xf703f401f2f602f3ULL, + 0xe312f10ed5ed1cdbULL, 0x6afe94a175cb5fd4ULL, 0x2c270b1d45313a58ULL, + 0xbb5ce7345f8f686bULL, 0xc9bc759f1056238fULL, 0x9b74ef2c07b7582bULL, + 0xd0e4345ce18cb8bdULL, 0xc4f53153c697a695ULL, 0x77a3d4618f16c2eeULL, + 0x67b7d06da30adaceULL, 0x22a48697d3b53344ULL, 0xe59b7e82556719d7ULL, + 0x8e23adeaeb64c901ULL, 0xd32efd1aa1c934bbULL, 0xa48d297b2edff655ULL, + 0xc0f03050cd90a09dULL, 0xecd73b4d88a19ac5ULL, 0x46d99fbc30fa658cULL, + 0xc73ff81586d22a93ULL, 0x3ff9c6572968ae7eULL, 0x4c5f1335ad796a98ULL, + 0x181e060a3a121430ULL, 0x1411050f271b1e28ULL, 0x33f6c5523461a466ULL, + 0x44551133bb776688ULL, 0xc1b6779906582f9fULL, 0xed917c84436915c7ULL, + 0xf58f7a8e797b01f7ULL, 0xfd8578886f750de7ULL, 0xd8ee365af782b4adULL, + 0x706c1c24c45448e0ULL, 0xe4dd394b9eaf96d5ULL, 0x792059eb1992cbf2ULL, + 0x60781828e84850c0ULL, 0x451356fa70bfe98aULL, 0xf645b3c8393e8df1ULL, + 0xfa4ab0cd243787e9ULL, 0x90b4246c51fcd83dULL, 0x80a020607de0c01dULL, + 0xf240b2cb32398bf9ULL, 0x72e092ab4fd94be4ULL, 0xb615a3f8894eed71ULL, + 0x27e7c05d137aba4eULL, 0x0d4944ccd6c1851aULL, 0x95f762a691335137ULL, + 0x40501030b0706080ULL, 0xea5eb4c1082b9fc9ULL, 0x2aae8491c5bb3f54ULL, + 0x115243c5e7d49722ULL, 0x76e593a844de4decULL, 0x2fedc25b0574b65eULL, + 0x357f4adeb4eba16aULL, 0xce73bdda5b14a981ULL, 0x06898f8c808a050cULL, + 0xb4992d7702c3ee75ULL, 0xca76bcd95013af89ULL, 0x4ad69cb92df36f94ULL, + 0xb5df6abec90b6177ULL, 0x1d5d40c0fadd9d3aULL, 0x1bd4cf4c7a579836ULL, + 0xb210a2fb8249eb79ULL, 0x3aba809de9a72774ULL, 0x216e4fd193f0bf42ULL, + 0x7c631f21d95d42f8ULL, 0x0fc5ca435d4c861eULL, 0x9238aae3da71db39ULL, + 0x155742c6ecd3912aULL +}; + +static const u64 T3[256] = { + 0x68d2d3ba016ab9bbULL, 0x194dfc54b1669ae5ULL, 0x93bc712fcd1465e2ULL, + 0xb9cd9c74511b8725ULL, 0x0251f553a457a2f7ULL, 0xb86b68d303bed6d0ULL, + 0xbd6f6bd204b5ded6ULL, 0x6429d74dfe8552b3ULL, 0x0d5df050ad4abafdULL, + 0x268ae9ac63e009cfULL, 0x830e8a8d84961c09ULL, 0x79c6dcbf1a4d91a5ULL, + 0xaddd90704d37a73dULL, 0x0755f652a35caaf1ULL, 0xc852b39ae117a47bULL, + 0x612dd44cf98e5ab5ULL, 0x658f23eaac200346ULL, 0xa67362d51184e6c4ULL, + 0xf166a497c268cc55ULL, 0xb2636ed10da8c6dcULL, 0xffcc553399d085aaULL, + 0x0859f351aa41b2fbULL, 0x2a71ed5b9c0fe2c7ULL, 0x04a2f7a655ae59f3ULL, + 0x815f7fde20c1befeULL, 0x753dd848e5a27aadULL, 0x329ae5a87fcc29d7ULL, + 0xc75eb699e80abc71ULL, 0x904b70db3be696e0ULL, 0xfac856329edb8dacULL, + 0x51e6c4b72215d195ULL, 0x2bd719fcceaab332ULL, 0x48ab38e393734b70ULL, + 0xdc42bf9efd3b8463ULL, 0xef7eae91d052fc41ULL, 0xcd56b09be61cac7dULL, + 0x4daf3be294784376ULL, 0x6dd6d0bb0661b1bdULL, 0x5819c341daf1329bULL, + 0xcba5b26e17e55779ULL, 0x0baef2a55cb341f9ULL, 0xc00b40cb4b561680ULL, + 0xdab1bd6b0cc27f67ULL, 0xfb6ea295cc7edc59ULL, 0x1fbefea1409f61e1ULL, + 0x18eb08f3e3c3cb10ULL, 0x4ffeceb1302fe181ULL, 0x0a0806020e16100cULL, + 0xdb1749cc5e672e92ULL, 0xf33751c4663f6ea2ULL, 0x6974271d53cfe84eULL, + 0x44503c146c9ca078ULL, 0xe82b58c3730e56b0ULL, 0xf291a563349a3f57ULL, + 0x954f73da3ced9ee6ULL, 0x3469e75d8e35d2d3ULL, 0x3e61e15f8023c2dfULL, + 0x8b5779dc2ed7aef2ULL, 0x94e9877d6e48cf13ULL, 0xde134acd596c2694ULL, + 0x9ee1817f605edf1fULL, 0x2f75ee5a9b04eac1ULL, 0xc1adb46c19f34775ULL, + 0x316de45c893edad5ULL, 0x0cfb04f7ffefeb08ULL, 0xbe986a26f2472dd4ULL, + 0x24db1cffc7b7ab38ULL, 0x7e932aedb9113b54ULL, 0x6f8725e8a236134aULL, + 0xd34eba9df4269c69ULL, 0xcea1b16f10ee5f7fULL, 0x8c028f8e8d8b0403ULL, + 0x7d642b194fe3c856ULL, 0x1abafda0479469e7ULL, 0x17e70df0eaded31aULL, + 0x971e868998ba3c11ULL, 0x333c110f2d697822ULL, 0x1b1c090715313812ULL, + 0x2986ecaf6afd11c5ULL, 0x30cb10fbdb9b8b20ULL, 0x2820180838584030ULL, + 0x41543f156b97a87eULL, 0x3934170d237f682eULL, 0x14100c041c2c2018ULL, + 0x05040301070b0806ULL, 0xe98dac6421ab0745ULL, 0x845b7cdf27cab6f8ULL, + 0xb3c59a765f0d9729ULL, 0x80f98b797264ef0bULL, 0x8e537add29dca6f4ULL, + 0xc9f4473db3b2f58eULL, 0x4e583a16628ab074ULL, 0xc3fc413fbda4e582ULL, + 0xebdc593785fca5b2ULL, 0xc4a9b76d1ef84f73ULL, 0xd8e04838a895dd90ULL, + 0x67ded6b90877a1b1ULL, 0xa2d19573442abf37ULL, 0x6a8326e9a53d1b4cULL, + 0xe1d45f358beab5beULL, 0x1c49ff55b66d92e3ULL, 0xa8d993714a3caf3bULL, + 0x8af18d7b7c72ff07ULL, 0x860a898c839d140fULL, 0xa7d596724321b731ULL, + 0x921a85889fb13417ULL, 0x09ff07f6f8e4e30eULL, 0x82a87e2ad6334dfcULL, + 0xc6f8423ebaafed84ULL, 0x3b65e25e8728cad9ULL, 0xbb9c6927f54c25d2ULL, + 0x4305ca46cfc00a89ULL, 0x3c30140c24746028ULL, 0xec89af6526a00f43ULL, + 0xd5bdb86805df676dULL, 0xf899a3613a8c2f5bULL, 0x0f0c0503091d180aULL, + 0xe2235ec17d1846bcULL, 0x1641f957b87b82efULL, 0xa97f67d61899feceULL, + 0x9a4376d935f086ecULL, 0x257de8589512facdULL, 0x9f4775d832fb8eeaULL, + 0xe385aa662fbd1749ULL, 0xac7b64d71f92f6c8ULL, 0xd2e84e3aa683cd9cULL, + 0xcf0745c8424b0e8aULL, 0xccf0443cb4b9fd88ULL, 0x35cf13fadc908326ULL, + 0xf462a796c563c453ULL, 0x01a6f4a752a551f5ULL, 0xc25ab598ef01b477ULL, + 0x7b9729ecbe1a3352ULL, 0x62dad5b80f7ca9b7ULL, 0xfc3b54c76f2276a8ULL, + 0x2c82efae6df619c3ULL, 0xd0b9bb6902d46f6bULL, 0x7a31dd4becbf62a7ULL, + 0x3d96e0ab76d131ddULL, 0x379ee6a978c721d1ULL, 0xe681a96728b61f4fULL, + 0x22281e0a364e503cULL, 0x4601c947c8cb028fULL, 0x1def0bf2e4c8c316ULL, + 0x5beec2b52c03c199ULL, 0xaa886622ee6b0dccULL, 0x56b332e581497b64ULL, + 0x719f2feeb00c235eULL, 0x7cc2dfbe1d4699a3ULL, 0x87ac7d2bd13845faULL, + 0xbf3e9e81a0e27c21ULL, 0x5a4836127ea6906cULL, 0xb5369883aef46c2dULL, + 0x776c2d1b41f5d85aULL, 0x3638120e2a627024ULL, 0xaf8c6523e96005caULL, + 0x06f302f5f1f9fb04ULL, 0x4c09cf45c6dd1283ULL, 0xa5846321e77615c6ULL, + 0xd11f4fce50713e9eULL, 0x7039db49e2a972abULL, 0x9cb0742cc4097de8ULL, + 0x3ac316f9d58d9b2cULL, 0x59bf37e68854636eULL, 0x54e2c7b6251ed993ULL, + 0x88a07828d8255df0ULL, 0x4b5c39176581b872ULL, 0xb0329b82a9ff642bULL, + 0x72682e1a46fed05cULL, 0x9d16808b96ac2c1dULL, 0x21df1ffec0bca33eULL, + 0x9812838a91a7241bULL, 0x2d241b093f534836ULL, 0xca0346c94540068cULL, + 0xa1269487b2d84c35ULL, 0x6b25d24ef7984ab9ULL, 0x42a33ee19d655b7cULL, + 0x96b8722eca1f6de4ULL, 0x53b731e486427362ULL, 0x47a73de09a6e537aULL, + 0x608b20ebab2b0b40ULL, 0xea7aad90d759f447ULL, 0x0eaaf1a45bb849ffULL, + 0x6678221e5ad2f044ULL, 0xab2e9285bcce5c39ULL, 0xfd9da0603d87275dULL, + 0x0000000000000000ULL, 0xb1946f25fb5a35deULL, 0x03f701f4f6f2f302ULL, + 0x12e30ef1edd5db1cULL, 0xfe6aa194cb75d45fULL, 0x272c1d0b3145583aULL, + 0x5cbb34e78f5f6b68ULL, 0xbcc99f7556108f23ULL, 0x749b2cefb7072b58ULL, + 0xe4d05c348ce1bdb8ULL, 0xf5c4533197c695a6ULL, 0xa37761d4168feec2ULL, + 0xb7676dd00aa3cedaULL, 0xa4229786b5d34433ULL, 0x9be5827e6755d719ULL, + 0x238eeaad64eb01c9ULL, 0x2ed31afdc9a1bb34ULL, 0x8da47b29df2e55f6ULL, + 0xf0c0503090cd9da0ULL, 0xd7ec4d3ba188c59aULL, 0xd946bc9ffa308c65ULL, + 0x3fc715f8d286932aULL, 0xf93f57c668297eaeULL, 0x5f4c351379ad986aULL, + 0x1e180a06123a3014ULL, 0x11140f051b27281eULL, 0xf63352c5613466a4ULL, + 0x5544331177bb8866ULL, 0xb6c1997758069f2fULL, 0x91ed847c6943c715ULL, + 0x8ff58e7a7b79f701ULL, 0x85fd8878756fe70dULL, 0xeed85a3682f7adb4ULL, + 0x6c70241c54c4e048ULL, 0xdde44b39af9ed596ULL, 0x2079eb599219f2cbULL, + 0x7860281848e8c050ULL, 0x1345fa56bf708ae9ULL, 0x45f6c8b33e39f18dULL, + 0x4afacdb03724e987ULL, 0xb4906c24fc513dd8ULL, 0xa0806020e07d1dc0ULL, + 0x40f2cbb23932f98bULL, 0xe072ab92d94fe44bULL, 0x15b6f8a34e8971edULL, + 0xe7275dc07a134ebaULL, 0x490dcc44c1d61a85ULL, 0xf795a66233913751ULL, + 0x5040301070b08060ULL, 0x5eeac1b42b08c99fULL, 0xae2a9184bbc5543fULL, + 0x5211c543d4e72297ULL, 0xe576a893de44ec4dULL, 0xed2f5bc274055eb6ULL, + 0x7f35de4aebb46aa1ULL, 0x73cedabd145b81a9ULL, 0x89068c8f8a800c05ULL, + 0x99b4772dc30275eeULL, 0x76cad9bc135089afULL, 0xd64ab99cf32d946fULL, + 0xdfb5be6a0bc97761ULL, 0x5d1dc040ddfa3a9dULL, 0xd41b4ccf577a3698ULL, + 0x10b2fba2498279ebULL, 0xba3a9d80a7e97427ULL, 0x6e21d14ff09342bfULL, + 0x637c211f5dd9f842ULL, 0xc50f43ca4c5d1e86ULL, 0x3892e3aa71da39dbULL, + 0x5715c642d3ec2a91ULL +}; + +static const u64 T4[256] = { + 0xbbb96a01bad3d268ULL, 0xe59a66b154fc4d19ULL, 0xe26514cd2f71bc93ULL, + 0x25871b51749ccdb9ULL, 0xf7a257a453f55102ULL, 0xd0d6be03d3686bb8ULL, + 0xd6deb504d26b6fbdULL, 0xb35285fe4dd72964ULL, 0xfdba4aad50f05d0dULL, + 0xcf09e063ace98a26ULL, 0x091c96848d8a0e83ULL, 0xa5914d1abfdcc679ULL, + 0x3da7374d7090ddadULL, 0xf1aa5ca352f65507ULL, 0x7ba417e19ab352c8ULL, + 0xb55a8ef94cd42d61ULL, 0x460320acea238f65ULL, 0xc4e68411d56273a6ULL, + 0x55cc68c297a466f1ULL, 0xdcc6a80dd16e63b2ULL, 0xaa85d0993355ccffULL, + 0xfbb241aa51f35908ULL, 0xc7e20f9c5bed712aULL, 0xf359ae55a6f7a204ULL, + 0xfebec120de7f5f81ULL, 0xad7aa2e548d83d75ULL, 0xd729cc7fa8e59a32ULL, + 0x71bc0ae899b65ec7ULL, 0xe096e63bdb704b90ULL, 0xac8ddb9e3256c8faULL, + 0x95d11522b7c4e651ULL, 0x32b3aacefc19d72bULL, 0x704b7393e338ab48ULL, + 0x63843bfd9ebf42dcULL, 0x41fc52d091ae7eefULL, 0x7dac1ce69bb056cdULL, + 0x76437894e23baf4dULL, 0xbdb16106bbd0d66dULL, 0x9b32f1da41c31958ULL, + 0x7957e5176eb2a5cbULL, 0xf941b35ca5f2ae0bULL, 0x8016564bcb400bc0ULL, + 0x677fc20c6bbdb1daULL, 0x59dc7ecc95a26efbULL, 0xe1619f40a1febe1fULL, + 0x10cbc3e3f308eb18ULL, 0x81e12f30b1cefe4fULL, 0x0c10160e0206080aULL, + 0x922e675ecc4917dbULL, 0xa26e3f66c45137f3ULL, 0x4ee8cf531d277469ULL, + 0x78a09c6c143c5044ULL, 0xb0560e73c3582be8ULL, 0x573f9a3463a591f2ULL, + 0xe69eed3cda734f95ULL, 0xd3d2358e5de76934ULL, 0xdfc223805fe1613eULL, + 0xf2aed72edc79578bULL, 0x13cf486e7d87e994ULL, 0x94266c59cd4a13deULL, + 0x1fdf5e607f81e19eULL, 0xc1ea049b5aee752fULL, 0x7547f3196cb4adc1ULL, + 0xd5da3e895ce46d31ULL, 0x08ebeffff704fb0cULL, 0xd42d47f2266a98beULL, + 0x38abb7c7ff1cdb24ULL, 0x543b11b9ed2a937eULL, 0x4a1336a2e825876fULL, + 0x699c26f49dba4ed3ULL, 0x7f5fee106fb1a1ceULL, 0x03048b8d8e8f028cULL, + 0x56c8e34f192b647dULL, 0xe7699447a0fdba1aULL, 0x1ad3deeaf00de717ULL, + 0x113cba9889861e97ULL, 0x2278692d0f113c33ULL, 0x1238311507091c1bULL, + 0xc511fd6aafec8629ULL, 0x208b9bdbfb10cb30ULL, 0x3040583808182028ULL, + 0x7ea8976b153f5441ULL, 0x2e687f230d173439ULL, 0x18202c1c040c1014ULL, + 0x06080b0701030405ULL, 0x4507ab2164ac8de9ULL, 0xf8b6ca27df7c5b84ULL, + 0x29970d5f769ac5b3ULL, 0x0bef6472798bf980ULL, 0xf4a6dc29dd7a538eULL, + 0x8ef5b2b33d47f4c9ULL, 0x74b08a62163a584eULL, 0x82e5a4bd3f41fcc3ULL, + 0xb2a5fc853759dcebULL, 0x734ff81e6db7a9c4ULL, 0x90dd95a83848e0d8ULL, + 0xb1a17708b9d6de67ULL, 0x37bf2a447395d1a2ULL, 0x4c1b3da5e926836aULL, + 0xbeb5ea8b355fd4e1ULL, 0xe3926db655ff491cULL, 0x3baf3c4a7193d9a8ULL, + 0x07ff727c7b8df18aULL, 0x0f149d838c890a86ULL, 0x31b721437296d5a7ULL, + 0x1734b19f88851a92ULL, 0x0ee3e4f8f607ff09ULL, 0xfc4d33d62a7ea882ULL, + 0x84edafba3e42f8c6ULL, 0xd9ca28875ee2653bULL, 0xd2254cf527699cbbULL, + 0x890ac0cf46ca0543ULL, 0x286074240c14303cULL, 0x430fa02665af89ecULL, + 0x6d67df0568b8bdd5ULL, 0x5b2f8c3a61a399f8ULL, 0x0a181d0903050c0fULL, + 0xbc46187dc15e23e2ULL, 0xef827bb857f94116ULL, 0xcefe9918d6677fa9ULL, + 0xec86f035d976439aULL, 0xcdfa129558e87d25ULL, 0xea8efb32d875479fULL, + 0x4917bd2f66aa85e3ULL, 0xc8f6921fd7647bacULL, 0x9ccd83a63a4ee8d2ULL, + 0x8a0e4b42c84507cfULL, 0x88fdb9b43c44f0ccULL, 0x268390dcfa13cf35ULL, + 0x53c463c596a762f4ULL, 0xf551a552a7f4a601ULL, 0x77b401ef98b55ac2ULL, + 0x52331abeec29977bULL, 0xb7a97c0fb8d5da62ULL, 0xa876226fc7543bfcULL, + 0xc319f66daeef822cULL, 0x6b6fd40269bbb9d0ULL, 0xa762bfec4bdd317aULL, + 0xdd31d176abe0963dULL, 0xd121c778a9e69e37ULL, 0x4f1fb62867a981e6ULL, + 0x3c504e360a1e2822ULL, 0x8f02cbc847c90146ULL, 0x16c3c8e4f20bef1dULL, + 0x99c1032cb5c2ee5bULL, 0xcc0d6bee226688aaULL, 0x647b4981e532b356ULL, + 0x5e230cb0ee2f9f71ULL, 0xa399461dbedfc27cULL, 0xfa4538d12b7dac87ULL, + 0x217ce2a0819e3ebfULL, 0x6c90a67e1236485aULL, 0x2d6cf4ae839836b5ULL, + 0x5ad8f5411b2d6c77ULL, 0x2470622a0e123836ULL, 0xca0560e923658cafULL, + 0x04fbf9f1f502f306ULL, 0x8312ddc645cf094cULL, 0xc61576e7216384a5ULL, + 0x9e3e7150ce4f1fd1ULL, 0xab72a9e249db3970ULL, 0xe87d09c42c74b09cULL, + 0x2c9b8dd5f916c33aULL, 0x6e635488e637bf59ULL, 0x93d91e25b6c7e254ULL, + 0xf05d25d82878a088ULL, 0x72b8816517395c4bULL, 0x2b64ffa9829b32b0ULL, + 0x5cd0fe461a2e6872ULL, 0x1d2cac968b80169dULL, 0x3ea3bcc0fe1fdf21ULL, + 0x1b24a7918a831298ULL, 0x3648533f091b242dULL, 0x8c064045c94603caULL, + 0x354cd8b2879426a1ULL, 0xb94a98f74ed2256bULL, 0x7c5b659de13ea342ULL, + 0xe46d1fca2e72b896ULL, 0x62734286e431b753ULL, 0x7a536e9ae03da747ULL, + 0x400b2babeb208b60ULL, 0x47f459d790ad7aeaULL, 0xff49b85ba4f1aa0eULL, + 0x44f0d25a1e227866ULL, 0x395ccebc85922eabULL, 0x5d27873d60a09dfdULL, + 0x0000000000000000ULL, 0xde355afb256f94b1ULL, 0x02f3f2f6f401f703ULL, + 0x1cdbd5edf10ee312ULL, 0x5fd475cb94a16afeULL, 0x3a5845310b1d2c27ULL, + 0x686b5f8fe734bb5cULL, 0x238f1056759fc9bcULL, 0x582b07b7ef2c9b74ULL, + 0xb8bde18c345cd0e4ULL, 0xa695c6973153c4f5ULL, 0xc2ee8f16d46177a3ULL, + 0xdacea30ad06d67b7ULL, 0x3344d3b5869722a4ULL, 0x19d755677e82e59bULL, + 0xc901eb64adea8e23ULL, 0x34bba1c9fd1ad32eULL, 0xf6552edf297ba48dULL, + 0xa09dcd903050c0f0ULL, 0x9ac588a13b4decd7ULL, 0x658c30fa9fbc46d9ULL, + 0x2a9386d2f815c73fULL, 0xae7e2968c6573ff9ULL, 0x6a98ad7913354c5fULL, + 0x14303a12060a181eULL, 0x1e28271b050f1411ULL, 0xa4663461c55233f6ULL, + 0x6688bb7711334455ULL, 0x2f9f06587799c1b6ULL, 0x15c743697c84ed91ULL, + 0x01f7797b7a8ef58fULL, 0x0de76f757888fd85ULL, 0xb4adf782365ad8eeULL, + 0x48e0c4541c24706cULL, 0x96d59eaf394be4ddULL, 0xcbf2199259eb7920ULL, + 0x50c0e84818286078ULL, 0xe98a70bf56fa4513ULL, 0x8df1393eb3c8f645ULL, + 0x87e92437b0cdfa4aULL, 0xd83d51fc246c90b4ULL, 0xc01d7de0206080a0ULL, + 0x8bf93239b2cbf240ULL, 0x4be44fd992ab72e0ULL, 0xed71894ea3f8b615ULL, + 0xba4e137ac05d27e7ULL, 0x851ad6c144cc0d49ULL, 0x5137913362a695f7ULL, + 0x6080b07010304050ULL, 0x9fc9082bb4c1ea5eULL, 0x3f54c5bb84912aaeULL, + 0x9722e7d443c51152ULL, 0x4dec44de93a876e5ULL, 0xb65e0574c25b2fedULL, + 0xa16ab4eb4ade357fULL, 0xa9815b14bddace73ULL, 0x050c808a8f8c0689ULL, + 0xee7502c32d77b499ULL, 0xaf895013bcd9ca76ULL, 0x6f942df39cb94ad6ULL, + 0x6177c90b6abeb5dfULL, 0x9d3afadd40c01d5dULL, 0x98367a57cf4c1bd4ULL, + 0xeb798249a2fbb210ULL, 0x2774e9a7809d3abaULL, 0xbf4293f04fd1216eULL, + 0x42f8d95d1f217c63ULL, 0x861e5d4cca430fc5ULL, 0xdb39da71aae39238ULL, + 0x912aecd342c61557ULL +}; + +static const u64 T5[256] = { + 0xb9bb016ad3ba68d2ULL, 0x9ae5b166fc54194dULL, 0x65e2cd14712f93bcULL, + 0x8725511b9c74b9cdULL, 0xa2f7a457f5530251ULL, 0xd6d003be68d3b86bULL, + 0xded604b56bd2bd6fULL, 0x52b3fe85d74d6429ULL, 0xbafdad4af0500d5dULL, + 0x09cf63e0e9ac268aULL, 0x1c0984968a8d830eULL, 0x91a51a4ddcbf79c6ULL, + 0xa73d4d379070adddULL, 0xaaf1a35cf6520755ULL, 0xa47be117b39ac852ULL, + 0x5ab5f98ed44c612dULL, 0x0346ac2023ea658fULL, 0xe6c4118462d5a673ULL, + 0xcc55c268a497f166ULL, 0xc6dc0da86ed1b263ULL, 0x85aa99d05533ffccULL, + 0xb2fbaa41f3510859ULL, 0xe2c79c0fed5b2a71ULL, 0x59f355aef7a604a2ULL, + 0xbefe20c17fde815fULL, 0x7aade5a2d848753dULL, 0x29d77fcce5a8329aULL, + 0xbc71e80ab699c75eULL, 0x96e03be670db904bULL, 0x8dac9edb5632fac8ULL, + 0xd1952215c4b751e6ULL, 0xb332ceaa19fc2bd7ULL, 0x4b70937338e348abULL, + 0x8463fd3bbf9edc42ULL, 0xfc41d052ae91ef7eULL, 0xac7de61cb09bcd56ULL, + 0x437694783be24dafULL, 0xb1bd0661d0bb6dd6ULL, 0x329bdaf1c3415819ULL, + 0x577917e5b26ecba5ULL, 0x41f95cb3f2a50baeULL, 0x16804b5640cbc00bULL, + 0x7f670cc2bd6bdab1ULL, 0xdc59cc7ea295fb6eULL, 0x61e1409ffea11fbeULL, + 0xcb10e3c308f318ebULL, 0xe181302fceb14ffeULL, 0x100c0e1606020a08ULL, + 0x2e925e6749ccdb17ULL, 0x6ea2663f51c4f337ULL, 0xe84e53cf271d6974ULL, + 0xa0786c9c3c144450ULL, 0x56b0730e58c3e82bULL, 0x3f57349aa563f291ULL, + 0x9ee63ced73da954fULL, 0xd2d38e35e75d3469ULL, 0xc2df8023e15f3e61ULL, + 0xaef22ed779dc8b57ULL, 0xcf136e48877d94e9ULL, 0x2694596c4acdde13ULL, + 0xdf1f605e817f9ee1ULL, 0xeac19b04ee5a2f75ULL, 0x477519f3b46cc1adULL, + 0xdad5893ee45c316dULL, 0xeb08ffef04f70cfbULL, 0x2dd4f2476a26be98ULL, + 0xab38c7b71cff24dbULL, 0x3b54b9112aed7e93ULL, 0x134aa23625e86f87ULL, + 0x9c69f426ba9dd34eULL, 0x5f7f10eeb16fcea1ULL, 0x04038d8b8f8e8c02ULL, + 0xc8564fe32b197d64ULL, 0x69e74794fda01abaULL, 0xd31aeade0df017e7ULL, + 0x3c1198ba8689971eULL, 0x78222d69110f333cULL, 0x3812153109071b1cULL, + 0x11c56afdecaf2986ULL, 0x8b20db9b10fb30cbULL, 0x4030385818082820ULL, + 0xa87e6b973f154154ULL, 0x682e237f170d3934ULL, 0x20181c2c0c041410ULL, + 0x0806070b03010504ULL, 0x074521abac64e98dULL, 0xb6f827ca7cdf845bULL, + 0x97295f0d9a76b3c5ULL, 0xef0b72648b7980f9ULL, 0xa6f429dc7add8e53ULL, + 0xf58eb3b2473dc9f4ULL, 0xb074628a3a164e58ULL, 0xe582bda4413fc3fcULL, + 0xa5b285fc5937ebdcULL, 0x4f731ef8b76dc4a9ULL, 0xdd90a8954838d8e0ULL, + 0xa1b10877d6b967deULL, 0xbf37442a9573a2d1ULL, 0x1b4ca53d26e96a83ULL, + 0xb5be8bea5f35e1d4ULL, 0x92e3b66dff551c49ULL, 0xaf3b4a3c9371a8d9ULL, + 0xff077c728d7b8af1ULL, 0x140f839d898c860aULL, 0xb73143219672a7d5ULL, + 0x34179fb18588921aULL, 0xe30ef8e407f609ffULL, 0x4dfcd6337e2a82a8ULL, + 0xed84baaf423ec6f8ULL, 0xcad98728e25e3b65ULL, 0x25d2f54c6927bb9cULL, + 0x0a89cfc0ca464305ULL, 0x60282474140c3c30ULL, 0x0f4326a0af65ec89ULL, + 0x676d05dfb868d5bdULL, 0x2f5b3a8ca361f899ULL, 0x180a091d05030f0cULL, + 0x46bc7d185ec1e223ULL, 0x82efb87bf9571641ULL, 0xfece189967d6a97fULL, + 0x86ec35f076d99a43ULL, 0xfacd9512e858257dULL, 0x8eea32fb75d89f47ULL, + 0x17492fbdaa66e385ULL, 0xf6c81f9264d7ac7bULL, 0xcd9ca6834e3ad2e8ULL, + 0x0e8a424b45c8cf07ULL, 0xfd88b4b9443cccf0ULL, 0x8326dc9013fa35cfULL, + 0xc453c563a796f462ULL, 0x51f552a5f4a701a6ULL, 0xb477ef01b598c25aULL, + 0x3352be1a29ec7b97ULL, 0xa9b70f7cd5b862daULL, 0x76a86f2254c7fc3bULL, + 0x19c36df6efae2c82ULL, 0x6f6b02d4bb69d0b9ULL, 0x62a7ecbfdd4b7a31ULL, + 0x31dd76d1e0ab3d96ULL, 0x21d178c7e6a9379eULL, 0x1f4f28b6a967e681ULL, + 0x503c364e1e0a2228ULL, 0x028fc8cbc9474601ULL, 0xc316e4c80bf21defULL, + 0xc1992c03c2b55beeULL, 0x0dccee6b6622aa88ULL, 0x7b64814932e556b3ULL, + 0x235eb00c2fee719fULL, 0x99a31d46dfbe7cc2ULL, 0x45fad1387d2b87acULL, + 0x7c21a0e29e81bf3eULL, 0x906c7ea636125a48ULL, 0x6c2daef49883b536ULL, + 0xd85a41f52d1b776cULL, 0x70242a62120e3638ULL, 0x05cae9606523af8cULL, + 0xfb04f1f902f506f3ULL, 0x1283c6ddcf454c09ULL, 0x15c6e7766321a584ULL, + 0x3e9e50714fced11fULL, 0x72abe2a9db497039ULL, 0x7de8c409742c9cb0ULL, + 0x9b2cd58d16f93ac3ULL, 0x636e885437e659bfULL, 0xd993251ec7b654e2ULL, + 0x5df0d825782888a0ULL, 0xb872658139174b5cULL, 0x642ba9ff9b82b032ULL, + 0xd05c46fe2e1a7268ULL, 0x2c1d96ac808b9d16ULL, 0xa33ec0bc1ffe21dfULL, + 0x241b91a7838a9812ULL, 0x48363f531b092d24ULL, 0x068c454046c9ca03ULL, + 0x4c35b2d89487a126ULL, 0x4ab9f798d24e6b25ULL, 0x5b7c9d653ee142a3ULL, + 0x6de4ca1f722e96b8ULL, 0x7362864231e453b7ULL, 0x537a9a6e3de047a7ULL, + 0x0b40ab2b20eb608bULL, 0xf447d759ad90ea7aULL, 0x49ff5bb8f1a40eaaULL, + 0xf0445ad2221e6678ULL, 0x5c39bcce9285ab2eULL, 0x275d3d87a060fd9dULL, + 0x0000000000000000ULL, 0x35defb5a6f25b194ULL, 0xf302f6f201f403f7ULL, + 0xdb1cedd50ef112e3ULL, 0xd45fcb75a194fe6aULL, 0x583a31451d0b272cULL, + 0x6b688f5f34e75cbbULL, 0x8f2356109f75bcc9ULL, 0x2b58b7072cef749bULL, + 0xbdb88ce15c34e4d0ULL, 0x95a697c65331f5c4ULL, 0xeec2168f61d4a377ULL, + 0xceda0aa36dd0b767ULL, 0x4433b5d39786a422ULL, 0xd7196755827e9be5ULL, + 0x01c964ebeaad238eULL, 0xbb34c9a11afd2ed3ULL, 0x55f6df2e7b298da4ULL, + 0x9da090cd5030f0c0ULL, 0xc59aa1884d3bd7ecULL, 0x8c65fa30bc9fd946ULL, + 0x932ad28615f83fc7ULL, 0x7eae682957c6f93fULL, 0x986a79ad35135f4cULL, + 0x3014123a0a061e18ULL, 0x281e1b270f051114ULL, 0x66a4613452c5f633ULL, + 0x886677bb33115544ULL, 0x9f2f58069977b6c1ULL, 0xc7156943847c91edULL, + 0xf7017b798e7a8ff5ULL, 0xe70d756f887885fdULL, 0xadb482f75a36eed8ULL, + 0xe04854c4241c6c70ULL, 0xd596af9e4b39dde4ULL, 0xf2cb9219eb592079ULL, + 0xc05048e828187860ULL, 0x8ae9bf70fa561345ULL, 0xf18d3e39c8b345f6ULL, + 0xe9873724cdb04afaULL, 0x3dd8fc516c24b490ULL, 0x1dc0e07d6020a080ULL, + 0xf98b3932cbb240f2ULL, 0xe44bd94fab92e072ULL, 0x71ed4e89f8a315b6ULL, + 0x4eba7a135dc0e727ULL, 0x1a85c1d6cc44490dULL, 0x37513391a662f795ULL, + 0x806070b030105040ULL, 0xc99f2b08c1b45eeaULL, 0x543fbbc59184ae2aULL, + 0x2297d4e7c5435211ULL, 0xec4dde44a893e576ULL, 0x5eb674055bc2ed2fULL, + 0x6aa1ebb4de4a7f35ULL, 0x81a9145bdabd73ceULL, 0x0c058a808c8f8906ULL, + 0x75eec302772d99b4ULL, 0x89af1350d9bc76caULL, 0x946ff32db99cd64aULL, + 0x77610bc9be6adfb5ULL, 0x3a9dddfac0405d1dULL, 0x3698577a4ccfd41bULL, + 0x79eb4982fba210b2ULL, 0x7427a7e99d80ba3aULL, 0x42bff093d14f6e21ULL, + 0xf8425dd9211f637cULL, 0x1e864c5d43cac50fULL, 0x39db71dae3aa3892ULL, + 0x2a91d3ecc6425715ULL +}; + +static const u64 T6[256] = { + 0x6a01bbb9d268bad3ULL, 0x66b1e59a4d1954fcULL, 0x14cde265bc932f71ULL, + 0x1b512587cdb9749cULL, 0x57a4f7a2510253f5ULL, 0xbe03d0d66bb8d368ULL, + 0xb504d6de6fbdd26bULL, 0x85feb35229644dd7ULL, 0x4aadfdba5d0d50f0ULL, + 0xe063cf098a26ace9ULL, 0x9684091c0e838d8aULL, 0x4d1aa591c679bfdcULL, + 0x374d3da7ddad7090ULL, 0x5ca3f1aa550752f6ULL, 0x17e17ba452c89ab3ULL, + 0x8ef9b55a2d614cd4ULL, 0x20ac46038f65ea23ULL, 0x8411c4e673a6d562ULL, + 0x68c255cc66f197a4ULL, 0xa80ddcc663b2d16eULL, 0xd099aa85ccff3355ULL, + 0x41aafbb2590851f3ULL, 0x0f9cc7e2712a5bedULL, 0xae55f359a204a6f7ULL, + 0xc120febe5f81de7fULL, 0xa2e5ad7a3d7548d8ULL, 0xcc7fd7299a32a8e5ULL, + 0x0ae871bc5ec799b6ULL, 0xe63be0964b90db70ULL, 0xdb9eac8dc8fa3256ULL, + 0x152295d1e651b7c4ULL, 0xaace32b3d72bfc19ULL, 0x7393704bab48e338ULL, + 0x3bfd638442dc9ebfULL, 0x52d041fc7eef91aeULL, 0x1ce67dac56cd9bb0ULL, + 0x78947643af4de23bULL, 0x6106bdb1d66dbbd0ULL, 0xf1da9b32195841c3ULL, + 0xe5177957a5cb6eb2ULL, 0xb35cf941ae0ba5f2ULL, 0x564b80160bc0cb40ULL, + 0xc20c677fb1da6bbdULL, 0x7ecc59dc6efb95a2ULL, 0x9f40e161be1fa1feULL, + 0xc3e310cbeb18f308ULL, 0x2f3081e1fe4fb1ceULL, 0x160e0c10080a0206ULL, + 0x675e922e17dbcc49ULL, 0x3f66a26e37f3c451ULL, 0xcf534ee874691d27ULL, + 0x9c6c78a05044143cULL, 0x0e73b0562be8c358ULL, 0x9a34573f91f263a5ULL, + 0xed3ce69e4f95da73ULL, 0x358ed3d269345de7ULL, 0x2380dfc2613e5fe1ULL, + 0xd72ef2ae578bdc79ULL, 0x486e13cfe9947d87ULL, 0x6c59942613decd4aULL, + 0x5e601fdfe19e7f81ULL, 0x049bc1ea752f5aeeULL, 0xf3197547adc16cb4ULL, + 0x3e89d5da6d315ce4ULL, 0xefff08ebfb0cf704ULL, 0x47f2d42d98be266aULL, + 0xb7c738abdb24ff1cULL, 0x11b9543b937eed2aULL, 0x36a24a13876fe825ULL, + 0x26f4699c4ed39dbaULL, 0xee107f5fa1ce6fb1ULL, 0x8b8d0304028c8e8fULL, + 0xe34f56c8647d192bULL, 0x9447e769ba1aa0fdULL, 0xdeea1ad3e717f00dULL, + 0xba98113c1e978986ULL, 0x692d22783c330f11ULL, 0x311512381c1b0709ULL, + 0xfd6ac5118629afecULL, 0x9bdb208bcb30fb10ULL, 0x5838304020280818ULL, + 0x976b7ea85441153fULL, 0x7f232e6834390d17ULL, 0x2c1c18201014040cULL, + 0x0b07060804050103ULL, 0xab2145078de964acULL, 0xca27f8b65b84df7cULL, + 0x0d5f2997c5b3769aULL, 0x64720beff980798bULL, 0xdc29f4a6538edd7aULL, + 0xb2b38ef5f4c93d47ULL, 0x8a6274b0584e163aULL, 0xa4bd82e5fcc33f41ULL, + 0xfc85b2a5dceb3759ULL, 0xf81e734fa9c46db7ULL, 0x95a890dde0d83848ULL, + 0x7708b1a1de67b9d6ULL, 0x2a4437bfd1a27395ULL, 0x3da54c1b836ae926ULL, + 0xea8bbeb5d4e1355fULL, 0x6db6e392491c55ffULL, 0x3c4a3bafd9a87193ULL, + 0x727c07fff18a7b8dULL, 0x9d830f140a868c89ULL, 0x214331b7d5a77296ULL, + 0xb19f17341a928885ULL, 0xe4f80ee3ff09f607ULL, 0x33d6fc4da8822a7eULL, + 0xafba84edf8c63e42ULL, 0x2887d9ca653b5ee2ULL, 0x4cf5d2259cbb2769ULL, + 0xc0cf890a054346caULL, 0x74242860303c0c14ULL, 0xa026430f89ec65afULL, + 0xdf056d67bdd568b8ULL, 0x8c3a5b2f99f861a3ULL, 0x1d090a180c0f0305ULL, + 0x187dbc4623e2c15eULL, 0x7bb8ef82411657f9ULL, 0x9918cefe7fa9d667ULL, + 0xf035ec86439ad976ULL, 0x1295cdfa7d2558e8ULL, 0xfb32ea8e479fd875ULL, + 0xbd2f491785e366aaULL, 0x921fc8f67bacd764ULL, 0x83a69ccde8d23a4eULL, + 0x4b428a0e07cfc845ULL, 0xb9b488fdf0cc3c44ULL, 0x90dc2683cf35fa13ULL, + 0x63c553c462f496a7ULL, 0xa552f551a601a7f4ULL, 0x01ef77b45ac298b5ULL, + 0x1abe5233977bec29ULL, 0x7c0fb7a9da62b8d5ULL, 0x226fa8763bfcc754ULL, + 0xf66dc319822caeefULL, 0xd4026b6fb9d069bbULL, 0xbfeca762317a4bddULL, + 0xd176dd31963dabe0ULL, 0xc778d1219e37a9e6ULL, 0xb6284f1f81e667a9ULL, + 0x4e363c5028220a1eULL, 0xcbc88f02014647c9ULL, 0xc8e416c3ef1df20bULL, + 0x032c99c1ee5bb5c2ULL, 0x6beecc0d88aa2266ULL, 0x4981647bb356e532ULL, + 0x0cb05e239f71ee2fULL, 0x461da399c27cbedfULL, 0x38d1fa45ac872b7dULL, + 0xe2a0217c3ebf819eULL, 0xa67e6c90485a1236ULL, 0xf4ae2d6c36b58398ULL, + 0xf5415ad86c771b2dULL, 0x622a247038360e12ULL, 0x60e9ca058caf2365ULL, + 0xf9f104fbf306f502ULL, 0xddc68312094c45cfULL, 0x76e7c61584a52163ULL, + 0x71509e3e1fd1ce4fULL, 0xa9e2ab72397049dbULL, 0x09c4e87db09c2c74ULL, + 0x8dd52c9bc33af916ULL, 0x54886e63bf59e637ULL, 0x1e2593d9e254b6c7ULL, + 0x25d8f05da0882878ULL, 0x816572b85c4b1739ULL, 0xffa92b6432b0829bULL, + 0xfe465cd068721a2eULL, 0xac961d2c169d8b80ULL, 0xbcc03ea3df21fe1fULL, + 0xa7911b2412988a83ULL, 0x533f3648242d091bULL, 0x40458c0603cac946ULL, + 0xd8b2354c26a18794ULL, 0x98f7b94a256b4ed2ULL, 0x659d7c5ba342e13eULL, + 0x1fcae46db8962e72ULL, 0x42866273b753e431ULL, 0x6e9a7a53a747e03dULL, + 0x2bab400b8b60eb20ULL, 0x59d747f47aea90adULL, 0xb85bff49aa0ea4f1ULL, + 0xd25a44f078661e22ULL, 0xcebc395c2eab8592ULL, 0x873d5d279dfd60a0ULL, + 0x0000000000000000ULL, 0x5afbde3594b1256fULL, 0xf2f602f3f703f401ULL, + 0xd5ed1cdbe312f10eULL, 0x75cb5fd46afe94a1ULL, 0x45313a582c270b1dULL, + 0x5f8f686bbb5ce734ULL, 0x1056238fc9bc759fULL, 0x07b7582b9b74ef2cULL, + 0xe18cb8bdd0e4345cULL, 0xc697a695c4f53153ULL, 0x8f16c2ee77a3d461ULL, + 0xa30adace67b7d06dULL, 0xd3b5334422a48697ULL, 0x556719d7e59b7e82ULL, + 0xeb64c9018e23adeaULL, 0xa1c934bbd32efd1aULL, 0x2edff655a48d297bULL, + 0xcd90a09dc0f03050ULL, 0x88a19ac5ecd73b4dULL, 0x30fa658c46d99fbcULL, + 0x86d22a93c73ff815ULL, 0x2968ae7e3ff9c657ULL, 0xad796a984c5f1335ULL, + 0x3a121430181e060aULL, 0x271b1e281411050fULL, 0x3461a46633f6c552ULL, + 0xbb77668844551133ULL, 0x06582f9fc1b67799ULL, 0x436915c7ed917c84ULL, + 0x797b01f7f58f7a8eULL, 0x6f750de7fd857888ULL, 0xf782b4add8ee365aULL, + 0xc45448e0706c1c24ULL, 0x9eaf96d5e4dd394bULL, 0x1992cbf2792059ebULL, + 0xe84850c060781828ULL, 0x70bfe98a451356faULL, 0x393e8df1f645b3c8ULL, + 0x243787e9fa4ab0cdULL, 0x51fcd83d90b4246cULL, 0x7de0c01d80a02060ULL, + 0x32398bf9f240b2cbULL, 0x4fd94be472e092abULL, 0x894eed71b615a3f8ULL, + 0x137aba4e27e7c05dULL, 0xd6c1851a0d4944ccULL, 0x9133513795f762a6ULL, + 0xb070608040501030ULL, 0x082b9fc9ea5eb4c1ULL, 0xc5bb3f542aae8491ULL, + 0xe7d49722115243c5ULL, 0x44de4dec76e593a8ULL, 0x0574b65e2fedc25bULL, + 0xb4eba16a357f4adeULL, 0x5b14a981ce73bddaULL, 0x808a050c06898f8cULL, + 0x02c3ee75b4992d77ULL, 0x5013af89ca76bcd9ULL, 0x2df36f944ad69cb9ULL, + 0xc90b6177b5df6abeULL, 0xfadd9d3a1d5d40c0ULL, 0x7a5798361bd4cf4cULL, + 0x8249eb79b210a2fbULL, 0xe9a727743aba809dULL, 0x93f0bf42216e4fd1ULL, + 0xd95d42f87c631f21ULL, 0x5d4c861e0fc5ca43ULL, 0xda71db399238aae3ULL, + 0xecd3912a155742c6ULL +}; + +static const u64 T7[256] = { + 0x016ab9bb68d2d3baULL, 0xb1669ae5194dfc54ULL, 0xcd1465e293bc712fULL, + 0x511b8725b9cd9c74ULL, 0xa457a2f70251f553ULL, 0x03bed6d0b86b68d3ULL, + 0x04b5ded6bd6f6bd2ULL, 0xfe8552b36429d74dULL, 0xad4abafd0d5df050ULL, + 0x63e009cf268ae9acULL, 0x84961c09830e8a8dULL, 0x1a4d91a579c6dcbfULL, + 0x4d37a73daddd9070ULL, 0xa35caaf10755f652ULL, 0xe117a47bc852b39aULL, + 0xf98e5ab5612dd44cULL, 0xac200346658f23eaULL, 0x1184e6c4a67362d5ULL, + 0xc268cc55f166a497ULL, 0x0da8c6dcb2636ed1ULL, 0x99d085aaffcc5533ULL, + 0xaa41b2fb0859f351ULL, 0x9c0fe2c72a71ed5bULL, 0x55ae59f304a2f7a6ULL, + 0x20c1befe815f7fdeULL, 0xe5a27aad753dd848ULL, 0x7fcc29d7329ae5a8ULL, + 0xe80abc71c75eb699ULL, 0x3be696e0904b70dbULL, 0x9edb8dacfac85632ULL, + 0x2215d19551e6c4b7ULL, 0xceaab3322bd719fcULL, 0x93734b7048ab38e3ULL, + 0xfd3b8463dc42bf9eULL, 0xd052fc41ef7eae91ULL, 0xe61cac7dcd56b09bULL, + 0x947843764daf3be2ULL, 0x0661b1bd6dd6d0bbULL, 0xdaf1329b5819c341ULL, + 0x17e55779cba5b26eULL, 0x5cb341f90baef2a5ULL, 0x4b561680c00b40cbULL, + 0x0cc27f67dab1bd6bULL, 0xcc7edc59fb6ea295ULL, 0x409f61e11fbefea1ULL, + 0xe3c3cb1018eb08f3ULL, 0x302fe1814ffeceb1ULL, 0x0e16100c0a080602ULL, + 0x5e672e92db1749ccULL, 0x663f6ea2f33751c4ULL, 0x53cfe84e6974271dULL, + 0x6c9ca07844503c14ULL, 0x730e56b0e82b58c3ULL, 0x349a3f57f291a563ULL, + 0x3ced9ee6954f73daULL, 0x8e35d2d33469e75dULL, 0x8023c2df3e61e15fULL, + 0x2ed7aef28b5779dcULL, 0x6e48cf1394e9877dULL, 0x596c2694de134acdULL, + 0x605edf1f9ee1817fULL, 0x9b04eac12f75ee5aULL, 0x19f34775c1adb46cULL, + 0x893edad5316de45cULL, 0xffefeb080cfb04f7ULL, 0xf2472dd4be986a26ULL, + 0xc7b7ab3824db1cffULL, 0xb9113b547e932aedULL, 0xa236134a6f8725e8ULL, + 0xf4269c69d34eba9dULL, 0x10ee5f7fcea1b16fULL, 0x8d8b04038c028f8eULL, + 0x4fe3c8567d642b19ULL, 0x479469e71abafda0ULL, 0xeaded31a17e70df0ULL, + 0x98ba3c11971e8689ULL, 0x2d697822333c110fULL, 0x153138121b1c0907ULL, + 0x6afd11c52986ecafULL, 0xdb9b8b2030cb10fbULL, 0x3858403028201808ULL, + 0x6b97a87e41543f15ULL, 0x237f682e3934170dULL, 0x1c2c201814100c04ULL, + 0x070b080605040301ULL, 0x21ab0745e98dac64ULL, 0x27cab6f8845b7cdfULL, + 0x5f0d9729b3c59a76ULL, 0x7264ef0b80f98b79ULL, 0x29dca6f48e537addULL, + 0xb3b2f58ec9f4473dULL, 0x628ab0744e583a16ULL, 0xbda4e582c3fc413fULL, + 0x85fca5b2ebdc5937ULL, 0x1ef84f73c4a9b76dULL, 0xa895dd90d8e04838ULL, + 0x0877a1b167ded6b9ULL, 0x442abf37a2d19573ULL, 0xa53d1b4c6a8326e9ULL, + 0x8beab5bee1d45f35ULL, 0xb66d92e31c49ff55ULL, 0x4a3caf3ba8d99371ULL, + 0x7c72ff078af18d7bULL, 0x839d140f860a898cULL, 0x4321b731a7d59672ULL, + 0x9fb13417921a8588ULL, 0xf8e4e30e09ff07f6ULL, 0xd6334dfc82a87e2aULL, + 0xbaafed84c6f8423eULL, 0x8728cad93b65e25eULL, 0xf54c25d2bb9c6927ULL, + 0xcfc00a894305ca46ULL, 0x247460283c30140cULL, 0x26a00f43ec89af65ULL, + 0x05df676dd5bdb868ULL, 0x3a8c2f5bf899a361ULL, 0x091d180a0f0c0503ULL, + 0x7d1846bce2235ec1ULL, 0xb87b82ef1641f957ULL, 0x1899fecea97f67d6ULL, + 0x35f086ec9a4376d9ULL, 0x9512facd257de858ULL, 0x32fb8eea9f4775d8ULL, + 0x2fbd1749e385aa66ULL, 0x1f92f6c8ac7b64d7ULL, 0xa683cd9cd2e84e3aULL, + 0x424b0e8acf0745c8ULL, 0xb4b9fd88ccf0443cULL, 0xdc90832635cf13faULL, + 0xc563c453f462a796ULL, 0x52a551f501a6f4a7ULL, 0xef01b477c25ab598ULL, + 0xbe1a33527b9729ecULL, 0x0f7ca9b762dad5b8ULL, 0x6f2276a8fc3b54c7ULL, + 0x6df619c32c82efaeULL, 0x02d46f6bd0b9bb69ULL, 0xecbf62a77a31dd4bULL, + 0x76d131dd3d96e0abULL, 0x78c721d1379ee6a9ULL, 0x28b61f4fe681a967ULL, + 0x364e503c22281e0aULL, 0xc8cb028f4601c947ULL, 0xe4c8c3161def0bf2ULL, + 0x2c03c1995beec2b5ULL, 0xee6b0dccaa886622ULL, 0x81497b6456b332e5ULL, + 0xb00c235e719f2feeULL, 0x1d4699a37cc2dfbeULL, 0xd13845fa87ac7d2bULL, + 0xa0e27c21bf3e9e81ULL, 0x7ea6906c5a483612ULL, 0xaef46c2db5369883ULL, + 0x41f5d85a776c2d1bULL, 0x2a6270243638120eULL, 0xe96005caaf8c6523ULL, + 0xf1f9fb0406f302f5ULL, 0xc6dd12834c09cf45ULL, 0xe77615c6a5846321ULL, + 0x50713e9ed11f4fceULL, 0xe2a972ab7039db49ULL, 0xc4097de89cb0742cULL, + 0xd58d9b2c3ac316f9ULL, 0x8854636e59bf37e6ULL, 0x251ed99354e2c7b6ULL, + 0xd8255df088a07828ULL, 0x6581b8724b5c3917ULL, 0xa9ff642bb0329b82ULL, + 0x46fed05c72682e1aULL, 0x96ac2c1d9d16808bULL, 0xc0bca33e21df1ffeULL, + 0x91a7241b9812838aULL, 0x3f5348362d241b09ULL, 0x4540068cca0346c9ULL, + 0xb2d84c35a1269487ULL, 0xf7984ab96b25d24eULL, 0x9d655b7c42a33ee1ULL, + 0xca1f6de496b8722eULL, 0x8642736253b731e4ULL, 0x9a6e537a47a73de0ULL, + 0xab2b0b40608b20ebULL, 0xd759f447ea7aad90ULL, 0x5bb849ff0eaaf1a4ULL, + 0x5ad2f0446678221eULL, 0xbcce5c39ab2e9285ULL, 0x3d87275dfd9da060ULL, + 0x0000000000000000ULL, 0xfb5a35deb1946f25ULL, 0xf6f2f30203f701f4ULL, + 0xedd5db1c12e30ef1ULL, 0xcb75d45ffe6aa194ULL, 0x3145583a272c1d0bULL, + 0x8f5f6b685cbb34e7ULL, 0x56108f23bcc99f75ULL, 0xb7072b58749b2cefULL, + 0x8ce1bdb8e4d05c34ULL, 0x97c695a6f5c45331ULL, 0x168feec2a37761d4ULL, + 0x0aa3cedab7676dd0ULL, 0xb5d34433a4229786ULL, 0x6755d7199be5827eULL, + 0x64eb01c9238eeaadULL, 0xc9a1bb342ed31afdULL, 0xdf2e55f68da47b29ULL, + 0x90cd9da0f0c05030ULL, 0xa188c59ad7ec4d3bULL, 0xfa308c65d946bc9fULL, + 0xd286932a3fc715f8ULL, 0x68297eaef93f57c6ULL, 0x79ad986a5f4c3513ULL, + 0x123a30141e180a06ULL, 0x1b27281e11140f05ULL, 0x613466a4f63352c5ULL, + 0x77bb886655443311ULL, 0x58069f2fb6c19977ULL, 0x6943c71591ed847cULL, + 0x7b79f7018ff58e7aULL, 0x756fe70d85fd8878ULL, 0x82f7adb4eed85a36ULL, + 0x54c4e0486c70241cULL, 0xaf9ed596dde44b39ULL, 0x9219f2cb2079eb59ULL, + 0x48e8c05078602818ULL, 0xbf708ae91345fa56ULL, 0x3e39f18d45f6c8b3ULL, + 0x3724e9874afacdb0ULL, 0xfc513dd8b4906c24ULL, 0xe07d1dc0a0806020ULL, + 0x3932f98b40f2cbb2ULL, 0xd94fe44be072ab92ULL, 0x4e8971ed15b6f8a3ULL, + 0x7a134ebae7275dc0ULL, 0xc1d61a85490dcc44ULL, 0x33913751f795a662ULL, + 0x70b0806050403010ULL, 0x2b08c99f5eeac1b4ULL, 0xbbc5543fae2a9184ULL, + 0xd4e722975211c543ULL, 0xde44ec4de576a893ULL, 0x74055eb6ed2f5bc2ULL, + 0xebb46aa17f35de4aULL, 0x145b81a973cedabdULL, 0x8a800c0589068c8fULL, + 0xc30275ee99b4772dULL, 0x135089af76cad9bcULL, 0xf32d946fd64ab99cULL, + 0x0bc97761dfb5be6aULL, 0xddfa3a9d5d1dc040ULL, 0x577a3698d41b4ccfULL, + 0x498279eb10b2fba2ULL, 0xa7e97427ba3a9d80ULL, 0xf09342bf6e21d14fULL, + 0x5dd9f842637c211fULL, 0x4c5d1e86c50f43caULL, 0x71da39db3892e3aaULL, + 0xd3ec2a915715c642ULL +}; + +static const u64 c[KHAZAD_ROUNDS + 1] = { + 0xba542f7453d3d24dULL, 0x50ac8dbf70529a4cULL, 0xead597d133515ba6ULL, + 0xde48a899db32b7fcULL, 0xe39e919be2bb416eULL, 0xa5cb6b95a1f3b102ULL, + 0xccc41d14c363da5dULL, 0x5fdc7dcd7f5a6c5cULL, 0xf726ffede89d6f8eULL +}; + +static int khazad_setkey(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len) +{ + struct khazad_ctx *ctx = crypto_tfm_ctx(tfm); + const __be32 *key = (const __be32 *)in_key; + int r; + const u64 *S = T7; + u64 K2, K1; + + /* key is supposed to be 32-bit aligned */ + K2 = ((u64)be32_to_cpu(key[0]) << 32) | be32_to_cpu(key[1]); + K1 = ((u64)be32_to_cpu(key[2]) << 32) | be32_to_cpu(key[3]); + + /* setup the encrypt key */ + for (r = 0; r <= KHAZAD_ROUNDS; r++) { + ctx->E[r] = T0[(int)(K1 >> 56) ] ^ + T1[(int)(K1 >> 48) & 0xff] ^ + T2[(int)(K1 >> 40) & 0xff] ^ + T3[(int)(K1 >> 32) & 0xff] ^ + T4[(int)(K1 >> 24) & 0xff] ^ + T5[(int)(K1 >> 16) & 0xff] ^ + T6[(int)(K1 >> 8) & 0xff] ^ + T7[(int)(K1 ) & 0xff] ^ + c[r] ^ K2; + K2 = K1; + K1 = ctx->E[r]; + } + /* Setup the decrypt key */ + ctx->D[0] = ctx->E[KHAZAD_ROUNDS]; + for (r = 1; r < KHAZAD_ROUNDS; r++) { + K1 = ctx->E[KHAZAD_ROUNDS - r]; + ctx->D[r] = T0[(int)S[(int)(K1 >> 56) ] & 0xff] ^ + T1[(int)S[(int)(K1 >> 48) & 0xff] & 0xff] ^ + T2[(int)S[(int)(K1 >> 40) & 0xff] & 0xff] ^ + T3[(int)S[(int)(K1 >> 32) & 0xff] & 0xff] ^ + T4[(int)S[(int)(K1 >> 24) & 0xff] & 0xff] ^ + T5[(int)S[(int)(K1 >> 16) & 0xff] & 0xff] ^ + T6[(int)S[(int)(K1 >> 8) & 0xff] & 0xff] ^ + T7[(int)S[(int)(K1 ) & 0xff] & 0xff]; + } + ctx->D[KHAZAD_ROUNDS] = ctx->E[0]; + + return 0; + +} + +static void khazad_crypt(const u64 roundKey[KHAZAD_ROUNDS + 1], + u8 *ciphertext, const u8 *plaintext) +{ + const __be64 *src = (const __be64 *)plaintext; + __be64 *dst = (__be64 *)ciphertext; + int r; + u64 state; + + state = be64_to_cpu(*src) ^ roundKey[0]; + + for (r = 1; r < KHAZAD_ROUNDS; r++) { + state = T0[(int)(state >> 56) ] ^ + T1[(int)(state >> 48) & 0xff] ^ + T2[(int)(state >> 40) & 0xff] ^ + T3[(int)(state >> 32) & 0xff] ^ + T4[(int)(state >> 24) & 0xff] ^ + T5[(int)(state >> 16) & 0xff] ^ + T6[(int)(state >> 8) & 0xff] ^ + T7[(int)(state ) & 0xff] ^ + roundKey[r]; + } + + state = (T0[(int)(state >> 56) ] & 0xff00000000000000ULL) ^ + (T1[(int)(state >> 48) & 0xff] & 0x00ff000000000000ULL) ^ + (T2[(int)(state >> 40) & 0xff] & 0x0000ff0000000000ULL) ^ + (T3[(int)(state >> 32) & 0xff] & 0x000000ff00000000ULL) ^ + (T4[(int)(state >> 24) & 0xff] & 0x00000000ff000000ULL) ^ + (T5[(int)(state >> 16) & 0xff] & 0x0000000000ff0000ULL) ^ + (T6[(int)(state >> 8) & 0xff] & 0x000000000000ff00ULL) ^ + (T7[(int)(state ) & 0xff] & 0x00000000000000ffULL) ^ + roundKey[KHAZAD_ROUNDS]; + + *dst = cpu_to_be64(state); +} + +static void khazad_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct khazad_ctx *ctx = crypto_tfm_ctx(tfm); + khazad_crypt(ctx->E, dst, src); +} + +static void khazad_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct khazad_ctx *ctx = crypto_tfm_ctx(tfm); + khazad_crypt(ctx->D, dst, src); +} + +static struct crypto_alg khazad_alg = { + .cra_name = "khazad", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = KHAZAD_BLOCK_SIZE, + .cra_ctxsize = sizeof (struct khazad_ctx), + .cra_alignmask = 7, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(khazad_alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = KHAZAD_KEY_SIZE, + .cia_max_keysize = KHAZAD_KEY_SIZE, + .cia_setkey = khazad_setkey, + .cia_encrypt = khazad_encrypt, + .cia_decrypt = khazad_decrypt } } +}; + +static int __init khazad_mod_init(void) +{ + int ret = 0; + + ret = crypto_register_alg(&khazad_alg); + return ret; +} + +static void __exit khazad_mod_fini(void) +{ + crypto_unregister_alg(&khazad_alg); +} + + +module_init(khazad_mod_init); +module_exit(khazad_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Khazad Cryptographic Algorithm"); diff --git a/crypto/krng.c b/crypto/krng.c new file mode 100644 index 00000000..4328bb34 --- /dev/null +++ b/crypto/krng.c @@ -0,0 +1,66 @@ +/* + * RNG implementation using standard kernel RNG. + * + * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the + * Free Software Foundation; either version 2 of the License, or (at your + * any later version. + * + */ + +#include <crypto/internal/rng.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/random.h> + +static int krng_get_random(struct crypto_rng *tfm, u8 *rdata, unsigned int dlen) +{ + get_random_bytes(rdata, dlen); + return 0; +} + +static int krng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen) +{ + return 0; +} + +static struct crypto_alg krng_alg = { + .cra_name = "stdrng", + .cra_driver_name = "krng", + .cra_priority = 200, + .cra_flags = CRYPTO_ALG_TYPE_RNG, + .cra_ctxsize = 0, + .cra_type = &crypto_rng_type, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(krng_alg.cra_list), + .cra_u = { + .rng = { + .rng_make_random = krng_get_random, + .rng_reset = krng_reset, + .seedsize = 0, + } + } +}; + + +/* Module initalization */ +static int __init krng_mod_init(void) +{ + return crypto_register_alg(&krng_alg); +} + +static void __exit krng_mod_fini(void) +{ + crypto_unregister_alg(&krng_alg); + return; +} + +module_init(krng_mod_init); +module_exit(krng_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Kernel Random Number Generator"); +MODULE_ALIAS("stdrng"); diff --git a/crypto/lrw.c b/crypto/lrw.c new file mode 100644 index 00000000..ba42acc4 --- /dev/null +++ b/crypto/lrw.c @@ -0,0 +1,402 @@ +/* LRW: as defined by Cyril Guyot in + * http://grouper.ieee.org/groups/1619/email/pdf00017.pdf + * + * Copyright (c) 2006 Rik Snel <rsnel@cube.dyndns.org> + * + * Based on ecb.c + * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + */ +/* This implementation is checked against the test vectors in the above + * document and by a test vector provided by Ken Buchanan at + * http://www.mail-archive.com/stds-p1619@listserv.ieee.org/msg00173.html + * + * The test vectors are included in the testing module tcrypt.[ch] */ + +#include <crypto/algapi.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/scatterlist.h> +#include <linux/slab.h> + +#include <crypto/b128ops.h> +#include <crypto/gf128mul.h> +#include <crypto/lrw.h> + +struct priv { + struct crypto_cipher *child; + struct lrw_table_ctx table; +}; + +static inline void setbit128_bbe(void *b, int bit) +{ + __set_bit(bit ^ (0x80 - +#ifdef __BIG_ENDIAN + BITS_PER_LONG +#else + BITS_PER_BYTE +#endif + ), b); +} + +int lrw_init_table(struct lrw_table_ctx *ctx, const u8 *tweak) +{ + be128 tmp = { 0 }; + int i; + + if (ctx->table) + gf128mul_free_64k(ctx->table); + + /* initialize multiplication table for Key2 */ + ctx->table = gf128mul_init_64k_bbe((be128 *)tweak); + if (!ctx->table) + return -ENOMEM; + + /* initialize optimization table */ + for (i = 0; i < 128; i++) { + setbit128_bbe(&tmp, i); + ctx->mulinc[i] = tmp; + gf128mul_64k_bbe(&ctx->mulinc[i], ctx->table); + } + + return 0; +} +EXPORT_SYMBOL_GPL(lrw_init_table); + +void lrw_free_table(struct lrw_table_ctx *ctx) +{ + if (ctx->table) + gf128mul_free_64k(ctx->table); +} +EXPORT_SYMBOL_GPL(lrw_free_table); + +static int setkey(struct crypto_tfm *parent, const u8 *key, + unsigned int keylen) +{ + struct priv *ctx = crypto_tfm_ctx(parent); + struct crypto_cipher *child = ctx->child; + int err, bsize = LRW_BLOCK_SIZE; + const u8 *tweak = key + keylen - bsize; + + crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_cipher_setkey(child, key, keylen - bsize); + if (err) + return err; + crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + + return lrw_init_table(&ctx->table, tweak); +} + +struct sinfo { + be128 t; + struct crypto_tfm *tfm; + void (*fn)(struct crypto_tfm *, u8 *, const u8 *); +}; + +static inline void inc(be128 *iv) +{ + be64_add_cpu(&iv->b, 1); + if (!iv->b) + be64_add_cpu(&iv->a, 1); +} + +static inline void lrw_round(struct sinfo *s, void *dst, const void *src) +{ + be128_xor(dst, &s->t, src); /* PP <- T xor P */ + s->fn(s->tfm, dst, dst); /* CC <- E(Key2,PP) */ + be128_xor(dst, dst, &s->t); /* C <- T xor CC */ +} + +/* this returns the number of consequative 1 bits starting + * from the right, get_index128(00 00 00 00 00 00 ... 00 00 10 FB) = 2 */ +static inline int get_index128(be128 *block) +{ + int x; + __be32 *p = (__be32 *) block; + + for (p += 3, x = 0; x < 128; p--, x += 32) { + u32 val = be32_to_cpup(p); + + if (!~val) + continue; + + return x + ffz(val); + } + + return x; +} + +static int crypt(struct blkcipher_desc *d, + struct blkcipher_walk *w, struct priv *ctx, + void (*fn)(struct crypto_tfm *, u8 *, const u8 *)) +{ + int err; + unsigned int avail; + const int bs = LRW_BLOCK_SIZE; + struct sinfo s = { + .tfm = crypto_cipher_tfm(ctx->child), + .fn = fn + }; + be128 *iv; + u8 *wsrc; + u8 *wdst; + + err = blkcipher_walk_virt(d, w); + if (!(avail = w->nbytes)) + return err; + + wsrc = w->src.virt.addr; + wdst = w->dst.virt.addr; + + /* calculate first value of T */ + iv = (be128 *)w->iv; + s.t = *iv; + + /* T <- I*Key2 */ + gf128mul_64k_bbe(&s.t, ctx->table.table); + + goto first; + + for (;;) { + do { + /* T <- I*Key2, using the optimization + * discussed in the specification */ + be128_xor(&s.t, &s.t, + &ctx->table.mulinc[get_index128(iv)]); + inc(iv); + +first: + lrw_round(&s, wdst, wsrc); + + wsrc += bs; + wdst += bs; + } while ((avail -= bs) >= bs); + + err = blkcipher_walk_done(d, w, avail); + if (!(avail = w->nbytes)) + break; + + wsrc = w->src.virt.addr; + wdst = w->dst.virt.addr; + } + + return err; +} + +static int encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct priv *ctx = crypto_blkcipher_ctx(desc->tfm); + struct blkcipher_walk w; + + blkcipher_walk_init(&w, dst, src, nbytes); + return crypt(desc, &w, ctx, + crypto_cipher_alg(ctx->child)->cia_encrypt); +} + +static int decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct priv *ctx = crypto_blkcipher_ctx(desc->tfm); + struct blkcipher_walk w; + + blkcipher_walk_init(&w, dst, src, nbytes); + return crypt(desc, &w, ctx, + crypto_cipher_alg(ctx->child)->cia_decrypt); +} + +int lrw_crypt(struct blkcipher_desc *desc, struct scatterlist *sdst, + struct scatterlist *ssrc, unsigned int nbytes, + struct lrw_crypt_req *req) +{ + const unsigned int bsize = LRW_BLOCK_SIZE; + const unsigned int max_blks = req->tbuflen / bsize; + struct lrw_table_ctx *ctx = req->table_ctx; + struct blkcipher_walk walk; + unsigned int nblocks; + be128 *iv, *src, *dst, *t; + be128 *t_buf = req->tbuf; + int err, i; + + BUG_ON(max_blks < 1); + + blkcipher_walk_init(&walk, sdst, ssrc, nbytes); + + err = blkcipher_walk_virt(desc, &walk); + nbytes = walk.nbytes; + if (!nbytes) + return err; + + nblocks = min(walk.nbytes / bsize, max_blks); + src = (be128 *)walk.src.virt.addr; + dst = (be128 *)walk.dst.virt.addr; + + /* calculate first value of T */ + iv = (be128 *)walk.iv; + t_buf[0] = *iv; + + /* T <- I*Key2 */ + gf128mul_64k_bbe(&t_buf[0], ctx->table); + + i = 0; + goto first; + + for (;;) { + do { + for (i = 0; i < nblocks; i++) { + /* T <- I*Key2, using the optimization + * discussed in the specification */ + be128_xor(&t_buf[i], t, + &ctx->mulinc[get_index128(iv)]); + inc(iv); +first: + t = &t_buf[i]; + + /* PP <- T xor P */ + be128_xor(dst + i, t, src + i); + } + + /* CC <- E(Key2,PP) */ + req->crypt_fn(req->crypt_ctx, (u8 *)dst, + nblocks * bsize); + + /* C <- T xor CC */ + for (i = 0; i < nblocks; i++) + be128_xor(dst + i, dst + i, &t_buf[i]); + + src += nblocks; + dst += nblocks; + nbytes -= nblocks * bsize; + nblocks = min(nbytes / bsize, max_blks); + } while (nblocks > 0); + + err = blkcipher_walk_done(desc, &walk, nbytes); + nbytes = walk.nbytes; + if (!nbytes) + break; + + nblocks = min(nbytes / bsize, max_blks); + src = (be128 *)walk.src.virt.addr; + dst = (be128 *)walk.dst.virt.addr; + } + + return err; +} +EXPORT_SYMBOL_GPL(lrw_crypt); + +static int init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_cipher *cipher; + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_spawn *spawn = crypto_instance_ctx(inst); + struct priv *ctx = crypto_tfm_ctx(tfm); + u32 *flags = &tfm->crt_flags; + + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + if (crypto_cipher_blocksize(cipher) != LRW_BLOCK_SIZE) { + *flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; + crypto_free_cipher(cipher); + return -EINVAL; + } + + ctx->child = cipher; + return 0; +} + +static void exit_tfm(struct crypto_tfm *tfm) +{ + struct priv *ctx = crypto_tfm_ctx(tfm); + + lrw_free_table(&ctx->table); + crypto_free_cipher(ctx->child); +} + +static struct crypto_instance *alloc(struct rtattr **tb) +{ + struct crypto_instance *inst; + struct crypto_alg *alg; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); + if (err) + return ERR_PTR(err); + + alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK); + if (IS_ERR(alg)) + return ERR_CAST(alg); + + inst = crypto_alloc_instance("lrw", alg); + if (IS_ERR(inst)) + goto out_put_alg; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = alg->cra_blocksize; + + if (alg->cra_alignmask < 7) inst->alg.cra_alignmask = 7; + else inst->alg.cra_alignmask = alg->cra_alignmask; + inst->alg.cra_type = &crypto_blkcipher_type; + + if (!(alg->cra_blocksize % 4)) + inst->alg.cra_alignmask |= 3; + inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; + inst->alg.cra_blkcipher.min_keysize = + alg->cra_cipher.cia_min_keysize + alg->cra_blocksize; + inst->alg.cra_blkcipher.max_keysize = + alg->cra_cipher.cia_max_keysize + alg->cra_blocksize; + + inst->alg.cra_ctxsize = sizeof(struct priv); + + inst->alg.cra_init = init_tfm; + inst->alg.cra_exit = exit_tfm; + + inst->alg.cra_blkcipher.setkey = setkey; + inst->alg.cra_blkcipher.encrypt = encrypt; + inst->alg.cra_blkcipher.decrypt = decrypt; + +out_put_alg: + crypto_mod_put(alg); + return inst; +} + +static void free(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(inst); +} + +static struct crypto_template crypto_tmpl = { + .name = "lrw", + .alloc = alloc, + .free = free, + .module = THIS_MODULE, +}; + +static int __init crypto_module_init(void) +{ + return crypto_register_template(&crypto_tmpl); +} + +static void __exit crypto_module_exit(void) +{ + crypto_unregister_template(&crypto_tmpl); +} + +module_init(crypto_module_init); +module_exit(crypto_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("LRW block cipher mode"); diff --git a/crypto/lzo.c b/crypto/lzo.c new file mode 100644 index 00000000..b5e77077 --- /dev/null +++ b/crypto/lzo.c @@ -0,0 +1,106 @@ +/* + * Cryptographic API. + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 as published by + * the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., 51 + * Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + */ + +#include <linux/init.h> +#include <linux/module.h> +#include <linux/crypto.h> +#include <linux/vmalloc.h> +#include <linux/lzo.h> + +struct lzo_ctx { + void *lzo_comp_mem; +}; + +static int lzo_init(struct crypto_tfm *tfm) +{ + struct lzo_ctx *ctx = crypto_tfm_ctx(tfm); + + ctx->lzo_comp_mem = vmalloc(LZO1X_MEM_COMPRESS); + if (!ctx->lzo_comp_mem) + return -ENOMEM; + + return 0; +} + +static void lzo_exit(struct crypto_tfm *tfm) +{ + struct lzo_ctx *ctx = crypto_tfm_ctx(tfm); + + vfree(ctx->lzo_comp_mem); +} + +static int lzo_compress(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen) +{ + struct lzo_ctx *ctx = crypto_tfm_ctx(tfm); + size_t tmp_len = *dlen; /* size_t(ulong) <-> uint on 64 bit */ + int err; + + err = lzo1x_1_compress(src, slen, dst, &tmp_len, ctx->lzo_comp_mem); + + if (err != LZO_E_OK) + return -EINVAL; + + *dlen = tmp_len; + return 0; +} + +static int lzo_decompress(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen) +{ + int err; + size_t tmp_len = *dlen; /* size_t(ulong) <-> uint on 64 bit */ + + err = lzo1x_decompress_safe(src, slen, dst, &tmp_len); + + if (err != LZO_E_OK) + return -EINVAL; + + *dlen = tmp_len; + return 0; + +} + +static struct crypto_alg alg = { + .cra_name = "lzo", + .cra_flags = CRYPTO_ALG_TYPE_COMPRESS, + .cra_ctxsize = sizeof(struct lzo_ctx), + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(alg.cra_list), + .cra_init = lzo_init, + .cra_exit = lzo_exit, + .cra_u = { .compress = { + .coa_compress = lzo_compress, + .coa_decompress = lzo_decompress } } +}; + +static int __init lzo_mod_init(void) +{ + return crypto_register_alg(&alg); +} + +static void __exit lzo_mod_fini(void) +{ + crypto_unregister_alg(&alg); +} + +module_init(lzo_mod_init); +module_exit(lzo_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("LZO Compression Algorithm"); diff --git a/crypto/md4.c b/crypto/md4.c new file mode 100644 index 00000000..0477a6a0 --- /dev/null +++ b/crypto/md4.c @@ -0,0 +1,258 @@ +/* + * Cryptographic API. + * + * MD4 Message Digest Algorithm (RFC1320). + * + * Implementation derived from Andrew Tridgell and Steve French's + * CIFS MD4 implementation, and the cryptoapi implementation + * originally based on the public domain implementation written + * by Colin Plumb in 1993. + * + * Copyright (c) Andrew Tridgell 1997-1998. + * Modified by Steve French (sfrench@us.ibm.com) 2002 + * Copyright (c) Cryptoapi developers. + * Copyright (c) 2002 David S. Miller (davem@redhat.com) + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + */ +#include <crypto/internal/hash.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/string.h> +#include <linux/types.h> +#include <asm/byteorder.h> + +#define MD4_DIGEST_SIZE 16 +#define MD4_HMAC_BLOCK_SIZE 64 +#define MD4_BLOCK_WORDS 16 +#define MD4_HASH_WORDS 4 + +struct md4_ctx { + u32 hash[MD4_HASH_WORDS]; + u32 block[MD4_BLOCK_WORDS]; + u64 byte_count; +}; + +static inline u32 lshift(u32 x, unsigned int s) +{ + x &= 0xFFFFFFFF; + return ((x << s) & 0xFFFFFFFF) | (x >> (32 - s)); +} + +static inline u32 F(u32 x, u32 y, u32 z) +{ + return (x & y) | ((~x) & z); +} + +static inline u32 G(u32 x, u32 y, u32 z) +{ + return (x & y) | (x & z) | (y & z); +} + +static inline u32 H(u32 x, u32 y, u32 z) +{ + return x ^ y ^ z; +} + +#define ROUND1(a,b,c,d,k,s) (a = lshift(a + F(b,c,d) + k, s)) +#define ROUND2(a,b,c,d,k,s) (a = lshift(a + G(b,c,d) + k + (u32)0x5A827999,s)) +#define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (u32)0x6ED9EBA1,s)) + +/* XXX: this stuff can be optimized */ +static inline void le32_to_cpu_array(u32 *buf, unsigned int words) +{ + while (words--) { + __le32_to_cpus(buf); + buf++; + } +} + +static inline void cpu_to_le32_array(u32 *buf, unsigned int words) +{ + while (words--) { + __cpu_to_le32s(buf); + buf++; + } +} + +static void md4_transform(u32 *hash, u32 const *in) +{ + u32 a, b, c, d; + + a = hash[0]; + b = hash[1]; + c = hash[2]; + d = hash[3]; + + ROUND1(a, b, c, d, in[0], 3); + ROUND1(d, a, b, c, in[1], 7); + ROUND1(c, d, a, b, in[2], 11); + ROUND1(b, c, d, a, in[3], 19); + ROUND1(a, b, c, d, in[4], 3); + ROUND1(d, a, b, c, in[5], 7); + ROUND1(c, d, a, b, in[6], 11); + ROUND1(b, c, d, a, in[7], 19); + ROUND1(a, b, c, d, in[8], 3); + ROUND1(d, a, b, c, in[9], 7); + ROUND1(c, d, a, b, in[10], 11); + ROUND1(b, c, d, a, in[11], 19); + ROUND1(a, b, c, d, in[12], 3); + ROUND1(d, a, b, c, in[13], 7); + ROUND1(c, d, a, b, in[14], 11); + ROUND1(b, c, d, a, in[15], 19); + + ROUND2(a, b, c, d,in[ 0], 3); + ROUND2(d, a, b, c, in[4], 5); + ROUND2(c, d, a, b, in[8], 9); + ROUND2(b, c, d, a, in[12], 13); + ROUND2(a, b, c, d, in[1], 3); + ROUND2(d, a, b, c, in[5], 5); + ROUND2(c, d, a, b, in[9], 9); + ROUND2(b, c, d, a, in[13], 13); + ROUND2(a, b, c, d, in[2], 3); + ROUND2(d, a, b, c, in[6], 5); + ROUND2(c, d, a, b, in[10], 9); + ROUND2(b, c, d, a, in[14], 13); + ROUND2(a, b, c, d, in[3], 3); + ROUND2(d, a, b, c, in[7], 5); + ROUND2(c, d, a, b, in[11], 9); + ROUND2(b, c, d, a, in[15], 13); + + ROUND3(a, b, c, d,in[ 0], 3); + ROUND3(d, a, b, c, in[8], 9); + ROUND3(c, d, a, b, in[4], 11); + ROUND3(b, c, d, a, in[12], 15); + ROUND3(a, b, c, d, in[2], 3); + ROUND3(d, a, b, c, in[10], 9); + ROUND3(c, d, a, b, in[6], 11); + ROUND3(b, c, d, a, in[14], 15); + ROUND3(a, b, c, d, in[1], 3); + ROUND3(d, a, b, c, in[9], 9); + ROUND3(c, d, a, b, in[5], 11); + ROUND3(b, c, d, a, in[13], 15); + ROUND3(a, b, c, d, in[3], 3); + ROUND3(d, a, b, c, in[11], 9); + ROUND3(c, d, a, b, in[7], 11); + ROUND3(b, c, d, a, in[15], 15); + + hash[0] += a; + hash[1] += b; + hash[2] += c; + hash[3] += d; +} + +static inline void md4_transform_helper(struct md4_ctx *ctx) +{ + le32_to_cpu_array(ctx->block, ARRAY_SIZE(ctx->block)); + md4_transform(ctx->hash, ctx->block); +} + +static int md4_init(struct shash_desc *desc) +{ + struct md4_ctx *mctx = shash_desc_ctx(desc); + + mctx->hash[0] = 0x67452301; + mctx->hash[1] = 0xefcdab89; + mctx->hash[2] = 0x98badcfe; + mctx->hash[3] = 0x10325476; + mctx->byte_count = 0; + + return 0; +} + +static int md4_update(struct shash_desc *desc, const u8 *data, unsigned int len) +{ + struct md4_ctx *mctx = shash_desc_ctx(desc); + const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f); + + mctx->byte_count += len; + + if (avail > len) { + memcpy((char *)mctx->block + (sizeof(mctx->block) - avail), + data, len); + return 0; + } + + memcpy((char *)mctx->block + (sizeof(mctx->block) - avail), + data, avail); + + md4_transform_helper(mctx); + data += avail; + len -= avail; + + while (len >= sizeof(mctx->block)) { + memcpy(mctx->block, data, sizeof(mctx->block)); + md4_transform_helper(mctx); + data += sizeof(mctx->block); + len -= sizeof(mctx->block); + } + + memcpy(mctx->block, data, len); + + return 0; +} + +static int md4_final(struct shash_desc *desc, u8 *out) +{ + struct md4_ctx *mctx = shash_desc_ctx(desc); + const unsigned int offset = mctx->byte_count & 0x3f; + char *p = (char *)mctx->block + offset; + int padding = 56 - (offset + 1); + + *p++ = 0x80; + if (padding < 0) { + memset(p, 0x00, padding + sizeof (u64)); + md4_transform_helper(mctx); + p = (char *)mctx->block; + padding = 56; + } + + memset(p, 0, padding); + mctx->block[14] = mctx->byte_count << 3; + mctx->block[15] = mctx->byte_count >> 29; + le32_to_cpu_array(mctx->block, (sizeof(mctx->block) - + sizeof(u64)) / sizeof(u32)); + md4_transform(mctx->hash, mctx->block); + cpu_to_le32_array(mctx->hash, ARRAY_SIZE(mctx->hash)); + memcpy(out, mctx->hash, sizeof(mctx->hash)); + memset(mctx, 0, sizeof(*mctx)); + + return 0; +} + +static struct shash_alg alg = { + .digestsize = MD4_DIGEST_SIZE, + .init = md4_init, + .update = md4_update, + .final = md4_final, + .descsize = sizeof(struct md4_ctx), + .base = { + .cra_name = "md4", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = MD4_HMAC_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static int __init md4_mod_init(void) +{ + return crypto_register_shash(&alg); +} + +static void __exit md4_mod_fini(void) +{ + crypto_unregister_shash(&alg); +} + +module_init(md4_mod_init); +module_exit(md4_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("MD4 Message Digest Algorithm"); + diff --git a/crypto/md5.c b/crypto/md5.c new file mode 100644 index 00000000..7febeaab --- /dev/null +++ b/crypto/md5.c @@ -0,0 +1,170 @@ +/* + * Cryptographic API. + * + * MD5 Message Digest Algorithm (RFC1321). + * + * Derived from cryptoapi implementation, originally based on the + * public domain implementation written by Colin Plumb in 1993. + * + * Copyright (c) Cryptoapi developers. + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ +#include <crypto/internal/hash.h> +#include <crypto/md5.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/string.h> +#include <linux/types.h> +#include <linux/cryptohash.h> +#include <asm/byteorder.h> + +/* XXX: this stuff can be optimized */ +static inline void le32_to_cpu_array(u32 *buf, unsigned int words) +{ + while (words--) { + __le32_to_cpus(buf); + buf++; + } +} + +static inline void cpu_to_le32_array(u32 *buf, unsigned int words) +{ + while (words--) { + __cpu_to_le32s(buf); + buf++; + } +} + +static inline void md5_transform_helper(struct md5_state *ctx) +{ + le32_to_cpu_array(ctx->block, sizeof(ctx->block) / sizeof(u32)); + md5_transform(ctx->hash, ctx->block); +} + +static int md5_init(struct shash_desc *desc) +{ + struct md5_state *mctx = shash_desc_ctx(desc); + + mctx->hash[0] = 0x67452301; + mctx->hash[1] = 0xefcdab89; + mctx->hash[2] = 0x98badcfe; + mctx->hash[3] = 0x10325476; + mctx->byte_count = 0; + + return 0; +} + +static int md5_update(struct shash_desc *desc, const u8 *data, unsigned int len) +{ + struct md5_state *mctx = shash_desc_ctx(desc); + const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f); + + mctx->byte_count += len; + + if (avail > len) { + memcpy((char *)mctx->block + (sizeof(mctx->block) - avail), + data, len); + return 0; + } + + memcpy((char *)mctx->block + (sizeof(mctx->block) - avail), + data, avail); + + md5_transform_helper(mctx); + data += avail; + len -= avail; + + while (len >= sizeof(mctx->block)) { + memcpy(mctx->block, data, sizeof(mctx->block)); + md5_transform_helper(mctx); + data += sizeof(mctx->block); + len -= sizeof(mctx->block); + } + + memcpy(mctx->block, data, len); + + return 0; +} + +static int md5_final(struct shash_desc *desc, u8 *out) +{ + struct md5_state *mctx = shash_desc_ctx(desc); + const unsigned int offset = mctx->byte_count & 0x3f; + char *p = (char *)mctx->block + offset; + int padding = 56 - (offset + 1); + + *p++ = 0x80; + if (padding < 0) { + memset(p, 0x00, padding + sizeof (u64)); + md5_transform_helper(mctx); + p = (char *)mctx->block; + padding = 56; + } + + memset(p, 0, padding); + mctx->block[14] = mctx->byte_count << 3; + mctx->block[15] = mctx->byte_count >> 29; + le32_to_cpu_array(mctx->block, (sizeof(mctx->block) - + sizeof(u64)) / sizeof(u32)); + md5_transform(mctx->hash, mctx->block); + cpu_to_le32_array(mctx->hash, sizeof(mctx->hash) / sizeof(u32)); + memcpy(out, mctx->hash, sizeof(mctx->hash)); + memset(mctx, 0, sizeof(*mctx)); + + return 0; +} + +static int md5_export(struct shash_desc *desc, void *out) +{ + struct md5_state *ctx = shash_desc_ctx(desc); + + memcpy(out, ctx, sizeof(*ctx)); + return 0; +} + +static int md5_import(struct shash_desc *desc, const void *in) +{ + struct md5_state *ctx = shash_desc_ctx(desc); + + memcpy(ctx, in, sizeof(*ctx)); + return 0; +} + +static struct shash_alg alg = { + .digestsize = MD5_DIGEST_SIZE, + .init = md5_init, + .update = md5_update, + .final = md5_final, + .export = md5_export, + .import = md5_import, + .descsize = sizeof(struct md5_state), + .statesize = sizeof(struct md5_state), + .base = { + .cra_name = "md5", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = MD5_HMAC_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static int __init md5_mod_init(void) +{ + return crypto_register_shash(&alg); +} + +static void __exit md5_mod_fini(void) +{ + crypto_unregister_shash(&alg); +} + +module_init(md5_mod_init); +module_exit(md5_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("MD5 Message Digest Algorithm"); diff --git a/crypto/michael_mic.c b/crypto/michael_mic.c new file mode 100644 index 00000000..079b761b --- /dev/null +++ b/crypto/michael_mic.c @@ -0,0 +1,186 @@ +/* + * Cryptographic API + * + * Michael MIC (IEEE 802.11i/TKIP) keyed digest + * + * Copyright (c) 2004 Jouni Malinen <j@w1.fi> + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ +#include <crypto/internal/hash.h> +#include <asm/byteorder.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/string.h> +#include <linux/types.h> + + +struct michael_mic_ctx { + u32 l, r; +}; + +struct michael_mic_desc_ctx { + u8 pending[4]; + size_t pending_len; + + u32 l, r; +}; + +static inline u32 xswap(u32 val) +{ + return ((val & 0x00ff00ff) << 8) | ((val & 0xff00ff00) >> 8); +} + + +#define michael_block(l, r) \ +do { \ + r ^= rol32(l, 17); \ + l += r; \ + r ^= xswap(l); \ + l += r; \ + r ^= rol32(l, 3); \ + l += r; \ + r ^= ror32(l, 2); \ + l += r; \ +} while (0) + + +static int michael_init(struct shash_desc *desc) +{ + struct michael_mic_desc_ctx *mctx = shash_desc_ctx(desc); + struct michael_mic_ctx *ctx = crypto_shash_ctx(desc->tfm); + mctx->pending_len = 0; + mctx->l = ctx->l; + mctx->r = ctx->r; + + return 0; +} + + +static int michael_update(struct shash_desc *desc, const u8 *data, + unsigned int len) +{ + struct michael_mic_desc_ctx *mctx = shash_desc_ctx(desc); + const __le32 *src; + + if (mctx->pending_len) { + int flen = 4 - mctx->pending_len; + if (flen > len) + flen = len; + memcpy(&mctx->pending[mctx->pending_len], data, flen); + mctx->pending_len += flen; + data += flen; + len -= flen; + + if (mctx->pending_len < 4) + return 0; + + src = (const __le32 *)mctx->pending; + mctx->l ^= le32_to_cpup(src); + michael_block(mctx->l, mctx->r); + mctx->pending_len = 0; + } + + src = (const __le32 *)data; + + while (len >= 4) { + mctx->l ^= le32_to_cpup(src++); + michael_block(mctx->l, mctx->r); + len -= 4; + } + + if (len > 0) { + mctx->pending_len = len; + memcpy(mctx->pending, src, len); + } + + return 0; +} + + +static int michael_final(struct shash_desc *desc, u8 *out) +{ + struct michael_mic_desc_ctx *mctx = shash_desc_ctx(desc); + u8 *data = mctx->pending; + __le32 *dst = (__le32 *)out; + + /* Last block and padding (0x5a, 4..7 x 0) */ + switch (mctx->pending_len) { + case 0: + mctx->l ^= 0x5a; + break; + case 1: + mctx->l ^= data[0] | 0x5a00; + break; + case 2: + mctx->l ^= data[0] | (data[1] << 8) | 0x5a0000; + break; + case 3: + mctx->l ^= data[0] | (data[1] << 8) | (data[2] << 16) | + 0x5a000000; + break; + } + michael_block(mctx->l, mctx->r); + /* l ^= 0; */ + michael_block(mctx->l, mctx->r); + + dst[0] = cpu_to_le32(mctx->l); + dst[1] = cpu_to_le32(mctx->r); + + return 0; +} + + +static int michael_setkey(struct crypto_shash *tfm, const u8 *key, + unsigned int keylen) +{ + struct michael_mic_ctx *mctx = crypto_shash_ctx(tfm); + + const __le32 *data = (const __le32 *)key; + + if (keylen != 8) { + crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); + return -EINVAL; + } + + mctx->l = le32_to_cpu(data[0]); + mctx->r = le32_to_cpu(data[1]); + return 0; +} + +static struct shash_alg alg = { + .digestsize = 8, + .setkey = michael_setkey, + .init = michael_init, + .update = michael_update, + .final = michael_final, + .descsize = sizeof(struct michael_mic_desc_ctx), + .base = { + .cra_name = "michael_mic", + .cra_blocksize = 8, + .cra_alignmask = 3, + .cra_ctxsize = sizeof(struct michael_mic_ctx), + .cra_module = THIS_MODULE, + } +}; + +static int __init michael_mic_init(void) +{ + return crypto_register_shash(&alg); +} + + +static void __exit michael_mic_exit(void) +{ + crypto_unregister_shash(&alg); +} + + +module_init(michael_mic_init); +module_exit(michael_mic_exit); + +MODULE_LICENSE("GPL v2"); +MODULE_DESCRIPTION("Michael MIC"); +MODULE_AUTHOR("Jouni Malinen <j@w1.fi>"); diff --git a/crypto/pcbc.c b/crypto/pcbc.c new file mode 100644 index 00000000..d1b8bdfb --- /dev/null +++ b/crypto/pcbc.c @@ -0,0 +1,297 @@ +/* + * PCBC: Propagating Cipher Block Chaining mode + * + * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved. + * Written by David Howells (dhowells@redhat.com) + * + * Derived from cbc.c + * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/algapi.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/scatterlist.h> +#include <linux/slab.h> + +struct crypto_pcbc_ctx { + struct crypto_cipher *child; +}; + +static int crypto_pcbc_setkey(struct crypto_tfm *parent, const u8 *key, + unsigned int keylen) +{ + struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(parent); + struct crypto_cipher *child = ctx->child; + int err; + + crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_cipher_setkey(child, key, keylen); + crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + return err; +} + +static int crypto_pcbc_encrypt_segment(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + struct crypto_cipher *tfm) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_encrypt; + int bsize = crypto_cipher_blocksize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + u8 *iv = walk->iv; + + do { + crypto_xor(iv, src, bsize); + fn(crypto_cipher_tfm(tfm), dst, iv); + memcpy(iv, dst, bsize); + crypto_xor(iv, src, bsize); + + src += bsize; + dst += bsize; + } while ((nbytes -= bsize) >= bsize); + + return nbytes; +} + +static int crypto_pcbc_encrypt_inplace(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + struct crypto_cipher *tfm) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_encrypt; + int bsize = crypto_cipher_blocksize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *iv = walk->iv; + u8 tmpbuf[bsize]; + + do { + memcpy(tmpbuf, src, bsize); + crypto_xor(iv, src, bsize); + fn(crypto_cipher_tfm(tfm), src, iv); + memcpy(iv, tmpbuf, bsize); + crypto_xor(iv, src, bsize); + + src += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_pcbc_encrypt(struct blkcipher_desc *desc, + struct scatterlist *dst, struct scatterlist *src, + unsigned int nbytes) +{ + struct blkcipher_walk walk; + struct crypto_blkcipher *tfm = desc->tfm; + struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm); + struct crypto_cipher *child = ctx->child; + int err; + + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt(desc, &walk); + + while ((nbytes = walk.nbytes)) { + if (walk.src.virt.addr == walk.dst.virt.addr) + nbytes = crypto_pcbc_encrypt_inplace(desc, &walk, + child); + else + nbytes = crypto_pcbc_encrypt_segment(desc, &walk, + child); + err = blkcipher_walk_done(desc, &walk, nbytes); + } + + return err; +} + +static int crypto_pcbc_decrypt_segment(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + struct crypto_cipher *tfm) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_decrypt; + int bsize = crypto_cipher_blocksize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + u8 *iv = walk->iv; + + do { + fn(crypto_cipher_tfm(tfm), dst, src); + crypto_xor(dst, iv, bsize); + memcpy(iv, src, bsize); + crypto_xor(iv, dst, bsize); + + src += bsize; + dst += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_pcbc_decrypt_inplace(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + struct crypto_cipher *tfm) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_decrypt; + int bsize = crypto_cipher_blocksize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *iv = walk->iv; + u8 tmpbuf[bsize]; + + do { + memcpy(tmpbuf, src, bsize); + fn(crypto_cipher_tfm(tfm), src, src); + crypto_xor(src, iv, bsize); + memcpy(iv, tmpbuf, bsize); + crypto_xor(iv, src, bsize); + + src += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_pcbc_decrypt(struct blkcipher_desc *desc, + struct scatterlist *dst, struct scatterlist *src, + unsigned int nbytes) +{ + struct blkcipher_walk walk; + struct crypto_blkcipher *tfm = desc->tfm; + struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm); + struct crypto_cipher *child = ctx->child; + int err; + + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt(desc, &walk); + + while ((nbytes = walk.nbytes)) { + if (walk.src.virt.addr == walk.dst.virt.addr) + nbytes = crypto_pcbc_decrypt_inplace(desc, &walk, + child); + else + nbytes = crypto_pcbc_decrypt_segment(desc, &walk, + child); + err = blkcipher_walk_done(desc, &walk, nbytes); + } + + return err; +} + +static int crypto_pcbc_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_spawn *spawn = crypto_instance_ctx(inst); + struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_cipher *cipher; + + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; + return 0; +} + +static void crypto_pcbc_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm); + crypto_free_cipher(ctx->child); +} + +static struct crypto_instance *crypto_pcbc_alloc(struct rtattr **tb) +{ + struct crypto_instance *inst; + struct crypto_alg *alg; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); + if (err) + return ERR_PTR(err); + + alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK); + if (IS_ERR(alg)) + return ERR_CAST(alg); + + inst = crypto_alloc_instance("pcbc", alg); + if (IS_ERR(inst)) + goto out_put_alg; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = alg->cra_blocksize; + inst->alg.cra_alignmask = alg->cra_alignmask; + inst->alg.cra_type = &crypto_blkcipher_type; + + /* We access the data as u32s when xoring. */ + inst->alg.cra_alignmask |= __alignof__(u32) - 1; + + inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; + inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize; + inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize; + + inst->alg.cra_ctxsize = sizeof(struct crypto_pcbc_ctx); + + inst->alg.cra_init = crypto_pcbc_init_tfm; + inst->alg.cra_exit = crypto_pcbc_exit_tfm; + + inst->alg.cra_blkcipher.setkey = crypto_pcbc_setkey; + inst->alg.cra_blkcipher.encrypt = crypto_pcbc_encrypt; + inst->alg.cra_blkcipher.decrypt = crypto_pcbc_decrypt; + +out_put_alg: + crypto_mod_put(alg); + return inst; +} + +static void crypto_pcbc_free(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(inst); +} + +static struct crypto_template crypto_pcbc_tmpl = { + .name = "pcbc", + .alloc = crypto_pcbc_alloc, + .free = crypto_pcbc_free, + .module = THIS_MODULE, +}; + +static int __init crypto_pcbc_module_init(void) +{ + return crypto_register_template(&crypto_pcbc_tmpl); +} + +static void __exit crypto_pcbc_module_exit(void) +{ + crypto_unregister_template(&crypto_pcbc_tmpl); +} + +module_init(crypto_pcbc_module_init); +module_exit(crypto_pcbc_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("PCBC block cipher algorithm"); diff --git a/crypto/pcompress.c b/crypto/pcompress.c new file mode 100644 index 00000000..2e458e54 --- /dev/null +++ b/crypto/pcompress.c @@ -0,0 +1,121 @@ +/* + * Cryptographic API. + * + * Partial (de)compression operations. + * + * Copyright 2008 Sony Corporation + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. + * If not, see <http://www.gnu.org/licenses/>. + */ + +#include <linux/crypto.h> +#include <linux/errno.h> +#include <linux/module.h> +#include <linux/seq_file.h> +#include <linux/string.h> +#include <linux/cryptouser.h> +#include <net/netlink.h> + +#include <crypto/compress.h> +#include <crypto/internal/compress.h> + +#include "internal.h" + + +static int crypto_pcomp_init(struct crypto_tfm *tfm, u32 type, u32 mask) +{ + return 0; +} + +static unsigned int crypto_pcomp_extsize(struct crypto_alg *alg) +{ + return alg->cra_ctxsize; +} + +static int crypto_pcomp_init_tfm(struct crypto_tfm *tfm) +{ + return 0; +} + +#ifdef CONFIG_NET +static int crypto_pcomp_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + struct crypto_report_comp rpcomp; + + snprintf(rpcomp.type, CRYPTO_MAX_ALG_NAME, "%s", "pcomp"); + + NLA_PUT(skb, CRYPTOCFGA_REPORT_COMPRESS, + sizeof(struct crypto_report_comp), &rpcomp); + + return 0; + +nla_put_failure: + return -EMSGSIZE; +} +#else +static int crypto_pcomp_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + return -ENOSYS; +} +#endif + +static void crypto_pcomp_show(struct seq_file *m, struct crypto_alg *alg) + __attribute__ ((unused)); +static void crypto_pcomp_show(struct seq_file *m, struct crypto_alg *alg) +{ + seq_printf(m, "type : pcomp\n"); +} + +static const struct crypto_type crypto_pcomp_type = { + .extsize = crypto_pcomp_extsize, + .init = crypto_pcomp_init, + .init_tfm = crypto_pcomp_init_tfm, +#ifdef CONFIG_PROC_FS + .show = crypto_pcomp_show, +#endif + .report = crypto_pcomp_report, + .maskclear = ~CRYPTO_ALG_TYPE_MASK, + .maskset = CRYPTO_ALG_TYPE_MASK, + .type = CRYPTO_ALG_TYPE_PCOMPRESS, + .tfmsize = offsetof(struct crypto_pcomp, base), +}; + +struct crypto_pcomp *crypto_alloc_pcomp(const char *alg_name, u32 type, + u32 mask) +{ + return crypto_alloc_tfm(alg_name, &crypto_pcomp_type, type, mask); +} +EXPORT_SYMBOL_GPL(crypto_alloc_pcomp); + +int crypto_register_pcomp(struct pcomp_alg *alg) +{ + struct crypto_alg *base = &alg->base; + + base->cra_type = &crypto_pcomp_type; + base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; + base->cra_flags |= CRYPTO_ALG_TYPE_PCOMPRESS; + + return crypto_register_alg(base); +} +EXPORT_SYMBOL_GPL(crypto_register_pcomp); + +int crypto_unregister_pcomp(struct pcomp_alg *alg) +{ + return crypto_unregister_alg(&alg->base); +} +EXPORT_SYMBOL_GPL(crypto_unregister_pcomp); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Partial (de)compression type"); +MODULE_AUTHOR("Sony Corporation"); diff --git a/crypto/pcrypt.c b/crypto/pcrypt.c new file mode 100644 index 00000000..b2c99dc1 --- /dev/null +++ b/crypto/pcrypt.c @@ -0,0 +1,567 @@ +/* + * pcrypt - Parallel crypto wrapper. + * + * Copyright (C) 2009 secunet Security Networks AG + * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms and conditions of the GNU General Public License, + * version 2, as published by the Free Software Foundation. + * + * This program is distributed in the hope it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., + * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. + */ + +#include <crypto/algapi.h> +#include <crypto/internal/aead.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/slab.h> +#include <linux/notifier.h> +#include <linux/kobject.h> +#include <linux/cpu.h> +#include <crypto/pcrypt.h> + +struct padata_pcrypt { + struct padata_instance *pinst; + struct workqueue_struct *wq; + + /* + * Cpumask for callback CPUs. It should be + * equal to serial cpumask of corresponding padata instance, + * so it is updated when padata notifies us about serial + * cpumask change. + * + * cb_cpumask is protected by RCU. This fact prevents us from + * using cpumask_var_t directly because the actual type of + * cpumsak_var_t depends on kernel configuration(particularly on + * CONFIG_CPUMASK_OFFSTACK macro). Depending on the configuration + * cpumask_var_t may be either a pointer to the struct cpumask + * or a variable allocated on the stack. Thus we can not safely use + * cpumask_var_t with RCU operations such as rcu_assign_pointer or + * rcu_dereference. So cpumask_var_t is wrapped with struct + * pcrypt_cpumask which makes possible to use it with RCU. + */ + struct pcrypt_cpumask { + cpumask_var_t mask; + } *cb_cpumask; + struct notifier_block nblock; +}; + +static struct padata_pcrypt pencrypt; +static struct padata_pcrypt pdecrypt; +static struct kset *pcrypt_kset; + +struct pcrypt_instance_ctx { + struct crypto_spawn spawn; + unsigned int tfm_count; +}; + +struct pcrypt_aead_ctx { + struct crypto_aead *child; + unsigned int cb_cpu; +}; + +static int pcrypt_do_parallel(struct padata_priv *padata, unsigned int *cb_cpu, + struct padata_pcrypt *pcrypt) +{ + unsigned int cpu_index, cpu, i; + struct pcrypt_cpumask *cpumask; + + cpu = *cb_cpu; + + rcu_read_lock_bh(); + cpumask = rcu_dereference(pcrypt->cb_cpumask); + if (cpumask_test_cpu(cpu, cpumask->mask)) + goto out; + + if (!cpumask_weight(cpumask->mask)) + goto out; + + cpu_index = cpu % cpumask_weight(cpumask->mask); + + cpu = cpumask_first(cpumask->mask); + for (i = 0; i < cpu_index; i++) + cpu = cpumask_next(cpu, cpumask->mask); + + *cb_cpu = cpu; + +out: + rcu_read_unlock_bh(); + return padata_do_parallel(pcrypt->pinst, padata, cpu); +} + +static int pcrypt_aead_setkey(struct crypto_aead *parent, + const u8 *key, unsigned int keylen) +{ + struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent); + + return crypto_aead_setkey(ctx->child, key, keylen); +} + +static int pcrypt_aead_setauthsize(struct crypto_aead *parent, + unsigned int authsize) +{ + struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent); + + return crypto_aead_setauthsize(ctx->child, authsize); +} + +static void pcrypt_aead_serial(struct padata_priv *padata) +{ + struct pcrypt_request *preq = pcrypt_padata_request(padata); + struct aead_request *req = pcrypt_request_ctx(preq); + + aead_request_complete(req->base.data, padata->info); +} + +static void pcrypt_aead_giv_serial(struct padata_priv *padata) +{ + struct pcrypt_request *preq = pcrypt_padata_request(padata); + struct aead_givcrypt_request *req = pcrypt_request_ctx(preq); + + aead_request_complete(req->areq.base.data, padata->info); +} + +static void pcrypt_aead_done(struct crypto_async_request *areq, int err) +{ + struct aead_request *req = areq->data; + struct pcrypt_request *preq = aead_request_ctx(req); + struct padata_priv *padata = pcrypt_request_padata(preq); + + padata->info = err; + req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + + padata_do_serial(padata); +} + +static void pcrypt_aead_enc(struct padata_priv *padata) +{ + struct pcrypt_request *preq = pcrypt_padata_request(padata); + struct aead_request *req = pcrypt_request_ctx(preq); + + padata->info = crypto_aead_encrypt(req); + + if (padata->info == -EINPROGRESS) + return; + + padata_do_serial(padata); +} + +static int pcrypt_aead_encrypt(struct aead_request *req) +{ + int err; + struct pcrypt_request *preq = aead_request_ctx(req); + struct aead_request *creq = pcrypt_request_ctx(preq); + struct padata_priv *padata = pcrypt_request_padata(preq); + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead); + u32 flags = aead_request_flags(req); + + memset(padata, 0, sizeof(struct padata_priv)); + + padata->parallel = pcrypt_aead_enc; + padata->serial = pcrypt_aead_serial; + + aead_request_set_tfm(creq, ctx->child); + aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP, + pcrypt_aead_done, req); + aead_request_set_crypt(creq, req->src, req->dst, + req->cryptlen, req->iv); + aead_request_set_assoc(creq, req->assoc, req->assoclen); + + err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pencrypt); + if (!err) + return -EINPROGRESS; + + return err; +} + +static void pcrypt_aead_dec(struct padata_priv *padata) +{ + struct pcrypt_request *preq = pcrypt_padata_request(padata); + struct aead_request *req = pcrypt_request_ctx(preq); + + padata->info = crypto_aead_decrypt(req); + + if (padata->info == -EINPROGRESS) + return; + + padata_do_serial(padata); +} + +static int pcrypt_aead_decrypt(struct aead_request *req) +{ + int err; + struct pcrypt_request *preq = aead_request_ctx(req); + struct aead_request *creq = pcrypt_request_ctx(preq); + struct padata_priv *padata = pcrypt_request_padata(preq); + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead); + u32 flags = aead_request_flags(req); + + memset(padata, 0, sizeof(struct padata_priv)); + + padata->parallel = pcrypt_aead_dec; + padata->serial = pcrypt_aead_serial; + + aead_request_set_tfm(creq, ctx->child); + aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP, + pcrypt_aead_done, req); + aead_request_set_crypt(creq, req->src, req->dst, + req->cryptlen, req->iv); + aead_request_set_assoc(creq, req->assoc, req->assoclen); + + err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pdecrypt); + if (!err) + return -EINPROGRESS; + + return err; +} + +static void pcrypt_aead_givenc(struct padata_priv *padata) +{ + struct pcrypt_request *preq = pcrypt_padata_request(padata); + struct aead_givcrypt_request *req = pcrypt_request_ctx(preq); + + padata->info = crypto_aead_givencrypt(req); + + if (padata->info == -EINPROGRESS) + return; + + padata_do_serial(padata); +} + +static int pcrypt_aead_givencrypt(struct aead_givcrypt_request *req) +{ + int err; + struct aead_request *areq = &req->areq; + struct pcrypt_request *preq = aead_request_ctx(areq); + struct aead_givcrypt_request *creq = pcrypt_request_ctx(preq); + struct padata_priv *padata = pcrypt_request_padata(preq); + struct crypto_aead *aead = aead_givcrypt_reqtfm(req); + struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead); + u32 flags = aead_request_flags(areq); + + memset(padata, 0, sizeof(struct padata_priv)); + + padata->parallel = pcrypt_aead_givenc; + padata->serial = pcrypt_aead_giv_serial; + + aead_givcrypt_set_tfm(creq, ctx->child); + aead_givcrypt_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP, + pcrypt_aead_done, areq); + aead_givcrypt_set_crypt(creq, areq->src, areq->dst, + areq->cryptlen, areq->iv); + aead_givcrypt_set_assoc(creq, areq->assoc, areq->assoclen); + aead_givcrypt_set_giv(creq, req->giv, req->seq); + + err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pencrypt); + if (!err) + return -EINPROGRESS; + + return err; +} + +static int pcrypt_aead_init_tfm(struct crypto_tfm *tfm) +{ + int cpu, cpu_index; + struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); + struct pcrypt_instance_ctx *ictx = crypto_instance_ctx(inst); + struct pcrypt_aead_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_aead *cipher; + + ictx->tfm_count++; + + cpu_index = ictx->tfm_count % cpumask_weight(cpu_online_mask); + + ctx->cb_cpu = cpumask_first(cpu_online_mask); + for (cpu = 0; cpu < cpu_index; cpu++) + ctx->cb_cpu = cpumask_next(ctx->cb_cpu, cpu_online_mask); + + cipher = crypto_spawn_aead(crypto_instance_ctx(inst)); + + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; + tfm->crt_aead.reqsize = sizeof(struct pcrypt_request) + + sizeof(struct aead_givcrypt_request) + + crypto_aead_reqsize(cipher); + + return 0; +} + +static void pcrypt_aead_exit_tfm(struct crypto_tfm *tfm) +{ + struct pcrypt_aead_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_aead(ctx->child); +} + +static struct crypto_instance *pcrypt_alloc_instance(struct crypto_alg *alg) +{ + struct crypto_instance *inst; + struct pcrypt_instance_ctx *ctx; + int err; + + inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); + if (!inst) { + inst = ERR_PTR(-ENOMEM); + goto out; + } + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "pcrypt(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) + goto out_free_inst; + + memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); + + ctx = crypto_instance_ctx(inst); + err = crypto_init_spawn(&ctx->spawn, alg, inst, + CRYPTO_ALG_TYPE_MASK); + if (err) + goto out_free_inst; + + inst->alg.cra_priority = alg->cra_priority + 100; + inst->alg.cra_blocksize = alg->cra_blocksize; + inst->alg.cra_alignmask = alg->cra_alignmask; + +out: + return inst; + +out_free_inst: + kfree(inst); + inst = ERR_PTR(err); + goto out; +} + +static struct crypto_instance *pcrypt_alloc_aead(struct rtattr **tb, + u32 type, u32 mask) +{ + struct crypto_instance *inst; + struct crypto_alg *alg; + + alg = crypto_get_attr_alg(tb, type, (mask & CRYPTO_ALG_TYPE_MASK)); + if (IS_ERR(alg)) + return ERR_CAST(alg); + + inst = pcrypt_alloc_instance(alg); + if (IS_ERR(inst)) + goto out_put_alg; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC; + inst->alg.cra_type = &crypto_aead_type; + + inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize; + inst->alg.cra_aead.geniv = alg->cra_aead.geniv; + inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize; + + inst->alg.cra_ctxsize = sizeof(struct pcrypt_aead_ctx); + + inst->alg.cra_init = pcrypt_aead_init_tfm; + inst->alg.cra_exit = pcrypt_aead_exit_tfm; + + inst->alg.cra_aead.setkey = pcrypt_aead_setkey; + inst->alg.cra_aead.setauthsize = pcrypt_aead_setauthsize; + inst->alg.cra_aead.encrypt = pcrypt_aead_encrypt; + inst->alg.cra_aead.decrypt = pcrypt_aead_decrypt; + inst->alg.cra_aead.givencrypt = pcrypt_aead_givencrypt; + +out_put_alg: + crypto_mod_put(alg); + return inst; +} + +static struct crypto_instance *pcrypt_alloc(struct rtattr **tb) +{ + struct crypto_attr_type *algt; + + algt = crypto_get_attr_type(tb); + if (IS_ERR(algt)) + return ERR_CAST(algt); + + switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { + case CRYPTO_ALG_TYPE_AEAD: + return pcrypt_alloc_aead(tb, algt->type, algt->mask); + } + + return ERR_PTR(-EINVAL); +} + +static void pcrypt_free(struct crypto_instance *inst) +{ + struct pcrypt_instance_ctx *ctx = crypto_instance_ctx(inst); + + crypto_drop_spawn(&ctx->spawn); + kfree(inst); +} + +static int pcrypt_cpumask_change_notify(struct notifier_block *self, + unsigned long val, void *data) +{ + struct padata_pcrypt *pcrypt; + struct pcrypt_cpumask *new_mask, *old_mask; + struct padata_cpumask *cpumask = (struct padata_cpumask *)data; + + if (!(val & PADATA_CPU_SERIAL)) + return 0; + + pcrypt = container_of(self, struct padata_pcrypt, nblock); + new_mask = kmalloc(sizeof(*new_mask), GFP_KERNEL); + if (!new_mask) + return -ENOMEM; + if (!alloc_cpumask_var(&new_mask->mask, GFP_KERNEL)) { + kfree(new_mask); + return -ENOMEM; + } + + old_mask = pcrypt->cb_cpumask; + + cpumask_copy(new_mask->mask, cpumask->cbcpu); + rcu_assign_pointer(pcrypt->cb_cpumask, new_mask); + synchronize_rcu_bh(); + + free_cpumask_var(old_mask->mask); + kfree(old_mask); + return 0; +} + +static int pcrypt_sysfs_add(struct padata_instance *pinst, const char *name) +{ + int ret; + + pinst->kobj.kset = pcrypt_kset; + ret = kobject_add(&pinst->kobj, NULL, name); + if (!ret) + kobject_uevent(&pinst->kobj, KOBJ_ADD); + + return ret; +} + +static int pcrypt_init_padata(struct padata_pcrypt *pcrypt, + const char *name) +{ + int ret = -ENOMEM; + struct pcrypt_cpumask *mask; + + get_online_cpus(); + + pcrypt->wq = alloc_workqueue(name, + WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE, 1); + if (!pcrypt->wq) + goto err; + + pcrypt->pinst = padata_alloc_possible(pcrypt->wq); + if (!pcrypt->pinst) + goto err_destroy_workqueue; + + mask = kmalloc(sizeof(*mask), GFP_KERNEL); + if (!mask) + goto err_free_padata; + if (!alloc_cpumask_var(&mask->mask, GFP_KERNEL)) { + kfree(mask); + goto err_free_padata; + } + + cpumask_and(mask->mask, cpu_possible_mask, cpu_online_mask); + rcu_assign_pointer(pcrypt->cb_cpumask, mask); + + pcrypt->nblock.notifier_call = pcrypt_cpumask_change_notify; + ret = padata_register_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock); + if (ret) + goto err_free_cpumask; + + ret = pcrypt_sysfs_add(pcrypt->pinst, name); + if (ret) + goto err_unregister_notifier; + + put_online_cpus(); + + return ret; + +err_unregister_notifier: + padata_unregister_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock); +err_free_cpumask: + free_cpumask_var(mask->mask); + kfree(mask); +err_free_padata: + padata_free(pcrypt->pinst); +err_destroy_workqueue: + destroy_workqueue(pcrypt->wq); +err: + put_online_cpus(); + + return ret; +} + +static void pcrypt_fini_padata(struct padata_pcrypt *pcrypt) +{ + free_cpumask_var(pcrypt->cb_cpumask->mask); + kfree(pcrypt->cb_cpumask); + + padata_stop(pcrypt->pinst); + padata_unregister_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock); + destroy_workqueue(pcrypt->wq); + padata_free(pcrypt->pinst); +} + +static struct crypto_template pcrypt_tmpl = { + .name = "pcrypt", + .alloc = pcrypt_alloc, + .free = pcrypt_free, + .module = THIS_MODULE, +}; + +static int __init pcrypt_init(void) +{ + int err = -ENOMEM; + + pcrypt_kset = kset_create_and_add("pcrypt", NULL, kernel_kobj); + if (!pcrypt_kset) + goto err; + + err = pcrypt_init_padata(&pencrypt, "pencrypt"); + if (err) + goto err_unreg_kset; + + err = pcrypt_init_padata(&pdecrypt, "pdecrypt"); + if (err) + goto err_deinit_pencrypt; + + padata_start(pencrypt.pinst); + padata_start(pdecrypt.pinst); + + return crypto_register_template(&pcrypt_tmpl); + +err_deinit_pencrypt: + pcrypt_fini_padata(&pencrypt); +err_unreg_kset: + kset_unregister(pcrypt_kset); +err: + return err; +} + +static void __exit pcrypt_exit(void) +{ + pcrypt_fini_padata(&pencrypt); + pcrypt_fini_padata(&pdecrypt); + + kset_unregister(pcrypt_kset); + crypto_unregister_template(&pcrypt_tmpl); +} + +module_init(pcrypt_init); +module_exit(pcrypt_exit); + +MODULE_LICENSE("GPL"); +MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>"); +MODULE_DESCRIPTION("Parallel crypto wrapper"); diff --git a/crypto/proc.c b/crypto/proc.c new file mode 100644 index 00000000..4a0a7aad --- /dev/null +++ b/crypto/proc.c @@ -0,0 +1,155 @@ +/* + * Scatterlist Cryptographic API. + * + * Procfs information. + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <linux/atomic.h> +#include <linux/init.h> +#include <linux/crypto.h> +#include <linux/module.h> /* for module_name() */ +#include <linux/rwsem.h> +#include <linux/proc_fs.h> +#include <linux/seq_file.h> +#include <linux/sysctl.h> +#include "internal.h" + +#ifdef CONFIG_CRYPTO_FIPS +static struct ctl_table crypto_sysctl_table[] = { + { + .procname = "fips_enabled", + .data = &fips_enabled, + .maxlen = sizeof(int), + .mode = 0444, + .proc_handler = proc_dointvec + }, + {} +}; + +static struct ctl_table crypto_dir_table[] = { + { + .procname = "crypto", + .mode = 0555, + .child = crypto_sysctl_table + }, + {} +}; + +static struct ctl_table_header *crypto_sysctls; + +static void crypto_proc_fips_init(void) +{ + crypto_sysctls = register_sysctl_table(crypto_dir_table); +} + +static void crypto_proc_fips_exit(void) +{ + if (crypto_sysctls) + unregister_sysctl_table(crypto_sysctls); +} +#else +#define crypto_proc_fips_init() +#define crypto_proc_fips_exit() +#endif + +static void *c_start(struct seq_file *m, loff_t *pos) +{ + down_read(&crypto_alg_sem); + return seq_list_start(&crypto_alg_list, *pos); +} + +static void *c_next(struct seq_file *m, void *p, loff_t *pos) +{ + return seq_list_next(p, &crypto_alg_list, pos); +} + +static void c_stop(struct seq_file *m, void *p) +{ + up_read(&crypto_alg_sem); +} + +static int c_show(struct seq_file *m, void *p) +{ + struct crypto_alg *alg = list_entry(p, struct crypto_alg, cra_list); + + seq_printf(m, "name : %s\n", alg->cra_name); + seq_printf(m, "driver : %s\n", alg->cra_driver_name); + seq_printf(m, "module : %s\n", module_name(alg->cra_module)); + seq_printf(m, "priority : %d\n", alg->cra_priority); + seq_printf(m, "refcnt : %d\n", atomic_read(&alg->cra_refcnt)); + seq_printf(m, "selftest : %s\n", + (alg->cra_flags & CRYPTO_ALG_TESTED) ? + "passed" : "unknown"); + + if (alg->cra_flags & CRYPTO_ALG_LARVAL) { + seq_printf(m, "type : larval\n"); + seq_printf(m, "flags : 0x%x\n", alg->cra_flags); + goto out; + } + + if (alg->cra_type && alg->cra_type->show) { + alg->cra_type->show(m, alg); + goto out; + } + + switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) { + case CRYPTO_ALG_TYPE_CIPHER: + seq_printf(m, "type : cipher\n"); + seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); + seq_printf(m, "min keysize : %u\n", + alg->cra_cipher.cia_min_keysize); + seq_printf(m, "max keysize : %u\n", + alg->cra_cipher.cia_max_keysize); + break; + case CRYPTO_ALG_TYPE_COMPRESS: + seq_printf(m, "type : compression\n"); + break; + default: + seq_printf(m, "type : unknown\n"); + break; + } + +out: + seq_putc(m, '\n'); + return 0; +} + +static const struct seq_operations crypto_seq_ops = { + .start = c_start, + .next = c_next, + .stop = c_stop, + .show = c_show +}; + +static int crypto_info_open(struct inode *inode, struct file *file) +{ + return seq_open(file, &crypto_seq_ops); +} + +static const struct file_operations proc_crypto_ops = { + .open = crypto_info_open, + .read = seq_read, + .llseek = seq_lseek, + .release = seq_release +}; + +void __init crypto_init_proc(void) +{ + proc_create("crypto", 0, NULL, &proc_crypto_ops); + crypto_proc_fips_init(); +} + +void __exit crypto_exit_proc(void) +{ + crypto_proc_fips_exit(); + remove_proc_entry("crypto", NULL); +} diff --git a/crypto/ripemd.h b/crypto/ripemd.h new file mode 100644 index 00000000..c57a2d4c --- /dev/null +++ b/crypto/ripemd.h @@ -0,0 +1,43 @@ +/* + * Common values for RIPEMD algorithms + */ + +#ifndef _CRYPTO_RMD_H +#define _CRYPTO_RMD_H + +#define RMD128_DIGEST_SIZE 16 +#define RMD128_BLOCK_SIZE 64 + +#define RMD160_DIGEST_SIZE 20 +#define RMD160_BLOCK_SIZE 64 + +#define RMD256_DIGEST_SIZE 32 +#define RMD256_BLOCK_SIZE 64 + +#define RMD320_DIGEST_SIZE 40 +#define RMD320_BLOCK_SIZE 64 + +/* initial values */ +#define RMD_H0 0x67452301UL +#define RMD_H1 0xefcdab89UL +#define RMD_H2 0x98badcfeUL +#define RMD_H3 0x10325476UL +#define RMD_H4 0xc3d2e1f0UL +#define RMD_H5 0x76543210UL +#define RMD_H6 0xfedcba98UL +#define RMD_H7 0x89abcdefUL +#define RMD_H8 0x01234567UL +#define RMD_H9 0x3c2d1e0fUL + +/* constants */ +#define RMD_K1 0x00000000UL +#define RMD_K2 0x5a827999UL +#define RMD_K3 0x6ed9eba1UL +#define RMD_K4 0x8f1bbcdcUL +#define RMD_K5 0xa953fd4eUL +#define RMD_K6 0x50a28be6UL +#define RMD_K7 0x5c4dd124UL +#define RMD_K8 0x6d703ef3UL +#define RMD_K9 0x7a6d76e9UL + +#endif diff --git a/crypto/rmd128.c b/crypto/rmd128.c new file mode 100644 index 00000000..8a0f68b7 --- /dev/null +++ b/crypto/rmd128.c @@ -0,0 +1,329 @@ +/* + * Cryptographic API. + * + * RIPEMD-128 - RACE Integrity Primitives Evaluation Message Digest. + * + * Based on the reference implementation by Antoon Bosselaers, ESAT-COSIC + * + * Copyright (c) 2008 Adrian-Ken Rueegsegger <ken@codelabs.ch> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ +#include <crypto/internal/hash.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <linux/types.h> +#include <asm/byteorder.h> + +#include "ripemd.h" + +struct rmd128_ctx { + u64 byte_count; + u32 state[4]; + __le32 buffer[16]; +}; + +#define K1 RMD_K1 +#define K2 RMD_K2 +#define K3 RMD_K3 +#define K4 RMD_K4 +#define KK1 RMD_K6 +#define KK2 RMD_K7 +#define KK3 RMD_K8 +#define KK4 RMD_K1 + +#define F1(x, y, z) (x ^ y ^ z) /* XOR */ +#define F2(x, y, z) (z ^ (x & (y ^ z))) /* x ? y : z */ +#define F3(x, y, z) ((x | ~y) ^ z) +#define F4(x, y, z) (y ^ (z & (x ^ y))) /* z ? x : y */ + +#define ROUND(a, b, c, d, f, k, x, s) { \ + (a) += f((b), (c), (d)) + le32_to_cpup(&(x)) + (k); \ + (a) = rol32((a), (s)); \ +} + +static void rmd128_transform(u32 *state, const __le32 *in) +{ + u32 aa, bb, cc, dd, aaa, bbb, ccc, ddd; + + /* Initialize left lane */ + aa = state[0]; + bb = state[1]; + cc = state[2]; + dd = state[3]; + + /* Initialize right lane */ + aaa = state[0]; + bbb = state[1]; + ccc = state[2]; + ddd = state[3]; + + /* round 1: left lane */ + ROUND(aa, bb, cc, dd, F1, K1, in[0], 11); + ROUND(dd, aa, bb, cc, F1, K1, in[1], 14); + ROUND(cc, dd, aa, bb, F1, K1, in[2], 15); + ROUND(bb, cc, dd, aa, F1, K1, in[3], 12); + ROUND(aa, bb, cc, dd, F1, K1, in[4], 5); + ROUND(dd, aa, bb, cc, F1, K1, in[5], 8); + ROUND(cc, dd, aa, bb, F1, K1, in[6], 7); + ROUND(bb, cc, dd, aa, F1, K1, in[7], 9); + ROUND(aa, bb, cc, dd, F1, K1, in[8], 11); + ROUND(dd, aa, bb, cc, F1, K1, in[9], 13); + ROUND(cc, dd, aa, bb, F1, K1, in[10], 14); + ROUND(bb, cc, dd, aa, F1, K1, in[11], 15); + ROUND(aa, bb, cc, dd, F1, K1, in[12], 6); + ROUND(dd, aa, bb, cc, F1, K1, in[13], 7); + ROUND(cc, dd, aa, bb, F1, K1, in[14], 9); + ROUND(bb, cc, dd, aa, F1, K1, in[15], 8); + + /* round 2: left lane */ + ROUND(aa, bb, cc, dd, F2, K2, in[7], 7); + ROUND(dd, aa, bb, cc, F2, K2, in[4], 6); + ROUND(cc, dd, aa, bb, F2, K2, in[13], 8); + ROUND(bb, cc, dd, aa, F2, K2, in[1], 13); + ROUND(aa, bb, cc, dd, F2, K2, in[10], 11); + ROUND(dd, aa, bb, cc, F2, K2, in[6], 9); + ROUND(cc, dd, aa, bb, F2, K2, in[15], 7); + ROUND(bb, cc, dd, aa, F2, K2, in[3], 15); + ROUND(aa, bb, cc, dd, F2, K2, in[12], 7); + ROUND(dd, aa, bb, cc, F2, K2, in[0], 12); + ROUND(cc, dd, aa, bb, F2, K2, in[9], 15); + ROUND(bb, cc, dd, aa, F2, K2, in[5], 9); + ROUND(aa, bb, cc, dd, F2, K2, in[2], 11); + ROUND(dd, aa, bb, cc, F2, K2, in[14], 7); + ROUND(cc, dd, aa, bb, F2, K2, in[11], 13); + ROUND(bb, cc, dd, aa, F2, K2, in[8], 12); + + /* round 3: left lane */ + ROUND(aa, bb, cc, dd, F3, K3, in[3], 11); + ROUND(dd, aa, bb, cc, F3, K3, in[10], 13); + ROUND(cc, dd, aa, bb, F3, K3, in[14], 6); + ROUND(bb, cc, dd, aa, F3, K3, in[4], 7); + ROUND(aa, bb, cc, dd, F3, K3, in[9], 14); + ROUND(dd, aa, bb, cc, F3, K3, in[15], 9); + ROUND(cc, dd, aa, bb, F3, K3, in[8], 13); + ROUND(bb, cc, dd, aa, F3, K3, in[1], 15); + ROUND(aa, bb, cc, dd, F3, K3, in[2], 14); + ROUND(dd, aa, bb, cc, F3, K3, in[7], 8); + ROUND(cc, dd, aa, bb, F3, K3, in[0], 13); + ROUND(bb, cc, dd, aa, F3, K3, in[6], 6); + ROUND(aa, bb, cc, dd, F3, K3, in[13], 5); + ROUND(dd, aa, bb, cc, F3, K3, in[11], 12); + ROUND(cc, dd, aa, bb, F3, K3, in[5], 7); + ROUND(bb, cc, dd, aa, F3, K3, in[12], 5); + + /* round 4: left lane */ + ROUND(aa, bb, cc, dd, F4, K4, in[1], 11); + ROUND(dd, aa, bb, cc, F4, K4, in[9], 12); + ROUND(cc, dd, aa, bb, F4, K4, in[11], 14); + ROUND(bb, cc, dd, aa, F4, K4, in[10], 15); + ROUND(aa, bb, cc, dd, F4, K4, in[0], 14); + ROUND(dd, aa, bb, cc, F4, K4, in[8], 15); + ROUND(cc, dd, aa, bb, F4, K4, in[12], 9); + ROUND(bb, cc, dd, aa, F4, K4, in[4], 8); + ROUND(aa, bb, cc, dd, F4, K4, in[13], 9); + ROUND(dd, aa, bb, cc, F4, K4, in[3], 14); + ROUND(cc, dd, aa, bb, F4, K4, in[7], 5); + ROUND(bb, cc, dd, aa, F4, K4, in[15], 6); + ROUND(aa, bb, cc, dd, F4, K4, in[14], 8); + ROUND(dd, aa, bb, cc, F4, K4, in[5], 6); + ROUND(cc, dd, aa, bb, F4, K4, in[6], 5); + ROUND(bb, cc, dd, aa, F4, K4, in[2], 12); + + /* round 1: right lane */ + ROUND(aaa, bbb, ccc, ddd, F4, KK1, in[5], 8); + ROUND(ddd, aaa, bbb, ccc, F4, KK1, in[14], 9); + ROUND(ccc, ddd, aaa, bbb, F4, KK1, in[7], 9); + ROUND(bbb, ccc, ddd, aaa, F4, KK1, in[0], 11); + ROUND(aaa, bbb, ccc, ddd, F4, KK1, in[9], 13); + ROUND(ddd, aaa, bbb, ccc, F4, KK1, in[2], 15); + ROUND(ccc, ddd, aaa, bbb, F4, KK1, in[11], 15); + ROUND(bbb, ccc, ddd, aaa, F4, KK1, in[4], 5); + ROUND(aaa, bbb, ccc, ddd, F4, KK1, in[13], 7); + ROUND(ddd, aaa, bbb, ccc, F4, KK1, in[6], 7); + ROUND(ccc, ddd, aaa, bbb, F4, KK1, in[15], 8); + ROUND(bbb, ccc, ddd, aaa, F4, KK1, in[8], 11); + ROUND(aaa, bbb, ccc, ddd, F4, KK1, in[1], 14); + ROUND(ddd, aaa, bbb, ccc, F4, KK1, in[10], 14); + ROUND(ccc, ddd, aaa, bbb, F4, KK1, in[3], 12); + ROUND(bbb, ccc, ddd, aaa, F4, KK1, in[12], 6); + + /* round 2: right lane */ + ROUND(aaa, bbb, ccc, ddd, F3, KK2, in[6], 9); + ROUND(ddd, aaa, bbb, ccc, F3, KK2, in[11], 13); + ROUND(ccc, ddd, aaa, bbb, F3, KK2, in[3], 15); + ROUND(bbb, ccc, ddd, aaa, F3, KK2, in[7], 7); + ROUND(aaa, bbb, ccc, ddd, F3, KK2, in[0], 12); + ROUND(ddd, aaa, bbb, ccc, F3, KK2, in[13], 8); + ROUND(ccc, ddd, aaa, bbb, F3, KK2, in[5], 9); + ROUND(bbb, ccc, ddd, aaa, F3, KK2, in[10], 11); + ROUND(aaa, bbb, ccc, ddd, F3, KK2, in[14], 7); + ROUND(ddd, aaa, bbb, ccc, F3, KK2, in[15], 7); + ROUND(ccc, ddd, aaa, bbb, F3, KK2, in[8], 12); + ROUND(bbb, ccc, ddd, aaa, F3, KK2, in[12], 7); + ROUND(aaa, bbb, ccc, ddd, F3, KK2, in[4], 6); + ROUND(ddd, aaa, bbb, ccc, F3, KK2, in[9], 15); + ROUND(ccc, ddd, aaa, bbb, F3, KK2, in[1], 13); + ROUND(bbb, ccc, ddd, aaa, F3, KK2, in[2], 11); + + /* round 3: right lane */ + ROUND(aaa, bbb, ccc, ddd, F2, KK3, in[15], 9); + ROUND(ddd, aaa, bbb, ccc, F2, KK3, in[5], 7); + ROUND(ccc, ddd, aaa, bbb, F2, KK3, in[1], 15); + ROUND(bbb, ccc, ddd, aaa, F2, KK3, in[3], 11); + ROUND(aaa, bbb, ccc, ddd, F2, KK3, in[7], 8); + ROUND(ddd, aaa, bbb, ccc, F2, KK3, in[14], 6); + ROUND(ccc, ddd, aaa, bbb, F2, KK3, in[6], 6); + ROUND(bbb, ccc, ddd, aaa, F2, KK3, in[9], 14); + ROUND(aaa, bbb, ccc, ddd, F2, KK3, in[11], 12); + ROUND(ddd, aaa, bbb, ccc, F2, KK3, in[8], 13); + ROUND(ccc, ddd, aaa, bbb, F2, KK3, in[12], 5); + ROUND(bbb, ccc, ddd, aaa, F2, KK3, in[2], 14); + ROUND(aaa, bbb, ccc, ddd, F2, KK3, in[10], 13); + ROUND(ddd, aaa, bbb, ccc, F2, KK3, in[0], 13); + ROUND(ccc, ddd, aaa, bbb, F2, KK3, in[4], 7); + ROUND(bbb, ccc, ddd, aaa, F2, KK3, in[13], 5); + + /* round 4: right lane */ + ROUND(aaa, bbb, ccc, ddd, F1, KK4, in[8], 15); + ROUND(ddd, aaa, bbb, ccc, F1, KK4, in[6], 5); + ROUND(ccc, ddd, aaa, bbb, F1, KK4, in[4], 8); + ROUND(bbb, ccc, ddd, aaa, F1, KK4, in[1], 11); + ROUND(aaa, bbb, ccc, ddd, F1, KK4, in[3], 14); + ROUND(ddd, aaa, bbb, ccc, F1, KK4, in[11], 14); + ROUND(ccc, ddd, aaa, bbb, F1, KK4, in[15], 6); + ROUND(bbb, ccc, ddd, aaa, F1, KK4, in[0], 14); + ROUND(aaa, bbb, ccc, ddd, F1, KK4, in[5], 6); + ROUND(ddd, aaa, bbb, ccc, F1, KK4, in[12], 9); + ROUND(ccc, ddd, aaa, bbb, F1, KK4, in[2], 12); + ROUND(bbb, ccc, ddd, aaa, F1, KK4, in[13], 9); + ROUND(aaa, bbb, ccc, ddd, F1, KK4, in[9], 12); + ROUND(ddd, aaa, bbb, ccc, F1, KK4, in[7], 5); + ROUND(ccc, ddd, aaa, bbb, F1, KK4, in[10], 15); + ROUND(bbb, ccc, ddd, aaa, F1, KK4, in[14], 8); + + /* combine results */ + ddd += cc + state[1]; /* final result for state[0] */ + state[1] = state[2] + dd + aaa; + state[2] = state[3] + aa + bbb; + state[3] = state[0] + bb + ccc; + state[0] = ddd; + + return; +} + +static int rmd128_init(struct shash_desc *desc) +{ + struct rmd128_ctx *rctx = shash_desc_ctx(desc); + + rctx->byte_count = 0; + + rctx->state[0] = RMD_H0; + rctx->state[1] = RMD_H1; + rctx->state[2] = RMD_H2; + rctx->state[3] = RMD_H3; + + memset(rctx->buffer, 0, sizeof(rctx->buffer)); + + return 0; +} + +static int rmd128_update(struct shash_desc *desc, const u8 *data, + unsigned int len) +{ + struct rmd128_ctx *rctx = shash_desc_ctx(desc); + const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); + + rctx->byte_count += len; + + /* Enough space in buffer? If so copy and we're done */ + if (avail > len) { + memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), + data, len); + goto out; + } + + memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), + data, avail); + + rmd128_transform(rctx->state, rctx->buffer); + data += avail; + len -= avail; + + while (len >= sizeof(rctx->buffer)) { + memcpy(rctx->buffer, data, sizeof(rctx->buffer)); + rmd128_transform(rctx->state, rctx->buffer); + data += sizeof(rctx->buffer); + len -= sizeof(rctx->buffer); + } + + memcpy(rctx->buffer, data, len); + +out: + return 0; +} + +/* Add padding and return the message digest. */ +static int rmd128_final(struct shash_desc *desc, u8 *out) +{ + struct rmd128_ctx *rctx = shash_desc_ctx(desc); + u32 i, index, padlen; + __le64 bits; + __le32 *dst = (__le32 *)out; + static const u8 padding[64] = { 0x80, }; + + bits = cpu_to_le64(rctx->byte_count << 3); + + /* Pad out to 56 mod 64 */ + index = rctx->byte_count & 0x3f; + padlen = (index < 56) ? (56 - index) : ((64+56) - index); + rmd128_update(desc, padding, padlen); + + /* Append length */ + rmd128_update(desc, (const u8 *)&bits, sizeof(bits)); + + /* Store state in digest */ + for (i = 0; i < 4; i++) + dst[i] = cpu_to_le32p(&rctx->state[i]); + + /* Wipe context */ + memset(rctx, 0, sizeof(*rctx)); + + return 0; +} + +static struct shash_alg alg = { + .digestsize = RMD128_DIGEST_SIZE, + .init = rmd128_init, + .update = rmd128_update, + .final = rmd128_final, + .descsize = sizeof(struct rmd128_ctx), + .base = { + .cra_name = "rmd128", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = RMD128_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static int __init rmd128_mod_init(void) +{ + return crypto_register_shash(&alg); +} + +static void __exit rmd128_mod_fini(void) +{ + crypto_unregister_shash(&alg); +} + +module_init(rmd128_mod_init); +module_exit(rmd128_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_AUTHOR("Adrian-Ken Rueegsegger <ken@codelabs.ch>"); +MODULE_DESCRIPTION("RIPEMD-128 Message Digest"); diff --git a/crypto/rmd160.c b/crypto/rmd160.c new file mode 100644 index 00000000..525d7bb7 --- /dev/null +++ b/crypto/rmd160.c @@ -0,0 +1,373 @@ +/* + * Cryptographic API. + * + * RIPEMD-160 - RACE Integrity Primitives Evaluation Message Digest. + * + * Based on the reference implementation by Antoon Bosselaers, ESAT-COSIC + * + * Copyright (c) 2008 Adrian-Ken Rueegsegger <ken@codelabs.ch> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ +#include <crypto/internal/hash.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <linux/types.h> +#include <asm/byteorder.h> + +#include "ripemd.h" + +struct rmd160_ctx { + u64 byte_count; + u32 state[5]; + __le32 buffer[16]; +}; + +#define K1 RMD_K1 +#define K2 RMD_K2 +#define K3 RMD_K3 +#define K4 RMD_K4 +#define K5 RMD_K5 +#define KK1 RMD_K6 +#define KK2 RMD_K7 +#define KK3 RMD_K8 +#define KK4 RMD_K9 +#define KK5 RMD_K1 + +#define F1(x, y, z) (x ^ y ^ z) /* XOR */ +#define F2(x, y, z) (z ^ (x & (y ^ z))) /* x ? y : z */ +#define F3(x, y, z) ((x | ~y) ^ z) +#define F4(x, y, z) (y ^ (z & (x ^ y))) /* z ? x : y */ +#define F5(x, y, z) (x ^ (y | ~z)) + +#define ROUND(a, b, c, d, e, f, k, x, s) { \ + (a) += f((b), (c), (d)) + le32_to_cpup(&(x)) + (k); \ + (a) = rol32((a), (s)) + (e); \ + (c) = rol32((c), 10); \ +} + +static void rmd160_transform(u32 *state, const __le32 *in) +{ + u32 aa, bb, cc, dd, ee, aaa, bbb, ccc, ddd, eee; + + /* Initialize left lane */ + aa = state[0]; + bb = state[1]; + cc = state[2]; + dd = state[3]; + ee = state[4]; + + /* Initialize right lane */ + aaa = state[0]; + bbb = state[1]; + ccc = state[2]; + ddd = state[3]; + eee = state[4]; + + /* round 1: left lane */ + ROUND(aa, bb, cc, dd, ee, F1, K1, in[0], 11); + ROUND(ee, aa, bb, cc, dd, F1, K1, in[1], 14); + ROUND(dd, ee, aa, bb, cc, F1, K1, in[2], 15); + ROUND(cc, dd, ee, aa, bb, F1, K1, in[3], 12); + ROUND(bb, cc, dd, ee, aa, F1, K1, in[4], 5); + ROUND(aa, bb, cc, dd, ee, F1, K1, in[5], 8); + ROUND(ee, aa, bb, cc, dd, F1, K1, in[6], 7); + ROUND(dd, ee, aa, bb, cc, F1, K1, in[7], 9); + ROUND(cc, dd, ee, aa, bb, F1, K1, in[8], 11); + ROUND(bb, cc, dd, ee, aa, F1, K1, in[9], 13); + ROUND(aa, bb, cc, dd, ee, F1, K1, in[10], 14); + ROUND(ee, aa, bb, cc, dd, F1, K1, in[11], 15); + ROUND(dd, ee, aa, bb, cc, F1, K1, in[12], 6); + ROUND(cc, dd, ee, aa, bb, F1, K1, in[13], 7); + ROUND(bb, cc, dd, ee, aa, F1, K1, in[14], 9); + ROUND(aa, bb, cc, dd, ee, F1, K1, in[15], 8); + + /* round 2: left lane" */ + ROUND(ee, aa, bb, cc, dd, F2, K2, in[7], 7); + ROUND(dd, ee, aa, bb, cc, F2, K2, in[4], 6); + ROUND(cc, dd, ee, aa, bb, F2, K2, in[13], 8); + ROUND(bb, cc, dd, ee, aa, F2, K2, in[1], 13); + ROUND(aa, bb, cc, dd, ee, F2, K2, in[10], 11); + ROUND(ee, aa, bb, cc, dd, F2, K2, in[6], 9); + ROUND(dd, ee, aa, bb, cc, F2, K2, in[15], 7); + ROUND(cc, dd, ee, aa, bb, F2, K2, in[3], 15); + ROUND(bb, cc, dd, ee, aa, F2, K2, in[12], 7); + ROUND(aa, bb, cc, dd, ee, F2, K2, in[0], 12); + ROUND(ee, aa, bb, cc, dd, F2, K2, in[9], 15); + ROUND(dd, ee, aa, bb, cc, F2, K2, in[5], 9); + ROUND(cc, dd, ee, aa, bb, F2, K2, in[2], 11); + ROUND(bb, cc, dd, ee, aa, F2, K2, in[14], 7); + ROUND(aa, bb, cc, dd, ee, F2, K2, in[11], 13); + ROUND(ee, aa, bb, cc, dd, F2, K2, in[8], 12); + + /* round 3: left lane" */ + ROUND(dd, ee, aa, bb, cc, F3, K3, in[3], 11); + ROUND(cc, dd, ee, aa, bb, F3, K3, in[10], 13); + ROUND(bb, cc, dd, ee, aa, F3, K3, in[14], 6); + ROUND(aa, bb, cc, dd, ee, F3, K3, in[4], 7); + ROUND(ee, aa, bb, cc, dd, F3, K3, in[9], 14); + ROUND(dd, ee, aa, bb, cc, F3, K3, in[15], 9); + ROUND(cc, dd, ee, aa, bb, F3, K3, in[8], 13); + ROUND(bb, cc, dd, ee, aa, F3, K3, in[1], 15); + ROUND(aa, bb, cc, dd, ee, F3, K3, in[2], 14); + ROUND(ee, aa, bb, cc, dd, F3, K3, in[7], 8); + ROUND(dd, ee, aa, bb, cc, F3, K3, in[0], 13); + ROUND(cc, dd, ee, aa, bb, F3, K3, in[6], 6); + ROUND(bb, cc, dd, ee, aa, F3, K3, in[13], 5); + ROUND(aa, bb, cc, dd, ee, F3, K3, in[11], 12); + ROUND(ee, aa, bb, cc, dd, F3, K3, in[5], 7); + ROUND(dd, ee, aa, bb, cc, F3, K3, in[12], 5); + + /* round 4: left lane" */ + ROUND(cc, dd, ee, aa, bb, F4, K4, in[1], 11); + ROUND(bb, cc, dd, ee, aa, F4, K4, in[9], 12); + ROUND(aa, bb, cc, dd, ee, F4, K4, in[11], 14); + ROUND(ee, aa, bb, cc, dd, F4, K4, in[10], 15); + ROUND(dd, ee, aa, bb, cc, F4, K4, in[0], 14); + ROUND(cc, dd, ee, aa, bb, F4, K4, in[8], 15); + ROUND(bb, cc, dd, ee, aa, F4, K4, in[12], 9); + ROUND(aa, bb, cc, dd, ee, F4, K4, in[4], 8); + ROUND(ee, aa, bb, cc, dd, F4, K4, in[13], 9); + ROUND(dd, ee, aa, bb, cc, F4, K4, in[3], 14); + ROUND(cc, dd, ee, aa, bb, F4, K4, in[7], 5); + ROUND(bb, cc, dd, ee, aa, F4, K4, in[15], 6); + ROUND(aa, bb, cc, dd, ee, F4, K4, in[14], 8); + ROUND(ee, aa, bb, cc, dd, F4, K4, in[5], 6); + ROUND(dd, ee, aa, bb, cc, F4, K4, in[6], 5); + ROUND(cc, dd, ee, aa, bb, F4, K4, in[2], 12); + + /* round 5: left lane" */ + ROUND(bb, cc, dd, ee, aa, F5, K5, in[4], 9); + ROUND(aa, bb, cc, dd, ee, F5, K5, in[0], 15); + ROUND(ee, aa, bb, cc, dd, F5, K5, in[5], 5); + ROUND(dd, ee, aa, bb, cc, F5, K5, in[9], 11); + ROUND(cc, dd, ee, aa, bb, F5, K5, in[7], 6); + ROUND(bb, cc, dd, ee, aa, F5, K5, in[12], 8); + ROUND(aa, bb, cc, dd, ee, F5, K5, in[2], 13); + ROUND(ee, aa, bb, cc, dd, F5, K5, in[10], 12); + ROUND(dd, ee, aa, bb, cc, F5, K5, in[14], 5); + ROUND(cc, dd, ee, aa, bb, F5, K5, in[1], 12); + ROUND(bb, cc, dd, ee, aa, F5, K5, in[3], 13); + ROUND(aa, bb, cc, dd, ee, F5, K5, in[8], 14); + ROUND(ee, aa, bb, cc, dd, F5, K5, in[11], 11); + ROUND(dd, ee, aa, bb, cc, F5, K5, in[6], 8); + ROUND(cc, dd, ee, aa, bb, F5, K5, in[15], 5); + ROUND(bb, cc, dd, ee, aa, F5, K5, in[13], 6); + + /* round 1: right lane */ + ROUND(aaa, bbb, ccc, ddd, eee, F5, KK1, in[5], 8); + ROUND(eee, aaa, bbb, ccc, ddd, F5, KK1, in[14], 9); + ROUND(ddd, eee, aaa, bbb, ccc, F5, KK1, in[7], 9); + ROUND(ccc, ddd, eee, aaa, bbb, F5, KK1, in[0], 11); + ROUND(bbb, ccc, ddd, eee, aaa, F5, KK1, in[9], 13); + ROUND(aaa, bbb, ccc, ddd, eee, F5, KK1, in[2], 15); + ROUND(eee, aaa, bbb, ccc, ddd, F5, KK1, in[11], 15); + ROUND(ddd, eee, aaa, bbb, ccc, F5, KK1, in[4], 5); + ROUND(ccc, ddd, eee, aaa, bbb, F5, KK1, in[13], 7); + ROUND(bbb, ccc, ddd, eee, aaa, F5, KK1, in[6], 7); + ROUND(aaa, bbb, ccc, ddd, eee, F5, KK1, in[15], 8); + ROUND(eee, aaa, bbb, ccc, ddd, F5, KK1, in[8], 11); + ROUND(ddd, eee, aaa, bbb, ccc, F5, KK1, in[1], 14); + ROUND(ccc, ddd, eee, aaa, bbb, F5, KK1, in[10], 14); + ROUND(bbb, ccc, ddd, eee, aaa, F5, KK1, in[3], 12); + ROUND(aaa, bbb, ccc, ddd, eee, F5, KK1, in[12], 6); + + /* round 2: right lane */ + ROUND(eee, aaa, bbb, ccc, ddd, F4, KK2, in[6], 9); + ROUND(ddd, eee, aaa, bbb, ccc, F4, KK2, in[11], 13); + ROUND(ccc, ddd, eee, aaa, bbb, F4, KK2, in[3], 15); + ROUND(bbb, ccc, ddd, eee, aaa, F4, KK2, in[7], 7); + ROUND(aaa, bbb, ccc, ddd, eee, F4, KK2, in[0], 12); + ROUND(eee, aaa, bbb, ccc, ddd, F4, KK2, in[13], 8); + ROUND(ddd, eee, aaa, bbb, ccc, F4, KK2, in[5], 9); + ROUND(ccc, ddd, eee, aaa, bbb, F4, KK2, in[10], 11); + ROUND(bbb, ccc, ddd, eee, aaa, F4, KK2, in[14], 7); + ROUND(aaa, bbb, ccc, ddd, eee, F4, KK2, in[15], 7); + ROUND(eee, aaa, bbb, ccc, ddd, F4, KK2, in[8], 12); + ROUND(ddd, eee, aaa, bbb, ccc, F4, KK2, in[12], 7); + ROUND(ccc, ddd, eee, aaa, bbb, F4, KK2, in[4], 6); + ROUND(bbb, ccc, ddd, eee, aaa, F4, KK2, in[9], 15); + ROUND(aaa, bbb, ccc, ddd, eee, F4, KK2, in[1], 13); + ROUND(eee, aaa, bbb, ccc, ddd, F4, KK2, in[2], 11); + + /* round 3: right lane */ + ROUND(ddd, eee, aaa, bbb, ccc, F3, KK3, in[15], 9); + ROUND(ccc, ddd, eee, aaa, bbb, F3, KK3, in[5], 7); + ROUND(bbb, ccc, ddd, eee, aaa, F3, KK3, in[1], 15); + ROUND(aaa, bbb, ccc, ddd, eee, F3, KK3, in[3], 11); + ROUND(eee, aaa, bbb, ccc, ddd, F3, KK3, in[7], 8); + ROUND(ddd, eee, aaa, bbb, ccc, F3, KK3, in[14], 6); + ROUND(ccc, ddd, eee, aaa, bbb, F3, KK3, in[6], 6); + ROUND(bbb, ccc, ddd, eee, aaa, F3, KK3, in[9], 14); + ROUND(aaa, bbb, ccc, ddd, eee, F3, KK3, in[11], 12); + ROUND(eee, aaa, bbb, ccc, ddd, F3, KK3, in[8], 13); + ROUND(ddd, eee, aaa, bbb, ccc, F3, KK3, in[12], 5); + ROUND(ccc, ddd, eee, aaa, bbb, F3, KK3, in[2], 14); + ROUND(bbb, ccc, ddd, eee, aaa, F3, KK3, in[10], 13); + ROUND(aaa, bbb, ccc, ddd, eee, F3, KK3, in[0], 13); + ROUND(eee, aaa, bbb, ccc, ddd, F3, KK3, in[4], 7); + ROUND(ddd, eee, aaa, bbb, ccc, F3, KK3, in[13], 5); + + /* round 4: right lane */ + ROUND(ccc, ddd, eee, aaa, bbb, F2, KK4, in[8], 15); + ROUND(bbb, ccc, ddd, eee, aaa, F2, KK4, in[6], 5); + ROUND(aaa, bbb, ccc, ddd, eee, F2, KK4, in[4], 8); + ROUND(eee, aaa, bbb, ccc, ddd, F2, KK4, in[1], 11); + ROUND(ddd, eee, aaa, bbb, ccc, F2, KK4, in[3], 14); + ROUND(ccc, ddd, eee, aaa, bbb, F2, KK4, in[11], 14); + ROUND(bbb, ccc, ddd, eee, aaa, F2, KK4, in[15], 6); + ROUND(aaa, bbb, ccc, ddd, eee, F2, KK4, in[0], 14); + ROUND(eee, aaa, bbb, ccc, ddd, F2, KK4, in[5], 6); + ROUND(ddd, eee, aaa, bbb, ccc, F2, KK4, in[12], 9); + ROUND(ccc, ddd, eee, aaa, bbb, F2, KK4, in[2], 12); + ROUND(bbb, ccc, ddd, eee, aaa, F2, KK4, in[13], 9); + ROUND(aaa, bbb, ccc, ddd, eee, F2, KK4, in[9], 12); + ROUND(eee, aaa, bbb, ccc, ddd, F2, KK4, in[7], 5); + ROUND(ddd, eee, aaa, bbb, ccc, F2, KK4, in[10], 15); + ROUND(ccc, ddd, eee, aaa, bbb, F2, KK4, in[14], 8); + + /* round 5: right lane */ + ROUND(bbb, ccc, ddd, eee, aaa, F1, KK5, in[12], 8); + ROUND(aaa, bbb, ccc, ddd, eee, F1, KK5, in[15], 5); + ROUND(eee, aaa, bbb, ccc, ddd, F1, KK5, in[10], 12); + ROUND(ddd, eee, aaa, bbb, ccc, F1, KK5, in[4], 9); + ROUND(ccc, ddd, eee, aaa, bbb, F1, KK5, in[1], 12); + ROUND(bbb, ccc, ddd, eee, aaa, F1, KK5, in[5], 5); + ROUND(aaa, bbb, ccc, ddd, eee, F1, KK5, in[8], 14); + ROUND(eee, aaa, bbb, ccc, ddd, F1, KK5, in[7], 6); + ROUND(ddd, eee, aaa, bbb, ccc, F1, KK5, in[6], 8); + ROUND(ccc, ddd, eee, aaa, bbb, F1, KK5, in[2], 13); + ROUND(bbb, ccc, ddd, eee, aaa, F1, KK5, in[13], 6); + ROUND(aaa, bbb, ccc, ddd, eee, F1, KK5, in[14], 5); + ROUND(eee, aaa, bbb, ccc, ddd, F1, KK5, in[0], 15); + ROUND(ddd, eee, aaa, bbb, ccc, F1, KK5, in[3], 13); + ROUND(ccc, ddd, eee, aaa, bbb, F1, KK5, in[9], 11); + ROUND(bbb, ccc, ddd, eee, aaa, F1, KK5, in[11], 11); + + /* combine results */ + ddd += cc + state[1]; /* final result for state[0] */ + state[1] = state[2] + dd + eee; + state[2] = state[3] + ee + aaa; + state[3] = state[4] + aa + bbb; + state[4] = state[0] + bb + ccc; + state[0] = ddd; + + return; +} + +static int rmd160_init(struct shash_desc *desc) +{ + struct rmd160_ctx *rctx = shash_desc_ctx(desc); + + rctx->byte_count = 0; + + rctx->state[0] = RMD_H0; + rctx->state[1] = RMD_H1; + rctx->state[2] = RMD_H2; + rctx->state[3] = RMD_H3; + rctx->state[4] = RMD_H4; + + memset(rctx->buffer, 0, sizeof(rctx->buffer)); + + return 0; +} + +static int rmd160_update(struct shash_desc *desc, const u8 *data, + unsigned int len) +{ + struct rmd160_ctx *rctx = shash_desc_ctx(desc); + const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); + + rctx->byte_count += len; + + /* Enough space in buffer? If so copy and we're done */ + if (avail > len) { + memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), + data, len); + goto out; + } + + memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), + data, avail); + + rmd160_transform(rctx->state, rctx->buffer); + data += avail; + len -= avail; + + while (len >= sizeof(rctx->buffer)) { + memcpy(rctx->buffer, data, sizeof(rctx->buffer)); + rmd160_transform(rctx->state, rctx->buffer); + data += sizeof(rctx->buffer); + len -= sizeof(rctx->buffer); + } + + memcpy(rctx->buffer, data, len); + +out: + return 0; +} + +/* Add padding and return the message digest. */ +static int rmd160_final(struct shash_desc *desc, u8 *out) +{ + struct rmd160_ctx *rctx = shash_desc_ctx(desc); + u32 i, index, padlen; + __le64 bits; + __le32 *dst = (__le32 *)out; + static const u8 padding[64] = { 0x80, }; + + bits = cpu_to_le64(rctx->byte_count << 3); + + /* Pad out to 56 mod 64 */ + index = rctx->byte_count & 0x3f; + padlen = (index < 56) ? (56 - index) : ((64+56) - index); + rmd160_update(desc, padding, padlen); + + /* Append length */ + rmd160_update(desc, (const u8 *)&bits, sizeof(bits)); + + /* Store state in digest */ + for (i = 0; i < 5; i++) + dst[i] = cpu_to_le32p(&rctx->state[i]); + + /* Wipe context */ + memset(rctx, 0, sizeof(*rctx)); + + return 0; +} + +static struct shash_alg alg = { + .digestsize = RMD160_DIGEST_SIZE, + .init = rmd160_init, + .update = rmd160_update, + .final = rmd160_final, + .descsize = sizeof(struct rmd160_ctx), + .base = { + .cra_name = "rmd160", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = RMD160_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static int __init rmd160_mod_init(void) +{ + return crypto_register_shash(&alg); +} + +static void __exit rmd160_mod_fini(void) +{ + crypto_unregister_shash(&alg); +} + +module_init(rmd160_mod_init); +module_exit(rmd160_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_AUTHOR("Adrian-Ken Rueegsegger <ken@codelabs.ch>"); +MODULE_DESCRIPTION("RIPEMD-160 Message Digest"); diff --git a/crypto/rmd256.c b/crypto/rmd256.c new file mode 100644 index 00000000..69293d9b --- /dev/null +++ b/crypto/rmd256.c @@ -0,0 +1,348 @@ +/* + * Cryptographic API. + * + * RIPEMD-256 - RACE Integrity Primitives Evaluation Message Digest. + * + * Based on the reference implementation by Antoon Bosselaers, ESAT-COSIC + * + * Copyright (c) 2008 Adrian-Ken Rueegsegger <ken@codelabs.ch> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ +#include <crypto/internal/hash.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <linux/types.h> +#include <asm/byteorder.h> + +#include "ripemd.h" + +struct rmd256_ctx { + u64 byte_count; + u32 state[8]; + __le32 buffer[16]; +}; + +#define K1 RMD_K1 +#define K2 RMD_K2 +#define K3 RMD_K3 +#define K4 RMD_K4 +#define KK1 RMD_K6 +#define KK2 RMD_K7 +#define KK3 RMD_K8 +#define KK4 RMD_K1 + +#define F1(x, y, z) (x ^ y ^ z) /* XOR */ +#define F2(x, y, z) (z ^ (x & (y ^ z))) /* x ? y : z */ +#define F3(x, y, z) ((x | ~y) ^ z) +#define F4(x, y, z) (y ^ (z & (x ^ y))) /* z ? x : y */ + +#define ROUND(a, b, c, d, f, k, x, s) { \ + (a) += f((b), (c), (d)) + le32_to_cpup(&(x)) + (k); \ + (a) = rol32((a), (s)); \ +} + +static void rmd256_transform(u32 *state, const __le32 *in) +{ + u32 aa, bb, cc, dd, aaa, bbb, ccc, ddd, tmp; + + /* Initialize left lane */ + aa = state[0]; + bb = state[1]; + cc = state[2]; + dd = state[3]; + + /* Initialize right lane */ + aaa = state[4]; + bbb = state[5]; + ccc = state[6]; + ddd = state[7]; + + /* round 1: left lane */ + ROUND(aa, bb, cc, dd, F1, K1, in[0], 11); + ROUND(dd, aa, bb, cc, F1, K1, in[1], 14); + ROUND(cc, dd, aa, bb, F1, K1, in[2], 15); + ROUND(bb, cc, dd, aa, F1, K1, in[3], 12); + ROUND(aa, bb, cc, dd, F1, K1, in[4], 5); + ROUND(dd, aa, bb, cc, F1, K1, in[5], 8); + ROUND(cc, dd, aa, bb, F1, K1, in[6], 7); + ROUND(bb, cc, dd, aa, F1, K1, in[7], 9); + ROUND(aa, bb, cc, dd, F1, K1, in[8], 11); + ROUND(dd, aa, bb, cc, F1, K1, in[9], 13); + ROUND(cc, dd, aa, bb, F1, K1, in[10], 14); + ROUND(bb, cc, dd, aa, F1, K1, in[11], 15); + ROUND(aa, bb, cc, dd, F1, K1, in[12], 6); + ROUND(dd, aa, bb, cc, F1, K1, in[13], 7); + ROUND(cc, dd, aa, bb, F1, K1, in[14], 9); + ROUND(bb, cc, dd, aa, F1, K1, in[15], 8); + + /* round 1: right lane */ + ROUND(aaa, bbb, ccc, ddd, F4, KK1, in[5], 8); + ROUND(ddd, aaa, bbb, ccc, F4, KK1, in[14], 9); + ROUND(ccc, ddd, aaa, bbb, F4, KK1, in[7], 9); + ROUND(bbb, ccc, ddd, aaa, F4, KK1, in[0], 11); + ROUND(aaa, bbb, ccc, ddd, F4, KK1, in[9], 13); + ROUND(ddd, aaa, bbb, ccc, F4, KK1, in[2], 15); + ROUND(ccc, ddd, aaa, bbb, F4, KK1, in[11], 15); + ROUND(bbb, ccc, ddd, aaa, F4, KK1, in[4], 5); + ROUND(aaa, bbb, ccc, ddd, F4, KK1, in[13], 7); + ROUND(ddd, aaa, bbb, ccc, F4, KK1, in[6], 7); + ROUND(ccc, ddd, aaa, bbb, F4, KK1, in[15], 8); + ROUND(bbb, ccc, ddd, aaa, F4, KK1, in[8], 11); + ROUND(aaa, bbb, ccc, ddd, F4, KK1, in[1], 14); + ROUND(ddd, aaa, bbb, ccc, F4, KK1, in[10], 14); + ROUND(ccc, ddd, aaa, bbb, F4, KK1, in[3], 12); + ROUND(bbb, ccc, ddd, aaa, F4, KK1, in[12], 6); + + /* Swap contents of "a" registers */ + tmp = aa; aa = aaa; aaa = tmp; + + /* round 2: left lane */ + ROUND(aa, bb, cc, dd, F2, K2, in[7], 7); + ROUND(dd, aa, bb, cc, F2, K2, in[4], 6); + ROUND(cc, dd, aa, bb, F2, K2, in[13], 8); + ROUND(bb, cc, dd, aa, F2, K2, in[1], 13); + ROUND(aa, bb, cc, dd, F2, K2, in[10], 11); + ROUND(dd, aa, bb, cc, F2, K2, in[6], 9); + ROUND(cc, dd, aa, bb, F2, K2, in[15], 7); + ROUND(bb, cc, dd, aa, F2, K2, in[3], 15); + ROUND(aa, bb, cc, dd, F2, K2, in[12], 7); + ROUND(dd, aa, bb, cc, F2, K2, in[0], 12); + ROUND(cc, dd, aa, bb, F2, K2, in[9], 15); + ROUND(bb, cc, dd, aa, F2, K2, in[5], 9); + ROUND(aa, bb, cc, dd, F2, K2, in[2], 11); + ROUND(dd, aa, bb, cc, F2, K2, in[14], 7); + ROUND(cc, dd, aa, bb, F2, K2, in[11], 13); + ROUND(bb, cc, dd, aa, F2, K2, in[8], 12); + + /* round 2: right lane */ + ROUND(aaa, bbb, ccc, ddd, F3, KK2, in[6], 9); + ROUND(ddd, aaa, bbb, ccc, F3, KK2, in[11], 13); + ROUND(ccc, ddd, aaa, bbb, F3, KK2, in[3], 15); + ROUND(bbb, ccc, ddd, aaa, F3, KK2, in[7], 7); + ROUND(aaa, bbb, ccc, ddd, F3, KK2, in[0], 12); + ROUND(ddd, aaa, bbb, ccc, F3, KK2, in[13], 8); + ROUND(ccc, ddd, aaa, bbb, F3, KK2, in[5], 9); + ROUND(bbb, ccc, ddd, aaa, F3, KK2, in[10], 11); + ROUND(aaa, bbb, ccc, ddd, F3, KK2, in[14], 7); + ROUND(ddd, aaa, bbb, ccc, F3, KK2, in[15], 7); + ROUND(ccc, ddd, aaa, bbb, F3, KK2, in[8], 12); + ROUND(bbb, ccc, ddd, aaa, F3, KK2, in[12], 7); + ROUND(aaa, bbb, ccc, ddd, F3, KK2, in[4], 6); + ROUND(ddd, aaa, bbb, ccc, F3, KK2, in[9], 15); + ROUND(ccc, ddd, aaa, bbb, F3, KK2, in[1], 13); + ROUND(bbb, ccc, ddd, aaa, F3, KK2, in[2], 11); + + /* Swap contents of "b" registers */ + tmp = bb; bb = bbb; bbb = tmp; + + /* round 3: left lane */ + ROUND(aa, bb, cc, dd, F3, K3, in[3], 11); + ROUND(dd, aa, bb, cc, F3, K3, in[10], 13); + ROUND(cc, dd, aa, bb, F3, K3, in[14], 6); + ROUND(bb, cc, dd, aa, F3, K3, in[4], 7); + ROUND(aa, bb, cc, dd, F3, K3, in[9], 14); + ROUND(dd, aa, bb, cc, F3, K3, in[15], 9); + ROUND(cc, dd, aa, bb, F3, K3, in[8], 13); + ROUND(bb, cc, dd, aa, F3, K3, in[1], 15); + ROUND(aa, bb, cc, dd, F3, K3, in[2], 14); + ROUND(dd, aa, bb, cc, F3, K3, in[7], 8); + ROUND(cc, dd, aa, bb, F3, K3, in[0], 13); + ROUND(bb, cc, dd, aa, F3, K3, in[6], 6); + ROUND(aa, bb, cc, dd, F3, K3, in[13], 5); + ROUND(dd, aa, bb, cc, F3, K3, in[11], 12); + ROUND(cc, dd, aa, bb, F3, K3, in[5], 7); + ROUND(bb, cc, dd, aa, F3, K3, in[12], 5); + + /* round 3: right lane */ + ROUND(aaa, bbb, ccc, ddd, F2, KK3, in[15], 9); + ROUND(ddd, aaa, bbb, ccc, F2, KK3, in[5], 7); + ROUND(ccc, ddd, aaa, bbb, F2, KK3, in[1], 15); + ROUND(bbb, ccc, ddd, aaa, F2, KK3, in[3], 11); + ROUND(aaa, bbb, ccc, ddd, F2, KK3, in[7], 8); + ROUND(ddd, aaa, bbb, ccc, F2, KK3, in[14], 6); + ROUND(ccc, ddd, aaa, bbb, F2, KK3, in[6], 6); + ROUND(bbb, ccc, ddd, aaa, F2, KK3, in[9], 14); + ROUND(aaa, bbb, ccc, ddd, F2, KK3, in[11], 12); + ROUND(ddd, aaa, bbb, ccc, F2, KK3, in[8], 13); + ROUND(ccc, ddd, aaa, bbb, F2, KK3, in[12], 5); + ROUND(bbb, ccc, ddd, aaa, F2, KK3, in[2], 14); + ROUND(aaa, bbb, ccc, ddd, F2, KK3, in[10], 13); + ROUND(ddd, aaa, bbb, ccc, F2, KK3, in[0], 13); + ROUND(ccc, ddd, aaa, bbb, F2, KK3, in[4], 7); + ROUND(bbb, ccc, ddd, aaa, F2, KK3, in[13], 5); + + /* Swap contents of "c" registers */ + tmp = cc; cc = ccc; ccc = tmp; + + /* round 4: left lane */ + ROUND(aa, bb, cc, dd, F4, K4, in[1], 11); + ROUND(dd, aa, bb, cc, F4, K4, in[9], 12); + ROUND(cc, dd, aa, bb, F4, K4, in[11], 14); + ROUND(bb, cc, dd, aa, F4, K4, in[10], 15); + ROUND(aa, bb, cc, dd, F4, K4, in[0], 14); + ROUND(dd, aa, bb, cc, F4, K4, in[8], 15); + ROUND(cc, dd, aa, bb, F4, K4, in[12], 9); + ROUND(bb, cc, dd, aa, F4, K4, in[4], 8); + ROUND(aa, bb, cc, dd, F4, K4, in[13], 9); + ROUND(dd, aa, bb, cc, F4, K4, in[3], 14); + ROUND(cc, dd, aa, bb, F4, K4, in[7], 5); + ROUND(bb, cc, dd, aa, F4, K4, in[15], 6); + ROUND(aa, bb, cc, dd, F4, K4, in[14], 8); + ROUND(dd, aa, bb, cc, F4, K4, in[5], 6); + ROUND(cc, dd, aa, bb, F4, K4, in[6], 5); + ROUND(bb, cc, dd, aa, F4, K4, in[2], 12); + + /* round 4: right lane */ + ROUND(aaa, bbb, ccc, ddd, F1, KK4, in[8], 15); + ROUND(ddd, aaa, bbb, ccc, F1, KK4, in[6], 5); + ROUND(ccc, ddd, aaa, bbb, F1, KK4, in[4], 8); + ROUND(bbb, ccc, ddd, aaa, F1, KK4, in[1], 11); + ROUND(aaa, bbb, ccc, ddd, F1, KK4, in[3], 14); + ROUND(ddd, aaa, bbb, ccc, F1, KK4, in[11], 14); + ROUND(ccc, ddd, aaa, bbb, F1, KK4, in[15], 6); + ROUND(bbb, ccc, ddd, aaa, F1, KK4, in[0], 14); + ROUND(aaa, bbb, ccc, ddd, F1, KK4, in[5], 6); + ROUND(ddd, aaa, bbb, ccc, F1, KK4, in[12], 9); + ROUND(ccc, ddd, aaa, bbb, F1, KK4, in[2], 12); + ROUND(bbb, ccc, ddd, aaa, F1, KK4, in[13], 9); + ROUND(aaa, bbb, ccc, ddd, F1, KK4, in[9], 12); + ROUND(ddd, aaa, bbb, ccc, F1, KK4, in[7], 5); + ROUND(ccc, ddd, aaa, bbb, F1, KK4, in[10], 15); + ROUND(bbb, ccc, ddd, aaa, F1, KK4, in[14], 8); + + /* Swap contents of "d" registers */ + tmp = dd; dd = ddd; ddd = tmp; + + /* combine results */ + state[0] += aa; + state[1] += bb; + state[2] += cc; + state[3] += dd; + state[4] += aaa; + state[5] += bbb; + state[6] += ccc; + state[7] += ddd; + + return; +} + +static int rmd256_init(struct shash_desc *desc) +{ + struct rmd256_ctx *rctx = shash_desc_ctx(desc); + + rctx->byte_count = 0; + + rctx->state[0] = RMD_H0; + rctx->state[1] = RMD_H1; + rctx->state[2] = RMD_H2; + rctx->state[3] = RMD_H3; + rctx->state[4] = RMD_H5; + rctx->state[5] = RMD_H6; + rctx->state[6] = RMD_H7; + rctx->state[7] = RMD_H8; + + memset(rctx->buffer, 0, sizeof(rctx->buffer)); + + return 0; +} + +static int rmd256_update(struct shash_desc *desc, const u8 *data, + unsigned int len) +{ + struct rmd256_ctx *rctx = shash_desc_ctx(desc); + const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); + + rctx->byte_count += len; + + /* Enough space in buffer? If so copy and we're done */ + if (avail > len) { + memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), + data, len); + goto out; + } + + memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), + data, avail); + + rmd256_transform(rctx->state, rctx->buffer); + data += avail; + len -= avail; + + while (len >= sizeof(rctx->buffer)) { + memcpy(rctx->buffer, data, sizeof(rctx->buffer)); + rmd256_transform(rctx->state, rctx->buffer); + data += sizeof(rctx->buffer); + len -= sizeof(rctx->buffer); + } + + memcpy(rctx->buffer, data, len); + +out: + return 0; +} + +/* Add padding and return the message digest. */ +static int rmd256_final(struct shash_desc *desc, u8 *out) +{ + struct rmd256_ctx *rctx = shash_desc_ctx(desc); + u32 i, index, padlen; + __le64 bits; + __le32 *dst = (__le32 *)out; + static const u8 padding[64] = { 0x80, }; + + bits = cpu_to_le64(rctx->byte_count << 3); + + /* Pad out to 56 mod 64 */ + index = rctx->byte_count & 0x3f; + padlen = (index < 56) ? (56 - index) : ((64+56) - index); + rmd256_update(desc, padding, padlen); + + /* Append length */ + rmd256_update(desc, (const u8 *)&bits, sizeof(bits)); + + /* Store state in digest */ + for (i = 0; i < 8; i++) + dst[i] = cpu_to_le32p(&rctx->state[i]); + + /* Wipe context */ + memset(rctx, 0, sizeof(*rctx)); + + return 0; +} + +static struct shash_alg alg = { + .digestsize = RMD256_DIGEST_SIZE, + .init = rmd256_init, + .update = rmd256_update, + .final = rmd256_final, + .descsize = sizeof(struct rmd256_ctx), + .base = { + .cra_name = "rmd256", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = RMD256_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static int __init rmd256_mod_init(void) +{ + return crypto_register_shash(&alg); +} + +static void __exit rmd256_mod_fini(void) +{ + crypto_unregister_shash(&alg); +} + +module_init(rmd256_mod_init); +module_exit(rmd256_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_AUTHOR("Adrian-Ken Rueegsegger <ken@codelabs.ch>"); +MODULE_DESCRIPTION("RIPEMD-256 Message Digest"); diff --git a/crypto/rmd320.c b/crypto/rmd320.c new file mode 100644 index 00000000..09f97dfd --- /dev/null +++ b/crypto/rmd320.c @@ -0,0 +1,397 @@ +/* + * Cryptographic API. + * + * RIPEMD-320 - RACE Integrity Primitives Evaluation Message Digest. + * + * Based on the reference implementation by Antoon Bosselaers, ESAT-COSIC + * + * Copyright (c) 2008 Adrian-Ken Rueegsegger <ken@codelabs.ch> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ +#include <crypto/internal/hash.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <linux/types.h> +#include <asm/byteorder.h> + +#include "ripemd.h" + +struct rmd320_ctx { + u64 byte_count; + u32 state[10]; + __le32 buffer[16]; +}; + +#define K1 RMD_K1 +#define K2 RMD_K2 +#define K3 RMD_K3 +#define K4 RMD_K4 +#define K5 RMD_K5 +#define KK1 RMD_K6 +#define KK2 RMD_K7 +#define KK3 RMD_K8 +#define KK4 RMD_K9 +#define KK5 RMD_K1 + +#define F1(x, y, z) (x ^ y ^ z) /* XOR */ +#define F2(x, y, z) (z ^ (x & (y ^ z))) /* x ? y : z */ +#define F3(x, y, z) ((x | ~y) ^ z) +#define F4(x, y, z) (y ^ (z & (x ^ y))) /* z ? x : y */ +#define F5(x, y, z) (x ^ (y | ~z)) + +#define ROUND(a, b, c, d, e, f, k, x, s) { \ + (a) += f((b), (c), (d)) + le32_to_cpup(&(x)) + (k); \ + (a) = rol32((a), (s)) + (e); \ + (c) = rol32((c), 10); \ +} + +static void rmd320_transform(u32 *state, const __le32 *in) +{ + u32 aa, bb, cc, dd, ee, aaa, bbb, ccc, ddd, eee, tmp; + + /* Initialize left lane */ + aa = state[0]; + bb = state[1]; + cc = state[2]; + dd = state[3]; + ee = state[4]; + + /* Initialize right lane */ + aaa = state[5]; + bbb = state[6]; + ccc = state[7]; + ddd = state[8]; + eee = state[9]; + + /* round 1: left lane */ + ROUND(aa, bb, cc, dd, ee, F1, K1, in[0], 11); + ROUND(ee, aa, bb, cc, dd, F1, K1, in[1], 14); + ROUND(dd, ee, aa, bb, cc, F1, K1, in[2], 15); + ROUND(cc, dd, ee, aa, bb, F1, K1, in[3], 12); + ROUND(bb, cc, dd, ee, aa, F1, K1, in[4], 5); + ROUND(aa, bb, cc, dd, ee, F1, K1, in[5], 8); + ROUND(ee, aa, bb, cc, dd, F1, K1, in[6], 7); + ROUND(dd, ee, aa, bb, cc, F1, K1, in[7], 9); + ROUND(cc, dd, ee, aa, bb, F1, K1, in[8], 11); + ROUND(bb, cc, dd, ee, aa, F1, K1, in[9], 13); + ROUND(aa, bb, cc, dd, ee, F1, K1, in[10], 14); + ROUND(ee, aa, bb, cc, dd, F1, K1, in[11], 15); + ROUND(dd, ee, aa, bb, cc, F1, K1, in[12], 6); + ROUND(cc, dd, ee, aa, bb, F1, K1, in[13], 7); + ROUND(bb, cc, dd, ee, aa, F1, K1, in[14], 9); + ROUND(aa, bb, cc, dd, ee, F1, K1, in[15], 8); + + /* round 1: right lane */ + ROUND(aaa, bbb, ccc, ddd, eee, F5, KK1, in[5], 8); + ROUND(eee, aaa, bbb, ccc, ddd, F5, KK1, in[14], 9); + ROUND(ddd, eee, aaa, bbb, ccc, F5, KK1, in[7], 9); + ROUND(ccc, ddd, eee, aaa, bbb, F5, KK1, in[0], 11); + ROUND(bbb, ccc, ddd, eee, aaa, F5, KK1, in[9], 13); + ROUND(aaa, bbb, ccc, ddd, eee, F5, KK1, in[2], 15); + ROUND(eee, aaa, bbb, ccc, ddd, F5, KK1, in[11], 15); + ROUND(ddd, eee, aaa, bbb, ccc, F5, KK1, in[4], 5); + ROUND(ccc, ddd, eee, aaa, bbb, F5, KK1, in[13], 7); + ROUND(bbb, ccc, ddd, eee, aaa, F5, KK1, in[6], 7); + ROUND(aaa, bbb, ccc, ddd, eee, F5, KK1, in[15], 8); + ROUND(eee, aaa, bbb, ccc, ddd, F5, KK1, in[8], 11); + ROUND(ddd, eee, aaa, bbb, ccc, F5, KK1, in[1], 14); + ROUND(ccc, ddd, eee, aaa, bbb, F5, KK1, in[10], 14); + ROUND(bbb, ccc, ddd, eee, aaa, F5, KK1, in[3], 12); + ROUND(aaa, bbb, ccc, ddd, eee, F5, KK1, in[12], 6); + + /* Swap contents of "a" registers */ + tmp = aa; aa = aaa; aaa = tmp; + + /* round 2: left lane" */ + ROUND(ee, aa, bb, cc, dd, F2, K2, in[7], 7); + ROUND(dd, ee, aa, bb, cc, F2, K2, in[4], 6); + ROUND(cc, dd, ee, aa, bb, F2, K2, in[13], 8); + ROUND(bb, cc, dd, ee, aa, F2, K2, in[1], 13); + ROUND(aa, bb, cc, dd, ee, F2, K2, in[10], 11); + ROUND(ee, aa, bb, cc, dd, F2, K2, in[6], 9); + ROUND(dd, ee, aa, bb, cc, F2, K2, in[15], 7); + ROUND(cc, dd, ee, aa, bb, F2, K2, in[3], 15); + ROUND(bb, cc, dd, ee, aa, F2, K2, in[12], 7); + ROUND(aa, bb, cc, dd, ee, F2, K2, in[0], 12); + ROUND(ee, aa, bb, cc, dd, F2, K2, in[9], 15); + ROUND(dd, ee, aa, bb, cc, F2, K2, in[5], 9); + ROUND(cc, dd, ee, aa, bb, F2, K2, in[2], 11); + ROUND(bb, cc, dd, ee, aa, F2, K2, in[14], 7); + ROUND(aa, bb, cc, dd, ee, F2, K2, in[11], 13); + ROUND(ee, aa, bb, cc, dd, F2, K2, in[8], 12); + + /* round 2: right lane */ + ROUND(eee, aaa, bbb, ccc, ddd, F4, KK2, in[6], 9); + ROUND(ddd, eee, aaa, bbb, ccc, F4, KK2, in[11], 13); + ROUND(ccc, ddd, eee, aaa, bbb, F4, KK2, in[3], 15); + ROUND(bbb, ccc, ddd, eee, aaa, F4, KK2, in[7], 7); + ROUND(aaa, bbb, ccc, ddd, eee, F4, KK2, in[0], 12); + ROUND(eee, aaa, bbb, ccc, ddd, F4, KK2, in[13], 8); + ROUND(ddd, eee, aaa, bbb, ccc, F4, KK2, in[5], 9); + ROUND(ccc, ddd, eee, aaa, bbb, F4, KK2, in[10], 11); + ROUND(bbb, ccc, ddd, eee, aaa, F4, KK2, in[14], 7); + ROUND(aaa, bbb, ccc, ddd, eee, F4, KK2, in[15], 7); + ROUND(eee, aaa, bbb, ccc, ddd, F4, KK2, in[8], 12); + ROUND(ddd, eee, aaa, bbb, ccc, F4, KK2, in[12], 7); + ROUND(ccc, ddd, eee, aaa, bbb, F4, KK2, in[4], 6); + ROUND(bbb, ccc, ddd, eee, aaa, F4, KK2, in[9], 15); + ROUND(aaa, bbb, ccc, ddd, eee, F4, KK2, in[1], 13); + ROUND(eee, aaa, bbb, ccc, ddd, F4, KK2, in[2], 11); + + /* Swap contents of "b" registers */ + tmp = bb; bb = bbb; bbb = tmp; + + /* round 3: left lane" */ + ROUND(dd, ee, aa, bb, cc, F3, K3, in[3], 11); + ROUND(cc, dd, ee, aa, bb, F3, K3, in[10], 13); + ROUND(bb, cc, dd, ee, aa, F3, K3, in[14], 6); + ROUND(aa, bb, cc, dd, ee, F3, K3, in[4], 7); + ROUND(ee, aa, bb, cc, dd, F3, K3, in[9], 14); + ROUND(dd, ee, aa, bb, cc, F3, K3, in[15], 9); + ROUND(cc, dd, ee, aa, bb, F3, K3, in[8], 13); + ROUND(bb, cc, dd, ee, aa, F3, K3, in[1], 15); + ROUND(aa, bb, cc, dd, ee, F3, K3, in[2], 14); + ROUND(ee, aa, bb, cc, dd, F3, K3, in[7], 8); + ROUND(dd, ee, aa, bb, cc, F3, K3, in[0], 13); + ROUND(cc, dd, ee, aa, bb, F3, K3, in[6], 6); + ROUND(bb, cc, dd, ee, aa, F3, K3, in[13], 5); + ROUND(aa, bb, cc, dd, ee, F3, K3, in[11], 12); + ROUND(ee, aa, bb, cc, dd, F3, K3, in[5], 7); + ROUND(dd, ee, aa, bb, cc, F3, K3, in[12], 5); + + /* round 3: right lane */ + ROUND(ddd, eee, aaa, bbb, ccc, F3, KK3, in[15], 9); + ROUND(ccc, ddd, eee, aaa, bbb, F3, KK3, in[5], 7); + ROUND(bbb, ccc, ddd, eee, aaa, F3, KK3, in[1], 15); + ROUND(aaa, bbb, ccc, ddd, eee, F3, KK3, in[3], 11); + ROUND(eee, aaa, bbb, ccc, ddd, F3, KK3, in[7], 8); + ROUND(ddd, eee, aaa, bbb, ccc, F3, KK3, in[14], 6); + ROUND(ccc, ddd, eee, aaa, bbb, F3, KK3, in[6], 6); + ROUND(bbb, ccc, ddd, eee, aaa, F3, KK3, in[9], 14); + ROUND(aaa, bbb, ccc, ddd, eee, F3, KK3, in[11], 12); + ROUND(eee, aaa, bbb, ccc, ddd, F3, KK3, in[8], 13); + ROUND(ddd, eee, aaa, bbb, ccc, F3, KK3, in[12], 5); + ROUND(ccc, ddd, eee, aaa, bbb, F3, KK3, in[2], 14); + ROUND(bbb, ccc, ddd, eee, aaa, F3, KK3, in[10], 13); + ROUND(aaa, bbb, ccc, ddd, eee, F3, KK3, in[0], 13); + ROUND(eee, aaa, bbb, ccc, ddd, F3, KK3, in[4], 7); + ROUND(ddd, eee, aaa, bbb, ccc, F3, KK3, in[13], 5); + + /* Swap contents of "c" registers */ + tmp = cc; cc = ccc; ccc = tmp; + + /* round 4: left lane" */ + ROUND(cc, dd, ee, aa, bb, F4, K4, in[1], 11); + ROUND(bb, cc, dd, ee, aa, F4, K4, in[9], 12); + ROUND(aa, bb, cc, dd, ee, F4, K4, in[11], 14); + ROUND(ee, aa, bb, cc, dd, F4, K4, in[10], 15); + ROUND(dd, ee, aa, bb, cc, F4, K4, in[0], 14); + ROUND(cc, dd, ee, aa, bb, F4, K4, in[8], 15); + ROUND(bb, cc, dd, ee, aa, F4, K4, in[12], 9); + ROUND(aa, bb, cc, dd, ee, F4, K4, in[4], 8); + ROUND(ee, aa, bb, cc, dd, F4, K4, in[13], 9); + ROUND(dd, ee, aa, bb, cc, F4, K4, in[3], 14); + ROUND(cc, dd, ee, aa, bb, F4, K4, in[7], 5); + ROUND(bb, cc, dd, ee, aa, F4, K4, in[15], 6); + ROUND(aa, bb, cc, dd, ee, F4, K4, in[14], 8); + ROUND(ee, aa, bb, cc, dd, F4, K4, in[5], 6); + ROUND(dd, ee, aa, bb, cc, F4, K4, in[6], 5); + ROUND(cc, dd, ee, aa, bb, F4, K4, in[2], 12); + + /* round 4: right lane */ + ROUND(ccc, ddd, eee, aaa, bbb, F2, KK4, in[8], 15); + ROUND(bbb, ccc, ddd, eee, aaa, F2, KK4, in[6], 5); + ROUND(aaa, bbb, ccc, ddd, eee, F2, KK4, in[4], 8); + ROUND(eee, aaa, bbb, ccc, ddd, F2, KK4, in[1], 11); + ROUND(ddd, eee, aaa, bbb, ccc, F2, KK4, in[3], 14); + ROUND(ccc, ddd, eee, aaa, bbb, F2, KK4, in[11], 14); + ROUND(bbb, ccc, ddd, eee, aaa, F2, KK4, in[15], 6); + ROUND(aaa, bbb, ccc, ddd, eee, F2, KK4, in[0], 14); + ROUND(eee, aaa, bbb, ccc, ddd, F2, KK4, in[5], 6); + ROUND(ddd, eee, aaa, bbb, ccc, F2, KK4, in[12], 9); + ROUND(ccc, ddd, eee, aaa, bbb, F2, KK4, in[2], 12); + ROUND(bbb, ccc, ddd, eee, aaa, F2, KK4, in[13], 9); + ROUND(aaa, bbb, ccc, ddd, eee, F2, KK4, in[9], 12); + ROUND(eee, aaa, bbb, ccc, ddd, F2, KK4, in[7], 5); + ROUND(ddd, eee, aaa, bbb, ccc, F2, KK4, in[10], 15); + ROUND(ccc, ddd, eee, aaa, bbb, F2, KK4, in[14], 8); + + /* Swap contents of "d" registers */ + tmp = dd; dd = ddd; ddd = tmp; + + /* round 5: left lane" */ + ROUND(bb, cc, dd, ee, aa, F5, K5, in[4], 9); + ROUND(aa, bb, cc, dd, ee, F5, K5, in[0], 15); + ROUND(ee, aa, bb, cc, dd, F5, K5, in[5], 5); + ROUND(dd, ee, aa, bb, cc, F5, K5, in[9], 11); + ROUND(cc, dd, ee, aa, bb, F5, K5, in[7], 6); + ROUND(bb, cc, dd, ee, aa, F5, K5, in[12], 8); + ROUND(aa, bb, cc, dd, ee, F5, K5, in[2], 13); + ROUND(ee, aa, bb, cc, dd, F5, K5, in[10], 12); + ROUND(dd, ee, aa, bb, cc, F5, K5, in[14], 5); + ROUND(cc, dd, ee, aa, bb, F5, K5, in[1], 12); + ROUND(bb, cc, dd, ee, aa, F5, K5, in[3], 13); + ROUND(aa, bb, cc, dd, ee, F5, K5, in[8], 14); + ROUND(ee, aa, bb, cc, dd, F5, K5, in[11], 11); + ROUND(dd, ee, aa, bb, cc, F5, K5, in[6], 8); + ROUND(cc, dd, ee, aa, bb, F5, K5, in[15], 5); + ROUND(bb, cc, dd, ee, aa, F5, K5, in[13], 6); + + /* round 5: right lane */ + ROUND(bbb, ccc, ddd, eee, aaa, F1, KK5, in[12], 8); + ROUND(aaa, bbb, ccc, ddd, eee, F1, KK5, in[15], 5); + ROUND(eee, aaa, bbb, ccc, ddd, F1, KK5, in[10], 12); + ROUND(ddd, eee, aaa, bbb, ccc, F1, KK5, in[4], 9); + ROUND(ccc, ddd, eee, aaa, bbb, F1, KK5, in[1], 12); + ROUND(bbb, ccc, ddd, eee, aaa, F1, KK5, in[5], 5); + ROUND(aaa, bbb, ccc, ddd, eee, F1, KK5, in[8], 14); + ROUND(eee, aaa, bbb, ccc, ddd, F1, KK5, in[7], 6); + ROUND(ddd, eee, aaa, bbb, ccc, F1, KK5, in[6], 8); + ROUND(ccc, ddd, eee, aaa, bbb, F1, KK5, in[2], 13); + ROUND(bbb, ccc, ddd, eee, aaa, F1, KK5, in[13], 6); + ROUND(aaa, bbb, ccc, ddd, eee, F1, KK5, in[14], 5); + ROUND(eee, aaa, bbb, ccc, ddd, F1, KK5, in[0], 15); + ROUND(ddd, eee, aaa, bbb, ccc, F1, KK5, in[3], 13); + ROUND(ccc, ddd, eee, aaa, bbb, F1, KK5, in[9], 11); + ROUND(bbb, ccc, ddd, eee, aaa, F1, KK5, in[11], 11); + + /* Swap contents of "e" registers */ + tmp = ee; ee = eee; eee = tmp; + + /* combine results */ + state[0] += aa; + state[1] += bb; + state[2] += cc; + state[3] += dd; + state[4] += ee; + state[5] += aaa; + state[6] += bbb; + state[7] += ccc; + state[8] += ddd; + state[9] += eee; + + return; +} + +static int rmd320_init(struct shash_desc *desc) +{ + struct rmd320_ctx *rctx = shash_desc_ctx(desc); + + rctx->byte_count = 0; + + rctx->state[0] = RMD_H0; + rctx->state[1] = RMD_H1; + rctx->state[2] = RMD_H2; + rctx->state[3] = RMD_H3; + rctx->state[4] = RMD_H4; + rctx->state[5] = RMD_H5; + rctx->state[6] = RMD_H6; + rctx->state[7] = RMD_H7; + rctx->state[8] = RMD_H8; + rctx->state[9] = RMD_H9; + + memset(rctx->buffer, 0, sizeof(rctx->buffer)); + + return 0; +} + +static int rmd320_update(struct shash_desc *desc, const u8 *data, + unsigned int len) +{ + struct rmd320_ctx *rctx = shash_desc_ctx(desc); + const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); + + rctx->byte_count += len; + + /* Enough space in buffer? If so copy and we're done */ + if (avail > len) { + memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), + data, len); + goto out; + } + + memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), + data, avail); + + rmd320_transform(rctx->state, rctx->buffer); + data += avail; + len -= avail; + + while (len >= sizeof(rctx->buffer)) { + memcpy(rctx->buffer, data, sizeof(rctx->buffer)); + rmd320_transform(rctx->state, rctx->buffer); + data += sizeof(rctx->buffer); + len -= sizeof(rctx->buffer); + } + + memcpy(rctx->buffer, data, len); + +out: + return 0; +} + +/* Add padding and return the message digest. */ +static int rmd320_final(struct shash_desc *desc, u8 *out) +{ + struct rmd320_ctx *rctx = shash_desc_ctx(desc); + u32 i, index, padlen; + __le64 bits; + __le32 *dst = (__le32 *)out; + static const u8 padding[64] = { 0x80, }; + + bits = cpu_to_le64(rctx->byte_count << 3); + + /* Pad out to 56 mod 64 */ + index = rctx->byte_count & 0x3f; + padlen = (index < 56) ? (56 - index) : ((64+56) - index); + rmd320_update(desc, padding, padlen); + + /* Append length */ + rmd320_update(desc, (const u8 *)&bits, sizeof(bits)); + + /* Store state in digest */ + for (i = 0; i < 10; i++) + dst[i] = cpu_to_le32p(&rctx->state[i]); + + /* Wipe context */ + memset(rctx, 0, sizeof(*rctx)); + + return 0; +} + +static struct shash_alg alg = { + .digestsize = RMD320_DIGEST_SIZE, + .init = rmd320_init, + .update = rmd320_update, + .final = rmd320_final, + .descsize = sizeof(struct rmd320_ctx), + .base = { + .cra_name = "rmd320", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = RMD320_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static int __init rmd320_mod_init(void) +{ + return crypto_register_shash(&alg); +} + +static void __exit rmd320_mod_fini(void) +{ + crypto_unregister_shash(&alg); +} + +module_init(rmd320_mod_init); +module_exit(rmd320_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_AUTHOR("Adrian-Ken Rueegsegger <ken@codelabs.ch>"); +MODULE_DESCRIPTION("RIPEMD-320 Message Digest"); diff --git a/crypto/rng.c b/crypto/rng.c new file mode 100644 index 00000000..64f864fa --- /dev/null +++ b/crypto/rng.c @@ -0,0 +1,154 @@ +/* + * Cryptographic API. + * + * RNG operations. + * + * Copyright (c) 2008 Neil Horman <nhorman@tuxdriver.com> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <linux/atomic.h> +#include <crypto/internal/rng.h> +#include <linux/err.h> +#include <linux/module.h> +#include <linux/mutex.h> +#include <linux/random.h> +#include <linux/seq_file.h> +#include <linux/slab.h> +#include <linux/string.h> +#include <linux/cryptouser.h> +#include <net/netlink.h> + +static DEFINE_MUTEX(crypto_default_rng_lock); +struct crypto_rng *crypto_default_rng; +EXPORT_SYMBOL_GPL(crypto_default_rng); +static int crypto_default_rng_refcnt; + +static int rngapi_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen) +{ + u8 *buf = NULL; + int err; + + if (!seed && slen) { + buf = kmalloc(slen, GFP_KERNEL); + if (!buf) + return -ENOMEM; + + get_random_bytes(buf, slen); + seed = buf; + } + + err = crypto_rng_alg(tfm)->rng_reset(tfm, seed, slen); + + kfree(buf); + return err; +} + +static int crypto_init_rng_ops(struct crypto_tfm *tfm, u32 type, u32 mask) +{ + struct rng_alg *alg = &tfm->__crt_alg->cra_rng; + struct rng_tfm *ops = &tfm->crt_rng; + + ops->rng_gen_random = alg->rng_make_random; + ops->rng_reset = rngapi_reset; + + return 0; +} + +#ifdef CONFIG_NET +static int crypto_rng_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + struct crypto_report_rng rrng; + + snprintf(rrng.type, CRYPTO_MAX_ALG_NAME, "%s", "rng"); + + rrng.seedsize = alg->cra_rng.seedsize; + + NLA_PUT(skb, CRYPTOCFGA_REPORT_RNG, + sizeof(struct crypto_report_rng), &rrng); + + return 0; + +nla_put_failure: + return -EMSGSIZE; +} +#else +static int crypto_rng_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + return -ENOSYS; +} +#endif + +static void crypto_rng_show(struct seq_file *m, struct crypto_alg *alg) + __attribute__ ((unused)); +static void crypto_rng_show(struct seq_file *m, struct crypto_alg *alg) +{ + seq_printf(m, "type : rng\n"); + seq_printf(m, "seedsize : %u\n", alg->cra_rng.seedsize); +} + +static unsigned int crypto_rng_ctxsize(struct crypto_alg *alg, u32 type, + u32 mask) +{ + return alg->cra_ctxsize; +} + +const struct crypto_type crypto_rng_type = { + .ctxsize = crypto_rng_ctxsize, + .init = crypto_init_rng_ops, +#ifdef CONFIG_PROC_FS + .show = crypto_rng_show, +#endif + .report = crypto_rng_report, +}; +EXPORT_SYMBOL_GPL(crypto_rng_type); + +int crypto_get_default_rng(void) +{ + struct crypto_rng *rng; + int err; + + mutex_lock(&crypto_default_rng_lock); + if (!crypto_default_rng) { + rng = crypto_alloc_rng("stdrng", 0, 0); + err = PTR_ERR(rng); + if (IS_ERR(rng)) + goto unlock; + + err = crypto_rng_reset(rng, NULL, crypto_rng_seedsize(rng)); + if (err) { + crypto_free_rng(rng); + goto unlock; + } + + crypto_default_rng = rng; + } + + crypto_default_rng_refcnt++; + err = 0; + +unlock: + mutex_unlock(&crypto_default_rng_lock); + + return err; +} +EXPORT_SYMBOL_GPL(crypto_get_default_rng); + +void crypto_put_default_rng(void) +{ + mutex_lock(&crypto_default_rng_lock); + if (!--crypto_default_rng_refcnt) { + crypto_free_rng(crypto_default_rng); + crypto_default_rng = NULL; + } + mutex_unlock(&crypto_default_rng_lock); +} +EXPORT_SYMBOL_GPL(crypto_put_default_rng); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Random Number Generator"); diff --git a/crypto/salsa20_generic.c b/crypto/salsa20_generic.c new file mode 100644 index 00000000..eac10c11 --- /dev/null +++ b/crypto/salsa20_generic.c @@ -0,0 +1,252 @@ +/* + * Salsa20: Salsa20 stream cipher algorithm + * + * Copyright (c) 2007 Tan Swee Heng <thesweeheng@gmail.com> + * + * Derived from: + * - salsa20.c: Public domain C code by Daniel J. Bernstein <djb@cr.yp.to> + * + * Salsa20 is a stream cipher candidate in eSTREAM, the ECRYPT Stream + * Cipher Project. It is designed by Daniel J. Bernstein <djb@cr.yp.to>. + * More information about eSTREAM and Salsa20 can be found here: + * http://www.ecrypt.eu.org/stream/ + * http://cr.yp.to/snuffle.html + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <linux/init.h> +#include <linux/module.h> +#include <linux/errno.h> +#include <linux/crypto.h> +#include <linux/types.h> +#include <linux/bitops.h> +#include <crypto/algapi.h> +#include <asm/byteorder.h> + +#define SALSA20_IV_SIZE 8U +#define SALSA20_MIN_KEY_SIZE 16U +#define SALSA20_MAX_KEY_SIZE 32U + +/* + * Start of code taken from D. J. Bernstein's reference implementation. + * With some modifications and optimizations made to suit our needs. + */ + +/* +salsa20-ref.c version 20051118 +D. J. Bernstein +Public domain. +*/ + +#define U32TO8_LITTLE(p, v) \ + { (p)[0] = (v >> 0) & 0xff; (p)[1] = (v >> 8) & 0xff; \ + (p)[2] = (v >> 16) & 0xff; (p)[3] = (v >> 24) & 0xff; } +#define U8TO32_LITTLE(p) \ + (((u32)((p)[0]) ) | ((u32)((p)[1]) << 8) | \ + ((u32)((p)[2]) << 16) | ((u32)((p)[3]) << 24) ) + +struct salsa20_ctx +{ + u32 input[16]; +}; + +static void salsa20_wordtobyte(u8 output[64], const u32 input[16]) +{ + u32 x[16]; + int i; + + memcpy(x, input, sizeof(x)); + for (i = 20; i > 0; i -= 2) { + x[ 4] ^= rol32((x[ 0] + x[12]), 7); + x[ 8] ^= rol32((x[ 4] + x[ 0]), 9); + x[12] ^= rol32((x[ 8] + x[ 4]), 13); + x[ 0] ^= rol32((x[12] + x[ 8]), 18); + x[ 9] ^= rol32((x[ 5] + x[ 1]), 7); + x[13] ^= rol32((x[ 9] + x[ 5]), 9); + x[ 1] ^= rol32((x[13] + x[ 9]), 13); + x[ 5] ^= rol32((x[ 1] + x[13]), 18); + x[14] ^= rol32((x[10] + x[ 6]), 7); + x[ 2] ^= rol32((x[14] + x[10]), 9); + x[ 6] ^= rol32((x[ 2] + x[14]), 13); + x[10] ^= rol32((x[ 6] + x[ 2]), 18); + x[ 3] ^= rol32((x[15] + x[11]), 7); + x[ 7] ^= rol32((x[ 3] + x[15]), 9); + x[11] ^= rol32((x[ 7] + x[ 3]), 13); + x[15] ^= rol32((x[11] + x[ 7]), 18); + x[ 1] ^= rol32((x[ 0] + x[ 3]), 7); + x[ 2] ^= rol32((x[ 1] + x[ 0]), 9); + x[ 3] ^= rol32((x[ 2] + x[ 1]), 13); + x[ 0] ^= rol32((x[ 3] + x[ 2]), 18); + x[ 6] ^= rol32((x[ 5] + x[ 4]), 7); + x[ 7] ^= rol32((x[ 6] + x[ 5]), 9); + x[ 4] ^= rol32((x[ 7] + x[ 6]), 13); + x[ 5] ^= rol32((x[ 4] + x[ 7]), 18); + x[11] ^= rol32((x[10] + x[ 9]), 7); + x[ 8] ^= rol32((x[11] + x[10]), 9); + x[ 9] ^= rol32((x[ 8] + x[11]), 13); + x[10] ^= rol32((x[ 9] + x[ 8]), 18); + x[12] ^= rol32((x[15] + x[14]), 7); + x[13] ^= rol32((x[12] + x[15]), 9); + x[14] ^= rol32((x[13] + x[12]), 13); + x[15] ^= rol32((x[14] + x[13]), 18); + } + for (i = 0; i < 16; ++i) + x[i] += input[i]; + for (i = 0; i < 16; ++i) + U32TO8_LITTLE(output + 4 * i,x[i]); +} + +static const char sigma[16] = "expand 32-byte k"; +static const char tau[16] = "expand 16-byte k"; + +static void salsa20_keysetup(struct salsa20_ctx *ctx, const u8 *k, u32 kbytes) +{ + const char *constants; + + ctx->input[1] = U8TO32_LITTLE(k + 0); + ctx->input[2] = U8TO32_LITTLE(k + 4); + ctx->input[3] = U8TO32_LITTLE(k + 8); + ctx->input[4] = U8TO32_LITTLE(k + 12); + if (kbytes == 32) { /* recommended */ + k += 16; + constants = sigma; + } else { /* kbytes == 16 */ + constants = tau; + } + ctx->input[11] = U8TO32_LITTLE(k + 0); + ctx->input[12] = U8TO32_LITTLE(k + 4); + ctx->input[13] = U8TO32_LITTLE(k + 8); + ctx->input[14] = U8TO32_LITTLE(k + 12); + ctx->input[0] = U8TO32_LITTLE(constants + 0); + ctx->input[5] = U8TO32_LITTLE(constants + 4); + ctx->input[10] = U8TO32_LITTLE(constants + 8); + ctx->input[15] = U8TO32_LITTLE(constants + 12); +} + +static void salsa20_ivsetup(struct salsa20_ctx *ctx, const u8 *iv) +{ + ctx->input[6] = U8TO32_LITTLE(iv + 0); + ctx->input[7] = U8TO32_LITTLE(iv + 4); + ctx->input[8] = 0; + ctx->input[9] = 0; +} + +static void salsa20_encrypt_bytes(struct salsa20_ctx *ctx, u8 *dst, + const u8 *src, unsigned int bytes) +{ + u8 buf[64]; + + if (dst != src) + memcpy(dst, src, bytes); + + while (bytes) { + salsa20_wordtobyte(buf, ctx->input); + + ctx->input[8]++; + if (!ctx->input[8]) + ctx->input[9]++; + + if (bytes <= 64) { + crypto_xor(dst, buf, bytes); + return; + } + + crypto_xor(dst, buf, 64); + bytes -= 64; + dst += 64; + } +} + +/* + * End of code taken from D. J. Bernstein's reference implementation. + */ + +static int setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keysize) +{ + struct salsa20_ctx *ctx = crypto_tfm_ctx(tfm); + salsa20_keysetup(ctx, key, keysize); + return 0; +} + +static int encrypt(struct blkcipher_desc *desc, + struct scatterlist *dst, struct scatterlist *src, + unsigned int nbytes) +{ + struct blkcipher_walk walk; + struct crypto_blkcipher *tfm = desc->tfm; + struct salsa20_ctx *ctx = crypto_blkcipher_ctx(tfm); + int err; + + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt_block(desc, &walk, 64); + + salsa20_ivsetup(ctx, walk.iv); + + if (likely(walk.nbytes == nbytes)) + { + salsa20_encrypt_bytes(ctx, walk.dst.virt.addr, + walk.src.virt.addr, nbytes); + return blkcipher_walk_done(desc, &walk, 0); + } + + while (walk.nbytes >= 64) { + salsa20_encrypt_bytes(ctx, walk.dst.virt.addr, + walk.src.virt.addr, + walk.nbytes - (walk.nbytes % 64)); + err = blkcipher_walk_done(desc, &walk, walk.nbytes % 64); + } + + if (walk.nbytes) { + salsa20_encrypt_bytes(ctx, walk.dst.virt.addr, + walk.src.virt.addr, walk.nbytes); + err = blkcipher_walk_done(desc, &walk, 0); + } + + return err; +} + +static struct crypto_alg alg = { + .cra_name = "salsa20", + .cra_driver_name = "salsa20-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_type = &crypto_blkcipher_type, + .cra_blocksize = 1, + .cra_ctxsize = sizeof(struct salsa20_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(alg.cra_list), + .cra_u = { + .blkcipher = { + .setkey = setkey, + .encrypt = encrypt, + .decrypt = encrypt, + .min_keysize = SALSA20_MIN_KEY_SIZE, + .max_keysize = SALSA20_MAX_KEY_SIZE, + .ivsize = SALSA20_IV_SIZE, + } + } +}; + +static int __init salsa20_generic_mod_init(void) +{ + return crypto_register_alg(&alg); +} + +static void __exit salsa20_generic_mod_fini(void) +{ + crypto_unregister_alg(&alg); +} + +module_init(salsa20_generic_mod_init); +module_exit(salsa20_generic_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION ("Salsa20 stream cipher algorithm"); +MODULE_ALIAS("salsa20"); diff --git a/crypto/scatterwalk.c b/crypto/scatterwalk.c new file mode 100644 index 00000000..7281b8a9 --- /dev/null +++ b/crypto/scatterwalk.c @@ -0,0 +1,126 @@ +/* + * Cryptographic API. + * + * Cipher operations. + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * 2002 Adam J. Richter <adam@yggdrasil.com> + * 2004 Jean-Luc Cooke <jlcooke@certainkey.com> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/scatterwalk.h> +#include <linux/kernel.h> +#include <linux/mm.h> +#include <linux/module.h> +#include <linux/pagemap.h> +#include <linux/highmem.h> +#include <linux/scatterlist.h> + +static inline void memcpy_dir(void *buf, void *sgdata, size_t nbytes, int out) +{ + void *src = out ? buf : sgdata; + void *dst = out ? sgdata : buf; + + memcpy(dst, src, nbytes); +} + +void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg) +{ + walk->sg = sg; + + BUG_ON(!sg->length); + + walk->offset = sg->offset; +} +EXPORT_SYMBOL_GPL(scatterwalk_start); + +void *scatterwalk_map(struct scatter_walk *walk) +{ + return kmap_atomic(scatterwalk_page(walk)) + + offset_in_page(walk->offset); +} +EXPORT_SYMBOL_GPL(scatterwalk_map); + +static void scatterwalk_pagedone(struct scatter_walk *walk, int out, + unsigned int more) +{ + if (out) { + struct page *page; + + page = sg_page(walk->sg) + ((walk->offset - 1) >> PAGE_SHIFT); + if (!PageSlab(page)) + flush_dcache_page(page); + } + + if (more) { + walk->offset += PAGE_SIZE - 1; + walk->offset &= PAGE_MASK; + if (walk->offset >= walk->sg->offset + walk->sg->length) + scatterwalk_start(walk, scatterwalk_sg_next(walk->sg)); + } +} + +void scatterwalk_done(struct scatter_walk *walk, int out, int more) +{ + if (!(scatterwalk_pagelen(walk) & (PAGE_SIZE - 1)) || !more) + scatterwalk_pagedone(walk, out, more); +} +EXPORT_SYMBOL_GPL(scatterwalk_done); + +void scatterwalk_copychunks(void *buf, struct scatter_walk *walk, + size_t nbytes, int out) +{ + for (;;) { + unsigned int len_this_page = scatterwalk_pagelen(walk); + u8 *vaddr; + + if (len_this_page > nbytes) + len_this_page = nbytes; + + vaddr = scatterwalk_map(walk); + memcpy_dir(buf, vaddr, len_this_page, out); + scatterwalk_unmap(vaddr); + + scatterwalk_advance(walk, len_this_page); + + if (nbytes == len_this_page) + break; + + buf += len_this_page; + nbytes -= len_this_page; + + scatterwalk_pagedone(walk, out, 1); + } +} +EXPORT_SYMBOL_GPL(scatterwalk_copychunks); + +void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg, + unsigned int start, unsigned int nbytes, int out) +{ + struct scatter_walk walk; + unsigned int offset = 0; + + if (!nbytes) + return; + + for (;;) { + scatterwalk_start(&walk, sg); + + if (start < offset + sg->length) + break; + + offset += sg->length; + sg = scatterwalk_sg_next(sg); + } + + scatterwalk_advance(&walk, start - offset); + scatterwalk_copychunks(buf, &walk, nbytes, out); + scatterwalk_done(&walk, out, 0); +} +EXPORT_SYMBOL_GPL(scatterwalk_map_and_copy); diff --git a/crypto/seed.c b/crypto/seed.c new file mode 100644 index 00000000..d3e422f6 --- /dev/null +++ b/crypto/seed.c @@ -0,0 +1,479 @@ +/* + * Cryptographic API. + * + * SEED Cipher Algorithm. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * Documentation of SEED can be found in RFC 4269. + * Copyright (C) 2007 Korea Information Security Agency (KISA). + */ + +#include <linux/module.h> +#include <linux/init.h> +#include <linux/types.h> +#include <linux/errno.h> +#include <linux/crypto.h> +#include <asm/byteorder.h> + +#define SEED_NUM_KCONSTANTS 16 +#define SEED_KEY_SIZE 16 +#define SEED_BLOCK_SIZE 16 +#define SEED_KEYSCHED_LEN 32 + +/* + * #define byte(x, nr) ((unsigned char)((x) >> (nr*8))) + */ +static inline u8 +byte(const u32 x, const unsigned n) +{ + return x >> (n << 3); +} + +struct seed_ctx { + u32 keysched[SEED_KEYSCHED_LEN]; +}; + +static const u32 SS0[256] = { + 0x2989a1a8, 0x05858184, 0x16c6d2d4, 0x13c3d3d0, + 0x14445054, 0x1d0d111c, 0x2c8ca0ac, 0x25052124, + 0x1d4d515c, 0x03434340, 0x18081018, 0x1e0e121c, + 0x11415150, 0x3cccf0fc, 0x0acac2c8, 0x23436360, + 0x28082028, 0x04444044, 0x20002020, 0x1d8d919c, + 0x20c0e0e0, 0x22c2e2e0, 0x08c8c0c8, 0x17071314, + 0x2585a1a4, 0x0f8f838c, 0x03030300, 0x3b4b7378, + 0x3b8bb3b8, 0x13031310, 0x12c2d2d0, 0x2ecee2ec, + 0x30407070, 0x0c8c808c, 0x3f0f333c, 0x2888a0a8, + 0x32023230, 0x1dcdd1dc, 0x36c6f2f4, 0x34447074, + 0x2ccce0ec, 0x15859194, 0x0b0b0308, 0x17475354, + 0x1c4c505c, 0x1b4b5358, 0x3d8db1bc, 0x01010100, + 0x24042024, 0x1c0c101c, 0x33437370, 0x18889098, + 0x10001010, 0x0cccc0cc, 0x32c2f2f0, 0x19c9d1d8, + 0x2c0c202c, 0x27c7e3e4, 0x32427270, 0x03838380, + 0x1b8b9398, 0x11c1d1d0, 0x06868284, 0x09c9c1c8, + 0x20406060, 0x10405050, 0x2383a3a0, 0x2bcbe3e8, + 0x0d0d010c, 0x3686b2b4, 0x1e8e929c, 0x0f4f434c, + 0x3787b3b4, 0x1a4a5258, 0x06c6c2c4, 0x38487078, + 0x2686a2a4, 0x12021210, 0x2f8fa3ac, 0x15c5d1d4, + 0x21416160, 0x03c3c3c0, 0x3484b0b4, 0x01414140, + 0x12425250, 0x3d4d717c, 0x0d8d818c, 0x08080008, + 0x1f0f131c, 0x19899198, 0x00000000, 0x19091118, + 0x04040004, 0x13435350, 0x37c7f3f4, 0x21c1e1e0, + 0x3dcdf1fc, 0x36467274, 0x2f0f232c, 0x27072324, + 0x3080b0b0, 0x0b8b8388, 0x0e0e020c, 0x2b8ba3a8, + 0x2282a2a0, 0x2e4e626c, 0x13839390, 0x0d4d414c, + 0x29496168, 0x3c4c707c, 0x09090108, 0x0a0a0208, + 0x3f8fb3bc, 0x2fcfe3ec, 0x33c3f3f0, 0x05c5c1c4, + 0x07878384, 0x14041014, 0x3ecef2fc, 0x24446064, + 0x1eced2dc, 0x2e0e222c, 0x0b4b4348, 0x1a0a1218, + 0x06060204, 0x21012120, 0x2b4b6368, 0x26466264, + 0x02020200, 0x35c5f1f4, 0x12829290, 0x0a8a8288, + 0x0c0c000c, 0x3383b3b0, 0x3e4e727c, 0x10c0d0d0, + 0x3a4a7278, 0x07474344, 0x16869294, 0x25c5e1e4, + 0x26062224, 0x00808080, 0x2d8da1ac, 0x1fcfd3dc, + 0x2181a1a0, 0x30003030, 0x37073334, 0x2e8ea2ac, + 0x36063234, 0x15051114, 0x22022220, 0x38083038, + 0x34c4f0f4, 0x2787a3a4, 0x05454144, 0x0c4c404c, + 0x01818180, 0x29c9e1e8, 0x04848084, 0x17879394, + 0x35053134, 0x0bcbc3c8, 0x0ecec2cc, 0x3c0c303c, + 0x31417170, 0x11011110, 0x07c7c3c4, 0x09898188, + 0x35457174, 0x3bcbf3f8, 0x1acad2d8, 0x38c8f0f8, + 0x14849094, 0x19495158, 0x02828280, 0x04c4c0c4, + 0x3fcff3fc, 0x09494148, 0x39093138, 0x27476364, + 0x00c0c0c0, 0x0fcfc3cc, 0x17c7d3d4, 0x3888b0b8, + 0x0f0f030c, 0x0e8e828c, 0x02424240, 0x23032320, + 0x11819190, 0x2c4c606c, 0x1bcbd3d8, 0x2484a0a4, + 0x34043034, 0x31c1f1f0, 0x08484048, 0x02c2c2c0, + 0x2f4f636c, 0x3d0d313c, 0x2d0d212c, 0x00404040, + 0x3e8eb2bc, 0x3e0e323c, 0x3c8cb0bc, 0x01c1c1c0, + 0x2a8aa2a8, 0x3a8ab2b8, 0x0e4e424c, 0x15455154, + 0x3b0b3338, 0x1cccd0dc, 0x28486068, 0x3f4f737c, + 0x1c8c909c, 0x18c8d0d8, 0x0a4a4248, 0x16465254, + 0x37477374, 0x2080a0a0, 0x2dcde1ec, 0x06464244, + 0x3585b1b4, 0x2b0b2328, 0x25456164, 0x3acaf2f8, + 0x23c3e3e0, 0x3989b1b8, 0x3181b1b0, 0x1f8f939c, + 0x1e4e525c, 0x39c9f1f8, 0x26c6e2e4, 0x3282b2b0, + 0x31013130, 0x2acae2e8, 0x2d4d616c, 0x1f4f535c, + 0x24c4e0e4, 0x30c0f0f0, 0x0dcdc1cc, 0x08888088, + 0x16061214, 0x3a0a3238, 0x18485058, 0x14c4d0d4, + 0x22426260, 0x29092128, 0x07070304, 0x33033330, + 0x28c8e0e8, 0x1b0b1318, 0x05050104, 0x39497178, + 0x10809090, 0x2a4a6268, 0x2a0a2228, 0x1a8a9298, +}; + +static const u32 SS1[256] = { + 0x38380830, 0xe828c8e0, 0x2c2d0d21, 0xa42686a2, + 0xcc0fcfc3, 0xdc1eced2, 0xb03383b3, 0xb83888b0, + 0xac2f8fa3, 0x60204060, 0x54154551, 0xc407c7c3, + 0x44044440, 0x6c2f4f63, 0x682b4b63, 0x581b4b53, + 0xc003c3c3, 0x60224262, 0x30330333, 0xb43585b1, + 0x28290921, 0xa02080a0, 0xe022c2e2, 0xa42787a3, + 0xd013c3d3, 0x90118191, 0x10110111, 0x04060602, + 0x1c1c0c10, 0xbc3c8cb0, 0x34360632, 0x480b4b43, + 0xec2fcfe3, 0x88088880, 0x6c2c4c60, 0xa82888a0, + 0x14170713, 0xc404c4c0, 0x14160612, 0xf434c4f0, + 0xc002c2c2, 0x44054541, 0xe021c1e1, 0xd416c6d2, + 0x3c3f0f33, 0x3c3d0d31, 0x8c0e8e82, 0x98188890, + 0x28280820, 0x4c0e4e42, 0xf436c6f2, 0x3c3e0e32, + 0xa42585a1, 0xf839c9f1, 0x0c0d0d01, 0xdc1fcfd3, + 0xd818c8d0, 0x282b0b23, 0x64264662, 0x783a4a72, + 0x24270723, 0x2c2f0f23, 0xf031c1f1, 0x70324272, + 0x40024242, 0xd414c4d0, 0x40014141, 0xc000c0c0, + 0x70334373, 0x64274763, 0xac2c8ca0, 0x880b8b83, + 0xf437c7f3, 0xac2d8da1, 0x80008080, 0x1c1f0f13, + 0xc80acac2, 0x2c2c0c20, 0xa82a8aa2, 0x34340430, + 0xd012c2d2, 0x080b0b03, 0xec2ecee2, 0xe829c9e1, + 0x5c1d4d51, 0x94148490, 0x18180810, 0xf838c8f0, + 0x54174753, 0xac2e8ea2, 0x08080800, 0xc405c5c1, + 0x10130313, 0xcc0dcdc1, 0x84068682, 0xb83989b1, + 0xfc3fcff3, 0x7c3d4d71, 0xc001c1c1, 0x30310131, + 0xf435c5f1, 0x880a8a82, 0x682a4a62, 0xb03181b1, + 0xd011c1d1, 0x20200020, 0xd417c7d3, 0x00020202, + 0x20220222, 0x04040400, 0x68284860, 0x70314171, + 0x04070703, 0xd81bcbd3, 0x9c1d8d91, 0x98198991, + 0x60214161, 0xbc3e8eb2, 0xe426c6e2, 0x58194951, + 0xdc1dcdd1, 0x50114151, 0x90108090, 0xdc1cccd0, + 0x981a8a92, 0xa02383a3, 0xa82b8ba3, 0xd010c0d0, + 0x80018181, 0x0c0f0f03, 0x44074743, 0x181a0a12, + 0xe023c3e3, 0xec2ccce0, 0x8c0d8d81, 0xbc3f8fb3, + 0x94168692, 0x783b4b73, 0x5c1c4c50, 0xa02282a2, + 0xa02181a1, 0x60234363, 0x20230323, 0x4c0d4d41, + 0xc808c8c0, 0x9c1e8e92, 0x9c1c8c90, 0x383a0a32, + 0x0c0c0c00, 0x2c2e0e22, 0xb83a8ab2, 0x6c2e4e62, + 0x9c1f8f93, 0x581a4a52, 0xf032c2f2, 0x90128292, + 0xf033c3f3, 0x48094941, 0x78384870, 0xcc0cccc0, + 0x14150511, 0xf83bcbf3, 0x70304070, 0x74354571, + 0x7c3f4f73, 0x34350531, 0x10100010, 0x00030303, + 0x64244460, 0x6c2d4d61, 0xc406c6c2, 0x74344470, + 0xd415c5d1, 0xb43484b0, 0xe82acae2, 0x08090901, + 0x74364672, 0x18190911, 0xfc3ecef2, 0x40004040, + 0x10120212, 0xe020c0e0, 0xbc3d8db1, 0x04050501, + 0xf83acaf2, 0x00010101, 0xf030c0f0, 0x282a0a22, + 0x5c1e4e52, 0xa82989a1, 0x54164652, 0x40034343, + 0x84058581, 0x14140410, 0x88098981, 0x981b8b93, + 0xb03080b0, 0xe425c5e1, 0x48084840, 0x78394971, + 0x94178793, 0xfc3cccf0, 0x1c1e0e12, 0x80028282, + 0x20210121, 0x8c0c8c80, 0x181b0b13, 0x5c1f4f53, + 0x74374773, 0x54144450, 0xb03282b2, 0x1c1d0d11, + 0x24250521, 0x4c0f4f43, 0x00000000, 0x44064642, + 0xec2dcde1, 0x58184850, 0x50124252, 0xe82bcbe3, + 0x7c3e4e72, 0xd81acad2, 0xc809c9c1, 0xfc3dcdf1, + 0x30300030, 0x94158591, 0x64254561, 0x3c3c0c30, + 0xb43686b2, 0xe424c4e0, 0xb83b8bb3, 0x7c3c4c70, + 0x0c0e0e02, 0x50104050, 0x38390931, 0x24260622, + 0x30320232, 0x84048480, 0x68294961, 0x90138393, + 0x34370733, 0xe427c7e3, 0x24240420, 0xa42484a0, + 0xc80bcbc3, 0x50134353, 0x080a0a02, 0x84078783, + 0xd819c9d1, 0x4c0c4c40, 0x80038383, 0x8c0f8f83, + 0xcc0ecec2, 0x383b0b33, 0x480a4a42, 0xb43787b3, +}; + +static const u32 SS2[256] = { + 0xa1a82989, 0x81840585, 0xd2d416c6, 0xd3d013c3, + 0x50541444, 0x111c1d0d, 0xa0ac2c8c, 0x21242505, + 0x515c1d4d, 0x43400343, 0x10181808, 0x121c1e0e, + 0x51501141, 0xf0fc3ccc, 0xc2c80aca, 0x63602343, + 0x20282808, 0x40440444, 0x20202000, 0x919c1d8d, + 0xe0e020c0, 0xe2e022c2, 0xc0c808c8, 0x13141707, + 0xa1a42585, 0x838c0f8f, 0x03000303, 0x73783b4b, + 0xb3b83b8b, 0x13101303, 0xd2d012c2, 0xe2ec2ece, + 0x70703040, 0x808c0c8c, 0x333c3f0f, 0xa0a82888, + 0x32303202, 0xd1dc1dcd, 0xf2f436c6, 0x70743444, + 0xe0ec2ccc, 0x91941585, 0x03080b0b, 0x53541747, + 0x505c1c4c, 0x53581b4b, 0xb1bc3d8d, 0x01000101, + 0x20242404, 0x101c1c0c, 0x73703343, 0x90981888, + 0x10101000, 0xc0cc0ccc, 0xf2f032c2, 0xd1d819c9, + 0x202c2c0c, 0xe3e427c7, 0x72703242, 0x83800383, + 0x93981b8b, 0xd1d011c1, 0x82840686, 0xc1c809c9, + 0x60602040, 0x50501040, 0xa3a02383, 0xe3e82bcb, + 0x010c0d0d, 0xb2b43686, 0x929c1e8e, 0x434c0f4f, + 0xb3b43787, 0x52581a4a, 0xc2c406c6, 0x70783848, + 0xa2a42686, 0x12101202, 0xa3ac2f8f, 0xd1d415c5, + 0x61602141, 0xc3c003c3, 0xb0b43484, 0x41400141, + 0x52501242, 0x717c3d4d, 0x818c0d8d, 0x00080808, + 0x131c1f0f, 0x91981989, 0x00000000, 0x11181909, + 0x00040404, 0x53501343, 0xf3f437c7, 0xe1e021c1, + 0xf1fc3dcd, 0x72743646, 0x232c2f0f, 0x23242707, + 0xb0b03080, 0x83880b8b, 0x020c0e0e, 0xa3a82b8b, + 0xa2a02282, 0x626c2e4e, 0x93901383, 0x414c0d4d, + 0x61682949, 0x707c3c4c, 0x01080909, 0x02080a0a, + 0xb3bc3f8f, 0xe3ec2fcf, 0xf3f033c3, 0xc1c405c5, + 0x83840787, 0x10141404, 0xf2fc3ece, 0x60642444, + 0xd2dc1ece, 0x222c2e0e, 0x43480b4b, 0x12181a0a, + 0x02040606, 0x21202101, 0x63682b4b, 0x62642646, + 0x02000202, 0xf1f435c5, 0x92901282, 0x82880a8a, + 0x000c0c0c, 0xb3b03383, 0x727c3e4e, 0xd0d010c0, + 0x72783a4a, 0x43440747, 0x92941686, 0xe1e425c5, + 0x22242606, 0x80800080, 0xa1ac2d8d, 0xd3dc1fcf, + 0xa1a02181, 0x30303000, 0x33343707, 0xa2ac2e8e, + 0x32343606, 0x11141505, 0x22202202, 0x30383808, + 0xf0f434c4, 0xa3a42787, 0x41440545, 0x404c0c4c, + 0x81800181, 0xe1e829c9, 0x80840484, 0x93941787, + 0x31343505, 0xc3c80bcb, 0xc2cc0ece, 0x303c3c0c, + 0x71703141, 0x11101101, 0xc3c407c7, 0x81880989, + 0x71743545, 0xf3f83bcb, 0xd2d81aca, 0xf0f838c8, + 0x90941484, 0x51581949, 0x82800282, 0xc0c404c4, + 0xf3fc3fcf, 0x41480949, 0x31383909, 0x63642747, + 0xc0c000c0, 0xc3cc0fcf, 0xd3d417c7, 0xb0b83888, + 0x030c0f0f, 0x828c0e8e, 0x42400242, 0x23202303, + 0x91901181, 0x606c2c4c, 0xd3d81bcb, 0xa0a42484, + 0x30343404, 0xf1f031c1, 0x40480848, 0xc2c002c2, + 0x636c2f4f, 0x313c3d0d, 0x212c2d0d, 0x40400040, + 0xb2bc3e8e, 0x323c3e0e, 0xb0bc3c8c, 0xc1c001c1, + 0xa2a82a8a, 0xb2b83a8a, 0x424c0e4e, 0x51541545, + 0x33383b0b, 0xd0dc1ccc, 0x60682848, 0x737c3f4f, + 0x909c1c8c, 0xd0d818c8, 0x42480a4a, 0x52541646, + 0x73743747, 0xa0a02080, 0xe1ec2dcd, 0x42440646, + 0xb1b43585, 0x23282b0b, 0x61642545, 0xf2f83aca, + 0xe3e023c3, 0xb1b83989, 0xb1b03181, 0x939c1f8f, + 0x525c1e4e, 0xf1f839c9, 0xe2e426c6, 0xb2b03282, + 0x31303101, 0xe2e82aca, 0x616c2d4d, 0x535c1f4f, + 0xe0e424c4, 0xf0f030c0, 0xc1cc0dcd, 0x80880888, + 0x12141606, 0x32383a0a, 0x50581848, 0xd0d414c4, + 0x62602242, 0x21282909, 0x03040707, 0x33303303, + 0xe0e828c8, 0x13181b0b, 0x01040505, 0x71783949, + 0x90901080, 0x62682a4a, 0x22282a0a, 0x92981a8a, +}; + +static const u32 SS3[256] = { + 0x08303838, 0xc8e0e828, 0x0d212c2d, 0x86a2a426, + 0xcfc3cc0f, 0xced2dc1e, 0x83b3b033, 0x88b0b838, + 0x8fa3ac2f, 0x40606020, 0x45515415, 0xc7c3c407, + 0x44404404, 0x4f636c2f, 0x4b63682b, 0x4b53581b, + 0xc3c3c003, 0x42626022, 0x03333033, 0x85b1b435, + 0x09212829, 0x80a0a020, 0xc2e2e022, 0x87a3a427, + 0xc3d3d013, 0x81919011, 0x01111011, 0x06020406, + 0x0c101c1c, 0x8cb0bc3c, 0x06323436, 0x4b43480b, + 0xcfe3ec2f, 0x88808808, 0x4c606c2c, 0x88a0a828, + 0x07131417, 0xc4c0c404, 0x06121416, 0xc4f0f434, + 0xc2c2c002, 0x45414405, 0xc1e1e021, 0xc6d2d416, + 0x0f333c3f, 0x0d313c3d, 0x8e828c0e, 0x88909818, + 0x08202828, 0x4e424c0e, 0xc6f2f436, 0x0e323c3e, + 0x85a1a425, 0xc9f1f839, 0x0d010c0d, 0xcfd3dc1f, + 0xc8d0d818, 0x0b23282b, 0x46626426, 0x4a72783a, + 0x07232427, 0x0f232c2f, 0xc1f1f031, 0x42727032, + 0x42424002, 0xc4d0d414, 0x41414001, 0xc0c0c000, + 0x43737033, 0x47636427, 0x8ca0ac2c, 0x8b83880b, + 0xc7f3f437, 0x8da1ac2d, 0x80808000, 0x0f131c1f, + 0xcac2c80a, 0x0c202c2c, 0x8aa2a82a, 0x04303434, + 0xc2d2d012, 0x0b03080b, 0xcee2ec2e, 0xc9e1e829, + 0x4d515c1d, 0x84909414, 0x08101818, 0xc8f0f838, + 0x47535417, 0x8ea2ac2e, 0x08000808, 0xc5c1c405, + 0x03131013, 0xcdc1cc0d, 0x86828406, 0x89b1b839, + 0xcff3fc3f, 0x4d717c3d, 0xc1c1c001, 0x01313031, + 0xc5f1f435, 0x8a82880a, 0x4a62682a, 0x81b1b031, + 0xc1d1d011, 0x00202020, 0xc7d3d417, 0x02020002, + 0x02222022, 0x04000404, 0x48606828, 0x41717031, + 0x07030407, 0xcbd3d81b, 0x8d919c1d, 0x89919819, + 0x41616021, 0x8eb2bc3e, 0xc6e2e426, 0x49515819, + 0xcdd1dc1d, 0x41515011, 0x80909010, 0xccd0dc1c, + 0x8a92981a, 0x83a3a023, 0x8ba3a82b, 0xc0d0d010, + 0x81818001, 0x0f030c0f, 0x47434407, 0x0a12181a, + 0xc3e3e023, 0xcce0ec2c, 0x8d818c0d, 0x8fb3bc3f, + 0x86929416, 0x4b73783b, 0x4c505c1c, 0x82a2a022, + 0x81a1a021, 0x43636023, 0x03232023, 0x4d414c0d, + 0xc8c0c808, 0x8e929c1e, 0x8c909c1c, 0x0a32383a, + 0x0c000c0c, 0x0e222c2e, 0x8ab2b83a, 0x4e626c2e, + 0x8f939c1f, 0x4a52581a, 0xc2f2f032, 0x82929012, + 0xc3f3f033, 0x49414809, 0x48707838, 0xccc0cc0c, + 0x05111415, 0xcbf3f83b, 0x40707030, 0x45717435, + 0x4f737c3f, 0x05313435, 0x00101010, 0x03030003, + 0x44606424, 0x4d616c2d, 0xc6c2c406, 0x44707434, + 0xc5d1d415, 0x84b0b434, 0xcae2e82a, 0x09010809, + 0x46727436, 0x09111819, 0xcef2fc3e, 0x40404000, + 0x02121012, 0xc0e0e020, 0x8db1bc3d, 0x05010405, + 0xcaf2f83a, 0x01010001, 0xc0f0f030, 0x0a22282a, + 0x4e525c1e, 0x89a1a829, 0x46525416, 0x43434003, + 0x85818405, 0x04101414, 0x89818809, 0x8b93981b, + 0x80b0b030, 0xc5e1e425, 0x48404808, 0x49717839, + 0x87939417, 0xccf0fc3c, 0x0e121c1e, 0x82828002, + 0x01212021, 0x8c808c0c, 0x0b13181b, 0x4f535c1f, + 0x47737437, 0x44505414, 0x82b2b032, 0x0d111c1d, + 0x05212425, 0x4f434c0f, 0x00000000, 0x46424406, + 0xcde1ec2d, 0x48505818, 0x42525012, 0xcbe3e82b, + 0x4e727c3e, 0xcad2d81a, 0xc9c1c809, 0xcdf1fc3d, + 0x00303030, 0x85919415, 0x45616425, 0x0c303c3c, + 0x86b2b436, 0xc4e0e424, 0x8bb3b83b, 0x4c707c3c, + 0x0e020c0e, 0x40505010, 0x09313839, 0x06222426, + 0x02323032, 0x84808404, 0x49616829, 0x83939013, + 0x07333437, 0xc7e3e427, 0x04202424, 0x84a0a424, + 0xcbc3c80b, 0x43535013, 0x0a02080a, 0x87838407, + 0xc9d1d819, 0x4c404c0c, 0x83838003, 0x8f838c0f, + 0xcec2cc0e, 0x0b33383b, 0x4a42480a, 0x87b3b437, +}; + +static const u32 KC[SEED_NUM_KCONSTANTS] = { + 0x9e3779b9, 0x3c6ef373, 0x78dde6e6, 0xf1bbcdcc, + 0xe3779b99, 0xc6ef3733, 0x8dde6e67, 0x1bbcdccf, + 0x3779b99e, 0x6ef3733c, 0xdde6e678, 0xbbcdccf1, + 0x779b99e3, 0xef3733c6, 0xde6e678d, 0xbcdccf1b, +}; + +#define OP(X1, X2, X3, X4, rbase) \ + t0 = X3 ^ ks[rbase]; \ + t1 = X4 ^ ks[rbase+1]; \ + t1 ^= t0; \ + t1 = SS0[byte(t1, 0)] ^ SS1[byte(t1, 1)] ^ \ + SS2[byte(t1, 2)] ^ SS3[byte(t1, 3)]; \ + t0 += t1; \ + t0 = SS0[byte(t0, 0)] ^ SS1[byte(t0, 1)] ^ \ + SS2[byte(t0, 2)] ^ SS3[byte(t0, 3)]; \ + t1 += t0; \ + t1 = SS0[byte(t1, 0)] ^ SS1[byte(t1, 1)] ^ \ + SS2[byte(t1, 2)] ^ SS3[byte(t1, 3)]; \ + t0 += t1; \ + X1 ^= t0; \ + X2 ^= t1; + +static int seed_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len) +{ + struct seed_ctx *ctx = crypto_tfm_ctx(tfm); + u32 *keyout = ctx->keysched; + const __be32 *key = (const __be32 *)in_key; + u32 i, t0, t1, x1, x2, x3, x4; + + x1 = be32_to_cpu(key[0]); + x2 = be32_to_cpu(key[1]); + x3 = be32_to_cpu(key[2]); + x4 = be32_to_cpu(key[3]); + + for (i = 0; i < SEED_NUM_KCONSTANTS; i++) { + t0 = x1 + x3 - KC[i]; + t1 = x2 + KC[i] - x4; + *(keyout++) = SS0[byte(t0, 0)] ^ SS1[byte(t0, 1)] ^ + SS2[byte(t0, 2)] ^ SS3[byte(t0, 3)]; + *(keyout++) = SS0[byte(t1, 0)] ^ SS1[byte(t1, 1)] ^ + SS2[byte(t1, 2)] ^ SS3[byte(t1, 3)]; + + if (i % 2 == 0) { + t0 = x1; + x1 = (x1 >> 8) ^ (x2 << 24); + x2 = (x2 >> 8) ^ (t0 << 24); + } else { + t0 = x3; + x3 = (x3 << 8) ^ (x4 >> 24); + x4 = (x4 << 8) ^ (t0 >> 24); + } + } + + return 0; +} + +/* encrypt a block of text */ + +static void seed_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + const struct seed_ctx *ctx = crypto_tfm_ctx(tfm); + const __be32 *src = (const __be32 *)in; + __be32 *dst = (__be32 *)out; + u32 x1, x2, x3, x4, t0, t1; + const u32 *ks = ctx->keysched; + + x1 = be32_to_cpu(src[0]); + x2 = be32_to_cpu(src[1]); + x3 = be32_to_cpu(src[2]); + x4 = be32_to_cpu(src[3]); + + OP(x1, x2, x3, x4, 0); + OP(x3, x4, x1, x2, 2); + OP(x1, x2, x3, x4, 4); + OP(x3, x4, x1, x2, 6); + OP(x1, x2, x3, x4, 8); + OP(x3, x4, x1, x2, 10); + OP(x1, x2, x3, x4, 12); + OP(x3, x4, x1, x2, 14); + OP(x1, x2, x3, x4, 16); + OP(x3, x4, x1, x2, 18); + OP(x1, x2, x3, x4, 20); + OP(x3, x4, x1, x2, 22); + OP(x1, x2, x3, x4, 24); + OP(x3, x4, x1, x2, 26); + OP(x1, x2, x3, x4, 28); + OP(x3, x4, x1, x2, 30); + + dst[0] = cpu_to_be32(x3); + dst[1] = cpu_to_be32(x4); + dst[2] = cpu_to_be32(x1); + dst[3] = cpu_to_be32(x2); +} + +/* decrypt a block of text */ + +static void seed_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + const struct seed_ctx *ctx = crypto_tfm_ctx(tfm); + const __be32 *src = (const __be32 *)in; + __be32 *dst = (__be32 *)out; + u32 x1, x2, x3, x4, t0, t1; + const u32 *ks = ctx->keysched; + + x1 = be32_to_cpu(src[0]); + x2 = be32_to_cpu(src[1]); + x3 = be32_to_cpu(src[2]); + x4 = be32_to_cpu(src[3]); + + OP(x1, x2, x3, x4, 30); + OP(x3, x4, x1, x2, 28); + OP(x1, x2, x3, x4, 26); + OP(x3, x4, x1, x2, 24); + OP(x1, x2, x3, x4, 22); + OP(x3, x4, x1, x2, 20); + OP(x1, x2, x3, x4, 18); + OP(x3, x4, x1, x2, 16); + OP(x1, x2, x3, x4, 14); + OP(x3, x4, x1, x2, 12); + OP(x1, x2, x3, x4, 10); + OP(x3, x4, x1, x2, 8); + OP(x1, x2, x3, x4, 6); + OP(x3, x4, x1, x2, 4); + OP(x1, x2, x3, x4, 2); + OP(x3, x4, x1, x2, 0); + + dst[0] = cpu_to_be32(x3); + dst[1] = cpu_to_be32(x4); + dst[2] = cpu_to_be32(x1); + dst[3] = cpu_to_be32(x2); +} + + +static struct crypto_alg seed_alg = { + .cra_name = "seed", + .cra_driver_name = "seed-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = SEED_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct seed_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(seed_alg.cra_list), + .cra_u = { + .cipher = { + .cia_min_keysize = SEED_KEY_SIZE, + .cia_max_keysize = SEED_KEY_SIZE, + .cia_setkey = seed_set_key, + .cia_encrypt = seed_encrypt, + .cia_decrypt = seed_decrypt + } + } +}; + +static int __init seed_init(void) +{ + return crypto_register_alg(&seed_alg); +} + +static void __exit seed_fini(void) +{ + crypto_unregister_alg(&seed_alg); +} + +module_init(seed_init); +module_exit(seed_fini); + +MODULE_DESCRIPTION("SEED Cipher Algorithm"); +MODULE_LICENSE("GPL"); +MODULE_AUTHOR("Hye-Shik Chang <perky@FreeBSD.org>, Kim Hyun <hkim@kisa.or.kr>"); diff --git a/crypto/seqiv.c b/crypto/seqiv.c new file mode 100644 index 00000000..4c449122 --- /dev/null +++ b/crypto/seqiv.c @@ -0,0 +1,365 @@ +/* + * seqiv: Sequence Number IV Generator + * + * This generator generates an IV based on a sequence number by xoring it + * with a salt. This algorithm is mainly useful for CTR and similar modes. + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/internal/aead.h> +#include <crypto/internal/skcipher.h> +#include <crypto/rng.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/slab.h> +#include <linux/spinlock.h> +#include <linux/string.h> + +struct seqiv_ctx { + spinlock_t lock; + u8 salt[] __attribute__ ((aligned(__alignof__(u32)))); +}; + +static void seqiv_complete2(struct skcipher_givcrypt_request *req, int err) +{ + struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); + struct crypto_ablkcipher *geniv; + + if (err == -EINPROGRESS) + return; + + if (err) + goto out; + + geniv = skcipher_givcrypt_reqtfm(req); + memcpy(req->creq.info, subreq->info, crypto_ablkcipher_ivsize(geniv)); + +out: + kfree(subreq->info); +} + +static void seqiv_complete(struct crypto_async_request *base, int err) +{ + struct skcipher_givcrypt_request *req = base->data; + + seqiv_complete2(req, err); + skcipher_givcrypt_complete(req, err); +} + +static void seqiv_aead_complete2(struct aead_givcrypt_request *req, int err) +{ + struct aead_request *subreq = aead_givcrypt_reqctx(req); + struct crypto_aead *geniv; + + if (err == -EINPROGRESS) + return; + + if (err) + goto out; + + geniv = aead_givcrypt_reqtfm(req); + memcpy(req->areq.iv, subreq->iv, crypto_aead_ivsize(geniv)); + +out: + kfree(subreq->iv); +} + +static void seqiv_aead_complete(struct crypto_async_request *base, int err) +{ + struct aead_givcrypt_request *req = base->data; + + seqiv_aead_complete2(req, err); + aead_givcrypt_complete(req, err); +} + +static void seqiv_geniv(struct seqiv_ctx *ctx, u8 *info, u64 seq, + unsigned int ivsize) +{ + unsigned int len = ivsize; + + if (ivsize > sizeof(u64)) { + memset(info, 0, ivsize - sizeof(u64)); + len = sizeof(u64); + } + seq = cpu_to_be64(seq); + memcpy(info + ivsize - len, &seq, len); + crypto_xor(info, ctx->salt, ivsize); +} + +static int seqiv_givencrypt(struct skcipher_givcrypt_request *req) +{ + struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); + struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); + crypto_completion_t complete; + void *data; + u8 *info; + unsigned int ivsize; + int err; + + ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv)); + + complete = req->creq.base.complete; + data = req->creq.base.data; + info = req->creq.info; + + ivsize = crypto_ablkcipher_ivsize(geniv); + + if (unlikely(!IS_ALIGNED((unsigned long)info, + crypto_ablkcipher_alignmask(geniv) + 1))) { + info = kmalloc(ivsize, req->creq.base.flags & + CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL: + GFP_ATOMIC); + if (!info) + return -ENOMEM; + + complete = seqiv_complete; + data = req; + } + + ablkcipher_request_set_callback(subreq, req->creq.base.flags, complete, + data); + ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst, + req->creq.nbytes, info); + + seqiv_geniv(ctx, info, req->seq, ivsize); + memcpy(req->giv, info, ivsize); + + err = crypto_ablkcipher_encrypt(subreq); + if (unlikely(info != req->creq.info)) + seqiv_complete2(req, err); + return err; +} + +static int seqiv_aead_givencrypt(struct aead_givcrypt_request *req) +{ + struct crypto_aead *geniv = aead_givcrypt_reqtfm(req); + struct seqiv_ctx *ctx = crypto_aead_ctx(geniv); + struct aead_request *areq = &req->areq; + struct aead_request *subreq = aead_givcrypt_reqctx(req); + crypto_completion_t complete; + void *data; + u8 *info; + unsigned int ivsize; + int err; + + aead_request_set_tfm(subreq, aead_geniv_base(geniv)); + + complete = areq->base.complete; + data = areq->base.data; + info = areq->iv; + + ivsize = crypto_aead_ivsize(geniv); + + if (unlikely(!IS_ALIGNED((unsigned long)info, + crypto_aead_alignmask(geniv) + 1))) { + info = kmalloc(ivsize, areq->base.flags & + CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL: + GFP_ATOMIC); + if (!info) + return -ENOMEM; + + complete = seqiv_aead_complete; + data = req; + } + + aead_request_set_callback(subreq, areq->base.flags, complete, data); + aead_request_set_crypt(subreq, areq->src, areq->dst, areq->cryptlen, + info); + aead_request_set_assoc(subreq, areq->assoc, areq->assoclen); + + seqiv_geniv(ctx, info, req->seq, ivsize); + memcpy(req->giv, info, ivsize); + + err = crypto_aead_encrypt(subreq); + if (unlikely(info != areq->iv)) + seqiv_aead_complete2(req, err); + return err; +} + +static int seqiv_givencrypt_first(struct skcipher_givcrypt_request *req) +{ + struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); + struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + int err = 0; + + spin_lock_bh(&ctx->lock); + if (crypto_ablkcipher_crt(geniv)->givencrypt != seqiv_givencrypt_first) + goto unlock; + + crypto_ablkcipher_crt(geniv)->givencrypt = seqiv_givencrypt; + err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt, + crypto_ablkcipher_ivsize(geniv)); + +unlock: + spin_unlock_bh(&ctx->lock); + + if (err) + return err; + + return seqiv_givencrypt(req); +} + +static int seqiv_aead_givencrypt_first(struct aead_givcrypt_request *req) +{ + struct crypto_aead *geniv = aead_givcrypt_reqtfm(req); + struct seqiv_ctx *ctx = crypto_aead_ctx(geniv); + int err = 0; + + spin_lock_bh(&ctx->lock); + if (crypto_aead_crt(geniv)->givencrypt != seqiv_aead_givencrypt_first) + goto unlock; + + crypto_aead_crt(geniv)->givencrypt = seqiv_aead_givencrypt; + err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt, + crypto_aead_ivsize(geniv)); + +unlock: + spin_unlock_bh(&ctx->lock); + + if (err) + return err; + + return seqiv_aead_givencrypt(req); +} + +static int seqiv_init(struct crypto_tfm *tfm) +{ + struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm); + struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + + spin_lock_init(&ctx->lock); + + tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request); + + return skcipher_geniv_init(tfm); +} + +static int seqiv_aead_init(struct crypto_tfm *tfm) +{ + struct crypto_aead *geniv = __crypto_aead_cast(tfm); + struct seqiv_ctx *ctx = crypto_aead_ctx(geniv); + + spin_lock_init(&ctx->lock); + + tfm->crt_aead.reqsize = sizeof(struct aead_request); + + return aead_geniv_init(tfm); +} + +static struct crypto_template seqiv_tmpl; + +static struct crypto_instance *seqiv_ablkcipher_alloc(struct rtattr **tb) +{ + struct crypto_instance *inst; + + inst = skcipher_geniv_alloc(&seqiv_tmpl, tb, 0, 0); + + if (IS_ERR(inst)) + goto out; + + inst->alg.cra_ablkcipher.givencrypt = seqiv_givencrypt_first; + + inst->alg.cra_init = seqiv_init; + inst->alg.cra_exit = skcipher_geniv_exit; + + inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize; + +out: + return inst; +} + +static struct crypto_instance *seqiv_aead_alloc(struct rtattr **tb) +{ + struct crypto_instance *inst; + + inst = aead_geniv_alloc(&seqiv_tmpl, tb, 0, 0); + + if (IS_ERR(inst)) + goto out; + + inst->alg.cra_aead.givencrypt = seqiv_aead_givencrypt_first; + + inst->alg.cra_init = seqiv_aead_init; + inst->alg.cra_exit = aead_geniv_exit; + + inst->alg.cra_ctxsize = inst->alg.cra_aead.ivsize; + +out: + return inst; +} + +static struct crypto_instance *seqiv_alloc(struct rtattr **tb) +{ + struct crypto_attr_type *algt; + struct crypto_instance *inst; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + err = crypto_get_default_rng(); + if (err) + return ERR_PTR(err); + + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK) + inst = seqiv_ablkcipher_alloc(tb); + else + inst = seqiv_aead_alloc(tb); + + if (IS_ERR(inst)) + goto put_rng; + + inst->alg.cra_alignmask |= __alignof__(u32) - 1; + inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx); + +out: + return inst; + +put_rng: + crypto_put_default_rng(); + goto out; +} + +static void seqiv_free(struct crypto_instance *inst) +{ + if ((inst->alg.cra_flags ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK) + skcipher_geniv_free(inst); + else + aead_geniv_free(inst); + crypto_put_default_rng(); +} + +static struct crypto_template seqiv_tmpl = { + .name = "seqiv", + .alloc = seqiv_alloc, + .free = seqiv_free, + .module = THIS_MODULE, +}; + +static int __init seqiv_module_init(void) +{ + return crypto_register_template(&seqiv_tmpl); +} + +static void __exit seqiv_module_exit(void) +{ + crypto_unregister_template(&seqiv_tmpl); +} + +module_init(seqiv_module_init); +module_exit(seqiv_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Sequence Number IV Generator"); diff --git a/crypto/serpent_generic.c b/crypto/serpent_generic.c new file mode 100644 index 00000000..8f32cf35 --- /dev/null +++ b/crypto/serpent_generic.c @@ -0,0 +1,684 @@ +/* + * Cryptographic API. + * + * Serpent Cipher Algorithm. + * + * Copyright (C) 2002 Dag Arne Osvik <osvik@ii.uib.no> + * 2003 Herbert Valerio Riedel <hvr@gnu.org> + * + * Added tnepres support: + * Ruben Jesus Garcia Hernandez <ruben@ugr.es>, 18.10.2004 + * Based on code by hvr + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + */ + +#include <linux/init.h> +#include <linux/module.h> +#include <linux/errno.h> +#include <asm/byteorder.h> +#include <linux/crypto.h> +#include <linux/types.h> +#include <crypto/serpent.h> + +/* Key is padded to the maximum of 256 bits before round key generation. + * Any key length <= 256 bits (32 bytes) is allowed by the algorithm. + */ + +#define PHI 0x9e3779b9UL + +#define keyiter(a, b, c, d, i, j) \ + ({ b ^= d; b ^= c; b ^= a; b ^= PHI ^ i; b = rol32(b, 11); k[j] = b; }) + +#define loadkeys(x0, x1, x2, x3, i) \ + ({ x0 = k[i]; x1 = k[i+1]; x2 = k[i+2]; x3 = k[i+3]; }) + +#define storekeys(x0, x1, x2, x3, i) \ + ({ k[i] = x0; k[i+1] = x1; k[i+2] = x2; k[i+3] = x3; }) + +#define store_and_load_keys(x0, x1, x2, x3, s, l) \ + ({ storekeys(x0, x1, x2, x3, s); loadkeys(x0, x1, x2, x3, l); }) + +#define K(x0, x1, x2, x3, i) ({ \ + x3 ^= k[4*(i)+3]; x2 ^= k[4*(i)+2]; \ + x1 ^= k[4*(i)+1]; x0 ^= k[4*(i)+0]; \ + }) + +#define LK(x0, x1, x2, x3, x4, i) ({ \ + x0 = rol32(x0, 13);\ + x2 = rol32(x2, 3); x1 ^= x0; x4 = x0 << 3; \ + x3 ^= x2; x1 ^= x2; \ + x1 = rol32(x1, 1); x3 ^= x4; \ + x3 = rol32(x3, 7); x4 = x1; \ + x0 ^= x1; x4 <<= 7; x2 ^= x3; \ + x0 ^= x3; x2 ^= x4; x3 ^= k[4*i+3]; \ + x1 ^= k[4*i+1]; x0 = rol32(x0, 5); x2 = rol32(x2, 22);\ + x0 ^= k[4*i+0]; x2 ^= k[4*i+2]; \ + }) + +#define KL(x0, x1, x2, x3, x4, i) ({ \ + x0 ^= k[4*i+0]; x1 ^= k[4*i+1]; x2 ^= k[4*i+2]; \ + x3 ^= k[4*i+3]; x0 = ror32(x0, 5); x2 = ror32(x2, 22);\ + x4 = x1; x2 ^= x3; x0 ^= x3; \ + x4 <<= 7; x0 ^= x1; x1 = ror32(x1, 1); \ + x2 ^= x4; x3 = ror32(x3, 7); x4 = x0 << 3; \ + x1 ^= x0; x3 ^= x4; x0 = ror32(x0, 13);\ + x1 ^= x2; x3 ^= x2; x2 = ror32(x2, 3); \ + }) + +#define S0(x0, x1, x2, x3, x4) ({ \ + x4 = x3; \ + x3 |= x0; x0 ^= x4; x4 ^= x2; \ + x4 = ~x4; x3 ^= x1; x1 &= x0; \ + x1 ^= x4; x2 ^= x0; x0 ^= x3; \ + x4 |= x0; x0 ^= x2; x2 &= x1; \ + x3 ^= x2; x1 = ~x1; x2 ^= x4; \ + x1 ^= x2; \ + }) + +#define S1(x0, x1, x2, x3, x4) ({ \ + x4 = x1; \ + x1 ^= x0; x0 ^= x3; x3 = ~x3; \ + x4 &= x1; x0 |= x1; x3 ^= x2; \ + x0 ^= x3; x1 ^= x3; x3 ^= x4; \ + x1 |= x4; x4 ^= x2; x2 &= x0; \ + x2 ^= x1; x1 |= x0; x0 = ~x0; \ + x0 ^= x2; x4 ^= x1; \ + }) + +#define S2(x0, x1, x2, x3, x4) ({ \ + x3 = ~x3; \ + x1 ^= x0; x4 = x0; x0 &= x2; \ + x0 ^= x3; x3 |= x4; x2 ^= x1; \ + x3 ^= x1; x1 &= x0; x0 ^= x2; \ + x2 &= x3; x3 |= x1; x0 = ~x0; \ + x3 ^= x0; x4 ^= x0; x0 ^= x2; \ + x1 |= x2; \ + }) + +#define S3(x0, x1, x2, x3, x4) ({ \ + x4 = x1; \ + x1 ^= x3; x3 |= x0; x4 &= x0; \ + x0 ^= x2; x2 ^= x1; x1 &= x3; \ + x2 ^= x3; x0 |= x4; x4 ^= x3; \ + x1 ^= x0; x0 &= x3; x3 &= x4; \ + x3 ^= x2; x4 |= x1; x2 &= x1; \ + x4 ^= x3; x0 ^= x3; x3 ^= x2; \ + }) + +#define S4(x0, x1, x2, x3, x4) ({ \ + x4 = x3; \ + x3 &= x0; x0 ^= x4; \ + x3 ^= x2; x2 |= x4; x0 ^= x1; \ + x4 ^= x3; x2 |= x0; \ + x2 ^= x1; x1 &= x0; \ + x1 ^= x4; x4 &= x2; x2 ^= x3; \ + x4 ^= x0; x3 |= x1; x1 = ~x1; \ + x3 ^= x0; \ + }) + +#define S5(x0, x1, x2, x3, x4) ({ \ + x4 = x1; x1 |= x0; \ + x2 ^= x1; x3 = ~x3; x4 ^= x0; \ + x0 ^= x2; x1 &= x4; x4 |= x3; \ + x4 ^= x0; x0 &= x3; x1 ^= x3; \ + x3 ^= x2; x0 ^= x1; x2 &= x4; \ + x1 ^= x2; x2 &= x0; \ + x3 ^= x2; \ + }) + +#define S6(x0, x1, x2, x3, x4) ({ \ + x4 = x1; \ + x3 ^= x0; x1 ^= x2; x2 ^= x0; \ + x0 &= x3; x1 |= x3; x4 = ~x4; \ + x0 ^= x1; x1 ^= x2; \ + x3 ^= x4; x4 ^= x0; x2 &= x0; \ + x4 ^= x1; x2 ^= x3; x3 &= x1; \ + x3 ^= x0; x1 ^= x2; \ + }) + +#define S7(x0, x1, x2, x3, x4) ({ \ + x1 = ~x1; \ + x4 = x1; x0 = ~x0; x1 &= x2; \ + x1 ^= x3; x3 |= x4; x4 ^= x2; \ + x2 ^= x3; x3 ^= x0; x0 |= x1; \ + x2 &= x0; x0 ^= x4; x4 ^= x3; \ + x3 &= x0; x4 ^= x1; \ + x2 ^= x4; x3 ^= x1; x4 |= x0; \ + x4 ^= x1; \ + }) + +#define SI0(x0, x1, x2, x3, x4) ({ \ + x4 = x3; x1 ^= x0; \ + x3 |= x1; x4 ^= x1; x0 = ~x0; \ + x2 ^= x3; x3 ^= x0; x0 &= x1; \ + x0 ^= x2; x2 &= x3; x3 ^= x4; \ + x2 ^= x3; x1 ^= x3; x3 &= x0; \ + x1 ^= x0; x0 ^= x2; x4 ^= x3; \ + }) + +#define SI1(x0, x1, x2, x3, x4) ({ \ + x1 ^= x3; x4 = x0; \ + x0 ^= x2; x2 = ~x2; x4 |= x1; \ + x4 ^= x3; x3 &= x1; x1 ^= x2; \ + x2 &= x4; x4 ^= x1; x1 |= x3; \ + x3 ^= x0; x2 ^= x0; x0 |= x4; \ + x2 ^= x4; x1 ^= x0; \ + x4 ^= x1; \ + }) + +#define SI2(x0, x1, x2, x3, x4) ({ \ + x2 ^= x1; x4 = x3; x3 = ~x3; \ + x3 |= x2; x2 ^= x4; x4 ^= x0; \ + x3 ^= x1; x1 |= x2; x2 ^= x0; \ + x1 ^= x4; x4 |= x3; x2 ^= x3; \ + x4 ^= x2; x2 &= x1; \ + x2 ^= x3; x3 ^= x4; x4 ^= x0; \ + }) + +#define SI3(x0, x1, x2, x3, x4) ({ \ + x2 ^= x1; \ + x4 = x1; x1 &= x2; \ + x1 ^= x0; x0 |= x4; x4 ^= x3; \ + x0 ^= x3; x3 |= x1; x1 ^= x2; \ + x1 ^= x3; x0 ^= x2; x2 ^= x3; \ + x3 &= x1; x1 ^= x0; x0 &= x2; \ + x4 ^= x3; x3 ^= x0; x0 ^= x1; \ + }) + +#define SI4(x0, x1, x2, x3, x4) ({ \ + x2 ^= x3; x4 = x0; x0 &= x1; \ + x0 ^= x2; x2 |= x3; x4 = ~x4; \ + x1 ^= x0; x0 ^= x2; x2 &= x4; \ + x2 ^= x0; x0 |= x4; \ + x0 ^= x3; x3 &= x2; \ + x4 ^= x3; x3 ^= x1; x1 &= x0; \ + x4 ^= x1; x0 ^= x3; \ + }) + +#define SI5(x0, x1, x2, x3, x4) ({ \ + x4 = x1; x1 |= x2; \ + x2 ^= x4; x1 ^= x3; x3 &= x4; \ + x2 ^= x3; x3 |= x0; x0 = ~x0; \ + x3 ^= x2; x2 |= x0; x4 ^= x1; \ + x2 ^= x4; x4 &= x0; x0 ^= x1; \ + x1 ^= x3; x0 &= x2; x2 ^= x3; \ + x0 ^= x2; x2 ^= x4; x4 ^= x3; \ + }) + +#define SI6(x0, x1, x2, x3, x4) ({ \ + x0 ^= x2; \ + x4 = x0; x0 &= x3; x2 ^= x3; \ + x0 ^= x2; x3 ^= x1; x2 |= x4; \ + x2 ^= x3; x3 &= x0; x0 = ~x0; \ + x3 ^= x1; x1 &= x2; x4 ^= x0; \ + x3 ^= x4; x4 ^= x2; x0 ^= x1; \ + x2 ^= x0; \ + }) + +#define SI7(x0, x1, x2, x3, x4) ({ \ + x4 = x3; x3 &= x0; x0 ^= x2; \ + x2 |= x4; x4 ^= x1; x0 = ~x0; \ + x1 |= x3; x4 ^= x0; x0 &= x2; \ + x0 ^= x1; x1 &= x2; x3 ^= x2; \ + x4 ^= x3; x2 &= x3; x3 |= x0; \ + x1 ^= x4; x3 ^= x4; x4 &= x0; \ + x4 ^= x2; \ + }) + +int __serpent_setkey(struct serpent_ctx *ctx, const u8 *key, + unsigned int keylen) +{ + u32 *k = ctx->expkey; + u8 *k8 = (u8 *)k; + u32 r0, r1, r2, r3, r4; + int i; + + /* Copy key, add padding */ + + for (i = 0; i < keylen; ++i) + k8[i] = key[i]; + if (i < SERPENT_MAX_KEY_SIZE) + k8[i++] = 1; + while (i < SERPENT_MAX_KEY_SIZE) + k8[i++] = 0; + + /* Expand key using polynomial */ + + r0 = le32_to_cpu(k[3]); + r1 = le32_to_cpu(k[4]); + r2 = le32_to_cpu(k[5]); + r3 = le32_to_cpu(k[6]); + r4 = le32_to_cpu(k[7]); + + keyiter(le32_to_cpu(k[0]), r0, r4, r2, 0, 0); + keyiter(le32_to_cpu(k[1]), r1, r0, r3, 1, 1); + keyiter(le32_to_cpu(k[2]), r2, r1, r4, 2, 2); + keyiter(le32_to_cpu(k[3]), r3, r2, r0, 3, 3); + keyiter(le32_to_cpu(k[4]), r4, r3, r1, 4, 4); + keyiter(le32_to_cpu(k[5]), r0, r4, r2, 5, 5); + keyiter(le32_to_cpu(k[6]), r1, r0, r3, 6, 6); + keyiter(le32_to_cpu(k[7]), r2, r1, r4, 7, 7); + + keyiter(k[0], r3, r2, r0, 8, 8); + keyiter(k[1], r4, r3, r1, 9, 9); + keyiter(k[2], r0, r4, r2, 10, 10); + keyiter(k[3], r1, r0, r3, 11, 11); + keyiter(k[4], r2, r1, r4, 12, 12); + keyiter(k[5], r3, r2, r0, 13, 13); + keyiter(k[6], r4, r3, r1, 14, 14); + keyiter(k[7], r0, r4, r2, 15, 15); + keyiter(k[8], r1, r0, r3, 16, 16); + keyiter(k[9], r2, r1, r4, 17, 17); + keyiter(k[10], r3, r2, r0, 18, 18); + keyiter(k[11], r4, r3, r1, 19, 19); + keyiter(k[12], r0, r4, r2, 20, 20); + keyiter(k[13], r1, r0, r3, 21, 21); + keyiter(k[14], r2, r1, r4, 22, 22); + keyiter(k[15], r3, r2, r0, 23, 23); + keyiter(k[16], r4, r3, r1, 24, 24); + keyiter(k[17], r0, r4, r2, 25, 25); + keyiter(k[18], r1, r0, r3, 26, 26); + keyiter(k[19], r2, r1, r4, 27, 27); + keyiter(k[20], r3, r2, r0, 28, 28); + keyiter(k[21], r4, r3, r1, 29, 29); + keyiter(k[22], r0, r4, r2, 30, 30); + keyiter(k[23], r1, r0, r3, 31, 31); + + k += 50; + + keyiter(k[-26], r2, r1, r4, 32, -18); + keyiter(k[-25], r3, r2, r0, 33, -17); + keyiter(k[-24], r4, r3, r1, 34, -16); + keyiter(k[-23], r0, r4, r2, 35, -15); + keyiter(k[-22], r1, r0, r3, 36, -14); + keyiter(k[-21], r2, r1, r4, 37, -13); + keyiter(k[-20], r3, r2, r0, 38, -12); + keyiter(k[-19], r4, r3, r1, 39, -11); + keyiter(k[-18], r0, r4, r2, 40, -10); + keyiter(k[-17], r1, r0, r3, 41, -9); + keyiter(k[-16], r2, r1, r4, 42, -8); + keyiter(k[-15], r3, r2, r0, 43, -7); + keyiter(k[-14], r4, r3, r1, 44, -6); + keyiter(k[-13], r0, r4, r2, 45, -5); + keyiter(k[-12], r1, r0, r3, 46, -4); + keyiter(k[-11], r2, r1, r4, 47, -3); + keyiter(k[-10], r3, r2, r0, 48, -2); + keyiter(k[-9], r4, r3, r1, 49, -1); + keyiter(k[-8], r0, r4, r2, 50, 0); + keyiter(k[-7], r1, r0, r3, 51, 1); + keyiter(k[-6], r2, r1, r4, 52, 2); + keyiter(k[-5], r3, r2, r0, 53, 3); + keyiter(k[-4], r4, r3, r1, 54, 4); + keyiter(k[-3], r0, r4, r2, 55, 5); + keyiter(k[-2], r1, r0, r3, 56, 6); + keyiter(k[-1], r2, r1, r4, 57, 7); + keyiter(k[0], r3, r2, r0, 58, 8); + keyiter(k[1], r4, r3, r1, 59, 9); + keyiter(k[2], r0, r4, r2, 60, 10); + keyiter(k[3], r1, r0, r3, 61, 11); + keyiter(k[4], r2, r1, r4, 62, 12); + keyiter(k[5], r3, r2, r0, 63, 13); + keyiter(k[6], r4, r3, r1, 64, 14); + keyiter(k[7], r0, r4, r2, 65, 15); + keyiter(k[8], r1, r0, r3, 66, 16); + keyiter(k[9], r2, r1, r4, 67, 17); + keyiter(k[10], r3, r2, r0, 68, 18); + keyiter(k[11], r4, r3, r1, 69, 19); + keyiter(k[12], r0, r4, r2, 70, 20); + keyiter(k[13], r1, r0, r3, 71, 21); + keyiter(k[14], r2, r1, r4, 72, 22); + keyiter(k[15], r3, r2, r0, 73, 23); + keyiter(k[16], r4, r3, r1, 74, 24); + keyiter(k[17], r0, r4, r2, 75, 25); + keyiter(k[18], r1, r0, r3, 76, 26); + keyiter(k[19], r2, r1, r4, 77, 27); + keyiter(k[20], r3, r2, r0, 78, 28); + keyiter(k[21], r4, r3, r1, 79, 29); + keyiter(k[22], r0, r4, r2, 80, 30); + keyiter(k[23], r1, r0, r3, 81, 31); + + k += 50; + + keyiter(k[-26], r2, r1, r4, 82, -18); + keyiter(k[-25], r3, r2, r0, 83, -17); + keyiter(k[-24], r4, r3, r1, 84, -16); + keyiter(k[-23], r0, r4, r2, 85, -15); + keyiter(k[-22], r1, r0, r3, 86, -14); + keyiter(k[-21], r2, r1, r4, 87, -13); + keyiter(k[-20], r3, r2, r0, 88, -12); + keyiter(k[-19], r4, r3, r1, 89, -11); + keyiter(k[-18], r0, r4, r2, 90, -10); + keyiter(k[-17], r1, r0, r3, 91, -9); + keyiter(k[-16], r2, r1, r4, 92, -8); + keyiter(k[-15], r3, r2, r0, 93, -7); + keyiter(k[-14], r4, r3, r1, 94, -6); + keyiter(k[-13], r0, r4, r2, 95, -5); + keyiter(k[-12], r1, r0, r3, 96, -4); + keyiter(k[-11], r2, r1, r4, 97, -3); + keyiter(k[-10], r3, r2, r0, 98, -2); + keyiter(k[-9], r4, r3, r1, 99, -1); + keyiter(k[-8], r0, r4, r2, 100, 0); + keyiter(k[-7], r1, r0, r3, 101, 1); + keyiter(k[-6], r2, r1, r4, 102, 2); + keyiter(k[-5], r3, r2, r0, 103, 3); + keyiter(k[-4], r4, r3, r1, 104, 4); + keyiter(k[-3], r0, r4, r2, 105, 5); + keyiter(k[-2], r1, r0, r3, 106, 6); + keyiter(k[-1], r2, r1, r4, 107, 7); + keyiter(k[0], r3, r2, r0, 108, 8); + keyiter(k[1], r4, r3, r1, 109, 9); + keyiter(k[2], r0, r4, r2, 110, 10); + keyiter(k[3], r1, r0, r3, 111, 11); + keyiter(k[4], r2, r1, r4, 112, 12); + keyiter(k[5], r3, r2, r0, 113, 13); + keyiter(k[6], r4, r3, r1, 114, 14); + keyiter(k[7], r0, r4, r2, 115, 15); + keyiter(k[8], r1, r0, r3, 116, 16); + keyiter(k[9], r2, r1, r4, 117, 17); + keyiter(k[10], r3, r2, r0, 118, 18); + keyiter(k[11], r4, r3, r1, 119, 19); + keyiter(k[12], r0, r4, r2, 120, 20); + keyiter(k[13], r1, r0, r3, 121, 21); + keyiter(k[14], r2, r1, r4, 122, 22); + keyiter(k[15], r3, r2, r0, 123, 23); + keyiter(k[16], r4, r3, r1, 124, 24); + keyiter(k[17], r0, r4, r2, 125, 25); + keyiter(k[18], r1, r0, r3, 126, 26); + keyiter(k[19], r2, r1, r4, 127, 27); + keyiter(k[20], r3, r2, r0, 128, 28); + keyiter(k[21], r4, r3, r1, 129, 29); + keyiter(k[22], r0, r4, r2, 130, 30); + keyiter(k[23], r1, r0, r3, 131, 31); + + /* Apply S-boxes */ + + S3(r3, r4, r0, r1, r2); store_and_load_keys(r1, r2, r4, r3, 28, 24); + S4(r1, r2, r4, r3, r0); store_and_load_keys(r2, r4, r3, r0, 24, 20); + S5(r2, r4, r3, r0, r1); store_and_load_keys(r1, r2, r4, r0, 20, 16); + S6(r1, r2, r4, r0, r3); store_and_load_keys(r4, r3, r2, r0, 16, 12); + S7(r4, r3, r2, r0, r1); store_and_load_keys(r1, r2, r0, r4, 12, 8); + S0(r1, r2, r0, r4, r3); store_and_load_keys(r0, r2, r4, r1, 8, 4); + S1(r0, r2, r4, r1, r3); store_and_load_keys(r3, r4, r1, r0, 4, 0); + S2(r3, r4, r1, r0, r2); store_and_load_keys(r2, r4, r3, r0, 0, -4); + S3(r2, r4, r3, r0, r1); store_and_load_keys(r0, r1, r4, r2, -4, -8); + S4(r0, r1, r4, r2, r3); store_and_load_keys(r1, r4, r2, r3, -8, -12); + S5(r1, r4, r2, r3, r0); store_and_load_keys(r0, r1, r4, r3, -12, -16); + S6(r0, r1, r4, r3, r2); store_and_load_keys(r4, r2, r1, r3, -16, -20); + S7(r4, r2, r1, r3, r0); store_and_load_keys(r0, r1, r3, r4, -20, -24); + S0(r0, r1, r3, r4, r2); store_and_load_keys(r3, r1, r4, r0, -24, -28); + k -= 50; + S1(r3, r1, r4, r0, r2); store_and_load_keys(r2, r4, r0, r3, 22, 18); + S2(r2, r4, r0, r3, r1); store_and_load_keys(r1, r4, r2, r3, 18, 14); + S3(r1, r4, r2, r3, r0); store_and_load_keys(r3, r0, r4, r1, 14, 10); + S4(r3, r0, r4, r1, r2); store_and_load_keys(r0, r4, r1, r2, 10, 6); + S5(r0, r4, r1, r2, r3); store_and_load_keys(r3, r0, r4, r2, 6, 2); + S6(r3, r0, r4, r2, r1); store_and_load_keys(r4, r1, r0, r2, 2, -2); + S7(r4, r1, r0, r2, r3); store_and_load_keys(r3, r0, r2, r4, -2, -6); + S0(r3, r0, r2, r4, r1); store_and_load_keys(r2, r0, r4, r3, -6, -10); + S1(r2, r0, r4, r3, r1); store_and_load_keys(r1, r4, r3, r2, -10, -14); + S2(r1, r4, r3, r2, r0); store_and_load_keys(r0, r4, r1, r2, -14, -18); + S3(r0, r4, r1, r2, r3); store_and_load_keys(r2, r3, r4, r0, -18, -22); + k -= 50; + S4(r2, r3, r4, r0, r1); store_and_load_keys(r3, r4, r0, r1, 28, 24); + S5(r3, r4, r0, r1, r2); store_and_load_keys(r2, r3, r4, r1, 24, 20); + S6(r2, r3, r4, r1, r0); store_and_load_keys(r4, r0, r3, r1, 20, 16); + S7(r4, r0, r3, r1, r2); store_and_load_keys(r2, r3, r1, r4, 16, 12); + S0(r2, r3, r1, r4, r0); store_and_load_keys(r1, r3, r4, r2, 12, 8); + S1(r1, r3, r4, r2, r0); store_and_load_keys(r0, r4, r2, r1, 8, 4); + S2(r0, r4, r2, r1, r3); store_and_load_keys(r3, r4, r0, r1, 4, 0); + S3(r3, r4, r0, r1, r2); storekeys(r1, r2, r4, r3, 0); + + return 0; +} +EXPORT_SYMBOL_GPL(__serpent_setkey); + +int serpent_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) +{ + return __serpent_setkey(crypto_tfm_ctx(tfm), key, keylen); +} +EXPORT_SYMBOL_GPL(serpent_setkey); + +void __serpent_encrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src) +{ + const u32 *k = ctx->expkey; + const __le32 *s = (const __le32 *)src; + __le32 *d = (__le32 *)dst; + u32 r0, r1, r2, r3, r4; + +/* + * Note: The conversions between u8* and u32* might cause trouble + * on architectures with stricter alignment rules than x86 + */ + + r0 = le32_to_cpu(s[0]); + r1 = le32_to_cpu(s[1]); + r2 = le32_to_cpu(s[2]); + r3 = le32_to_cpu(s[3]); + + K(r0, r1, r2, r3, 0); + S0(r0, r1, r2, r3, r4); LK(r2, r1, r3, r0, r4, 1); + S1(r2, r1, r3, r0, r4); LK(r4, r3, r0, r2, r1, 2); + S2(r4, r3, r0, r2, r1); LK(r1, r3, r4, r2, r0, 3); + S3(r1, r3, r4, r2, r0); LK(r2, r0, r3, r1, r4, 4); + S4(r2, r0, r3, r1, r4); LK(r0, r3, r1, r4, r2, 5); + S5(r0, r3, r1, r4, r2); LK(r2, r0, r3, r4, r1, 6); + S6(r2, r0, r3, r4, r1); LK(r3, r1, r0, r4, r2, 7); + S7(r3, r1, r0, r4, r2); LK(r2, r0, r4, r3, r1, 8); + S0(r2, r0, r4, r3, r1); LK(r4, r0, r3, r2, r1, 9); + S1(r4, r0, r3, r2, r1); LK(r1, r3, r2, r4, r0, 10); + S2(r1, r3, r2, r4, r0); LK(r0, r3, r1, r4, r2, 11); + S3(r0, r3, r1, r4, r2); LK(r4, r2, r3, r0, r1, 12); + S4(r4, r2, r3, r0, r1); LK(r2, r3, r0, r1, r4, 13); + S5(r2, r3, r0, r1, r4); LK(r4, r2, r3, r1, r0, 14); + S6(r4, r2, r3, r1, r0); LK(r3, r0, r2, r1, r4, 15); + S7(r3, r0, r2, r1, r4); LK(r4, r2, r1, r3, r0, 16); + S0(r4, r2, r1, r3, r0); LK(r1, r2, r3, r4, r0, 17); + S1(r1, r2, r3, r4, r0); LK(r0, r3, r4, r1, r2, 18); + S2(r0, r3, r4, r1, r2); LK(r2, r3, r0, r1, r4, 19); + S3(r2, r3, r0, r1, r4); LK(r1, r4, r3, r2, r0, 20); + S4(r1, r4, r3, r2, r0); LK(r4, r3, r2, r0, r1, 21); + S5(r4, r3, r2, r0, r1); LK(r1, r4, r3, r0, r2, 22); + S6(r1, r4, r3, r0, r2); LK(r3, r2, r4, r0, r1, 23); + S7(r3, r2, r4, r0, r1); LK(r1, r4, r0, r3, r2, 24); + S0(r1, r4, r0, r3, r2); LK(r0, r4, r3, r1, r2, 25); + S1(r0, r4, r3, r1, r2); LK(r2, r3, r1, r0, r4, 26); + S2(r2, r3, r1, r0, r4); LK(r4, r3, r2, r0, r1, 27); + S3(r4, r3, r2, r0, r1); LK(r0, r1, r3, r4, r2, 28); + S4(r0, r1, r3, r4, r2); LK(r1, r3, r4, r2, r0, 29); + S5(r1, r3, r4, r2, r0); LK(r0, r1, r3, r2, r4, 30); + S6(r0, r1, r3, r2, r4); LK(r3, r4, r1, r2, r0, 31); + S7(r3, r4, r1, r2, r0); K(r0, r1, r2, r3, 32); + + d[0] = cpu_to_le32(r0); + d[1] = cpu_to_le32(r1); + d[2] = cpu_to_le32(r2); + d[3] = cpu_to_le32(r3); +} +EXPORT_SYMBOL_GPL(__serpent_encrypt); + +static void serpent_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct serpent_ctx *ctx = crypto_tfm_ctx(tfm); + + __serpent_encrypt(ctx, dst, src); +} + +void __serpent_decrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src) +{ + const u32 *k = ctx->expkey; + const __le32 *s = (const __le32 *)src; + __le32 *d = (__le32 *)dst; + u32 r0, r1, r2, r3, r4; + + r0 = le32_to_cpu(s[0]); + r1 = le32_to_cpu(s[1]); + r2 = le32_to_cpu(s[2]); + r3 = le32_to_cpu(s[3]); + + K(r0, r1, r2, r3, 32); + SI7(r0, r1, r2, r3, r4); KL(r1, r3, r0, r4, r2, 31); + SI6(r1, r3, r0, r4, r2); KL(r0, r2, r4, r1, r3, 30); + SI5(r0, r2, r4, r1, r3); KL(r2, r3, r0, r4, r1, 29); + SI4(r2, r3, r0, r4, r1); KL(r2, r0, r1, r4, r3, 28); + SI3(r2, r0, r1, r4, r3); KL(r1, r2, r3, r4, r0, 27); + SI2(r1, r2, r3, r4, r0); KL(r2, r0, r4, r3, r1, 26); + SI1(r2, r0, r4, r3, r1); KL(r1, r0, r4, r3, r2, 25); + SI0(r1, r0, r4, r3, r2); KL(r4, r2, r0, r1, r3, 24); + SI7(r4, r2, r0, r1, r3); KL(r2, r1, r4, r3, r0, 23); + SI6(r2, r1, r4, r3, r0); KL(r4, r0, r3, r2, r1, 22); + SI5(r4, r0, r3, r2, r1); KL(r0, r1, r4, r3, r2, 21); + SI4(r0, r1, r4, r3, r2); KL(r0, r4, r2, r3, r1, 20); + SI3(r0, r4, r2, r3, r1); KL(r2, r0, r1, r3, r4, 19); + SI2(r2, r0, r1, r3, r4); KL(r0, r4, r3, r1, r2, 18); + SI1(r0, r4, r3, r1, r2); KL(r2, r4, r3, r1, r0, 17); + SI0(r2, r4, r3, r1, r0); KL(r3, r0, r4, r2, r1, 16); + SI7(r3, r0, r4, r2, r1); KL(r0, r2, r3, r1, r4, 15); + SI6(r0, r2, r3, r1, r4); KL(r3, r4, r1, r0, r2, 14); + SI5(r3, r4, r1, r0, r2); KL(r4, r2, r3, r1, r0, 13); + SI4(r4, r2, r3, r1, r0); KL(r4, r3, r0, r1, r2, 12); + SI3(r4, r3, r0, r1, r2); KL(r0, r4, r2, r1, r3, 11); + SI2(r0, r4, r2, r1, r3); KL(r4, r3, r1, r2, r0, 10); + SI1(r4, r3, r1, r2, r0); KL(r0, r3, r1, r2, r4, 9); + SI0(r0, r3, r1, r2, r4); KL(r1, r4, r3, r0, r2, 8); + SI7(r1, r4, r3, r0, r2); KL(r4, r0, r1, r2, r3, 7); + SI6(r4, r0, r1, r2, r3); KL(r1, r3, r2, r4, r0, 6); + SI5(r1, r3, r2, r4, r0); KL(r3, r0, r1, r2, r4, 5); + SI4(r3, r0, r1, r2, r4); KL(r3, r1, r4, r2, r0, 4); + SI3(r3, r1, r4, r2, r0); KL(r4, r3, r0, r2, r1, 3); + SI2(r4, r3, r0, r2, r1); KL(r3, r1, r2, r0, r4, 2); + SI1(r3, r1, r2, r0, r4); KL(r4, r1, r2, r0, r3, 1); + SI0(r4, r1, r2, r0, r3); K(r2, r3, r1, r4, 0); + + d[0] = cpu_to_le32(r2); + d[1] = cpu_to_le32(r3); + d[2] = cpu_to_le32(r1); + d[3] = cpu_to_le32(r4); +} +EXPORT_SYMBOL_GPL(__serpent_decrypt); + +static void serpent_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct serpent_ctx *ctx = crypto_tfm_ctx(tfm); + + __serpent_decrypt(ctx, dst, src); +} + +static struct crypto_alg serpent_alg = { + .cra_name = "serpent", + .cra_driver_name = "serpent-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = SERPENT_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct serpent_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(serpent_alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = SERPENT_MIN_KEY_SIZE, + .cia_max_keysize = SERPENT_MAX_KEY_SIZE, + .cia_setkey = serpent_setkey, + .cia_encrypt = serpent_encrypt, + .cia_decrypt = serpent_decrypt } } +}; + +static int tnepres_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen) +{ + u8 rev_key[SERPENT_MAX_KEY_SIZE]; + int i; + + for (i = 0; i < keylen; ++i) + rev_key[keylen - i - 1] = key[i]; + + return serpent_setkey(tfm, rev_key, keylen); +} + +static void tnepres_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + const u32 * const s = (const u32 * const)src; + u32 * const d = (u32 * const)dst; + + u32 rs[4], rd[4]; + + rs[0] = swab32(s[3]); + rs[1] = swab32(s[2]); + rs[2] = swab32(s[1]); + rs[3] = swab32(s[0]); + + serpent_encrypt(tfm, (u8 *)rd, (u8 *)rs); + + d[0] = swab32(rd[3]); + d[1] = swab32(rd[2]); + d[2] = swab32(rd[1]); + d[3] = swab32(rd[0]); +} + +static void tnepres_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + const u32 * const s = (const u32 * const)src; + u32 * const d = (u32 * const)dst; + + u32 rs[4], rd[4]; + + rs[0] = swab32(s[3]); + rs[1] = swab32(s[2]); + rs[2] = swab32(s[1]); + rs[3] = swab32(s[0]); + + serpent_decrypt(tfm, (u8 *)rd, (u8 *)rs); + + d[0] = swab32(rd[3]); + d[1] = swab32(rd[2]); + d[2] = swab32(rd[1]); + d[3] = swab32(rd[0]); +} + +static struct crypto_alg tnepres_alg = { + .cra_name = "tnepres", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = SERPENT_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct serpent_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(serpent_alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = SERPENT_MIN_KEY_SIZE, + .cia_max_keysize = SERPENT_MAX_KEY_SIZE, + .cia_setkey = tnepres_setkey, + .cia_encrypt = tnepres_encrypt, + .cia_decrypt = tnepres_decrypt } } +}; + +static int __init serpent_mod_init(void) +{ + int ret = crypto_register_alg(&serpent_alg); + + if (ret) + return ret; + + ret = crypto_register_alg(&tnepres_alg); + + if (ret) + crypto_unregister_alg(&serpent_alg); + + return ret; +} + +static void __exit serpent_mod_fini(void) +{ + crypto_unregister_alg(&tnepres_alg); + crypto_unregister_alg(&serpent_alg); +} + +module_init(serpent_mod_init); +module_exit(serpent_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Serpent and tnepres (kerneli compatible serpent reversed) Cipher Algorithm"); +MODULE_AUTHOR("Dag Arne Osvik <osvik@ii.uib.no>"); +MODULE_ALIAS("tnepres"); +MODULE_ALIAS("serpent"); diff --git a/crypto/sha1_generic.c b/crypto/sha1_generic.c new file mode 100644 index 00000000..42794803 --- /dev/null +++ b/crypto/sha1_generic.c @@ -0,0 +1,156 @@ +/* + * Cryptographic API. + * + * SHA1 Secure Hash Algorithm. + * + * Derived from cryptoapi implementation, adapted for in-place + * scatterlist interface. + * + * Copyright (c) Alan Smithee. + * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk> + * Copyright (c) Jean-Francois Dive <jef@linuxbe.org> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ +#include <crypto/internal/hash.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <linux/cryptohash.h> +#include <linux/types.h> +#include <crypto/sha.h> +#include <asm/byteorder.h> + +static int sha1_init(struct shash_desc *desc) +{ + struct sha1_state *sctx = shash_desc_ctx(desc); + + *sctx = (struct sha1_state){ + .state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 }, + }; + + return 0; +} + +int crypto_sha1_update(struct shash_desc *desc, const u8 *data, + unsigned int len) +{ + struct sha1_state *sctx = shash_desc_ctx(desc); + unsigned int partial, done; + const u8 *src; + + partial = sctx->count % SHA1_BLOCK_SIZE; + sctx->count += len; + done = 0; + src = data; + + if ((partial + len) >= SHA1_BLOCK_SIZE) { + u32 temp[SHA_WORKSPACE_WORDS]; + + if (partial) { + done = -partial; + memcpy(sctx->buffer + partial, data, + done + SHA1_BLOCK_SIZE); + src = sctx->buffer; + } + + do { + sha_transform(sctx->state, src, temp); + done += SHA1_BLOCK_SIZE; + src = data + done; + } while (done + SHA1_BLOCK_SIZE <= len); + + memset(temp, 0, sizeof(temp)); + partial = 0; + } + memcpy(sctx->buffer + partial, src, len - done); + + return 0; +} +EXPORT_SYMBOL(crypto_sha1_update); + + +/* Add padding and return the message digest. */ +static int sha1_final(struct shash_desc *desc, u8 *out) +{ + struct sha1_state *sctx = shash_desc_ctx(desc); + __be32 *dst = (__be32 *)out; + u32 i, index, padlen; + __be64 bits; + static const u8 padding[64] = { 0x80, }; + + bits = cpu_to_be64(sctx->count << 3); + + /* Pad out to 56 mod 64 */ + index = sctx->count & 0x3f; + padlen = (index < 56) ? (56 - index) : ((64+56) - index); + crypto_sha1_update(desc, padding, padlen); + + /* Append length */ + crypto_sha1_update(desc, (const u8 *)&bits, sizeof(bits)); + + /* Store state in digest */ + for (i = 0; i < 5; i++) + dst[i] = cpu_to_be32(sctx->state[i]); + + /* Wipe context */ + memset(sctx, 0, sizeof *sctx); + + return 0; +} + +static int sha1_export(struct shash_desc *desc, void *out) +{ + struct sha1_state *sctx = shash_desc_ctx(desc); + + memcpy(out, sctx, sizeof(*sctx)); + return 0; +} + +static int sha1_import(struct shash_desc *desc, const void *in) +{ + struct sha1_state *sctx = shash_desc_ctx(desc); + + memcpy(sctx, in, sizeof(*sctx)); + return 0; +} + +static struct shash_alg alg = { + .digestsize = SHA1_DIGEST_SIZE, + .init = sha1_init, + .update = crypto_sha1_update, + .final = sha1_final, + .export = sha1_export, + .import = sha1_import, + .descsize = sizeof(struct sha1_state), + .statesize = sizeof(struct sha1_state), + .base = { + .cra_name = "sha1", + .cra_driver_name= "sha1-generic", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = SHA1_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static int __init sha1_generic_mod_init(void) +{ + return crypto_register_shash(&alg); +} + +static void __exit sha1_generic_mod_fini(void) +{ + crypto_unregister_shash(&alg); +} + +module_init(sha1_generic_mod_init); +module_exit(sha1_generic_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm"); + +MODULE_ALIAS("sha1"); diff --git a/crypto/sha256_generic.c b/crypto/sha256_generic.c new file mode 100644 index 00000000..c48459eb --- /dev/null +++ b/crypto/sha256_generic.c @@ -0,0 +1,402 @@ +/* + * Cryptographic API. + * + * SHA-256, as specified in + * http://csrc.nist.gov/groups/STM/cavp/documents/shs/sha256-384-512.pdf + * + * SHA-256 code by Jean-Luc Cooke <jlcooke@certainkey.com>. + * + * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com> + * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk> + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * SHA224 Support Copyright 2007 Intel Corporation <jonathan.lynch@intel.com> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ +#include <crypto/internal/hash.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <linux/types.h> +#include <crypto/sha.h> +#include <asm/byteorder.h> + +static inline u32 Ch(u32 x, u32 y, u32 z) +{ + return z ^ (x & (y ^ z)); +} + +static inline u32 Maj(u32 x, u32 y, u32 z) +{ + return (x & y) | (z & (x | y)); +} + +#define e0(x) (ror32(x, 2) ^ ror32(x,13) ^ ror32(x,22)) +#define e1(x) (ror32(x, 6) ^ ror32(x,11) ^ ror32(x,25)) +#define s0(x) (ror32(x, 7) ^ ror32(x,18) ^ (x >> 3)) +#define s1(x) (ror32(x,17) ^ ror32(x,19) ^ (x >> 10)) + +static inline void LOAD_OP(int I, u32 *W, const u8 *input) +{ + W[I] = __be32_to_cpu( ((__be32*)(input))[I] ); +} + +static inline void BLEND_OP(int I, u32 *W) +{ + W[I] = s1(W[I-2]) + W[I-7] + s0(W[I-15]) + W[I-16]; +} + +static void sha256_transform(u32 *state, const u8 *input) +{ + u32 a, b, c, d, e, f, g, h, t1, t2; + u32 W[64]; + int i; + + /* load the input */ + for (i = 0; i < 16; i++) + LOAD_OP(i, W, input); + + /* now blend */ + for (i = 16; i < 64; i++) + BLEND_OP(i, W); + + /* load the state into our registers */ + a=state[0]; b=state[1]; c=state[2]; d=state[3]; + e=state[4]; f=state[5]; g=state[6]; h=state[7]; + + /* now iterate */ + t1 = h + e1(e) + Ch(e,f,g) + 0x428a2f98 + W[ 0]; + t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2; + t1 = g + e1(d) + Ch(d,e,f) + 0x71374491 + W[ 1]; + t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2; + t1 = f + e1(c) + Ch(c,d,e) + 0xb5c0fbcf + W[ 2]; + t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2; + t1 = e + e1(b) + Ch(b,c,d) + 0xe9b5dba5 + W[ 3]; + t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2; + t1 = d + e1(a) + Ch(a,b,c) + 0x3956c25b + W[ 4]; + t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2; + t1 = c + e1(h) + Ch(h,a,b) + 0x59f111f1 + W[ 5]; + t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2; + t1 = b + e1(g) + Ch(g,h,a) + 0x923f82a4 + W[ 6]; + t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2; + t1 = a + e1(f) + Ch(f,g,h) + 0xab1c5ed5 + W[ 7]; + t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2; + + t1 = h + e1(e) + Ch(e,f,g) + 0xd807aa98 + W[ 8]; + t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2; + t1 = g + e1(d) + Ch(d,e,f) + 0x12835b01 + W[ 9]; + t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2; + t1 = f + e1(c) + Ch(c,d,e) + 0x243185be + W[10]; + t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2; + t1 = e + e1(b) + Ch(b,c,d) + 0x550c7dc3 + W[11]; + t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2; + t1 = d + e1(a) + Ch(a,b,c) + 0x72be5d74 + W[12]; + t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2; + t1 = c + e1(h) + Ch(h,a,b) + 0x80deb1fe + W[13]; + t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2; + t1 = b + e1(g) + Ch(g,h,a) + 0x9bdc06a7 + W[14]; + t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2; + t1 = a + e1(f) + Ch(f,g,h) + 0xc19bf174 + W[15]; + t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2; + + t1 = h + e1(e) + Ch(e,f,g) + 0xe49b69c1 + W[16]; + t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2; + t1 = g + e1(d) + Ch(d,e,f) + 0xefbe4786 + W[17]; + t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2; + t1 = f + e1(c) + Ch(c,d,e) + 0x0fc19dc6 + W[18]; + t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2; + t1 = e + e1(b) + Ch(b,c,d) + 0x240ca1cc + W[19]; + t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2; + t1 = d + e1(a) + Ch(a,b,c) + 0x2de92c6f + W[20]; + t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2; + t1 = c + e1(h) + Ch(h,a,b) + 0x4a7484aa + W[21]; + t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2; + t1 = b + e1(g) + Ch(g,h,a) + 0x5cb0a9dc + W[22]; + t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2; + t1 = a + e1(f) + Ch(f,g,h) + 0x76f988da + W[23]; + t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2; + + t1 = h + e1(e) + Ch(e,f,g) + 0x983e5152 + W[24]; + t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2; + t1 = g + e1(d) + Ch(d,e,f) + 0xa831c66d + W[25]; + t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2; + t1 = f + e1(c) + Ch(c,d,e) + 0xb00327c8 + W[26]; + t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2; + t1 = e + e1(b) + Ch(b,c,d) + 0xbf597fc7 + W[27]; + t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2; + t1 = d + e1(a) + Ch(a,b,c) + 0xc6e00bf3 + W[28]; + t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2; + t1 = c + e1(h) + Ch(h,a,b) + 0xd5a79147 + W[29]; + t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2; + t1 = b + e1(g) + Ch(g,h,a) + 0x06ca6351 + W[30]; + t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2; + t1 = a + e1(f) + Ch(f,g,h) + 0x14292967 + W[31]; + t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2; + + t1 = h + e1(e) + Ch(e,f,g) + 0x27b70a85 + W[32]; + t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2; + t1 = g + e1(d) + Ch(d,e,f) + 0x2e1b2138 + W[33]; + t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2; + t1 = f + e1(c) + Ch(c,d,e) + 0x4d2c6dfc + W[34]; + t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2; + t1 = e + e1(b) + Ch(b,c,d) + 0x53380d13 + W[35]; + t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2; + t1 = d + e1(a) + Ch(a,b,c) + 0x650a7354 + W[36]; + t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2; + t1 = c + e1(h) + Ch(h,a,b) + 0x766a0abb + W[37]; + t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2; + t1 = b + e1(g) + Ch(g,h,a) + 0x81c2c92e + W[38]; + t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2; + t1 = a + e1(f) + Ch(f,g,h) + 0x92722c85 + W[39]; + t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2; + + t1 = h + e1(e) + Ch(e,f,g) + 0xa2bfe8a1 + W[40]; + t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2; + t1 = g + e1(d) + Ch(d,e,f) + 0xa81a664b + W[41]; + t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2; + t1 = f + e1(c) + Ch(c,d,e) + 0xc24b8b70 + W[42]; + t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2; + t1 = e + e1(b) + Ch(b,c,d) + 0xc76c51a3 + W[43]; + t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2; + t1 = d + e1(a) + Ch(a,b,c) + 0xd192e819 + W[44]; + t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2; + t1 = c + e1(h) + Ch(h,a,b) + 0xd6990624 + W[45]; + t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2; + t1 = b + e1(g) + Ch(g,h,a) + 0xf40e3585 + W[46]; + t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2; + t1 = a + e1(f) + Ch(f,g,h) + 0x106aa070 + W[47]; + t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2; + + t1 = h + e1(e) + Ch(e,f,g) + 0x19a4c116 + W[48]; + t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2; + t1 = g + e1(d) + Ch(d,e,f) + 0x1e376c08 + W[49]; + t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2; + t1 = f + e1(c) + Ch(c,d,e) + 0x2748774c + W[50]; + t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2; + t1 = e + e1(b) + Ch(b,c,d) + 0x34b0bcb5 + W[51]; + t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2; + t1 = d + e1(a) + Ch(a,b,c) + 0x391c0cb3 + W[52]; + t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2; + t1 = c + e1(h) + Ch(h,a,b) + 0x4ed8aa4a + W[53]; + t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2; + t1 = b + e1(g) + Ch(g,h,a) + 0x5b9cca4f + W[54]; + t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2; + t1 = a + e1(f) + Ch(f,g,h) + 0x682e6ff3 + W[55]; + t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2; + + t1 = h + e1(e) + Ch(e,f,g) + 0x748f82ee + W[56]; + t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2; + t1 = g + e1(d) + Ch(d,e,f) + 0x78a5636f + W[57]; + t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2; + t1 = f + e1(c) + Ch(c,d,e) + 0x84c87814 + W[58]; + t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2; + t1 = e + e1(b) + Ch(b,c,d) + 0x8cc70208 + W[59]; + t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2; + t1 = d + e1(a) + Ch(a,b,c) + 0x90befffa + W[60]; + t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2; + t1 = c + e1(h) + Ch(h,a,b) + 0xa4506ceb + W[61]; + t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2; + t1 = b + e1(g) + Ch(g,h,a) + 0xbef9a3f7 + W[62]; + t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2; + t1 = a + e1(f) + Ch(f,g,h) + 0xc67178f2 + W[63]; + t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2; + + state[0] += a; state[1] += b; state[2] += c; state[3] += d; + state[4] += e; state[5] += f; state[6] += g; state[7] += h; + + /* clear any sensitive info... */ + a = b = c = d = e = f = g = h = t1 = t2 = 0; + memset(W, 0, 64 * sizeof(u32)); +} + + +static int sha224_init(struct shash_desc *desc) +{ + struct sha256_state *sctx = shash_desc_ctx(desc); + sctx->state[0] = SHA224_H0; + sctx->state[1] = SHA224_H1; + sctx->state[2] = SHA224_H2; + sctx->state[3] = SHA224_H3; + sctx->state[4] = SHA224_H4; + sctx->state[5] = SHA224_H5; + sctx->state[6] = SHA224_H6; + sctx->state[7] = SHA224_H7; + sctx->count = 0; + + return 0; +} + +static int sha256_init(struct shash_desc *desc) +{ + struct sha256_state *sctx = shash_desc_ctx(desc); + sctx->state[0] = SHA256_H0; + sctx->state[1] = SHA256_H1; + sctx->state[2] = SHA256_H2; + sctx->state[3] = SHA256_H3; + sctx->state[4] = SHA256_H4; + sctx->state[5] = SHA256_H5; + sctx->state[6] = SHA256_H6; + sctx->state[7] = SHA256_H7; + sctx->count = 0; + + return 0; +} + +static int sha256_update(struct shash_desc *desc, const u8 *data, + unsigned int len) +{ + struct sha256_state *sctx = shash_desc_ctx(desc); + unsigned int partial, done; + const u8 *src; + + partial = sctx->count & 0x3f; + sctx->count += len; + done = 0; + src = data; + + if ((partial + len) > 63) { + if (partial) { + done = -partial; + memcpy(sctx->buf + partial, data, done + 64); + src = sctx->buf; + } + + do { + sha256_transform(sctx->state, src); + done += 64; + src = data + done; + } while (done + 63 < len); + + partial = 0; + } + memcpy(sctx->buf + partial, src, len - done); + + return 0; +} + +static int sha256_final(struct shash_desc *desc, u8 *out) +{ + struct sha256_state *sctx = shash_desc_ctx(desc); + __be32 *dst = (__be32 *)out; + __be64 bits; + unsigned int index, pad_len; + int i; + static const u8 padding[64] = { 0x80, }; + + /* Save number of bits */ + bits = cpu_to_be64(sctx->count << 3); + + /* Pad out to 56 mod 64. */ + index = sctx->count & 0x3f; + pad_len = (index < 56) ? (56 - index) : ((64+56) - index); + sha256_update(desc, padding, pad_len); + + /* Append length (before padding) */ + sha256_update(desc, (const u8 *)&bits, sizeof(bits)); + + /* Store state in digest */ + for (i = 0; i < 8; i++) + dst[i] = cpu_to_be32(sctx->state[i]); + + /* Zeroize sensitive information. */ + memset(sctx, 0, sizeof(*sctx)); + + return 0; +} + +static int sha224_final(struct shash_desc *desc, u8 *hash) +{ + u8 D[SHA256_DIGEST_SIZE]; + + sha256_final(desc, D); + + memcpy(hash, D, SHA224_DIGEST_SIZE); + memset(D, 0, SHA256_DIGEST_SIZE); + + return 0; +} + +static int sha256_export(struct shash_desc *desc, void *out) +{ + struct sha256_state *sctx = shash_desc_ctx(desc); + + memcpy(out, sctx, sizeof(*sctx)); + return 0; +} + +static int sha256_import(struct shash_desc *desc, const void *in) +{ + struct sha256_state *sctx = shash_desc_ctx(desc); + + memcpy(sctx, in, sizeof(*sctx)); + return 0; +} + +static struct shash_alg sha256 = { + .digestsize = SHA256_DIGEST_SIZE, + .init = sha256_init, + .update = sha256_update, + .final = sha256_final, + .export = sha256_export, + .import = sha256_import, + .descsize = sizeof(struct sha256_state), + .statesize = sizeof(struct sha256_state), + .base = { + .cra_name = "sha256", + .cra_driver_name= "sha256-generic", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = SHA256_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static struct shash_alg sha224 = { + .digestsize = SHA224_DIGEST_SIZE, + .init = sha224_init, + .update = sha256_update, + .final = sha224_final, + .descsize = sizeof(struct sha256_state), + .base = { + .cra_name = "sha224", + .cra_driver_name= "sha224-generic", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = SHA224_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static int __init sha256_generic_mod_init(void) +{ + int ret = 0; + + ret = crypto_register_shash(&sha224); + + if (ret < 0) + return ret; + + ret = crypto_register_shash(&sha256); + + if (ret < 0) + crypto_unregister_shash(&sha224); + + return ret; +} + +static void __exit sha256_generic_mod_fini(void) +{ + crypto_unregister_shash(&sha224); + crypto_unregister_shash(&sha256); +} + +module_init(sha256_generic_mod_init); +module_exit(sha256_generic_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("SHA-224 and SHA-256 Secure Hash Algorithm"); + +MODULE_ALIAS("sha224"); +MODULE_ALIAS("sha256"); diff --git a/crypto/sha512_generic.c b/crypto/sha512_generic.c new file mode 100644 index 00000000..dd30f40a --- /dev/null +++ b/crypto/sha512_generic.c @@ -0,0 +1,298 @@ +/* SHA-512 code by Jean-Luc Cooke <jlcooke@certainkey.com> + * + * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com> + * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk> + * Copyright (c) 2003 Kyle McMartin <kyle@debian.org> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the + * Free Software Foundation; either version 2, or (at your option) any + * later version. + * + */ +#include <crypto/internal/hash.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <linux/init.h> +#include <linux/crypto.h> +#include <linux/types.h> +#include <crypto/sha.h> +#include <linux/percpu.h> +#include <asm/byteorder.h> + +static inline u64 Ch(u64 x, u64 y, u64 z) +{ + return z ^ (x & (y ^ z)); +} + +static inline u64 Maj(u64 x, u64 y, u64 z) +{ + return (x & y) | (z & (x | y)); +} + +static const u64 sha512_K[80] = { + 0x428a2f98d728ae22ULL, 0x7137449123ef65cdULL, 0xb5c0fbcfec4d3b2fULL, + 0xe9b5dba58189dbbcULL, 0x3956c25bf348b538ULL, 0x59f111f1b605d019ULL, + 0x923f82a4af194f9bULL, 0xab1c5ed5da6d8118ULL, 0xd807aa98a3030242ULL, + 0x12835b0145706fbeULL, 0x243185be4ee4b28cULL, 0x550c7dc3d5ffb4e2ULL, + 0x72be5d74f27b896fULL, 0x80deb1fe3b1696b1ULL, 0x9bdc06a725c71235ULL, + 0xc19bf174cf692694ULL, 0xe49b69c19ef14ad2ULL, 0xefbe4786384f25e3ULL, + 0x0fc19dc68b8cd5b5ULL, 0x240ca1cc77ac9c65ULL, 0x2de92c6f592b0275ULL, + 0x4a7484aa6ea6e483ULL, 0x5cb0a9dcbd41fbd4ULL, 0x76f988da831153b5ULL, + 0x983e5152ee66dfabULL, 0xa831c66d2db43210ULL, 0xb00327c898fb213fULL, + 0xbf597fc7beef0ee4ULL, 0xc6e00bf33da88fc2ULL, 0xd5a79147930aa725ULL, + 0x06ca6351e003826fULL, 0x142929670a0e6e70ULL, 0x27b70a8546d22ffcULL, + 0x2e1b21385c26c926ULL, 0x4d2c6dfc5ac42aedULL, 0x53380d139d95b3dfULL, + 0x650a73548baf63deULL, 0x766a0abb3c77b2a8ULL, 0x81c2c92e47edaee6ULL, + 0x92722c851482353bULL, 0xa2bfe8a14cf10364ULL, 0xa81a664bbc423001ULL, + 0xc24b8b70d0f89791ULL, 0xc76c51a30654be30ULL, 0xd192e819d6ef5218ULL, + 0xd69906245565a910ULL, 0xf40e35855771202aULL, 0x106aa07032bbd1b8ULL, + 0x19a4c116b8d2d0c8ULL, 0x1e376c085141ab53ULL, 0x2748774cdf8eeb99ULL, + 0x34b0bcb5e19b48a8ULL, 0x391c0cb3c5c95a63ULL, 0x4ed8aa4ae3418acbULL, + 0x5b9cca4f7763e373ULL, 0x682e6ff3d6b2b8a3ULL, 0x748f82ee5defb2fcULL, + 0x78a5636f43172f60ULL, 0x84c87814a1f0ab72ULL, 0x8cc702081a6439ecULL, + 0x90befffa23631e28ULL, 0xa4506cebde82bde9ULL, 0xbef9a3f7b2c67915ULL, + 0xc67178f2e372532bULL, 0xca273eceea26619cULL, 0xd186b8c721c0c207ULL, + 0xeada7dd6cde0eb1eULL, 0xf57d4f7fee6ed178ULL, 0x06f067aa72176fbaULL, + 0x0a637dc5a2c898a6ULL, 0x113f9804bef90daeULL, 0x1b710b35131c471bULL, + 0x28db77f523047d84ULL, 0x32caab7b40c72493ULL, 0x3c9ebe0a15c9bebcULL, + 0x431d67c49c100d4cULL, 0x4cc5d4becb3e42b6ULL, 0x597f299cfc657e2aULL, + 0x5fcb6fab3ad6faecULL, 0x6c44198c4a475817ULL, +}; + +#define e0(x) (ror64(x,28) ^ ror64(x,34) ^ ror64(x,39)) +#define e1(x) (ror64(x,14) ^ ror64(x,18) ^ ror64(x,41)) +#define s0(x) (ror64(x, 1) ^ ror64(x, 8) ^ (x >> 7)) +#define s1(x) (ror64(x,19) ^ ror64(x,61) ^ (x >> 6)) + +static inline void LOAD_OP(int I, u64 *W, const u8 *input) +{ + W[I] = __be64_to_cpu( ((__be64*)(input))[I] ); +} + +static inline void BLEND_OP(int I, u64 *W) +{ + W[I & 15] += s1(W[(I-2) & 15]) + W[(I-7) & 15] + s0(W[(I-15) & 15]); +} + +static void +sha512_transform(u64 *state, const u8 *input) +{ + u64 a, b, c, d, e, f, g, h, t1, t2; + + int i; + u64 W[16]; + + /* load the state into our registers */ + a=state[0]; b=state[1]; c=state[2]; d=state[3]; + e=state[4]; f=state[5]; g=state[6]; h=state[7]; + + /* now iterate */ + for (i=0; i<80; i+=8) { + if (!(i & 8)) { + int j; + + if (i < 16) { + /* load the input */ + for (j = 0; j < 16; j++) + LOAD_OP(i + j, W, input); + } else { + for (j = 0; j < 16; j++) { + BLEND_OP(i + j, W); + } + } + } + + t1 = h + e1(e) + Ch(e,f,g) + sha512_K[i ] + W[(i & 15)]; + t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2; + t1 = g + e1(d) + Ch(d,e,f) + sha512_K[i+1] + W[(i & 15) + 1]; + t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2; + t1 = f + e1(c) + Ch(c,d,e) + sha512_K[i+2] + W[(i & 15) + 2]; + t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2; + t1 = e + e1(b) + Ch(b,c,d) + sha512_K[i+3] + W[(i & 15) + 3]; + t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2; + t1 = d + e1(a) + Ch(a,b,c) + sha512_K[i+4] + W[(i & 15) + 4]; + t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2; + t1 = c + e1(h) + Ch(h,a,b) + sha512_K[i+5] + W[(i & 15) + 5]; + t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2; + t1 = b + e1(g) + Ch(g,h,a) + sha512_K[i+6] + W[(i & 15) + 6]; + t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2; + t1 = a + e1(f) + Ch(f,g,h) + sha512_K[i+7] + W[(i & 15) + 7]; + t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2; + } + + state[0] += a; state[1] += b; state[2] += c; state[3] += d; + state[4] += e; state[5] += f; state[6] += g; state[7] += h; + + /* erase our data */ + a = b = c = d = e = f = g = h = t1 = t2 = 0; +} + +static int +sha512_init(struct shash_desc *desc) +{ + struct sha512_state *sctx = shash_desc_ctx(desc); + sctx->state[0] = SHA512_H0; + sctx->state[1] = SHA512_H1; + sctx->state[2] = SHA512_H2; + sctx->state[3] = SHA512_H3; + sctx->state[4] = SHA512_H4; + sctx->state[5] = SHA512_H5; + sctx->state[6] = SHA512_H6; + sctx->state[7] = SHA512_H7; + sctx->count[0] = sctx->count[1] = 0; + + return 0; +} + +static int +sha384_init(struct shash_desc *desc) +{ + struct sha512_state *sctx = shash_desc_ctx(desc); + sctx->state[0] = SHA384_H0; + sctx->state[1] = SHA384_H1; + sctx->state[2] = SHA384_H2; + sctx->state[3] = SHA384_H3; + sctx->state[4] = SHA384_H4; + sctx->state[5] = SHA384_H5; + sctx->state[6] = SHA384_H6; + sctx->state[7] = SHA384_H7; + sctx->count[0] = sctx->count[1] = 0; + + return 0; +} + +static int +sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len) +{ + struct sha512_state *sctx = shash_desc_ctx(desc); + + unsigned int i, index, part_len; + + /* Compute number of bytes mod 128 */ + index = sctx->count[0] & 0x7f; + + /* Update number of bytes */ + if ((sctx->count[0] += len) < len) + sctx->count[1]++; + + part_len = 128 - index; + + /* Transform as many times as possible. */ + if (len >= part_len) { + memcpy(&sctx->buf[index], data, part_len); + sha512_transform(sctx->state, sctx->buf); + + for (i = part_len; i + 127 < len; i+=128) + sha512_transform(sctx->state, &data[i]); + + index = 0; + } else { + i = 0; + } + + /* Buffer remaining input */ + memcpy(&sctx->buf[index], &data[i], len - i); + + return 0; +} + +static int +sha512_final(struct shash_desc *desc, u8 *hash) +{ + struct sha512_state *sctx = shash_desc_ctx(desc); + static u8 padding[128] = { 0x80, }; + __be64 *dst = (__be64 *)hash; + __be64 bits[2]; + unsigned int index, pad_len; + int i; + + /* Save number of bits */ + bits[1] = cpu_to_be64(sctx->count[0] << 3); + bits[0] = cpu_to_be64(sctx->count[1] << 3 | sctx->count[0] >> 61); + + /* Pad out to 112 mod 128. */ + index = sctx->count[0] & 0x7f; + pad_len = (index < 112) ? (112 - index) : ((128+112) - index); + sha512_update(desc, padding, pad_len); + + /* Append length (before padding) */ + sha512_update(desc, (const u8 *)bits, sizeof(bits)); + + /* Store state in digest */ + for (i = 0; i < 8; i++) + dst[i] = cpu_to_be64(sctx->state[i]); + + /* Zeroize sensitive information. */ + memset(sctx, 0, sizeof(struct sha512_state)); + + return 0; +} + +static int sha384_final(struct shash_desc *desc, u8 *hash) +{ + u8 D[64]; + + sha512_final(desc, D); + + memcpy(hash, D, 48); + memset(D, 0, 64); + + return 0; +} + +static struct shash_alg sha512 = { + .digestsize = SHA512_DIGEST_SIZE, + .init = sha512_init, + .update = sha512_update, + .final = sha512_final, + .descsize = sizeof(struct sha512_state), + .base = { + .cra_name = "sha512", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = SHA512_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static struct shash_alg sha384 = { + .digestsize = SHA384_DIGEST_SIZE, + .init = sha384_init, + .update = sha512_update, + .final = sha384_final, + .descsize = sizeof(struct sha512_state), + .base = { + .cra_name = "sha384", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = SHA384_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static int __init sha512_generic_mod_init(void) +{ + int ret = 0; + + if ((ret = crypto_register_shash(&sha384)) < 0) + goto out; + if ((ret = crypto_register_shash(&sha512)) < 0) + crypto_unregister_shash(&sha384); +out: + return ret; +} + +static void __exit sha512_generic_mod_fini(void) +{ + crypto_unregister_shash(&sha384); + crypto_unregister_shash(&sha512); +} + +module_init(sha512_generic_mod_init); +module_exit(sha512_generic_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("SHA-512 and SHA-384 Secure Hash Algorithms"); + +MODULE_ALIAS("sha384"); +MODULE_ALIAS("sha512"); diff --git a/crypto/shash.c b/crypto/shash.c new file mode 100644 index 00000000..21fc12e2 --- /dev/null +++ b/crypto/shash.c @@ -0,0 +1,672 @@ +/* + * Synchronous Cryptographic Hash operations. + * + * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/scatterwalk.h> +#include <crypto/internal/hash.h> +#include <linux/err.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/slab.h> +#include <linux/seq_file.h> +#include <linux/cryptouser.h> +#include <net/netlink.h> + +#include "internal.h" + +static const struct crypto_type crypto_shash_type; + +static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key, + unsigned int keylen) +{ + return -ENOSYS; +} + +static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, + unsigned int keylen) +{ + struct shash_alg *shash = crypto_shash_alg(tfm); + unsigned long alignmask = crypto_shash_alignmask(tfm); + unsigned long absize; + u8 *buffer, *alignbuffer; + int err; + + absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); + buffer = kmalloc(absize, GFP_KERNEL); + if (!buffer) + return -ENOMEM; + + alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); + memcpy(alignbuffer, key, keylen); + err = shash->setkey(tfm, alignbuffer, keylen); + kzfree(buffer); + return err; +} + +int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, + unsigned int keylen) +{ + struct shash_alg *shash = crypto_shash_alg(tfm); + unsigned long alignmask = crypto_shash_alignmask(tfm); + + if ((unsigned long)key & alignmask) + return shash_setkey_unaligned(tfm, key, keylen); + + return shash->setkey(tfm, key, keylen); +} +EXPORT_SYMBOL_GPL(crypto_shash_setkey); + +static inline unsigned int shash_align_buffer_size(unsigned len, + unsigned long mask) +{ + return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1)); +} + +static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, + unsigned int len) +{ + struct crypto_shash *tfm = desc->tfm; + struct shash_alg *shash = crypto_shash_alg(tfm); + unsigned long alignmask = crypto_shash_alignmask(tfm); + unsigned int unaligned_len = alignmask + 1 - + ((unsigned long)data & alignmask); + u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)] + __attribute__ ((aligned)); + u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); + int err; + + if (unaligned_len > len) + unaligned_len = len; + + memcpy(buf, data, unaligned_len); + err = shash->update(desc, buf, unaligned_len); + memset(buf, 0, unaligned_len); + + return err ?: + shash->update(desc, data + unaligned_len, len - unaligned_len); +} + +int crypto_shash_update(struct shash_desc *desc, const u8 *data, + unsigned int len) +{ + struct crypto_shash *tfm = desc->tfm; + struct shash_alg *shash = crypto_shash_alg(tfm); + unsigned long alignmask = crypto_shash_alignmask(tfm); + + if ((unsigned long)data & alignmask) + return shash_update_unaligned(desc, data, len); + + return shash->update(desc, data, len); +} +EXPORT_SYMBOL_GPL(crypto_shash_update); + +static int shash_final_unaligned(struct shash_desc *desc, u8 *out) +{ + struct crypto_shash *tfm = desc->tfm; + unsigned long alignmask = crypto_shash_alignmask(tfm); + struct shash_alg *shash = crypto_shash_alg(tfm); + unsigned int ds = crypto_shash_digestsize(tfm); + u8 ubuf[shash_align_buffer_size(ds, alignmask)] + __attribute__ ((aligned)); + u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); + int err; + + err = shash->final(desc, buf); + if (err) + goto out; + + memcpy(out, buf, ds); + +out: + memset(buf, 0, ds); + return err; +} + +int crypto_shash_final(struct shash_desc *desc, u8 *out) +{ + struct crypto_shash *tfm = desc->tfm; + struct shash_alg *shash = crypto_shash_alg(tfm); + unsigned long alignmask = crypto_shash_alignmask(tfm); + + if ((unsigned long)out & alignmask) + return shash_final_unaligned(desc, out); + + return shash->final(desc, out); +} +EXPORT_SYMBOL_GPL(crypto_shash_final); + +static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) +{ + return crypto_shash_update(desc, data, len) ?: + crypto_shash_final(desc, out); +} + +int crypto_shash_finup(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) +{ + struct crypto_shash *tfm = desc->tfm; + struct shash_alg *shash = crypto_shash_alg(tfm); + unsigned long alignmask = crypto_shash_alignmask(tfm); + + if (((unsigned long)data | (unsigned long)out) & alignmask) + return shash_finup_unaligned(desc, data, len, out); + + return shash->finup(desc, data, len, out); +} +EXPORT_SYMBOL_GPL(crypto_shash_finup); + +static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) +{ + return crypto_shash_init(desc) ?: + crypto_shash_finup(desc, data, len, out); +} + +int crypto_shash_digest(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) +{ + struct crypto_shash *tfm = desc->tfm; + struct shash_alg *shash = crypto_shash_alg(tfm); + unsigned long alignmask = crypto_shash_alignmask(tfm); + + if (((unsigned long)data | (unsigned long)out) & alignmask) + return shash_digest_unaligned(desc, data, len, out); + + return shash->digest(desc, data, len, out); +} +EXPORT_SYMBOL_GPL(crypto_shash_digest); + +static int shash_default_export(struct shash_desc *desc, void *out) +{ + memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm)); + return 0; +} + +static int shash_default_import(struct shash_desc *desc, const void *in) +{ + memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm)); + return 0; +} + +static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, + unsigned int keylen) +{ + struct crypto_shash **ctx = crypto_ahash_ctx(tfm); + + return crypto_shash_setkey(*ctx, key, keylen); +} + +static int shash_async_init(struct ahash_request *req) +{ + struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); + struct shash_desc *desc = ahash_request_ctx(req); + + desc->tfm = *ctx; + desc->flags = req->base.flags; + + return crypto_shash_init(desc); +} + +int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc) +{ + struct crypto_hash_walk walk; + int nbytes; + + for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; + nbytes = crypto_hash_walk_done(&walk, nbytes)) + nbytes = crypto_shash_update(desc, walk.data, nbytes); + + return nbytes; +} +EXPORT_SYMBOL_GPL(shash_ahash_update); + +static int shash_async_update(struct ahash_request *req) +{ + return shash_ahash_update(req, ahash_request_ctx(req)); +} + +static int shash_async_final(struct ahash_request *req) +{ + return crypto_shash_final(ahash_request_ctx(req), req->result); +} + +int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) +{ + struct crypto_hash_walk walk; + int nbytes; + + nbytes = crypto_hash_walk_first(req, &walk); + if (!nbytes) + return crypto_shash_final(desc, req->result); + + do { + nbytes = crypto_hash_walk_last(&walk) ? + crypto_shash_finup(desc, walk.data, nbytes, + req->result) : + crypto_shash_update(desc, walk.data, nbytes); + nbytes = crypto_hash_walk_done(&walk, nbytes); + } while (nbytes > 0); + + return nbytes; +} +EXPORT_SYMBOL_GPL(shash_ahash_finup); + +static int shash_async_finup(struct ahash_request *req) +{ + struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); + struct shash_desc *desc = ahash_request_ctx(req); + + desc->tfm = *ctx; + desc->flags = req->base.flags; + + return shash_ahash_finup(req, desc); +} + +int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) +{ + struct scatterlist *sg = req->src; + unsigned int offset = sg->offset; + unsigned int nbytes = req->nbytes; + int err; + + if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { + void *data; + + data = kmap_atomic(sg_page(sg)); + err = crypto_shash_digest(desc, data + offset, nbytes, + req->result); + kunmap_atomic(data); + crypto_yield(desc->flags); + } else + err = crypto_shash_init(desc) ?: + shash_ahash_finup(req, desc); + + return err; +} +EXPORT_SYMBOL_GPL(shash_ahash_digest); + +static int shash_async_digest(struct ahash_request *req) +{ + struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); + struct shash_desc *desc = ahash_request_ctx(req); + + desc->tfm = *ctx; + desc->flags = req->base.flags; + + return shash_ahash_digest(req, desc); +} + +static int shash_async_export(struct ahash_request *req, void *out) +{ + return crypto_shash_export(ahash_request_ctx(req), out); +} + +static int shash_async_import(struct ahash_request *req, const void *in) +{ + struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); + struct shash_desc *desc = ahash_request_ctx(req); + + desc->tfm = *ctx; + desc->flags = req->base.flags; + + return crypto_shash_import(desc, in); +} + +static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) +{ + struct crypto_shash **ctx = crypto_tfm_ctx(tfm); + + crypto_free_shash(*ctx); +} + +int crypto_init_shash_ops_async(struct crypto_tfm *tfm) +{ + struct crypto_alg *calg = tfm->__crt_alg; + struct shash_alg *alg = __crypto_shash_alg(calg); + struct crypto_ahash *crt = __crypto_ahash_cast(tfm); + struct crypto_shash **ctx = crypto_tfm_ctx(tfm); + struct crypto_shash *shash; + + if (!crypto_mod_get(calg)) + return -EAGAIN; + + shash = crypto_create_tfm(calg, &crypto_shash_type); + if (IS_ERR(shash)) { + crypto_mod_put(calg); + return PTR_ERR(shash); + } + + *ctx = shash; + tfm->exit = crypto_exit_shash_ops_async; + + crt->init = shash_async_init; + crt->update = shash_async_update; + crt->final = shash_async_final; + crt->finup = shash_async_finup; + crt->digest = shash_async_digest; + + if (alg->setkey) + crt->setkey = shash_async_setkey; + if (alg->export) + crt->export = shash_async_export; + if (alg->import) + crt->import = shash_async_import; + + crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); + + return 0; +} + +static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key, + unsigned int keylen) +{ + struct shash_desc **descp = crypto_hash_ctx(tfm); + struct shash_desc *desc = *descp; + + return crypto_shash_setkey(desc->tfm, key, keylen); +} + +static int shash_compat_init(struct hash_desc *hdesc) +{ + struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); + struct shash_desc *desc = *descp; + + desc->flags = hdesc->flags; + + return crypto_shash_init(desc); +} + +static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg, + unsigned int len) +{ + struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); + struct shash_desc *desc = *descp; + struct crypto_hash_walk walk; + int nbytes; + + for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len); + nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes)) + nbytes = crypto_shash_update(desc, walk.data, nbytes); + + return nbytes; +} + +static int shash_compat_final(struct hash_desc *hdesc, u8 *out) +{ + struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); + + return crypto_shash_final(*descp, out); +} + +static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg, + unsigned int nbytes, u8 *out) +{ + unsigned int offset = sg->offset; + int err; + + if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { + struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); + struct shash_desc *desc = *descp; + void *data; + + desc->flags = hdesc->flags; + + data = kmap_atomic(sg_page(sg)); + err = crypto_shash_digest(desc, data + offset, nbytes, out); + kunmap_atomic(data); + crypto_yield(desc->flags); + goto out; + } + + err = shash_compat_init(hdesc); + if (err) + goto out; + + err = shash_compat_update(hdesc, sg, nbytes); + if (err) + goto out; + + err = shash_compat_final(hdesc, out); + +out: + return err; +} + +static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm) +{ + struct shash_desc **descp = crypto_tfm_ctx(tfm); + struct shash_desc *desc = *descp; + + crypto_free_shash(desc->tfm); + kzfree(desc); +} + +static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm) +{ + struct hash_tfm *crt = &tfm->crt_hash; + struct crypto_alg *calg = tfm->__crt_alg; + struct shash_alg *alg = __crypto_shash_alg(calg); + struct shash_desc **descp = crypto_tfm_ctx(tfm); + struct crypto_shash *shash; + struct shash_desc *desc; + + if (!crypto_mod_get(calg)) + return -EAGAIN; + + shash = crypto_create_tfm(calg, &crypto_shash_type); + if (IS_ERR(shash)) { + crypto_mod_put(calg); + return PTR_ERR(shash); + } + + desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash), + GFP_KERNEL); + if (!desc) { + crypto_free_shash(shash); + return -ENOMEM; + } + + *descp = desc; + desc->tfm = shash; + tfm->exit = crypto_exit_shash_ops_compat; + + crt->init = shash_compat_init; + crt->update = shash_compat_update; + crt->final = shash_compat_final; + crt->digest = shash_compat_digest; + crt->setkey = shash_compat_setkey; + + crt->digestsize = alg->digestsize; + + return 0; +} + +static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) +{ + switch (mask & CRYPTO_ALG_TYPE_MASK) { + case CRYPTO_ALG_TYPE_HASH_MASK: + return crypto_init_shash_ops_compat(tfm); + } + + return -EINVAL; +} + +static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, + u32 mask) +{ + switch (mask & CRYPTO_ALG_TYPE_MASK) { + case CRYPTO_ALG_TYPE_HASH_MASK: + return sizeof(struct shash_desc *); + } + + return 0; +} + +static int crypto_shash_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_shash *hash = __crypto_shash_cast(tfm); + + hash->descsize = crypto_shash_alg(hash)->descsize; + return 0; +} + +static unsigned int crypto_shash_extsize(struct crypto_alg *alg) +{ + return alg->cra_ctxsize; +} + +#ifdef CONFIG_NET +static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + struct crypto_report_hash rhash; + struct shash_alg *salg = __crypto_shash_alg(alg); + + snprintf(rhash.type, CRYPTO_MAX_ALG_NAME, "%s", "shash"); + rhash.blocksize = alg->cra_blocksize; + rhash.digestsize = salg->digestsize; + + NLA_PUT(skb, CRYPTOCFGA_REPORT_HASH, + sizeof(struct crypto_report_hash), &rhash); + + return 0; + +nla_put_failure: + return -EMSGSIZE; +} +#else +static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg) +{ + return -ENOSYS; +} +#endif + +static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) + __attribute__ ((unused)); +static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) +{ + struct shash_alg *salg = __crypto_shash_alg(alg); + + seq_printf(m, "type : shash\n"); + seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); + seq_printf(m, "digestsize : %u\n", salg->digestsize); +} + +static const struct crypto_type crypto_shash_type = { + .ctxsize = crypto_shash_ctxsize, + .extsize = crypto_shash_extsize, + .init = crypto_init_shash_ops, + .init_tfm = crypto_shash_init_tfm, +#ifdef CONFIG_PROC_FS + .show = crypto_shash_show, +#endif + .report = crypto_shash_report, + .maskclear = ~CRYPTO_ALG_TYPE_MASK, + .maskset = CRYPTO_ALG_TYPE_MASK, + .type = CRYPTO_ALG_TYPE_SHASH, + .tfmsize = offsetof(struct crypto_shash, base), +}; + +struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, + u32 mask) +{ + return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask); +} +EXPORT_SYMBOL_GPL(crypto_alloc_shash); + +static int shash_prepare_alg(struct shash_alg *alg) +{ + struct crypto_alg *base = &alg->base; + + if (alg->digestsize > PAGE_SIZE / 8 || + alg->descsize > PAGE_SIZE / 8 || + alg->statesize > PAGE_SIZE / 8) + return -EINVAL; + + base->cra_type = &crypto_shash_type; + base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; + base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; + + if (!alg->finup) + alg->finup = shash_finup_unaligned; + if (!alg->digest) + alg->digest = shash_digest_unaligned; + if (!alg->export) { + alg->export = shash_default_export; + alg->import = shash_default_import; + alg->statesize = alg->descsize; + } + if (!alg->setkey) + alg->setkey = shash_no_setkey; + + return 0; +} + +int crypto_register_shash(struct shash_alg *alg) +{ + struct crypto_alg *base = &alg->base; + int err; + + err = shash_prepare_alg(alg); + if (err) + return err; + + return crypto_register_alg(base); +} +EXPORT_SYMBOL_GPL(crypto_register_shash); + +int crypto_unregister_shash(struct shash_alg *alg) +{ + return crypto_unregister_alg(&alg->base); +} +EXPORT_SYMBOL_GPL(crypto_unregister_shash); + +int shash_register_instance(struct crypto_template *tmpl, + struct shash_instance *inst) +{ + int err; + + err = shash_prepare_alg(&inst->alg); + if (err) + return err; + + return crypto_register_instance(tmpl, shash_crypto_instance(inst)); +} +EXPORT_SYMBOL_GPL(shash_register_instance); + +void shash_free_instance(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(shash_instance(inst)); +} +EXPORT_SYMBOL_GPL(shash_free_instance); + +int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, + struct shash_alg *alg, + struct crypto_instance *inst) +{ + return crypto_init_spawn2(&spawn->base, &alg->base, inst, + &crypto_shash_type); +} +EXPORT_SYMBOL_GPL(crypto_init_shash_spawn); + +struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask) +{ + struct crypto_alg *alg; + + alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask); + return IS_ERR(alg) ? ERR_CAST(alg) : + container_of(alg, struct shash_alg, base); +} +EXPORT_SYMBOL_GPL(shash_attr_alg); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Synchronous cryptographic hash type"); diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c new file mode 100644 index 00000000..8f147bff --- /dev/null +++ b/crypto/tcrypt.c @@ -0,0 +1,1637 @@ +/* + * Quick & dirty crypto testing module. + * + * This will only exist until we have a better testing mechanism + * (e.g. a char device). + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> + * Copyright (c) 2007 Nokia Siemens Networks + * + * Updated RFC4106 AES-GCM testing. + * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) + * Adrian Hoban <adrian.hoban@intel.com> + * Gabriele Paoloni <gabriele.paoloni@intel.com> + * Tadeusz Struk (tadeusz.struk@intel.com) + * Copyright (c) 2010, Intel Corporation. + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/hash.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/gfp.h> +#include <linux/module.h> +#include <linux/scatterlist.h> +#include <linux/string.h> +#include <linux/moduleparam.h> +#include <linux/jiffies.h> +#include <linux/timex.h> +#include <linux/interrupt.h> +#include "tcrypt.h" +#include "internal.h" + +/* + * Need slab memory for testing (size in number of pages). + */ +#define TVMEMSIZE 4 + +/* +* Used by test_cipher_speed() +*/ +#define ENCRYPT 1 +#define DECRYPT 0 + +/* + * Used by test_cipher_speed() + */ +static unsigned int sec; + +static char *alg = NULL; +static u32 type; +static u32 mask; +static int mode; +static char *tvmem[TVMEMSIZE]; + +static char *check[] = { + "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", + "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes", + "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea", + "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt", + "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320", + "lzo", "cts", "zlib", NULL +}; + +static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc, + struct scatterlist *sg, int blen, int sec) +{ + unsigned long start, end; + int bcount; + int ret; + + for (start = jiffies, end = start + sec * HZ, bcount = 0; + time_before(jiffies, end); bcount++) { + if (enc) + ret = crypto_blkcipher_encrypt(desc, sg, sg, blen); + else + ret = crypto_blkcipher_decrypt(desc, sg, sg, blen); + + if (ret) + return ret; + } + + printk("%d operations in %d seconds (%ld bytes)\n", + bcount, sec, (long)bcount * blen); + return 0; +} + +static int test_cipher_cycles(struct blkcipher_desc *desc, int enc, + struct scatterlist *sg, int blen) +{ + unsigned long cycles = 0; + int ret = 0; + int i; + + local_bh_disable(); + local_irq_disable(); + + /* Warm-up run. */ + for (i = 0; i < 4; i++) { + if (enc) + ret = crypto_blkcipher_encrypt(desc, sg, sg, blen); + else + ret = crypto_blkcipher_decrypt(desc, sg, sg, blen); + + if (ret) + goto out; + } + + /* The real thing. */ + for (i = 0; i < 8; i++) { + cycles_t start, end; + + start = get_cycles(); + if (enc) + ret = crypto_blkcipher_encrypt(desc, sg, sg, blen); + else + ret = crypto_blkcipher_decrypt(desc, sg, sg, blen); + end = get_cycles(); + + if (ret) + goto out; + + cycles += end - start; + } + +out: + local_irq_enable(); + local_bh_enable(); + + if (ret == 0) + printk("1 operation in %lu cycles (%d bytes)\n", + (cycles + 4) / 8, blen); + + return ret; +} + +static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 }; + +static void test_cipher_speed(const char *algo, int enc, unsigned int sec, + struct cipher_speed_template *template, + unsigned int tcount, u8 *keysize) +{ + unsigned int ret, i, j, iv_len; + const char *key; + char iv[128]; + struct crypto_blkcipher *tfm; + struct blkcipher_desc desc; + const char *e; + u32 *b_size; + + if (enc == ENCRYPT) + e = "encryption"; + else + e = "decryption"; + + printk("\ntesting speed of %s %s\n", algo, e); + + tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC); + + if (IS_ERR(tfm)) { + printk("failed to load transform for %s: %ld\n", algo, + PTR_ERR(tfm)); + return; + } + desc.tfm = tfm; + desc.flags = 0; + + i = 0; + do { + + b_size = block_sizes; + do { + struct scatterlist sg[TVMEMSIZE]; + + if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) { + printk("template (%u) too big for " + "tvmem (%lu)\n", *keysize + *b_size, + TVMEMSIZE * PAGE_SIZE); + goto out; + } + + printk("test %u (%d bit key, %d byte blocks): ", i, + *keysize * 8, *b_size); + + memset(tvmem[0], 0xff, PAGE_SIZE); + + /* set key, plain text and IV */ + key = tvmem[0]; + for (j = 0; j < tcount; j++) { + if (template[j].klen == *keysize) { + key = template[j].key; + break; + } + } + + ret = crypto_blkcipher_setkey(tfm, key, *keysize); + if (ret) { + printk("setkey() failed flags=%x\n", + crypto_blkcipher_get_flags(tfm)); + goto out; + } + + sg_init_table(sg, TVMEMSIZE); + sg_set_buf(sg, tvmem[0] + *keysize, + PAGE_SIZE - *keysize); + for (j = 1; j < TVMEMSIZE; j++) { + sg_set_buf(sg + j, tvmem[j], PAGE_SIZE); + memset (tvmem[j], 0xff, PAGE_SIZE); + } + + iv_len = crypto_blkcipher_ivsize(tfm); + if (iv_len) { + memset(&iv, 0xff, iv_len); + crypto_blkcipher_set_iv(tfm, iv, iv_len); + } + + if (sec) + ret = test_cipher_jiffies(&desc, enc, sg, + *b_size, sec); + else + ret = test_cipher_cycles(&desc, enc, sg, + *b_size); + + if (ret) { + printk("%s() failed flags=%x\n", e, desc.flags); + break; + } + b_size++; + i++; + } while (*b_size); + keysize++; + } while (*keysize); + +out: + crypto_free_blkcipher(tfm); +} + +static int test_hash_jiffies_digest(struct hash_desc *desc, + struct scatterlist *sg, int blen, + char *out, int sec) +{ + unsigned long start, end; + int bcount; + int ret; + + for (start = jiffies, end = start + sec * HZ, bcount = 0; + time_before(jiffies, end); bcount++) { + ret = crypto_hash_digest(desc, sg, blen, out); + if (ret) + return ret; + } + + printk("%6u opers/sec, %9lu bytes/sec\n", + bcount / sec, ((long)bcount * blen) / sec); + + return 0; +} + +static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg, + int blen, int plen, char *out, int sec) +{ + unsigned long start, end; + int bcount, pcount; + int ret; + + if (plen == blen) + return test_hash_jiffies_digest(desc, sg, blen, out, sec); + + for (start = jiffies, end = start + sec * HZ, bcount = 0; + time_before(jiffies, end); bcount++) { + ret = crypto_hash_init(desc); + if (ret) + return ret; + for (pcount = 0; pcount < blen; pcount += plen) { + ret = crypto_hash_update(desc, sg, plen); + if (ret) + return ret; + } + /* we assume there is enough space in 'out' for the result */ + ret = crypto_hash_final(desc, out); + if (ret) + return ret; + } + + printk("%6u opers/sec, %9lu bytes/sec\n", + bcount / sec, ((long)bcount * blen) / sec); + + return 0; +} + +static int test_hash_cycles_digest(struct hash_desc *desc, + struct scatterlist *sg, int blen, char *out) +{ + unsigned long cycles = 0; + int i; + int ret; + + local_bh_disable(); + local_irq_disable(); + + /* Warm-up run. */ + for (i = 0; i < 4; i++) { + ret = crypto_hash_digest(desc, sg, blen, out); + if (ret) + goto out; + } + + /* The real thing. */ + for (i = 0; i < 8; i++) { + cycles_t start, end; + + start = get_cycles(); + + ret = crypto_hash_digest(desc, sg, blen, out); + if (ret) + goto out; + + end = get_cycles(); + + cycles += end - start; + } + +out: + local_irq_enable(); + local_bh_enable(); + + if (ret) + return ret; + + printk("%6lu cycles/operation, %4lu cycles/byte\n", + cycles / 8, cycles / (8 * blen)); + + return 0; +} + +static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg, + int blen, int plen, char *out) +{ + unsigned long cycles = 0; + int i, pcount; + int ret; + + if (plen == blen) + return test_hash_cycles_digest(desc, sg, blen, out); + + local_bh_disable(); + local_irq_disable(); + + /* Warm-up run. */ + for (i = 0; i < 4; i++) { + ret = crypto_hash_init(desc); + if (ret) + goto out; + for (pcount = 0; pcount < blen; pcount += plen) { + ret = crypto_hash_update(desc, sg, plen); + if (ret) + goto out; + } + ret = crypto_hash_final(desc, out); + if (ret) + goto out; + } + + /* The real thing. */ + for (i = 0; i < 8; i++) { + cycles_t start, end; + + start = get_cycles(); + + ret = crypto_hash_init(desc); + if (ret) + goto out; + for (pcount = 0; pcount < blen; pcount += plen) { + ret = crypto_hash_update(desc, sg, plen); + if (ret) + goto out; + } + ret = crypto_hash_final(desc, out); + if (ret) + goto out; + + end = get_cycles(); + + cycles += end - start; + } + +out: + local_irq_enable(); + local_bh_enable(); + + if (ret) + return ret; + + printk("%6lu cycles/operation, %4lu cycles/byte\n", + cycles / 8, cycles / (8 * blen)); + + return 0; +} + +static void test_hash_sg_init(struct scatterlist *sg) +{ + int i; + + sg_init_table(sg, TVMEMSIZE); + for (i = 0; i < TVMEMSIZE; i++) { + sg_set_buf(sg + i, tvmem[i], PAGE_SIZE); + memset(tvmem[i], 0xff, PAGE_SIZE); + } +} + +static void test_hash_speed(const char *algo, unsigned int sec, + struct hash_speed *speed) +{ + struct scatterlist sg[TVMEMSIZE]; + struct crypto_hash *tfm; + struct hash_desc desc; + static char output[1024]; + int i; + int ret; + + printk(KERN_INFO "\ntesting speed of %s\n", algo); + + tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC); + + if (IS_ERR(tfm)) { + printk(KERN_ERR "failed to load transform for %s: %ld\n", algo, + PTR_ERR(tfm)); + return; + } + + desc.tfm = tfm; + desc.flags = 0; + + if (crypto_hash_digestsize(tfm) > sizeof(output)) { + printk(KERN_ERR "digestsize(%u) > outputbuffer(%zu)\n", + crypto_hash_digestsize(tfm), sizeof(output)); + goto out; + } + + test_hash_sg_init(sg); + for (i = 0; speed[i].blen != 0; i++) { + if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) { + printk(KERN_ERR + "template (%u) too big for tvmem (%lu)\n", + speed[i].blen, TVMEMSIZE * PAGE_SIZE); + goto out; + } + + if (speed[i].klen) + crypto_hash_setkey(tfm, tvmem[0], speed[i].klen); + + printk(KERN_INFO "test%3u " + "(%5u byte blocks,%5u bytes per update,%4u updates): ", + i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen); + + if (sec) + ret = test_hash_jiffies(&desc, sg, speed[i].blen, + speed[i].plen, output, sec); + else + ret = test_hash_cycles(&desc, sg, speed[i].blen, + speed[i].plen, output); + + if (ret) { + printk(KERN_ERR "hashing failed ret=%d\n", ret); + break; + } + } + +out: + crypto_free_hash(tfm); +} + +struct tcrypt_result { + struct completion completion; + int err; +}; + +static void tcrypt_complete(struct crypto_async_request *req, int err) +{ + struct tcrypt_result *res = req->data; + + if (err == -EINPROGRESS) + return; + + res->err = err; + complete(&res->completion); +} + +static inline int do_one_ahash_op(struct ahash_request *req, int ret) +{ + if (ret == -EINPROGRESS || ret == -EBUSY) { + struct tcrypt_result *tr = req->base.data; + + ret = wait_for_completion_interruptible(&tr->completion); + if (!ret) + ret = tr->err; + INIT_COMPLETION(tr->completion); + } + return ret; +} + +static int test_ahash_jiffies_digest(struct ahash_request *req, int blen, + char *out, int sec) +{ + unsigned long start, end; + int bcount; + int ret; + + for (start = jiffies, end = start + sec * HZ, bcount = 0; + time_before(jiffies, end); bcount++) { + ret = do_one_ahash_op(req, crypto_ahash_digest(req)); + if (ret) + return ret; + } + + printk("%6u opers/sec, %9lu bytes/sec\n", + bcount / sec, ((long)bcount * blen) / sec); + + return 0; +} + +static int test_ahash_jiffies(struct ahash_request *req, int blen, + int plen, char *out, int sec) +{ + unsigned long start, end; + int bcount, pcount; + int ret; + + if (plen == blen) + return test_ahash_jiffies_digest(req, blen, out, sec); + + for (start = jiffies, end = start + sec * HZ, bcount = 0; + time_before(jiffies, end); bcount++) { + ret = crypto_ahash_init(req); + if (ret) + return ret; + for (pcount = 0; pcount < blen; pcount += plen) { + ret = do_one_ahash_op(req, crypto_ahash_update(req)); + if (ret) + return ret; + } + /* we assume there is enough space in 'out' for the result */ + ret = do_one_ahash_op(req, crypto_ahash_final(req)); + if (ret) + return ret; + } + + pr_cont("%6u opers/sec, %9lu bytes/sec\n", + bcount / sec, ((long)bcount * blen) / sec); + + return 0; +} + +static int test_ahash_cycles_digest(struct ahash_request *req, int blen, + char *out) +{ + unsigned long cycles = 0; + int ret, i; + + /* Warm-up run. */ + for (i = 0; i < 4; i++) { + ret = do_one_ahash_op(req, crypto_ahash_digest(req)); + if (ret) + goto out; + } + + /* The real thing. */ + for (i = 0; i < 8; i++) { + cycles_t start, end; + + start = get_cycles(); + + ret = do_one_ahash_op(req, crypto_ahash_digest(req)); + if (ret) + goto out; + + end = get_cycles(); + + cycles += end - start; + } + +out: + if (ret) + return ret; + + pr_cont("%6lu cycles/operation, %4lu cycles/byte\n", + cycles / 8, cycles / (8 * blen)); + + return 0; +} + +static int test_ahash_cycles(struct ahash_request *req, int blen, + int plen, char *out) +{ + unsigned long cycles = 0; + int i, pcount, ret; + + if (plen == blen) + return test_ahash_cycles_digest(req, blen, out); + + /* Warm-up run. */ + for (i = 0; i < 4; i++) { + ret = crypto_ahash_init(req); + if (ret) + goto out; + for (pcount = 0; pcount < blen; pcount += plen) { + ret = do_one_ahash_op(req, crypto_ahash_update(req)); + if (ret) + goto out; + } + ret = do_one_ahash_op(req, crypto_ahash_final(req)); + if (ret) + goto out; + } + + /* The real thing. */ + for (i = 0; i < 8; i++) { + cycles_t start, end; + + start = get_cycles(); + + ret = crypto_ahash_init(req); + if (ret) + goto out; + for (pcount = 0; pcount < blen; pcount += plen) { + ret = do_one_ahash_op(req, crypto_ahash_update(req)); + if (ret) + goto out; + } + ret = do_one_ahash_op(req, crypto_ahash_final(req)); + if (ret) + goto out; + + end = get_cycles(); + + cycles += end - start; + } + +out: + if (ret) + return ret; + + pr_cont("%6lu cycles/operation, %4lu cycles/byte\n", + cycles / 8, cycles / (8 * blen)); + + return 0; +} + +static void test_ahash_speed(const char *algo, unsigned int sec, + struct hash_speed *speed) +{ + struct scatterlist sg[TVMEMSIZE]; + struct tcrypt_result tresult; + struct ahash_request *req; + struct crypto_ahash *tfm; + static char output[1024]; + int i, ret; + + printk(KERN_INFO "\ntesting speed of async %s\n", algo); + + tfm = crypto_alloc_ahash(algo, 0, 0); + if (IS_ERR(tfm)) { + pr_err("failed to load transform for %s: %ld\n", + algo, PTR_ERR(tfm)); + return; + } + + if (crypto_ahash_digestsize(tfm) > sizeof(output)) { + pr_err("digestsize(%u) > outputbuffer(%zu)\n", + crypto_ahash_digestsize(tfm), sizeof(output)); + goto out; + } + + test_hash_sg_init(sg); + req = ahash_request_alloc(tfm, GFP_KERNEL); + if (!req) { + pr_err("ahash request allocation failure\n"); + goto out; + } + + init_completion(&tresult.completion); + ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, + tcrypt_complete, &tresult); + + for (i = 0; speed[i].blen != 0; i++) { + if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) { + pr_err("template (%u) too big for tvmem (%lu)\n", + speed[i].blen, TVMEMSIZE * PAGE_SIZE); + break; + } + + pr_info("test%3u " + "(%5u byte blocks,%5u bytes per update,%4u updates): ", + i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen); + + ahash_request_set_crypt(req, sg, output, speed[i].plen); + + if (sec) + ret = test_ahash_jiffies(req, speed[i].blen, + speed[i].plen, output, sec); + else + ret = test_ahash_cycles(req, speed[i].blen, + speed[i].plen, output); + + if (ret) { + pr_err("hashing failed ret=%d\n", ret); + break; + } + } + + ahash_request_free(req); + +out: + crypto_free_ahash(tfm); +} + +static inline int do_one_acipher_op(struct ablkcipher_request *req, int ret) +{ + if (ret == -EINPROGRESS || ret == -EBUSY) { + struct tcrypt_result *tr = req->base.data; + + ret = wait_for_completion_interruptible(&tr->completion); + if (!ret) + ret = tr->err; + INIT_COMPLETION(tr->completion); + } + + return ret; +} + +static int test_acipher_jiffies(struct ablkcipher_request *req, int enc, + int blen, int sec) +{ + unsigned long start, end; + int bcount; + int ret; + + for (start = jiffies, end = start + sec * HZ, bcount = 0; + time_before(jiffies, end); bcount++) { + if (enc) + ret = do_one_acipher_op(req, + crypto_ablkcipher_encrypt(req)); + else + ret = do_one_acipher_op(req, + crypto_ablkcipher_decrypt(req)); + + if (ret) + return ret; + } + + pr_cont("%d operations in %d seconds (%ld bytes)\n", + bcount, sec, (long)bcount * blen); + return 0; +} + +static int test_acipher_cycles(struct ablkcipher_request *req, int enc, + int blen) +{ + unsigned long cycles = 0; + int ret = 0; + int i; + + /* Warm-up run. */ + for (i = 0; i < 4; i++) { + if (enc) + ret = do_one_acipher_op(req, + crypto_ablkcipher_encrypt(req)); + else + ret = do_one_acipher_op(req, + crypto_ablkcipher_decrypt(req)); + + if (ret) + goto out; + } + + /* The real thing. */ + for (i = 0; i < 8; i++) { + cycles_t start, end; + + start = get_cycles(); + if (enc) + ret = do_one_acipher_op(req, + crypto_ablkcipher_encrypt(req)); + else + ret = do_one_acipher_op(req, + crypto_ablkcipher_decrypt(req)); + end = get_cycles(); + + if (ret) + goto out; + + cycles += end - start; + } + +out: + if (ret == 0) + pr_cont("1 operation in %lu cycles (%d bytes)\n", + (cycles + 4) / 8, blen); + + return ret; +} + +static void test_acipher_speed(const char *algo, int enc, unsigned int sec, + struct cipher_speed_template *template, + unsigned int tcount, u8 *keysize) +{ + unsigned int ret, i, j, iv_len; + struct tcrypt_result tresult; + const char *key; + char iv[128]; + struct ablkcipher_request *req; + struct crypto_ablkcipher *tfm; + const char *e; + u32 *b_size; + + if (enc == ENCRYPT) + e = "encryption"; + else + e = "decryption"; + + pr_info("\ntesting speed of async %s %s\n", algo, e); + + init_completion(&tresult.completion); + + tfm = crypto_alloc_ablkcipher(algo, 0, 0); + + if (IS_ERR(tfm)) { + pr_err("failed to load transform for %s: %ld\n", algo, + PTR_ERR(tfm)); + return; + } + + req = ablkcipher_request_alloc(tfm, GFP_KERNEL); + if (!req) { + pr_err("tcrypt: skcipher: Failed to allocate request for %s\n", + algo); + goto out; + } + + ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, + tcrypt_complete, &tresult); + + i = 0; + do { + b_size = block_sizes; + + do { + struct scatterlist sg[TVMEMSIZE]; + + if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) { + pr_err("template (%u) too big for " + "tvmem (%lu)\n", *keysize + *b_size, + TVMEMSIZE * PAGE_SIZE); + goto out_free_req; + } + + pr_info("test %u (%d bit key, %d byte blocks): ", i, + *keysize * 8, *b_size); + + memset(tvmem[0], 0xff, PAGE_SIZE); + + /* set key, plain text and IV */ + key = tvmem[0]; + for (j = 0; j < tcount; j++) { + if (template[j].klen == *keysize) { + key = template[j].key; + break; + } + } + + crypto_ablkcipher_clear_flags(tfm, ~0); + + ret = crypto_ablkcipher_setkey(tfm, key, *keysize); + if (ret) { + pr_err("setkey() failed flags=%x\n", + crypto_ablkcipher_get_flags(tfm)); + goto out_free_req; + } + + sg_init_table(sg, TVMEMSIZE); + sg_set_buf(sg, tvmem[0] + *keysize, + PAGE_SIZE - *keysize); + for (j = 1; j < TVMEMSIZE; j++) { + sg_set_buf(sg + j, tvmem[j], PAGE_SIZE); + memset(tvmem[j], 0xff, PAGE_SIZE); + } + + iv_len = crypto_ablkcipher_ivsize(tfm); + if (iv_len) + memset(&iv, 0xff, iv_len); + + ablkcipher_request_set_crypt(req, sg, sg, *b_size, iv); + + if (sec) + ret = test_acipher_jiffies(req, enc, + *b_size, sec); + else + ret = test_acipher_cycles(req, enc, + *b_size); + + if (ret) { + pr_err("%s() failed flags=%x\n", e, + crypto_ablkcipher_get_flags(tfm)); + break; + } + b_size++; + i++; + } while (*b_size); + keysize++; + } while (*keysize); + +out_free_req: + ablkcipher_request_free(req); +out: + crypto_free_ablkcipher(tfm); +} + +static void test_available(void) +{ + char **name = check; + + while (*name) { + printk("alg %s ", *name); + printk(crypto_has_alg(*name, 0, 0) ? + "found\n" : "not found\n"); + name++; + } +} + +static inline int tcrypt_test(const char *alg) +{ + int ret; + + ret = alg_test(alg, alg, 0, 0); + /* non-fips algs return -EINVAL in fips mode */ + if (fips_enabled && ret == -EINVAL) + ret = 0; + return ret; +} + +static int do_test(int m) +{ + int i; + int ret = 0; + + switch (m) { + case 0: + for (i = 1; i < 200; i++) + ret += do_test(i); + break; + + case 1: + ret += tcrypt_test("md5"); + break; + + case 2: + ret += tcrypt_test("sha1"); + break; + + case 3: + ret += tcrypt_test("ecb(des)"); + ret += tcrypt_test("cbc(des)"); + break; + + case 4: + ret += tcrypt_test("ecb(des3_ede)"); + ret += tcrypt_test("cbc(des3_ede)"); + break; + + case 5: + ret += tcrypt_test("md4"); + break; + + case 6: + ret += tcrypt_test("sha256"); + break; + + case 7: + ret += tcrypt_test("ecb(blowfish)"); + ret += tcrypt_test("cbc(blowfish)"); + ret += tcrypt_test("ctr(blowfish)"); + break; + + case 8: + ret += tcrypt_test("ecb(twofish)"); + ret += tcrypt_test("cbc(twofish)"); + ret += tcrypt_test("ctr(twofish)"); + ret += tcrypt_test("lrw(twofish)"); + ret += tcrypt_test("xts(twofish)"); + break; + + case 9: + ret += tcrypt_test("ecb(serpent)"); + ret += tcrypt_test("cbc(serpent)"); + ret += tcrypt_test("ctr(serpent)"); + ret += tcrypt_test("lrw(serpent)"); + ret += tcrypt_test("xts(serpent)"); + break; + + case 10: + ret += tcrypt_test("ecb(aes)"); + ret += tcrypt_test("cbc(aes)"); + ret += tcrypt_test("lrw(aes)"); + ret += tcrypt_test("xts(aes)"); + ret += tcrypt_test("ctr(aes)"); + ret += tcrypt_test("rfc3686(ctr(aes))"); + break; + + case 11: + ret += tcrypt_test("sha384"); + break; + + case 12: + ret += tcrypt_test("sha512"); + break; + + case 13: + ret += tcrypt_test("deflate"); + break; + + case 14: + ret += tcrypt_test("ecb(cast5)"); + break; + + case 15: + ret += tcrypt_test("ecb(cast6)"); + break; + + case 16: + ret += tcrypt_test("ecb(arc4)"); + break; + + case 17: + ret += tcrypt_test("michael_mic"); + break; + + case 18: + ret += tcrypt_test("crc32c"); + break; + + case 19: + ret += tcrypt_test("ecb(tea)"); + break; + + case 20: + ret += tcrypt_test("ecb(xtea)"); + break; + + case 21: + ret += tcrypt_test("ecb(khazad)"); + break; + + case 22: + ret += tcrypt_test("wp512"); + break; + + case 23: + ret += tcrypt_test("wp384"); + break; + + case 24: + ret += tcrypt_test("wp256"); + break; + + case 25: + ret += tcrypt_test("ecb(tnepres)"); + break; + + case 26: + ret += tcrypt_test("ecb(anubis)"); + ret += tcrypt_test("cbc(anubis)"); + break; + + case 27: + ret += tcrypt_test("tgr192"); + break; + + case 28: + + ret += tcrypt_test("tgr160"); + break; + + case 29: + ret += tcrypt_test("tgr128"); + break; + + case 30: + ret += tcrypt_test("ecb(xeta)"); + break; + + case 31: + ret += tcrypt_test("pcbc(fcrypt)"); + break; + + case 32: + ret += tcrypt_test("ecb(camellia)"); + ret += tcrypt_test("cbc(camellia)"); + break; + case 33: + ret += tcrypt_test("sha224"); + break; + + case 34: + ret += tcrypt_test("salsa20"); + break; + + case 35: + ret += tcrypt_test("gcm(aes)"); + break; + + case 36: + ret += tcrypt_test("lzo"); + break; + + case 37: + ret += tcrypt_test("ccm(aes)"); + break; + + case 38: + ret += tcrypt_test("cts(cbc(aes))"); + break; + + case 39: + ret += tcrypt_test("rmd128"); + break; + + case 40: + ret += tcrypt_test("rmd160"); + break; + + case 41: + ret += tcrypt_test("rmd256"); + break; + + case 42: + ret += tcrypt_test("rmd320"); + break; + + case 43: + ret += tcrypt_test("ecb(seed)"); + break; + + case 44: + ret += tcrypt_test("zlib"); + break; + + case 45: + ret += tcrypt_test("rfc4309(ccm(aes))"); + break; + + case 100: + ret += tcrypt_test("hmac(md5)"); + break; + + case 101: + ret += tcrypt_test("hmac(sha1)"); + break; + + case 102: + ret += tcrypt_test("hmac(sha256)"); + break; + + case 103: + ret += tcrypt_test("hmac(sha384)"); + break; + + case 104: + ret += tcrypt_test("hmac(sha512)"); + break; + + case 105: + ret += tcrypt_test("hmac(sha224)"); + break; + + case 106: + ret += tcrypt_test("xcbc(aes)"); + break; + + case 107: + ret += tcrypt_test("hmac(rmd128)"); + break; + + case 108: + ret += tcrypt_test("hmac(rmd160)"); + break; + + case 109: + ret += tcrypt_test("vmac(aes)"); + break; + + case 150: + ret += tcrypt_test("ansi_cprng"); + break; + + case 151: + ret += tcrypt_test("rfc4106(gcm(aes))"); + break; + + case 200: + test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0, + speed_template_32_40_48); + test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0, + speed_template_32_40_48); + test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0, + speed_template_32_48_64); + test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0, + speed_template_32_48_64); + test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0, + speed_template_16_24_32); + break; + + case 201: + test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec, + des3_speed_template, DES3_SPEED_VECTORS, + speed_template_24); + test_cipher_speed("ecb(des3_ede)", DECRYPT, sec, + des3_speed_template, DES3_SPEED_VECTORS, + speed_template_24); + test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec, + des3_speed_template, DES3_SPEED_VECTORS, + speed_template_24); + test_cipher_speed("cbc(des3_ede)", DECRYPT, sec, + des3_speed_template, DES3_SPEED_VECTORS, + speed_template_24); + break; + + case 202: + test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0, + speed_template_32_40_48); + test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0, + speed_template_32_40_48); + test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0, + speed_template_32_48_64); + test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0, + speed_template_32_48_64); + break; + + case 203: + test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0, + speed_template_8_32); + test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0, + speed_template_8_32); + test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0, + speed_template_8_32); + test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0, + speed_template_8_32); + test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0, + speed_template_8_32); + test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0, + speed_template_8_32); + break; + + case 204: + test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0, + speed_template_8); + test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0, + speed_template_8); + test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0, + speed_template_8); + test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0, + speed_template_8); + break; + + case 205: + test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0, + speed_template_32_40_48); + test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0, + speed_template_32_40_48); + test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0, + speed_template_32_48_64); + test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0, + speed_template_32_48_64); + break; + + case 206: + test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0, + speed_template_16_32); + break; + + case 207: + test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0, + speed_template_16_32); + test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0, + speed_template_16_32); + test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0, + speed_template_16_32); + test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0, + speed_template_16_32); + test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0, + speed_template_16_32); + test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0, + speed_template_16_32); + test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0, + speed_template_32_48); + test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0, + speed_template_32_48); + test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0, + speed_template_32_64); + test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0, + speed_template_32_64); + break; + + case 300: + /* fall through */ + + case 301: + test_hash_speed("md4", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 302: + test_hash_speed("md5", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 303: + test_hash_speed("sha1", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 304: + test_hash_speed("sha256", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 305: + test_hash_speed("sha384", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 306: + test_hash_speed("sha512", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 307: + test_hash_speed("wp256", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 308: + test_hash_speed("wp384", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 309: + test_hash_speed("wp512", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 310: + test_hash_speed("tgr128", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 311: + test_hash_speed("tgr160", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 312: + test_hash_speed("tgr192", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 313: + test_hash_speed("sha224", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 314: + test_hash_speed("rmd128", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 315: + test_hash_speed("rmd160", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 316: + test_hash_speed("rmd256", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 317: + test_hash_speed("rmd320", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + + case 318: + test_hash_speed("ghash-generic", sec, hash_speed_template_16); + if (mode > 300 && mode < 400) break; + + case 399: + break; + + case 400: + /* fall through */ + + case 401: + test_ahash_speed("md4", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 402: + test_ahash_speed("md5", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 403: + test_ahash_speed("sha1", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 404: + test_ahash_speed("sha256", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 405: + test_ahash_speed("sha384", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 406: + test_ahash_speed("sha512", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 407: + test_ahash_speed("wp256", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 408: + test_ahash_speed("wp384", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 409: + test_ahash_speed("wp512", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 410: + test_ahash_speed("tgr128", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 411: + test_ahash_speed("tgr160", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 412: + test_ahash_speed("tgr192", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 413: + test_ahash_speed("sha224", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 414: + test_ahash_speed("rmd128", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 415: + test_ahash_speed("rmd160", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 416: + test_ahash_speed("rmd256", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 417: + test_ahash_speed("rmd320", sec, generic_hash_speed_template); + if (mode > 400 && mode < 500) break; + + case 499: + break; + + case 500: + test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0, + speed_template_32_40_48); + test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0, + speed_template_32_40_48); + test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0, + speed_template_32_48_64); + test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0, + speed_template_32_48_64); + test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0, + speed_template_16_24_32); + test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0, + speed_template_16_24_32); + break; + + case 501: + test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec, + des3_speed_template, DES3_SPEED_VECTORS, + speed_template_24); + test_acipher_speed("ecb(des3_ede)", DECRYPT, sec, + des3_speed_template, DES3_SPEED_VECTORS, + speed_template_24); + test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec, + des3_speed_template, DES3_SPEED_VECTORS, + speed_template_24); + test_acipher_speed("cbc(des3_ede)", DECRYPT, sec, + des3_speed_template, DES3_SPEED_VECTORS, + speed_template_24); + break; + + case 502: + test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0, + speed_template_8); + test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0, + speed_template_8); + test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0, + speed_template_8); + test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0, + speed_template_8); + break; + + case 503: + test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0, + speed_template_16_32); + test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0, + speed_template_16_32); + test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0, + speed_template_16_32); + test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0, + speed_template_16_32); + test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0, + speed_template_16_32); + test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0, + speed_template_16_32); + test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0, + speed_template_32_48); + test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0, + speed_template_32_48); + test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0, + speed_template_32_64); + test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0, + speed_template_32_64); + break; + + case 1000: + test_available(); + break; + } + + return ret; +} + +static int do_alg_test(const char *alg, u32 type, u32 mask) +{ + return crypto_has_alg(alg, type, mask ?: CRYPTO_ALG_TYPE_MASK) ? + 0 : -ENOENT; +} + +static int __init tcrypt_mod_init(void) +{ + int err = -ENOMEM; + int i; + + for (i = 0; i < TVMEMSIZE; i++) { + tvmem[i] = (void *)__get_free_page(GFP_KERNEL); + if (!tvmem[i]) + goto err_free_tv; + } + + if (alg) + err = do_alg_test(alg, type, mask); + else + err = do_test(mode); + + if (err) { + printk(KERN_ERR "tcrypt: one or more tests failed!\n"); + goto err_free_tv; + } + + /* We intentionaly return -EAGAIN to prevent keeping the module, + * unless we're running in fips mode. It does all its work from + * init() and doesn't offer any runtime functionality, but in + * the fips case, checking for a successful load is helpful. + * => we don't need it in the memory, do we? + * -- mludvig + */ + if (!fips_enabled) + err = -EAGAIN; + +err_free_tv: + for (i = 0; i < TVMEMSIZE && tvmem[i]; i++) + free_page((unsigned long)tvmem[i]); + + return err; +} + +/* + * If an init function is provided, an exit function must also be provided + * to allow module unload. + */ +static void __exit tcrypt_mod_fini(void) { } + +module_init(tcrypt_mod_init); +module_exit(tcrypt_mod_fini); + +module_param(alg, charp, 0); +module_param(type, uint, 0); +module_param(mask, uint, 0); +module_param(mode, int, 0); +module_param(sec, uint, 0); +MODULE_PARM_DESC(sec, "Length in seconds of speed tests " + "(defaults to zero which uses CPU cycles instead)"); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Quick & dirty crypto testing module"); +MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>"); diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h new file mode 100644 index 00000000..5be1fc8c --- /dev/null +++ b/crypto/tcrypt.h @@ -0,0 +1,117 @@ +/* + * Quick & dirty crypto testing module. + * + * This will only exist until we have a better testing mechanism + * (e.g. a char device). + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> + * Copyright (c) 2007 Nokia Siemens Networks + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ +#ifndef _CRYPTO_TCRYPT_H +#define _CRYPTO_TCRYPT_H + +struct cipher_speed_template { + const char *key; + unsigned int klen; +}; + +struct hash_speed { + unsigned int blen; /* buffer length */ + unsigned int plen; /* per-update length */ + unsigned int klen; /* key length */ +}; + +/* + * DES test vectors. + */ +#define DES3_SPEED_VECTORS 1 + +static struct cipher_speed_template des3_speed_template[] = { + { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\x55\x55\x55\x55\x55\x55\x55\x55" + "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .klen = 24, + } +}; + +/* + * Cipher speed tests + */ +static u8 speed_template_8[] = {8, 0}; +static u8 speed_template_24[] = {24, 0}; +static u8 speed_template_8_32[] = {8, 32, 0}; +static u8 speed_template_16_32[] = {16, 32, 0}; +static u8 speed_template_16_24_32[] = {16, 24, 32, 0}; +static u8 speed_template_32_40_48[] = {32, 40, 48, 0}; +static u8 speed_template_32_48[] = {32, 48, 0}; +static u8 speed_template_32_48_64[] = {32, 48, 64, 0}; +static u8 speed_template_32_64[] = {32, 64, 0}; + +/* + * Digest speed tests + */ +static struct hash_speed generic_hash_speed_template[] = { + { .blen = 16, .plen = 16, }, + { .blen = 64, .plen = 16, }, + { .blen = 64, .plen = 64, }, + { .blen = 256, .plen = 16, }, + { .blen = 256, .plen = 64, }, + { .blen = 256, .plen = 256, }, + { .blen = 1024, .plen = 16, }, + { .blen = 1024, .plen = 256, }, + { .blen = 1024, .plen = 1024, }, + { .blen = 2048, .plen = 16, }, + { .blen = 2048, .plen = 256, }, + { .blen = 2048, .plen = 1024, }, + { .blen = 2048, .plen = 2048, }, + { .blen = 4096, .plen = 16, }, + { .blen = 4096, .plen = 256, }, + { .blen = 4096, .plen = 1024, }, + { .blen = 4096, .plen = 4096, }, + { .blen = 8192, .plen = 16, }, + { .blen = 8192, .plen = 256, }, + { .blen = 8192, .plen = 1024, }, + { .blen = 8192, .plen = 4096, }, + { .blen = 8192, .plen = 8192, }, + + /* End marker */ + { .blen = 0, .plen = 0, } +}; + +static struct hash_speed hash_speed_template_16[] = { + { .blen = 16, .plen = 16, .klen = 16, }, + { .blen = 64, .plen = 16, .klen = 16, }, + { .blen = 64, .plen = 64, .klen = 16, }, + { .blen = 256, .plen = 16, .klen = 16, }, + { .blen = 256, .plen = 64, .klen = 16, }, + { .blen = 256, .plen = 256, .klen = 16, }, + { .blen = 1024, .plen = 16, .klen = 16, }, + { .blen = 1024, .plen = 256, .klen = 16, }, + { .blen = 1024, .plen = 1024, .klen = 16, }, + { .blen = 2048, .plen = 16, .klen = 16, }, + { .blen = 2048, .plen = 256, .klen = 16, }, + { .blen = 2048, .plen = 1024, .klen = 16, }, + { .blen = 2048, .plen = 2048, .klen = 16, }, + { .blen = 4096, .plen = 16, .klen = 16, }, + { .blen = 4096, .plen = 256, .klen = 16, }, + { .blen = 4096, .plen = 1024, .klen = 16, }, + { .blen = 4096, .plen = 4096, .klen = 16, }, + { .blen = 8192, .plen = 16, .klen = 16, }, + { .blen = 8192, .plen = 256, .klen = 16, }, + { .blen = 8192, .plen = 1024, .klen = 16, }, + { .blen = 8192, .plen = 4096, .klen = 16, }, + { .blen = 8192, .plen = 8192, .klen = 16, }, + + /* End marker */ + { .blen = 0, .plen = 0, .klen = 0, } +}; + +#endif /* _CRYPTO_TCRYPT_H */ diff --git a/crypto/tea.c b/crypto/tea.c new file mode 100644 index 00000000..412bc74f --- /dev/null +++ b/crypto/tea.c @@ -0,0 +1,309 @@ +/* + * Cryptographic API. + * + * TEA, XTEA, and XETA crypto alogrithms + * + * The TEA and Xtended TEA algorithms were developed by David Wheeler + * and Roger Needham at the Computer Laboratory of Cambridge University. + * + * Due to the order of evaluation in XTEA many people have incorrectly + * implemented it. XETA (XTEA in the wrong order), exists for + * compatibility with these implementations. + * + * Copyright (c) 2004 Aaron Grothe ajgrothe@yahoo.com + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + */ + +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <asm/byteorder.h> +#include <linux/crypto.h> +#include <linux/types.h> + +#define TEA_KEY_SIZE 16 +#define TEA_BLOCK_SIZE 8 +#define TEA_ROUNDS 32 +#define TEA_DELTA 0x9e3779b9 + +#define XTEA_KEY_SIZE 16 +#define XTEA_BLOCK_SIZE 8 +#define XTEA_ROUNDS 32 +#define XTEA_DELTA 0x9e3779b9 + +struct tea_ctx { + u32 KEY[4]; +}; + +struct xtea_ctx { + u32 KEY[4]; +}; + +static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len) +{ + struct tea_ctx *ctx = crypto_tfm_ctx(tfm); + const __le32 *key = (const __le32 *)in_key; + + ctx->KEY[0] = le32_to_cpu(key[0]); + ctx->KEY[1] = le32_to_cpu(key[1]); + ctx->KEY[2] = le32_to_cpu(key[2]); + ctx->KEY[3] = le32_to_cpu(key[3]); + + return 0; + +} + +static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + u32 y, z, n, sum = 0; + u32 k0, k1, k2, k3; + struct tea_ctx *ctx = crypto_tfm_ctx(tfm); + const __le32 *in = (const __le32 *)src; + __le32 *out = (__le32 *)dst; + + y = le32_to_cpu(in[0]); + z = le32_to_cpu(in[1]); + + k0 = ctx->KEY[0]; + k1 = ctx->KEY[1]; + k2 = ctx->KEY[2]; + k3 = ctx->KEY[3]; + + n = TEA_ROUNDS; + + while (n-- > 0) { + sum += TEA_DELTA; + y += ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1); + z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3); + } + + out[0] = cpu_to_le32(y); + out[1] = cpu_to_le32(z); +} + +static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + u32 y, z, n, sum; + u32 k0, k1, k2, k3; + struct tea_ctx *ctx = crypto_tfm_ctx(tfm); + const __le32 *in = (const __le32 *)src; + __le32 *out = (__le32 *)dst; + + y = le32_to_cpu(in[0]); + z = le32_to_cpu(in[1]); + + k0 = ctx->KEY[0]; + k1 = ctx->KEY[1]; + k2 = ctx->KEY[2]; + k3 = ctx->KEY[3]; + + sum = TEA_DELTA << 5; + + n = TEA_ROUNDS; + + while (n-- > 0) { + z -= ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3); + y -= ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1); + sum -= TEA_DELTA; + } + + out[0] = cpu_to_le32(y); + out[1] = cpu_to_le32(z); +} + +static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len) +{ + struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); + const __le32 *key = (const __le32 *)in_key; + + ctx->KEY[0] = le32_to_cpu(key[0]); + ctx->KEY[1] = le32_to_cpu(key[1]); + ctx->KEY[2] = le32_to_cpu(key[2]); + ctx->KEY[3] = le32_to_cpu(key[3]); + + return 0; + +} + +static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + u32 y, z, sum = 0; + u32 limit = XTEA_DELTA * XTEA_ROUNDS; + struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); + const __le32 *in = (const __le32 *)src; + __le32 *out = (__le32 *)dst; + + y = le32_to_cpu(in[0]); + z = le32_to_cpu(in[1]); + + while (sum != limit) { + y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]); + sum += XTEA_DELTA; + z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]); + } + + out[0] = cpu_to_le32(y); + out[1] = cpu_to_le32(z); +} + +static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + u32 y, z, sum; + struct tea_ctx *ctx = crypto_tfm_ctx(tfm); + const __le32 *in = (const __le32 *)src; + __le32 *out = (__le32 *)dst; + + y = le32_to_cpu(in[0]); + z = le32_to_cpu(in[1]); + + sum = XTEA_DELTA * XTEA_ROUNDS; + + while (sum) { + z -= ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 & 3]); + sum -= XTEA_DELTA; + y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]); + } + + out[0] = cpu_to_le32(y); + out[1] = cpu_to_le32(z); +} + + +static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + u32 y, z, sum = 0; + u32 limit = XTEA_DELTA * XTEA_ROUNDS; + struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); + const __le32 *in = (const __le32 *)src; + __le32 *out = (__le32 *)dst; + + y = le32_to_cpu(in[0]); + z = le32_to_cpu(in[1]); + + while (sum != limit) { + y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3]; + sum += XTEA_DELTA; + z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3]; + } + + out[0] = cpu_to_le32(y); + out[1] = cpu_to_le32(z); +} + +static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + u32 y, z, sum; + struct tea_ctx *ctx = crypto_tfm_ctx(tfm); + const __le32 *in = (const __le32 *)src; + __le32 *out = (__le32 *)dst; + + y = le32_to_cpu(in[0]); + z = le32_to_cpu(in[1]); + + sum = XTEA_DELTA * XTEA_ROUNDS; + + while (sum) { + z -= (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 & 3]; + sum -= XTEA_DELTA; + y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3]; + } + + out[0] = cpu_to_le32(y); + out[1] = cpu_to_le32(z); +} + +static struct crypto_alg tea_alg = { + .cra_name = "tea", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = TEA_BLOCK_SIZE, + .cra_ctxsize = sizeof (struct tea_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(tea_alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = TEA_KEY_SIZE, + .cia_max_keysize = TEA_KEY_SIZE, + .cia_setkey = tea_setkey, + .cia_encrypt = tea_encrypt, + .cia_decrypt = tea_decrypt } } +}; + +static struct crypto_alg xtea_alg = { + .cra_name = "xtea", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = XTEA_BLOCK_SIZE, + .cra_ctxsize = sizeof (struct xtea_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = XTEA_KEY_SIZE, + .cia_max_keysize = XTEA_KEY_SIZE, + .cia_setkey = xtea_setkey, + .cia_encrypt = xtea_encrypt, + .cia_decrypt = xtea_decrypt } } +}; + +static struct crypto_alg xeta_alg = { + .cra_name = "xeta", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = XTEA_BLOCK_SIZE, + .cra_ctxsize = sizeof (struct xtea_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = XTEA_KEY_SIZE, + .cia_max_keysize = XTEA_KEY_SIZE, + .cia_setkey = xtea_setkey, + .cia_encrypt = xeta_encrypt, + .cia_decrypt = xeta_decrypt } } +}; + +static int __init tea_mod_init(void) +{ + int ret = 0; + + ret = crypto_register_alg(&tea_alg); + if (ret < 0) + goto out; + + ret = crypto_register_alg(&xtea_alg); + if (ret < 0) { + crypto_unregister_alg(&tea_alg); + goto out; + } + + ret = crypto_register_alg(&xeta_alg); + if (ret < 0) { + crypto_unregister_alg(&tea_alg); + crypto_unregister_alg(&xtea_alg); + goto out; + } + +out: + return ret; +} + +static void __exit tea_mod_fini(void) +{ + crypto_unregister_alg(&tea_alg); + crypto_unregister_alg(&xtea_alg); + crypto_unregister_alg(&xeta_alg); +} + +MODULE_ALIAS("xtea"); +MODULE_ALIAS("xeta"); + +module_init(tea_mod_init); +module_exit(tea_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("TEA, XTEA & XETA Cryptographic Algorithms"); diff --git a/crypto/testmgr.c b/crypto/testmgr.c new file mode 100644 index 00000000..5674878f --- /dev/null +++ b/crypto/testmgr.c @@ -0,0 +1,2814 @@ +/* + * Algorithm testing framework and tests. + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> + * Copyright (c) 2007 Nokia Siemens Networks + * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> + * + * Updated RFC4106 AES-GCM testing. + * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) + * Adrian Hoban <adrian.hoban@intel.com> + * Gabriele Paoloni <gabriele.paoloni@intel.com> + * Tadeusz Struk (tadeusz.struk@intel.com) + * Copyright (c) 2010, Intel Corporation. + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/hash.h> +#include <linux/err.h> +#include <linux/module.h> +#include <linux/scatterlist.h> +#include <linux/slab.h> +#include <linux/string.h> +#include <crypto/rng.h> + +#include "internal.h" + +#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS + +/* a perfect nop */ +int alg_test(const char *driver, const char *alg, u32 type, u32 mask) +{ + return 0; +} + +#else + +#include "testmgr.h" + +/* + * Need slab memory for testing (size in number of pages). + */ +#define XBUFSIZE 8 + +/* + * Indexes into the xbuf to simulate cross-page access. + */ +#define IDX1 32 +#define IDX2 32400 +#define IDX3 1 +#define IDX4 8193 +#define IDX5 22222 +#define IDX6 17101 +#define IDX7 27333 +#define IDX8 3000 + +/* +* Used by test_cipher() +*/ +#define ENCRYPT 1 +#define DECRYPT 0 + +struct tcrypt_result { + struct completion completion; + int err; +}; + +struct aead_test_suite { + struct { + struct aead_testvec *vecs; + unsigned int count; + } enc, dec; +}; + +struct cipher_test_suite { + struct { + struct cipher_testvec *vecs; + unsigned int count; + } enc, dec; +}; + +struct comp_test_suite { + struct { + struct comp_testvec *vecs; + unsigned int count; + } comp, decomp; +}; + +struct pcomp_test_suite { + struct { + struct pcomp_testvec *vecs; + unsigned int count; + } comp, decomp; +}; + +struct hash_test_suite { + struct hash_testvec *vecs; + unsigned int count; +}; + +struct cprng_test_suite { + struct cprng_testvec *vecs; + unsigned int count; +}; + +struct alg_test_desc { + const char *alg; + int (*test)(const struct alg_test_desc *desc, const char *driver, + u32 type, u32 mask); + int fips_allowed; /* set if alg is allowed in fips mode */ + + union { + struct aead_test_suite aead; + struct cipher_test_suite cipher; + struct comp_test_suite comp; + struct pcomp_test_suite pcomp; + struct hash_test_suite hash; + struct cprng_test_suite cprng; + } suite; +}; + +static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; + +static void hexdump(unsigned char *buf, unsigned int len) +{ + print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, + 16, 1, + buf, len, false); +} + +static void tcrypt_complete(struct crypto_async_request *req, int err) +{ + struct tcrypt_result *res = req->data; + + if (err == -EINPROGRESS) + return; + + res->err = err; + complete(&res->completion); +} + +static int testmgr_alloc_buf(char *buf[XBUFSIZE]) +{ + int i; + + for (i = 0; i < XBUFSIZE; i++) { + buf[i] = (void *)__get_free_page(GFP_KERNEL); + if (!buf[i]) + goto err_free_buf; + } + + return 0; + +err_free_buf: + while (i-- > 0) + free_page((unsigned long)buf[i]); + + return -ENOMEM; +} + +static void testmgr_free_buf(char *buf[XBUFSIZE]) +{ + int i; + + for (i = 0; i < XBUFSIZE; i++) + free_page((unsigned long)buf[i]); +} + +static int do_one_async_hash_op(struct ahash_request *req, + struct tcrypt_result *tr, + int ret) +{ + if (ret == -EINPROGRESS || ret == -EBUSY) { + ret = wait_for_completion_interruptible(&tr->completion); + if (!ret) + ret = tr->err; + INIT_COMPLETION(tr->completion); + } + return ret; +} + +static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, + unsigned int tcount, bool use_digest) +{ + const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); + unsigned int i, j, k, temp; + struct scatterlist sg[8]; + char result[64]; + struct ahash_request *req; + struct tcrypt_result tresult; + void *hash_buff; + char *xbuf[XBUFSIZE]; + int ret = -ENOMEM; + + if (testmgr_alloc_buf(xbuf)) + goto out_nobuf; + + init_completion(&tresult.completion); + + req = ahash_request_alloc(tfm, GFP_KERNEL); + if (!req) { + printk(KERN_ERR "alg: hash: Failed to allocate request for " + "%s\n", algo); + goto out_noreq; + } + ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, + tcrypt_complete, &tresult); + + j = 0; + for (i = 0; i < tcount; i++) { + if (template[i].np) + continue; + + j++; + memset(result, 0, 64); + + hash_buff = xbuf[0]; + + memcpy(hash_buff, template[i].plaintext, template[i].psize); + sg_init_one(&sg[0], hash_buff, template[i].psize); + + if (template[i].ksize) { + crypto_ahash_clear_flags(tfm, ~0); + ret = crypto_ahash_setkey(tfm, template[i].key, + template[i].ksize); + if (ret) { + printk(KERN_ERR "alg: hash: setkey failed on " + "test %d for %s: ret=%d\n", j, algo, + -ret); + goto out; + } + } + + ahash_request_set_crypt(req, sg, result, template[i].psize); + if (use_digest) { + ret = do_one_async_hash_op(req, &tresult, + crypto_ahash_digest(req)); + if (ret) { + pr_err("alg: hash: digest failed on test %d " + "for %s: ret=%d\n", j, algo, -ret); + goto out; + } + } else { + ret = do_one_async_hash_op(req, &tresult, + crypto_ahash_init(req)); + if (ret) { + pr_err("alt: hash: init failed on test %d " + "for %s: ret=%d\n", j, algo, -ret); + goto out; + } + ret = do_one_async_hash_op(req, &tresult, + crypto_ahash_update(req)); + if (ret) { + pr_err("alt: hash: update failed on test %d " + "for %s: ret=%d\n", j, algo, -ret); + goto out; + } + ret = do_one_async_hash_op(req, &tresult, + crypto_ahash_final(req)); + if (ret) { + pr_err("alt: hash: final failed on test %d " + "for %s: ret=%d\n", j, algo, -ret); + goto out; + } + } + + if (memcmp(result, template[i].digest, + crypto_ahash_digestsize(tfm))) { + printk(KERN_ERR "alg: hash: Test %d failed for %s\n", + j, algo); + hexdump(result, crypto_ahash_digestsize(tfm)); + ret = -EINVAL; + goto out; + } + } + + j = 0; + for (i = 0; i < tcount; i++) { + if (template[i].np) { + j++; + memset(result, 0, 64); + + temp = 0; + sg_init_table(sg, template[i].np); + ret = -EINVAL; + for (k = 0; k < template[i].np; k++) { + if (WARN_ON(offset_in_page(IDX[k]) + + template[i].tap[k] > PAGE_SIZE)) + goto out; + sg_set_buf(&sg[k], + memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]), + template[i].plaintext + temp, + template[i].tap[k]), + template[i].tap[k]); + temp += template[i].tap[k]; + } + + if (template[i].ksize) { + crypto_ahash_clear_flags(tfm, ~0); + ret = crypto_ahash_setkey(tfm, template[i].key, + template[i].ksize); + + if (ret) { + printk(KERN_ERR "alg: hash: setkey " + "failed on chunking test %d " + "for %s: ret=%d\n", j, algo, + -ret); + goto out; + } + } + + ahash_request_set_crypt(req, sg, result, + template[i].psize); + ret = crypto_ahash_digest(req); + switch (ret) { + case 0: + break; + case -EINPROGRESS: + case -EBUSY: + ret = wait_for_completion_interruptible( + &tresult.completion); + if (!ret && !(ret = tresult.err)) { + INIT_COMPLETION(tresult.completion); + break; + } + /* fall through */ + default: + printk(KERN_ERR "alg: hash: digest failed " + "on chunking test %d for %s: " + "ret=%d\n", j, algo, -ret); + goto out; + } + + if (memcmp(result, template[i].digest, + crypto_ahash_digestsize(tfm))) { + printk(KERN_ERR "alg: hash: Chunking test %d " + "failed for %s\n", j, algo); + hexdump(result, crypto_ahash_digestsize(tfm)); + ret = -EINVAL; + goto out; + } + } + } + + ret = 0; + +out: + ahash_request_free(req); +out_noreq: + testmgr_free_buf(xbuf); +out_nobuf: + return ret; +} + +static int test_aead(struct crypto_aead *tfm, int enc, + struct aead_testvec *template, unsigned int tcount) +{ + const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); + unsigned int i, j, k, n, temp; + int ret = -ENOMEM; + char *q; + char *key; + struct aead_request *req; + struct scatterlist sg[8]; + struct scatterlist asg[8]; + const char *e; + struct tcrypt_result result; + unsigned int authsize; + void *input; + void *assoc; + char iv[MAX_IVLEN]; + char *xbuf[XBUFSIZE]; + char *axbuf[XBUFSIZE]; + + if (testmgr_alloc_buf(xbuf)) + goto out_noxbuf; + if (testmgr_alloc_buf(axbuf)) + goto out_noaxbuf; + + if (enc == ENCRYPT) + e = "encryption"; + else + e = "decryption"; + + init_completion(&result.completion); + + req = aead_request_alloc(tfm, GFP_KERNEL); + if (!req) { + printk(KERN_ERR "alg: aead: Failed to allocate request for " + "%s\n", algo); + goto out; + } + + aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, + tcrypt_complete, &result); + + for (i = 0, j = 0; i < tcount; i++) { + if (!template[i].np) { + j++; + + /* some tepmplates have no input data but they will + * touch input + */ + input = xbuf[0]; + assoc = axbuf[0]; + + ret = -EINVAL; + if (WARN_ON(template[i].ilen > PAGE_SIZE || + template[i].alen > PAGE_SIZE)) + goto out; + + memcpy(input, template[i].input, template[i].ilen); + memcpy(assoc, template[i].assoc, template[i].alen); + if (template[i].iv) + memcpy(iv, template[i].iv, MAX_IVLEN); + else + memset(iv, 0, MAX_IVLEN); + + crypto_aead_clear_flags(tfm, ~0); + if (template[i].wk) + crypto_aead_set_flags( + tfm, CRYPTO_TFM_REQ_WEAK_KEY); + + key = template[i].key; + + ret = crypto_aead_setkey(tfm, key, + template[i].klen); + if (!ret == template[i].fail) { + printk(KERN_ERR "alg: aead: setkey failed on " + "test %d for %s: flags=%x\n", j, algo, + crypto_aead_get_flags(tfm)); + goto out; + } else if (ret) + continue; + + authsize = abs(template[i].rlen - template[i].ilen); + ret = crypto_aead_setauthsize(tfm, authsize); + if (ret) { + printk(KERN_ERR "alg: aead: Failed to set " + "authsize to %u on test %d for %s\n", + authsize, j, algo); + goto out; + } + + sg_init_one(&sg[0], input, + template[i].ilen + (enc ? authsize : 0)); + + sg_init_one(&asg[0], assoc, template[i].alen); + + aead_request_set_crypt(req, sg, sg, + template[i].ilen, iv); + + aead_request_set_assoc(req, asg, template[i].alen); + + ret = enc ? + crypto_aead_encrypt(req) : + crypto_aead_decrypt(req); + + switch (ret) { + case 0: + if (template[i].novrfy) { + /* verification was supposed to fail */ + printk(KERN_ERR "alg: aead: %s failed " + "on test %d for %s: ret was 0, " + "expected -EBADMSG\n", + e, j, algo); + /* so really, we got a bad message */ + ret = -EBADMSG; + goto out; + } + break; + case -EINPROGRESS: + case -EBUSY: + ret = wait_for_completion_interruptible( + &result.completion); + if (!ret && !(ret = result.err)) { + INIT_COMPLETION(result.completion); + break; + } + case -EBADMSG: + if (template[i].novrfy) + /* verification failure was expected */ + continue; + /* fall through */ + default: + printk(KERN_ERR "alg: aead: %s failed on test " + "%d for %s: ret=%d\n", e, j, algo, -ret); + goto out; + } + + q = input; + if (memcmp(q, template[i].result, template[i].rlen)) { + printk(KERN_ERR "alg: aead: Test %d failed on " + "%s for %s\n", j, e, algo); + hexdump(q, template[i].rlen); + ret = -EINVAL; + goto out; + } + } + } + + for (i = 0, j = 0; i < tcount; i++) { + if (template[i].np) { + j++; + + if (template[i].iv) + memcpy(iv, template[i].iv, MAX_IVLEN); + else + memset(iv, 0, MAX_IVLEN); + + crypto_aead_clear_flags(tfm, ~0); + if (template[i].wk) + crypto_aead_set_flags( + tfm, CRYPTO_TFM_REQ_WEAK_KEY); + key = template[i].key; + + ret = crypto_aead_setkey(tfm, key, template[i].klen); + if (!ret == template[i].fail) { + printk(KERN_ERR "alg: aead: setkey failed on " + "chunk test %d for %s: flags=%x\n", j, + algo, crypto_aead_get_flags(tfm)); + goto out; + } else if (ret) + continue; + + authsize = abs(template[i].rlen - template[i].ilen); + + ret = -EINVAL; + sg_init_table(sg, template[i].np); + for (k = 0, temp = 0; k < template[i].np; k++) { + if (WARN_ON(offset_in_page(IDX[k]) + + template[i].tap[k] > PAGE_SIZE)) + goto out; + + q = xbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); + + memcpy(q, template[i].input + temp, + template[i].tap[k]); + + n = template[i].tap[k]; + if (k == template[i].np - 1 && enc) + n += authsize; + if (offset_in_page(q) + n < PAGE_SIZE) + q[n] = 0; + + sg_set_buf(&sg[k], q, template[i].tap[k]); + temp += template[i].tap[k]; + } + + ret = crypto_aead_setauthsize(tfm, authsize); + if (ret) { + printk(KERN_ERR "alg: aead: Failed to set " + "authsize to %u on chunk test %d for " + "%s\n", authsize, j, algo); + goto out; + } + + if (enc) { + if (WARN_ON(sg[k - 1].offset + + sg[k - 1].length + authsize > + PAGE_SIZE)) { + ret = -EINVAL; + goto out; + } + + sg[k - 1].length += authsize; + } + + sg_init_table(asg, template[i].anp); + ret = -EINVAL; + for (k = 0, temp = 0; k < template[i].anp; k++) { + if (WARN_ON(offset_in_page(IDX[k]) + + template[i].atap[k] > PAGE_SIZE)) + goto out; + sg_set_buf(&asg[k], + memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]), + template[i].assoc + temp, + template[i].atap[k]), + template[i].atap[k]); + temp += template[i].atap[k]; + } + + aead_request_set_crypt(req, sg, sg, + template[i].ilen, + iv); + + aead_request_set_assoc(req, asg, template[i].alen); + + ret = enc ? + crypto_aead_encrypt(req) : + crypto_aead_decrypt(req); + + switch (ret) { + case 0: + if (template[i].novrfy) { + /* verification was supposed to fail */ + printk(KERN_ERR "alg: aead: %s failed " + "on chunk test %d for %s: ret " + "was 0, expected -EBADMSG\n", + e, j, algo); + /* so really, we got a bad message */ + ret = -EBADMSG; + goto out; + } + break; + case -EINPROGRESS: + case -EBUSY: + ret = wait_for_completion_interruptible( + &result.completion); + if (!ret && !(ret = result.err)) { + INIT_COMPLETION(result.completion); + break; + } + case -EBADMSG: + if (template[i].novrfy) + /* verification failure was expected */ + continue; + /* fall through */ + default: + printk(KERN_ERR "alg: aead: %s failed on " + "chunk test %d for %s: ret=%d\n", e, j, + algo, -ret); + goto out; + } + + ret = -EINVAL; + for (k = 0, temp = 0; k < template[i].np; k++) { + q = xbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); + + n = template[i].tap[k]; + if (k == template[i].np - 1) + n += enc ? authsize : -authsize; + + if (memcmp(q, template[i].result + temp, n)) { + printk(KERN_ERR "alg: aead: Chunk " + "test %d failed on %s at page " + "%u for %s\n", j, e, k, algo); + hexdump(q, n); + goto out; + } + + q += n; + if (k == template[i].np - 1 && !enc) { + if (memcmp(q, template[i].input + + temp + n, authsize)) + n = authsize; + else + n = 0; + } else { + for (n = 0; offset_in_page(q + n) && + q[n]; n++) + ; + } + if (n) { + printk(KERN_ERR "alg: aead: Result " + "buffer corruption in chunk " + "test %d on %s at page %u for " + "%s: %u bytes:\n", j, e, k, + algo, n); + hexdump(q, n); + goto out; + } + + temp += template[i].tap[k]; + } + } + } + + ret = 0; + +out: + aead_request_free(req); + testmgr_free_buf(axbuf); +out_noaxbuf: + testmgr_free_buf(xbuf); +out_noxbuf: + return ret; +} + +static int test_cipher(struct crypto_cipher *tfm, int enc, + struct cipher_testvec *template, unsigned int tcount) +{ + const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); + unsigned int i, j, k; + char *q; + const char *e; + void *data; + char *xbuf[XBUFSIZE]; + int ret = -ENOMEM; + + if (testmgr_alloc_buf(xbuf)) + goto out_nobuf; + + if (enc == ENCRYPT) + e = "encryption"; + else + e = "decryption"; + + j = 0; + for (i = 0; i < tcount; i++) { + if (template[i].np) + continue; + + j++; + + ret = -EINVAL; + if (WARN_ON(template[i].ilen > PAGE_SIZE)) + goto out; + + data = xbuf[0]; + memcpy(data, template[i].input, template[i].ilen); + + crypto_cipher_clear_flags(tfm, ~0); + if (template[i].wk) + crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); + + ret = crypto_cipher_setkey(tfm, template[i].key, + template[i].klen); + if (!ret == template[i].fail) { + printk(KERN_ERR "alg: cipher: setkey failed " + "on test %d for %s: flags=%x\n", j, + algo, crypto_cipher_get_flags(tfm)); + goto out; + } else if (ret) + continue; + + for (k = 0; k < template[i].ilen; + k += crypto_cipher_blocksize(tfm)) { + if (enc) + crypto_cipher_encrypt_one(tfm, data + k, + data + k); + else + crypto_cipher_decrypt_one(tfm, data + k, + data + k); + } + + q = data; + if (memcmp(q, template[i].result, template[i].rlen)) { + printk(KERN_ERR "alg: cipher: Test %d failed " + "on %s for %s\n", j, e, algo); + hexdump(q, template[i].rlen); + ret = -EINVAL; + goto out; + } + } + + ret = 0; + +out: + testmgr_free_buf(xbuf); +out_nobuf: + return ret; +} + +static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, + struct cipher_testvec *template, unsigned int tcount) +{ + const char *algo = + crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm)); + unsigned int i, j, k, n, temp; + char *q; + struct ablkcipher_request *req; + struct scatterlist sg[8]; + const char *e; + struct tcrypt_result result; + void *data; + char iv[MAX_IVLEN]; + char *xbuf[XBUFSIZE]; + int ret = -ENOMEM; + + if (testmgr_alloc_buf(xbuf)) + goto out_nobuf; + + if (enc == ENCRYPT) + e = "encryption"; + else + e = "decryption"; + + init_completion(&result.completion); + + req = ablkcipher_request_alloc(tfm, GFP_KERNEL); + if (!req) { + printk(KERN_ERR "alg: skcipher: Failed to allocate request " + "for %s\n", algo); + goto out; + } + + ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, + tcrypt_complete, &result); + + j = 0; + for (i = 0; i < tcount; i++) { + if (template[i].iv) + memcpy(iv, template[i].iv, MAX_IVLEN); + else + memset(iv, 0, MAX_IVLEN); + + if (!(template[i].np)) { + j++; + + ret = -EINVAL; + if (WARN_ON(template[i].ilen > PAGE_SIZE)) + goto out; + + data = xbuf[0]; + memcpy(data, template[i].input, template[i].ilen); + + crypto_ablkcipher_clear_flags(tfm, ~0); + if (template[i].wk) + crypto_ablkcipher_set_flags( + tfm, CRYPTO_TFM_REQ_WEAK_KEY); + + ret = crypto_ablkcipher_setkey(tfm, template[i].key, + template[i].klen); + if (!ret == template[i].fail) { + printk(KERN_ERR "alg: skcipher: setkey failed " + "on test %d for %s: flags=%x\n", j, + algo, crypto_ablkcipher_get_flags(tfm)); + goto out; + } else if (ret) + continue; + + sg_init_one(&sg[0], data, template[i].ilen); + + ablkcipher_request_set_crypt(req, sg, sg, + template[i].ilen, iv); + ret = enc ? + crypto_ablkcipher_encrypt(req) : + crypto_ablkcipher_decrypt(req); + + switch (ret) { + case 0: + break; + case -EINPROGRESS: + case -EBUSY: + ret = wait_for_completion_interruptible( + &result.completion); + if (!ret && !((ret = result.err))) { + INIT_COMPLETION(result.completion); + break; + } + /* fall through */ + default: + printk(KERN_ERR "alg: skcipher: %s failed on " + "test %d for %s: ret=%d\n", e, j, algo, + -ret); + goto out; + } + + q = data; + if (memcmp(q, template[i].result, template[i].rlen)) { + printk(KERN_ERR "alg: skcipher: Test %d " + "failed on %s for %s\n", j, e, algo); + hexdump(q, template[i].rlen); + ret = -EINVAL; + goto out; + } + } + } + + j = 0; + for (i = 0; i < tcount; i++) { + + if (template[i].iv) + memcpy(iv, template[i].iv, MAX_IVLEN); + else + memset(iv, 0, MAX_IVLEN); + + if (template[i].np) { + j++; + + crypto_ablkcipher_clear_flags(tfm, ~0); + if (template[i].wk) + crypto_ablkcipher_set_flags( + tfm, CRYPTO_TFM_REQ_WEAK_KEY); + + ret = crypto_ablkcipher_setkey(tfm, template[i].key, + template[i].klen); + if (!ret == template[i].fail) { + printk(KERN_ERR "alg: skcipher: setkey failed " + "on chunk test %d for %s: flags=%x\n", + j, algo, + crypto_ablkcipher_get_flags(tfm)); + goto out; + } else if (ret) + continue; + + temp = 0; + ret = -EINVAL; + sg_init_table(sg, template[i].np); + for (k = 0; k < template[i].np; k++) { + if (WARN_ON(offset_in_page(IDX[k]) + + template[i].tap[k] > PAGE_SIZE)) + goto out; + + q = xbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); + + memcpy(q, template[i].input + temp, + template[i].tap[k]); + + if (offset_in_page(q) + template[i].tap[k] < + PAGE_SIZE) + q[template[i].tap[k]] = 0; + + sg_set_buf(&sg[k], q, template[i].tap[k]); + + temp += template[i].tap[k]; + } + + ablkcipher_request_set_crypt(req, sg, sg, + template[i].ilen, iv); + + ret = enc ? + crypto_ablkcipher_encrypt(req) : + crypto_ablkcipher_decrypt(req); + + switch (ret) { + case 0: + break; + case -EINPROGRESS: + case -EBUSY: + ret = wait_for_completion_interruptible( + &result.completion); + if (!ret && !((ret = result.err))) { + INIT_COMPLETION(result.completion); + break; + } + /* fall through */ + default: + printk(KERN_ERR "alg: skcipher: %s failed on " + "chunk test %d for %s: ret=%d\n", e, j, + algo, -ret); + goto out; + } + + temp = 0; + ret = -EINVAL; + for (k = 0; k < template[i].np; k++) { + q = xbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); + + if (memcmp(q, template[i].result + temp, + template[i].tap[k])) { + printk(KERN_ERR "alg: skcipher: Chunk " + "test %d failed on %s at page " + "%u for %s\n", j, e, k, algo); + hexdump(q, template[i].tap[k]); + goto out; + } + + q += template[i].tap[k]; + for (n = 0; offset_in_page(q + n) && q[n]; n++) + ; + if (n) { + printk(KERN_ERR "alg: skcipher: " + "Result buffer corruption in " + "chunk test %d on %s at page " + "%u for %s: %u bytes:\n", j, e, + k, algo, n); + hexdump(q, n); + goto out; + } + temp += template[i].tap[k]; + } + } + } + + ret = 0; + +out: + ablkcipher_request_free(req); + testmgr_free_buf(xbuf); +out_nobuf: + return ret; +} + +static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, + struct comp_testvec *dtemplate, int ctcount, int dtcount) +{ + const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); + unsigned int i; + char result[COMP_BUF_SIZE]; + int ret; + + for (i = 0; i < ctcount; i++) { + int ilen; + unsigned int dlen = COMP_BUF_SIZE; + + memset(result, 0, sizeof (result)); + + ilen = ctemplate[i].inlen; + ret = crypto_comp_compress(tfm, ctemplate[i].input, + ilen, result, &dlen); + if (ret) { + printk(KERN_ERR "alg: comp: compression failed " + "on test %d for %s: ret=%d\n", i + 1, algo, + -ret); + goto out; + } + + if (dlen != ctemplate[i].outlen) { + printk(KERN_ERR "alg: comp: Compression test %d " + "failed for %s: output len = %d\n", i + 1, algo, + dlen); + ret = -EINVAL; + goto out; + } + + if (memcmp(result, ctemplate[i].output, dlen)) { + printk(KERN_ERR "alg: comp: Compression test %d " + "failed for %s\n", i + 1, algo); + hexdump(result, dlen); + ret = -EINVAL; + goto out; + } + } + + for (i = 0; i < dtcount; i++) { + int ilen; + unsigned int dlen = COMP_BUF_SIZE; + + memset(result, 0, sizeof (result)); + + ilen = dtemplate[i].inlen; + ret = crypto_comp_decompress(tfm, dtemplate[i].input, + ilen, result, &dlen); + if (ret) { + printk(KERN_ERR "alg: comp: decompression failed " + "on test %d for %s: ret=%d\n", i + 1, algo, + -ret); + goto out; + } + + if (dlen != dtemplate[i].outlen) { + printk(KERN_ERR "alg: comp: Decompression test %d " + "failed for %s: output len = %d\n", i + 1, algo, + dlen); + ret = -EINVAL; + goto out; + } + + if (memcmp(result, dtemplate[i].output, dlen)) { + printk(KERN_ERR "alg: comp: Decompression test %d " + "failed for %s\n", i + 1, algo); + hexdump(result, dlen); + ret = -EINVAL; + goto out; + } + } + + ret = 0; + +out: + return ret; +} + +static int test_pcomp(struct crypto_pcomp *tfm, + struct pcomp_testvec *ctemplate, + struct pcomp_testvec *dtemplate, int ctcount, + int dtcount) +{ + const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm)); + unsigned int i; + char result[COMP_BUF_SIZE]; + int res; + + for (i = 0; i < ctcount; i++) { + struct comp_request req; + unsigned int produced = 0; + + res = crypto_compress_setup(tfm, ctemplate[i].params, + ctemplate[i].paramsize); + if (res) { + pr_err("alg: pcomp: compression setup failed on test " + "%d for %s: error=%d\n", i + 1, algo, res); + return res; + } + + res = crypto_compress_init(tfm); + if (res) { + pr_err("alg: pcomp: compression init failed on test " + "%d for %s: error=%d\n", i + 1, algo, res); + return res; + } + + memset(result, 0, sizeof(result)); + + req.next_in = ctemplate[i].input; + req.avail_in = ctemplate[i].inlen / 2; + req.next_out = result; + req.avail_out = ctemplate[i].outlen / 2; + + res = crypto_compress_update(tfm, &req); + if (res < 0 && (res != -EAGAIN || req.avail_in)) { + pr_err("alg: pcomp: compression update failed on test " + "%d for %s: error=%d\n", i + 1, algo, res); + return res; + } + if (res > 0) + produced += res; + + /* Add remaining input data */ + req.avail_in += (ctemplate[i].inlen + 1) / 2; + + res = crypto_compress_update(tfm, &req); + if (res < 0 && (res != -EAGAIN || req.avail_in)) { + pr_err("alg: pcomp: compression update failed on test " + "%d for %s: error=%d\n", i + 1, algo, res); + return res; + } + if (res > 0) + produced += res; + + /* Provide remaining output space */ + req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2; + + res = crypto_compress_final(tfm, &req); + if (res < 0) { + pr_err("alg: pcomp: compression final failed on test " + "%d for %s: error=%d\n", i + 1, algo, res); + return res; + } + produced += res; + + if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) { + pr_err("alg: comp: Compression test %d failed for %s: " + "output len = %d (expected %d)\n", i + 1, algo, + COMP_BUF_SIZE - req.avail_out, + ctemplate[i].outlen); + return -EINVAL; + } + + if (produced != ctemplate[i].outlen) { + pr_err("alg: comp: Compression test %d failed for %s: " + "returned len = %u (expected %d)\n", i + 1, + algo, produced, ctemplate[i].outlen); + return -EINVAL; + } + + if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) { + pr_err("alg: pcomp: Compression test %d failed for " + "%s\n", i + 1, algo); + hexdump(result, ctemplate[i].outlen); + return -EINVAL; + } + } + + for (i = 0; i < dtcount; i++) { + struct comp_request req; + unsigned int produced = 0; + + res = crypto_decompress_setup(tfm, dtemplate[i].params, + dtemplate[i].paramsize); + if (res) { + pr_err("alg: pcomp: decompression setup failed on " + "test %d for %s: error=%d\n", i + 1, algo, res); + return res; + } + + res = crypto_decompress_init(tfm); + if (res) { + pr_err("alg: pcomp: decompression init failed on test " + "%d for %s: error=%d\n", i + 1, algo, res); + return res; + } + + memset(result, 0, sizeof(result)); + + req.next_in = dtemplate[i].input; + req.avail_in = dtemplate[i].inlen / 2; + req.next_out = result; + req.avail_out = dtemplate[i].outlen / 2; + + res = crypto_decompress_update(tfm, &req); + if (res < 0 && (res != -EAGAIN || req.avail_in)) { + pr_err("alg: pcomp: decompression update failed on " + "test %d for %s: error=%d\n", i + 1, algo, res); + return res; + } + if (res > 0) + produced += res; + + /* Add remaining input data */ + req.avail_in += (dtemplate[i].inlen + 1) / 2; + + res = crypto_decompress_update(tfm, &req); + if (res < 0 && (res != -EAGAIN || req.avail_in)) { + pr_err("alg: pcomp: decompression update failed on " + "test %d for %s: error=%d\n", i + 1, algo, res); + return res; + } + if (res > 0) + produced += res; + + /* Provide remaining output space */ + req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2; + + res = crypto_decompress_final(tfm, &req); + if (res < 0 && (res != -EAGAIN || req.avail_in)) { + pr_err("alg: pcomp: decompression final failed on " + "test %d for %s: error=%d\n", i + 1, algo, res); + return res; + } + if (res > 0) + produced += res; + + if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) { + pr_err("alg: comp: Decompression test %d failed for " + "%s: output len = %d (expected %d)\n", i + 1, + algo, COMP_BUF_SIZE - req.avail_out, + dtemplate[i].outlen); + return -EINVAL; + } + + if (produced != dtemplate[i].outlen) { + pr_err("alg: comp: Decompression test %d failed for " + "%s: returned len = %u (expected %d)\n", i + 1, + algo, produced, dtemplate[i].outlen); + return -EINVAL; + } + + if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) { + pr_err("alg: pcomp: Decompression test %d failed for " + "%s\n", i + 1, algo); + hexdump(result, dtemplate[i].outlen); + return -EINVAL; + } + } + + return 0; +} + + +static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template, + unsigned int tcount) +{ + const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); + int err = 0, i, j, seedsize; + u8 *seed; + char result[32]; + + seedsize = crypto_rng_seedsize(tfm); + + seed = kmalloc(seedsize, GFP_KERNEL); + if (!seed) { + printk(KERN_ERR "alg: cprng: Failed to allocate seed space " + "for %s\n", algo); + return -ENOMEM; + } + + for (i = 0; i < tcount; i++) { + memset(result, 0, 32); + + memcpy(seed, template[i].v, template[i].vlen); + memcpy(seed + template[i].vlen, template[i].key, + template[i].klen); + memcpy(seed + template[i].vlen + template[i].klen, + template[i].dt, template[i].dtlen); + + err = crypto_rng_reset(tfm, seed, seedsize); + if (err) { + printk(KERN_ERR "alg: cprng: Failed to reset rng " + "for %s\n", algo); + goto out; + } + + for (j = 0; j < template[i].loops; j++) { + err = crypto_rng_get_bytes(tfm, result, + template[i].rlen); + if (err != template[i].rlen) { + printk(KERN_ERR "alg: cprng: Failed to obtain " + "the correct amount of random data for " + "%s (requested %d, got %d)\n", algo, + template[i].rlen, err); + goto out; + } + } + + err = memcmp(result, template[i].result, + template[i].rlen); + if (err) { + printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", + i, algo); + hexdump(result, template[i].rlen); + err = -EINVAL; + goto out; + } + } + +out: + kfree(seed); + return err; +} + +static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, + u32 type, u32 mask) +{ + struct crypto_aead *tfm; + int err = 0; + + tfm = crypto_alloc_aead(driver, type, mask); + if (IS_ERR(tfm)) { + printk(KERN_ERR "alg: aead: Failed to load transform for %s: " + "%ld\n", driver, PTR_ERR(tfm)); + return PTR_ERR(tfm); + } + + if (desc->suite.aead.enc.vecs) { + err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, + desc->suite.aead.enc.count); + if (err) + goto out; + } + + if (!err && desc->suite.aead.dec.vecs) + err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, + desc->suite.aead.dec.count); + +out: + crypto_free_aead(tfm); + return err; +} + +static int alg_test_cipher(const struct alg_test_desc *desc, + const char *driver, u32 type, u32 mask) +{ + struct crypto_cipher *tfm; + int err = 0; + + tfm = crypto_alloc_cipher(driver, type, mask); + if (IS_ERR(tfm)) { + printk(KERN_ERR "alg: cipher: Failed to load transform for " + "%s: %ld\n", driver, PTR_ERR(tfm)); + return PTR_ERR(tfm); + } + + if (desc->suite.cipher.enc.vecs) { + err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, + desc->suite.cipher.enc.count); + if (err) + goto out; + } + + if (desc->suite.cipher.dec.vecs) + err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, + desc->suite.cipher.dec.count); + +out: + crypto_free_cipher(tfm); + return err; +} + +static int alg_test_skcipher(const struct alg_test_desc *desc, + const char *driver, u32 type, u32 mask) +{ + struct crypto_ablkcipher *tfm; + int err = 0; + + tfm = crypto_alloc_ablkcipher(driver, type, mask); + if (IS_ERR(tfm)) { + printk(KERN_ERR "alg: skcipher: Failed to load transform for " + "%s: %ld\n", driver, PTR_ERR(tfm)); + return PTR_ERR(tfm); + } + + if (desc->suite.cipher.enc.vecs) { + err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, + desc->suite.cipher.enc.count); + if (err) + goto out; + } + + if (desc->suite.cipher.dec.vecs) + err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, + desc->suite.cipher.dec.count); + +out: + crypto_free_ablkcipher(tfm); + return err; +} + +static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, + u32 type, u32 mask) +{ + struct crypto_comp *tfm; + int err; + + tfm = crypto_alloc_comp(driver, type, mask); + if (IS_ERR(tfm)) { + printk(KERN_ERR "alg: comp: Failed to load transform for %s: " + "%ld\n", driver, PTR_ERR(tfm)); + return PTR_ERR(tfm); + } + + err = test_comp(tfm, desc->suite.comp.comp.vecs, + desc->suite.comp.decomp.vecs, + desc->suite.comp.comp.count, + desc->suite.comp.decomp.count); + + crypto_free_comp(tfm); + return err; +} + +static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver, + u32 type, u32 mask) +{ + struct crypto_pcomp *tfm; + int err; + + tfm = crypto_alloc_pcomp(driver, type, mask); + if (IS_ERR(tfm)) { + pr_err("alg: pcomp: Failed to load transform for %s: %ld\n", + driver, PTR_ERR(tfm)); + return PTR_ERR(tfm); + } + + err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs, + desc->suite.pcomp.decomp.vecs, + desc->suite.pcomp.comp.count, + desc->suite.pcomp.decomp.count); + + crypto_free_pcomp(tfm); + return err; +} + +static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, + u32 type, u32 mask) +{ + struct crypto_ahash *tfm; + int err; + + tfm = crypto_alloc_ahash(driver, type, mask); + if (IS_ERR(tfm)) { + printk(KERN_ERR "alg: hash: Failed to load transform for %s: " + "%ld\n", driver, PTR_ERR(tfm)); + return PTR_ERR(tfm); + } + + err = test_hash(tfm, desc->suite.hash.vecs, + desc->suite.hash.count, true); + if (!err) + err = test_hash(tfm, desc->suite.hash.vecs, + desc->suite.hash.count, false); + + crypto_free_ahash(tfm); + return err; +} + +static int alg_test_crc32c(const struct alg_test_desc *desc, + const char *driver, u32 type, u32 mask) +{ + struct crypto_shash *tfm; + u32 val; + int err; + + err = alg_test_hash(desc, driver, type, mask); + if (err) + goto out; + + tfm = crypto_alloc_shash(driver, type, mask); + if (IS_ERR(tfm)) { + printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " + "%ld\n", driver, PTR_ERR(tfm)); + err = PTR_ERR(tfm); + goto out; + } + + do { + struct { + struct shash_desc shash; + char ctx[crypto_shash_descsize(tfm)]; + } sdesc; + + sdesc.shash.tfm = tfm; + sdesc.shash.flags = 0; + + *(u32 *)sdesc.ctx = le32_to_cpu(420553207); + err = crypto_shash_final(&sdesc.shash, (u8 *)&val); + if (err) { + printk(KERN_ERR "alg: crc32c: Operation failed for " + "%s: %d\n", driver, err); + break; + } + + if (val != ~420553207) { + printk(KERN_ERR "alg: crc32c: Test failed for %s: " + "%d\n", driver, val); + err = -EINVAL; + } + } while (0); + + crypto_free_shash(tfm); + +out: + return err; +} + +static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, + u32 type, u32 mask) +{ + struct crypto_rng *rng; + int err; + + rng = crypto_alloc_rng(driver, type, mask); + if (IS_ERR(rng)) { + printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " + "%ld\n", driver, PTR_ERR(rng)); + return PTR_ERR(rng); + } + + err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); + + crypto_free_rng(rng); + + return err; +} + +static int alg_test_null(const struct alg_test_desc *desc, + const char *driver, u32 type, u32 mask) +{ + return 0; +} + +/* Please keep this list sorted by algorithm name. */ +static const struct alg_test_desc alg_test_descs[] = { + { + .alg = "__cbc-serpent-sse2", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } + }, { + .alg = "__driver-cbc-aes-aesni", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } + }, { + .alg = "__driver-cbc-serpent-sse2", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } + }, { + .alg = "__driver-ecb-aes-aesni", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } + }, { + .alg = "__driver-ecb-serpent-sse2", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } + }, { + .alg = "__ghash-pclmulqdqni", + .test = alg_test_null, + .suite = { + .hash = { + .vecs = NULL, + .count = 0 + } + } + }, { + .alg = "ansi_cprng", + .test = alg_test_cprng, + .fips_allowed = 1, + .suite = { + .cprng = { + .vecs = ansi_cprng_aes_tv_template, + .count = ANSI_CPRNG_AES_TEST_VECTORS + } + } + }, { + .alg = "cbc(aes)", + .test = alg_test_skcipher, + .fips_allowed = 1, + .suite = { + .cipher = { + .enc = { + .vecs = aes_cbc_enc_tv_template, + .count = AES_CBC_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_cbc_dec_tv_template, + .count = AES_CBC_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "cbc(anubis)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = anubis_cbc_enc_tv_template, + .count = ANUBIS_CBC_ENC_TEST_VECTORS + }, + .dec = { + .vecs = anubis_cbc_dec_tv_template, + .count = ANUBIS_CBC_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "cbc(blowfish)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = bf_cbc_enc_tv_template, + .count = BF_CBC_ENC_TEST_VECTORS + }, + .dec = { + .vecs = bf_cbc_dec_tv_template, + .count = BF_CBC_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "cbc(camellia)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = camellia_cbc_enc_tv_template, + .count = CAMELLIA_CBC_ENC_TEST_VECTORS + }, + .dec = { + .vecs = camellia_cbc_dec_tv_template, + .count = CAMELLIA_CBC_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "cbc(des)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = des_cbc_enc_tv_template, + .count = DES_CBC_ENC_TEST_VECTORS + }, + .dec = { + .vecs = des_cbc_dec_tv_template, + .count = DES_CBC_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "cbc(des3_ede)", + .test = alg_test_skcipher, + .fips_allowed = 1, + .suite = { + .cipher = { + .enc = { + .vecs = des3_ede_cbc_enc_tv_template, + .count = DES3_EDE_CBC_ENC_TEST_VECTORS + }, + .dec = { + .vecs = des3_ede_cbc_dec_tv_template, + .count = DES3_EDE_CBC_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "cbc(serpent)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = serpent_cbc_enc_tv_template, + .count = SERPENT_CBC_ENC_TEST_VECTORS + }, + .dec = { + .vecs = serpent_cbc_dec_tv_template, + .count = SERPENT_CBC_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "cbc(twofish)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = tf_cbc_enc_tv_template, + .count = TF_CBC_ENC_TEST_VECTORS + }, + .dec = { + .vecs = tf_cbc_dec_tv_template, + .count = TF_CBC_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ccm(aes)", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = aes_ccm_enc_tv_template, + .count = AES_CCM_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_ccm_dec_tv_template, + .count = AES_CCM_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "crc32c", + .test = alg_test_crc32c, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = crc32c_tv_template, + .count = CRC32C_TEST_VECTORS + } + } + }, { + .alg = "cryptd(__driver-ecb-aes-aesni)", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } + }, { + .alg = "cryptd(__driver-ecb-serpent-sse2)", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } + }, { + .alg = "cryptd(__ghash-pclmulqdqni)", + .test = alg_test_null, + .suite = { + .hash = { + .vecs = NULL, + .count = 0 + } + } + }, { + .alg = "ctr(aes)", + .test = alg_test_skcipher, + .fips_allowed = 1, + .suite = { + .cipher = { + .enc = { + .vecs = aes_ctr_enc_tv_template, + .count = AES_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_ctr_dec_tv_template, + .count = AES_CTR_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ctr(blowfish)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = bf_ctr_enc_tv_template, + .count = BF_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = bf_ctr_dec_tv_template, + .count = BF_CTR_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ctr(camellia)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = camellia_ctr_enc_tv_template, + .count = CAMELLIA_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = camellia_ctr_dec_tv_template, + .count = CAMELLIA_CTR_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ctr(serpent)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = serpent_ctr_enc_tv_template, + .count = SERPENT_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = serpent_ctr_dec_tv_template, + .count = SERPENT_CTR_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ctr(twofish)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = tf_ctr_enc_tv_template, + .count = TF_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = tf_ctr_dec_tv_template, + .count = TF_CTR_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "cts(cbc(aes))", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cts_mode_enc_tv_template, + .count = CTS_MODE_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cts_mode_dec_tv_template, + .count = CTS_MODE_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "deflate", + .test = alg_test_comp, + .suite = { + .comp = { + .comp = { + .vecs = deflate_comp_tv_template, + .count = DEFLATE_COMP_TEST_VECTORS + }, + .decomp = { + .vecs = deflate_decomp_tv_template, + .count = DEFLATE_DECOMP_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(__aes-aesni)", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } + }, { + .alg = "ecb(aes)", + .test = alg_test_skcipher, + .fips_allowed = 1, + .suite = { + .cipher = { + .enc = { + .vecs = aes_enc_tv_template, + .count = AES_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_dec_tv_template, + .count = AES_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(anubis)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = anubis_enc_tv_template, + .count = ANUBIS_ENC_TEST_VECTORS + }, + .dec = { + .vecs = anubis_dec_tv_template, + .count = ANUBIS_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(arc4)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = arc4_enc_tv_template, + .count = ARC4_ENC_TEST_VECTORS + }, + .dec = { + .vecs = arc4_dec_tv_template, + .count = ARC4_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(blowfish)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = bf_enc_tv_template, + .count = BF_ENC_TEST_VECTORS + }, + .dec = { + .vecs = bf_dec_tv_template, + .count = BF_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(camellia)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = camellia_enc_tv_template, + .count = CAMELLIA_ENC_TEST_VECTORS + }, + .dec = { + .vecs = camellia_dec_tv_template, + .count = CAMELLIA_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(cast5)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast5_enc_tv_template, + .count = CAST5_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast5_dec_tv_template, + .count = CAST5_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(cast6)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast6_enc_tv_template, + .count = CAST6_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast6_dec_tv_template, + .count = CAST6_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(des)", + .test = alg_test_skcipher, + .fips_allowed = 1, + .suite = { + .cipher = { + .enc = { + .vecs = des_enc_tv_template, + .count = DES_ENC_TEST_VECTORS + }, + .dec = { + .vecs = des_dec_tv_template, + .count = DES_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(des3_ede)", + .test = alg_test_skcipher, + .fips_allowed = 1, + .suite = { + .cipher = { + .enc = { + .vecs = des3_ede_enc_tv_template, + .count = DES3_EDE_ENC_TEST_VECTORS + }, + .dec = { + .vecs = des3_ede_dec_tv_template, + .count = DES3_EDE_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(khazad)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = khazad_enc_tv_template, + .count = KHAZAD_ENC_TEST_VECTORS + }, + .dec = { + .vecs = khazad_dec_tv_template, + .count = KHAZAD_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(seed)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = seed_enc_tv_template, + .count = SEED_ENC_TEST_VECTORS + }, + .dec = { + .vecs = seed_dec_tv_template, + .count = SEED_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(serpent)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = serpent_enc_tv_template, + .count = SERPENT_ENC_TEST_VECTORS + }, + .dec = { + .vecs = serpent_dec_tv_template, + .count = SERPENT_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(tea)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = tea_enc_tv_template, + .count = TEA_ENC_TEST_VECTORS + }, + .dec = { + .vecs = tea_dec_tv_template, + .count = TEA_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(tnepres)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = tnepres_enc_tv_template, + .count = TNEPRES_ENC_TEST_VECTORS + }, + .dec = { + .vecs = tnepres_dec_tv_template, + .count = TNEPRES_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(twofish)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = tf_enc_tv_template, + .count = TF_ENC_TEST_VECTORS + }, + .dec = { + .vecs = tf_dec_tv_template, + .count = TF_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(xeta)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = xeta_enc_tv_template, + .count = XETA_ENC_TEST_VECTORS + }, + .dec = { + .vecs = xeta_dec_tv_template, + .count = XETA_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ecb(xtea)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = xtea_enc_tv_template, + .count = XTEA_ENC_TEST_VECTORS + }, + .dec = { + .vecs = xtea_dec_tv_template, + .count = XTEA_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "gcm(aes)", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = aes_gcm_enc_tv_template, + .count = AES_GCM_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_gcm_dec_tv_template, + .count = AES_GCM_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ghash", + .test = alg_test_hash, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = ghash_tv_template, + .count = GHASH_TEST_VECTORS + } + } + }, { + .alg = "hmac(md5)", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = hmac_md5_tv_template, + .count = HMAC_MD5_TEST_VECTORS + } + } + }, { + .alg = "hmac(rmd128)", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = hmac_rmd128_tv_template, + .count = HMAC_RMD128_TEST_VECTORS + } + } + }, { + .alg = "hmac(rmd160)", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = hmac_rmd160_tv_template, + .count = HMAC_RMD160_TEST_VECTORS + } + } + }, { + .alg = "hmac(sha1)", + .test = alg_test_hash, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = hmac_sha1_tv_template, + .count = HMAC_SHA1_TEST_VECTORS + } + } + }, { + .alg = "hmac(sha224)", + .test = alg_test_hash, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = hmac_sha224_tv_template, + .count = HMAC_SHA224_TEST_VECTORS + } + } + }, { + .alg = "hmac(sha256)", + .test = alg_test_hash, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = hmac_sha256_tv_template, + .count = HMAC_SHA256_TEST_VECTORS + } + } + }, { + .alg = "hmac(sha384)", + .test = alg_test_hash, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = hmac_sha384_tv_template, + .count = HMAC_SHA384_TEST_VECTORS + } + } + }, { + .alg = "hmac(sha512)", + .test = alg_test_hash, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = hmac_sha512_tv_template, + .count = HMAC_SHA512_TEST_VECTORS + } + } + }, { + .alg = "lrw(aes)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = aes_lrw_enc_tv_template, + .count = AES_LRW_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_lrw_dec_tv_template, + .count = AES_LRW_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "lrw(camellia)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = camellia_lrw_enc_tv_template, + .count = CAMELLIA_LRW_ENC_TEST_VECTORS + }, + .dec = { + .vecs = camellia_lrw_dec_tv_template, + .count = CAMELLIA_LRW_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "lrw(serpent)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = serpent_lrw_enc_tv_template, + .count = SERPENT_LRW_ENC_TEST_VECTORS + }, + .dec = { + .vecs = serpent_lrw_dec_tv_template, + .count = SERPENT_LRW_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "lrw(twofish)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = tf_lrw_enc_tv_template, + .count = TF_LRW_ENC_TEST_VECTORS + }, + .dec = { + .vecs = tf_lrw_dec_tv_template, + .count = TF_LRW_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "lzo", + .test = alg_test_comp, + .suite = { + .comp = { + .comp = { + .vecs = lzo_comp_tv_template, + .count = LZO_COMP_TEST_VECTORS + }, + .decomp = { + .vecs = lzo_decomp_tv_template, + .count = LZO_DECOMP_TEST_VECTORS + } + } + } + }, { + .alg = "md4", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = md4_tv_template, + .count = MD4_TEST_VECTORS + } + } + }, { + .alg = "md5", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = md5_tv_template, + .count = MD5_TEST_VECTORS + } + } + }, { + .alg = "michael_mic", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = michael_mic_tv_template, + .count = MICHAEL_MIC_TEST_VECTORS + } + } + }, { + .alg = "ofb(aes)", + .test = alg_test_skcipher, + .fips_allowed = 1, + .suite = { + .cipher = { + .enc = { + .vecs = aes_ofb_enc_tv_template, + .count = AES_OFB_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_ofb_dec_tv_template, + .count = AES_OFB_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "pcbc(fcrypt)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = fcrypt_pcbc_enc_tv_template, + .count = FCRYPT_ENC_TEST_VECTORS + }, + .dec = { + .vecs = fcrypt_pcbc_dec_tv_template, + .count = FCRYPT_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "rfc3686(ctr(aes))", + .test = alg_test_skcipher, + .fips_allowed = 1, + .suite = { + .cipher = { + .enc = { + .vecs = aes_ctr_rfc3686_enc_tv_template, + .count = AES_CTR_3686_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_ctr_rfc3686_dec_tv_template, + .count = AES_CTR_3686_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "rfc4106(gcm(aes))", + .test = alg_test_aead, + .suite = { + .aead = { + .enc = { + .vecs = aes_gcm_rfc4106_enc_tv_template, + .count = AES_GCM_4106_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_gcm_rfc4106_dec_tv_template, + .count = AES_GCM_4106_DEC_TEST_VECTORS + } + } + } + }, { + + + .alg = "rfc4309(ccm(aes))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = aes_ccm_rfc4309_enc_tv_template, + .count = AES_CCM_4309_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_ccm_rfc4309_dec_tv_template, + .count = AES_CCM_4309_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "rmd128", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = rmd128_tv_template, + .count = RMD128_TEST_VECTORS + } + } + }, { + .alg = "rmd160", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = rmd160_tv_template, + .count = RMD160_TEST_VECTORS + } + } + }, { + .alg = "rmd256", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = rmd256_tv_template, + .count = RMD256_TEST_VECTORS + } + } + }, { + .alg = "rmd320", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = rmd320_tv_template, + .count = RMD320_TEST_VECTORS + } + } + }, { + .alg = "salsa20", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = salsa20_stream_enc_tv_template, + .count = SALSA20_STREAM_ENC_TEST_VECTORS + } + } + } + }, { + .alg = "sha1", + .test = alg_test_hash, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = sha1_tv_template, + .count = SHA1_TEST_VECTORS + } + } + }, { + .alg = "sha224", + .test = alg_test_hash, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = sha224_tv_template, + .count = SHA224_TEST_VECTORS + } + } + }, { + .alg = "sha256", + .test = alg_test_hash, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = sha256_tv_template, + .count = SHA256_TEST_VECTORS + } + } + }, { + .alg = "sha384", + .test = alg_test_hash, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = sha384_tv_template, + .count = SHA384_TEST_VECTORS + } + } + }, { + .alg = "sha512", + .test = alg_test_hash, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = sha512_tv_template, + .count = SHA512_TEST_VECTORS + } + } + }, { + .alg = "tgr128", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = tgr128_tv_template, + .count = TGR128_TEST_VECTORS + } + } + }, { + .alg = "tgr160", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = tgr160_tv_template, + .count = TGR160_TEST_VECTORS + } + } + }, { + .alg = "tgr192", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = tgr192_tv_template, + .count = TGR192_TEST_VECTORS + } + } + }, { + .alg = "vmac(aes)", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = aes_vmac128_tv_template, + .count = VMAC_AES_TEST_VECTORS + } + } + }, { + .alg = "wp256", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = wp256_tv_template, + .count = WP256_TEST_VECTORS + } + } + }, { + .alg = "wp384", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = wp384_tv_template, + .count = WP384_TEST_VECTORS + } + } + }, { + .alg = "wp512", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = wp512_tv_template, + .count = WP512_TEST_VECTORS + } + } + }, { + .alg = "xcbc(aes)", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = aes_xcbc128_tv_template, + .count = XCBC_AES_TEST_VECTORS + } + } + }, { + .alg = "xts(aes)", + .test = alg_test_skcipher, + .fips_allowed = 1, + .suite = { + .cipher = { + .enc = { + .vecs = aes_xts_enc_tv_template, + .count = AES_XTS_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_xts_dec_tv_template, + .count = AES_XTS_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "xts(camellia)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = camellia_xts_enc_tv_template, + .count = CAMELLIA_XTS_ENC_TEST_VECTORS + }, + .dec = { + .vecs = camellia_xts_dec_tv_template, + .count = CAMELLIA_XTS_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "xts(serpent)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = serpent_xts_enc_tv_template, + .count = SERPENT_XTS_ENC_TEST_VECTORS + }, + .dec = { + .vecs = serpent_xts_dec_tv_template, + .count = SERPENT_XTS_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "xts(twofish)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = tf_xts_enc_tv_template, + .count = TF_XTS_ENC_TEST_VECTORS + }, + .dec = { + .vecs = tf_xts_dec_tv_template, + .count = TF_XTS_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "zlib", + .test = alg_test_pcomp, + .suite = { + .pcomp = { + .comp = { + .vecs = zlib_comp_tv_template, + .count = ZLIB_COMP_TEST_VECTORS + }, + .decomp = { + .vecs = zlib_decomp_tv_template, + .count = ZLIB_DECOMP_TEST_VECTORS + } + } + } + } +}; + +static int alg_find_test(const char *alg) +{ + int start = 0; + int end = ARRAY_SIZE(alg_test_descs); + + while (start < end) { + int i = (start + end) / 2; + int diff = strcmp(alg_test_descs[i].alg, alg); + + if (diff > 0) { + end = i; + continue; + } + + if (diff < 0) { + start = i + 1; + continue; + } + + return i; + } + + return -1; +} + +int alg_test(const char *driver, const char *alg, u32 type, u32 mask) +{ + int i; + int j; + int rc; + + if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { + char nalg[CRYPTO_MAX_ALG_NAME]; + + if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= + sizeof(nalg)) + return -ENAMETOOLONG; + + i = alg_find_test(nalg); + if (i < 0) + goto notest; + + if (fips_enabled && !alg_test_descs[i].fips_allowed) + goto non_fips_alg; + + rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); + goto test_done; + } + + i = alg_find_test(alg); + j = alg_find_test(driver); + if (i < 0 && j < 0) + goto notest; + + if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || + (j >= 0 && !alg_test_descs[j].fips_allowed))) + goto non_fips_alg; + + rc = 0; + if (i >= 0) + rc |= alg_test_descs[i].test(alg_test_descs + i, driver, + type, mask); + if (j >= 0) + rc |= alg_test_descs[j].test(alg_test_descs + j, driver, + type, mask); + +test_done: + if (fips_enabled && rc) + panic("%s: %s alg self test failed in fips mode!\n", driver, alg); + + if (fips_enabled && !rc) + printk(KERN_INFO "alg: self-tests for %s (%s) passed\n", + driver, alg); + + return rc; + +notest: + printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); + return 0; +non_fips_alg: + return -EINVAL; +} + +#endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ + +EXPORT_SYMBOL_GPL(alg_test); diff --git a/crypto/testmgr.h b/crypto/testmgr.h new file mode 100644 index 00000000..36e5a8ee --- /dev/null +++ b/crypto/testmgr.h @@ -0,0 +1,14861 @@ +/* + * Algorithm testing framework and tests. + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> + * Copyright (c) 2007 Nokia Siemens Networks + * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> + * + * Updated RFC4106 AES-GCM testing. Some test vectors were taken from + * http://csrc.nist.gov/groups/ST/toolkit/BCM/documents/proposedmodes/ + * gcm/gcm-test-vectors.tar.gz + * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) + * Adrian Hoban <adrian.hoban@intel.com> + * Gabriele Paoloni <gabriele.paoloni@intel.com> + * Tadeusz Struk (tadeusz.struk@intel.com) + * Copyright (c) 2010, Intel Corporation. + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ +#ifndef _CRYPTO_TESTMGR_H +#define _CRYPTO_TESTMGR_H + +#include <linux/netlink.h> +#include <linux/zlib.h> + +#include <crypto/compress.h> + +#define MAX_DIGEST_SIZE 64 +#define MAX_TAP 8 + +#define MAX_KEYLEN 56 +#define MAX_IVLEN 32 + +struct hash_testvec { + /* only used with keyed hash algorithms */ + char *key; + char *plaintext; + char *digest; + unsigned char tap[MAX_TAP]; + unsigned char psize; + unsigned char np; + unsigned char ksize; +}; + +struct cipher_testvec { + char *key; + char *iv; + char *input; + char *result; + unsigned short tap[MAX_TAP]; + int np; + unsigned char fail; + unsigned char wk; /* weak key flag */ + unsigned char klen; + unsigned short ilen; + unsigned short rlen; +}; + +struct aead_testvec { + char *key; + char *iv; + char *input; + char *assoc; + char *result; + unsigned char tap[MAX_TAP]; + unsigned char atap[MAX_TAP]; + int np; + int anp; + unsigned char fail; + unsigned char novrfy; /* ccm dec verification failure expected */ + unsigned char wk; /* weak key flag */ + unsigned char klen; + unsigned short ilen; + unsigned short alen; + unsigned short rlen; +}; + +struct cprng_testvec { + char *key; + char *dt; + char *v; + char *result; + unsigned char klen; + unsigned short dtlen; + unsigned short vlen; + unsigned short rlen; + unsigned short loops; +}; + +static char zeroed_string[48]; + +/* + * MD4 test vectors from RFC1320 + */ +#define MD4_TEST_VECTORS 7 + +static struct hash_testvec md4_tv_template [] = { + { + .plaintext = "", + .digest = "\x31\xd6\xcf\xe0\xd1\x6a\xe9\x31" + "\xb7\x3c\x59\xd7\xe0\xc0\x89\xc0", + }, { + .plaintext = "a", + .psize = 1, + .digest = "\xbd\xe5\x2c\xb3\x1d\xe3\x3e\x46" + "\x24\x5e\x05\xfb\xdb\xd6\xfb\x24", + }, { + .plaintext = "abc", + .psize = 3, + .digest = "\xa4\x48\x01\x7a\xaf\x21\xd8\x52" + "\x5f\xc1\x0a\xe8\x7a\xa6\x72\x9d", + }, { + .plaintext = "message digest", + .psize = 14, + .digest = "\xd9\x13\x0a\x81\x64\x54\x9f\xe8" + "\x18\x87\x48\x06\xe1\xc7\x01\x4b", + }, { + .plaintext = "abcdefghijklmnopqrstuvwxyz", + .psize = 26, + .digest = "\xd7\x9e\x1c\x30\x8a\xa5\xbb\xcd" + "\xee\xa8\xed\x63\xdf\x41\x2d\xa9", + .np = 2, + .tap = { 13, 13 }, + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", + .psize = 62, + .digest = "\x04\x3f\x85\x82\xf2\x41\xdb\x35" + "\x1c\xe6\x27\xe1\x53\xe7\xf0\xe4", + }, { + .plaintext = "123456789012345678901234567890123456789012345678901234567890123" + "45678901234567890", + .psize = 80, + .digest = "\xe3\x3b\x4d\xdc\x9c\x38\xf2\x19" + "\x9c\x3e\x7b\x16\x4f\xcc\x05\x36", + }, +}; + +/* + * MD5 test vectors from RFC1321 + */ +#define MD5_TEST_VECTORS 7 + +static struct hash_testvec md5_tv_template[] = { + { + .digest = "\xd4\x1d\x8c\xd9\x8f\x00\xb2\x04" + "\xe9\x80\x09\x98\xec\xf8\x42\x7e", + }, { + .plaintext = "a", + .psize = 1, + .digest = "\x0c\xc1\x75\xb9\xc0\xf1\xb6\xa8" + "\x31\xc3\x99\xe2\x69\x77\x26\x61", + }, { + .plaintext = "abc", + .psize = 3, + .digest = "\x90\x01\x50\x98\x3c\xd2\x4f\xb0" + "\xd6\x96\x3f\x7d\x28\xe1\x7f\x72", + }, { + .plaintext = "message digest", + .psize = 14, + .digest = "\xf9\x6b\x69\x7d\x7c\xb7\x93\x8d" + "\x52\x5a\x2f\x31\xaa\xf1\x61\xd0", + }, { + .plaintext = "abcdefghijklmnopqrstuvwxyz", + .psize = 26, + .digest = "\xc3\xfc\xd3\xd7\x61\x92\xe4\x00" + "\x7d\xfb\x49\x6c\xca\x67\xe1\x3b", + .np = 2, + .tap = {13, 13} + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", + .psize = 62, + .digest = "\xd1\x74\xab\x98\xd2\x77\xd9\xf5" + "\xa5\x61\x1c\x2c\x9f\x41\x9d\x9f", + }, { + .plaintext = "12345678901234567890123456789012345678901234567890123456789012" + "345678901234567890", + .psize = 80, + .digest = "\x57\xed\xf4\xa2\x2b\xe3\xc9\x55" + "\xac\x49\xda\x2e\x21\x07\xb6\x7a", + } + +}; + +/* + * RIPEMD-128 test vectors from ISO/IEC 10118-3:2004(E) + */ +#define RMD128_TEST_VECTORS 10 + +static struct hash_testvec rmd128_tv_template[] = { + { + .digest = "\xcd\xf2\x62\x13\xa1\x50\xdc\x3e" + "\xcb\x61\x0f\x18\xf6\xb3\x8b\x46", + }, { + .plaintext = "a", + .psize = 1, + .digest = "\x86\xbe\x7a\xfa\x33\x9d\x0f\xc7" + "\xcf\xc7\x85\xe7\x2f\x57\x8d\x33", + }, { + .plaintext = "abc", + .psize = 3, + .digest = "\xc1\x4a\x12\x19\x9c\x66\xe4\xba" + "\x84\x63\x6b\x0f\x69\x14\x4c\x77", + }, { + .plaintext = "message digest", + .psize = 14, + .digest = "\x9e\x32\x7b\x3d\x6e\x52\x30\x62" + "\xaf\xc1\x13\x2d\x7d\xf9\xd1\xb8", + }, { + .plaintext = "abcdefghijklmnopqrstuvwxyz", + .psize = 26, + .digest = "\xfd\x2a\xa6\x07\xf7\x1d\xc8\xf5" + "\x10\x71\x49\x22\xb3\x71\x83\x4e", + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcde" + "fghijklmnopqrstuvwxyz0123456789", + .psize = 62, + .digest = "\xd1\xe9\x59\xeb\x17\x9c\x91\x1f" + "\xae\xa4\x62\x4c\x60\xc5\xc7\x02", + }, { + .plaintext = "1234567890123456789012345678901234567890" + "1234567890123456789012345678901234567890", + .psize = 80, + .digest = "\x3f\x45\xef\x19\x47\x32\xc2\xdb" + "\xb2\xc4\xa2\xc7\x69\x79\x5f\xa3", + }, { + .plaintext = "abcdbcdecdefdefgefghfghighij" + "hijkijkljklmklmnlmnomnopnopq", + .psize = 56, + .digest = "\xa1\xaa\x06\x89\xd0\xfa\xfa\x2d" + "\xdc\x22\xe8\x8b\x49\x13\x3a\x06", + .np = 2, + .tap = { 28, 28 }, + }, { + .plaintext = "abcdefghbcdefghicdefghijdefghijkefghijklfghi" + "jklmghijklmnhijklmnoijklmnopjklmnopqklmnopqr" + "lmnopqrsmnopqrstnopqrstu", + .psize = 112, + .digest = "\xd4\xec\xc9\x13\xe1\xdf\x77\x6b" + "\xf4\x8d\xe9\xd5\x5b\x1f\x25\x46", + }, { + .plaintext = "abcdbcdecdefdefgefghfghighijhijk", + .psize = 32, + .digest = "\x13\xfc\x13\xe8\xef\xff\x34\x7d" + "\xe1\x93\xff\x46\xdb\xac\xcf\xd4", + } +}; + +/* + * RIPEMD-160 test vectors from ISO/IEC 10118-3:2004(E) + */ +#define RMD160_TEST_VECTORS 10 + +static struct hash_testvec rmd160_tv_template[] = { + { + .digest = "\x9c\x11\x85\xa5\xc5\xe9\xfc\x54\x61\x28" + "\x08\x97\x7e\xe8\xf5\x48\xb2\x25\x8d\x31", + }, { + .plaintext = "a", + .psize = 1, + .digest = "\x0b\xdc\x9d\x2d\x25\x6b\x3e\xe9\xda\xae" + "\x34\x7b\xe6\xf4\xdc\x83\x5a\x46\x7f\xfe", + }, { + .plaintext = "abc", + .psize = 3, + .digest = "\x8e\xb2\x08\xf7\xe0\x5d\x98\x7a\x9b\x04" + "\x4a\x8e\x98\xc6\xb0\x87\xf1\x5a\x0b\xfc", + }, { + .plaintext = "message digest", + .psize = 14, + .digest = "\x5d\x06\x89\xef\x49\xd2\xfa\xe5\x72\xb8" + "\x81\xb1\x23\xa8\x5f\xfa\x21\x59\x5f\x36", + }, { + .plaintext = "abcdefghijklmnopqrstuvwxyz", + .psize = 26, + .digest = "\xf7\x1c\x27\x10\x9c\x69\x2c\x1b\x56\xbb" + "\xdc\xeb\x5b\x9d\x28\x65\xb3\x70\x8d\xbc", + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcde" + "fghijklmnopqrstuvwxyz0123456789", + .psize = 62, + .digest = "\xb0\xe2\x0b\x6e\x31\x16\x64\x02\x86\xed" + "\x3a\x87\xa5\x71\x30\x79\xb2\x1f\x51\x89", + }, { + .plaintext = "1234567890123456789012345678901234567890" + "1234567890123456789012345678901234567890", + .psize = 80, + .digest = "\x9b\x75\x2e\x45\x57\x3d\x4b\x39\xf4\xdb" + "\xd3\x32\x3c\xab\x82\xbf\x63\x32\x6b\xfb", + }, { + .plaintext = "abcdbcdecdefdefgefghfghighij" + "hijkijkljklmklmnlmnomnopnopq", + .psize = 56, + .digest = "\x12\xa0\x53\x38\x4a\x9c\x0c\x88\xe4\x05" + "\xa0\x6c\x27\xdc\xf4\x9a\xda\x62\xeb\x2b", + .np = 2, + .tap = { 28, 28 }, + }, { + .plaintext = "abcdefghbcdefghicdefghijdefghijkefghijklfghi" + "jklmghijklmnhijklmnoijklmnopjklmnopqklmnopqr" + "lmnopqrsmnopqrstnopqrstu", + .psize = 112, + .digest = "\x6f\x3f\xa3\x9b\x6b\x50\x3c\x38\x4f\x91" + "\x9a\x49\xa7\xaa\x5c\x2c\x08\xbd\xfb\x45", + }, { + .plaintext = "abcdbcdecdefdefgefghfghighijhijk", + .psize = 32, + .digest = "\x94\xc2\x64\x11\x54\x04\xe6\x33\x79\x0d" + "\xfc\xc8\x7b\x58\x7d\x36\x77\x06\x7d\x9f", + } +}; + +/* + * RIPEMD-256 test vectors + */ +#define RMD256_TEST_VECTORS 8 + +static struct hash_testvec rmd256_tv_template[] = { + { + .digest = "\x02\xba\x4c\x4e\x5f\x8e\xcd\x18" + "\x77\xfc\x52\xd6\x4d\x30\xe3\x7a" + "\x2d\x97\x74\xfb\x1e\x5d\x02\x63" + "\x80\xae\x01\x68\xe3\xc5\x52\x2d", + }, { + .plaintext = "a", + .psize = 1, + .digest = "\xf9\x33\x3e\x45\xd8\x57\xf5\xd9" + "\x0a\x91\xba\xb7\x0a\x1e\xba\x0c" + "\xfb\x1b\xe4\xb0\x78\x3c\x9a\xcf" + "\xcd\x88\x3a\x91\x34\x69\x29\x25", + }, { + .plaintext = "abc", + .psize = 3, + .digest = "\xaf\xbd\x6e\x22\x8b\x9d\x8c\xbb" + "\xce\xf5\xca\x2d\x03\xe6\xdb\xa1" + "\x0a\xc0\xbc\x7d\xcb\xe4\x68\x0e" + "\x1e\x42\xd2\xe9\x75\x45\x9b\x65", + }, { + .plaintext = "message digest", + .psize = 14, + .digest = "\x87\xe9\x71\x75\x9a\x1c\xe4\x7a" + "\x51\x4d\x5c\x91\x4c\x39\x2c\x90" + "\x18\xc7\xc4\x6b\xc1\x44\x65\x55" + "\x4a\xfc\xdf\x54\xa5\x07\x0c\x0e", + }, { + .plaintext = "abcdefghijklmnopqrstuvwxyz", + .psize = 26, + .digest = "\x64\x9d\x30\x34\x75\x1e\xa2\x16" + "\x77\x6b\xf9\xa1\x8a\xcc\x81\xbc" + "\x78\x96\x11\x8a\x51\x97\x96\x87" + "\x82\xdd\x1f\xd9\x7d\x8d\x51\x33", + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcde" + "fghijklmnopqrstuvwxyz0123456789", + .psize = 62, + .digest = "\x57\x40\xa4\x08\xac\x16\xb7\x20" + "\xb8\x44\x24\xae\x93\x1c\xbb\x1f" + "\xe3\x63\xd1\xd0\xbf\x40\x17\xf1" + "\xa8\x9f\x7e\xa6\xde\x77\xa0\xb8", + }, { + .plaintext = "1234567890123456789012345678901234567890" + "1234567890123456789012345678901234567890", + .psize = 80, + .digest = "\x06\xfd\xcc\x7a\x40\x95\x48\xaa" + "\xf9\x13\x68\xc0\x6a\x62\x75\xb5" + "\x53\xe3\xf0\x99\xbf\x0e\xa4\xed" + "\xfd\x67\x78\xdf\x89\xa8\x90\xdd", + }, { + .plaintext = "abcdbcdecdefdefgefghfghighij" + "hijkijkljklmklmnlmnomnopnopq", + .psize = 56, + .digest = "\x38\x43\x04\x55\x83\xaa\xc6\xc8" + "\xc8\xd9\x12\x85\x73\xe7\xa9\x80" + "\x9a\xfb\x2a\x0f\x34\xcc\xc3\x6e" + "\xa9\xe7\x2f\x16\xf6\x36\x8e\x3f", + .np = 2, + .tap = { 28, 28 }, + } +}; + +/* + * RIPEMD-320 test vectors + */ +#define RMD320_TEST_VECTORS 8 + +static struct hash_testvec rmd320_tv_template[] = { + { + .digest = "\x22\xd6\x5d\x56\x61\x53\x6c\xdc\x75\xc1" + "\xfd\xf5\xc6\xde\x7b\x41\xb9\xf2\x73\x25" + "\xeb\xc6\x1e\x85\x57\x17\x7d\x70\x5a\x0e" + "\xc8\x80\x15\x1c\x3a\x32\xa0\x08\x99\xb8", + }, { + .plaintext = "a", + .psize = 1, + .digest = "\xce\x78\x85\x06\x38\xf9\x26\x58\xa5\xa5" + "\x85\x09\x75\x79\x92\x6d\xda\x66\x7a\x57" + "\x16\x56\x2c\xfc\xf6\xfb\xe7\x7f\x63\x54" + "\x2f\x99\xb0\x47\x05\xd6\x97\x0d\xff\x5d", + }, { + .plaintext = "abc", + .psize = 3, + .digest = "\xde\x4c\x01\xb3\x05\x4f\x89\x30\xa7\x9d" + "\x09\xae\x73\x8e\x92\x30\x1e\x5a\x17\x08" + "\x5b\xef\xfd\xc1\xb8\xd1\x16\x71\x3e\x74" + "\xf8\x2f\xa9\x42\xd6\x4c\xdb\xc4\x68\x2d", + }, { + .plaintext = "message digest", + .psize = 14, + .digest = "\x3a\x8e\x28\x50\x2e\xd4\x5d\x42\x2f\x68" + "\x84\x4f\x9d\xd3\x16\xe7\xb9\x85\x33\xfa" + "\x3f\x2a\x91\xd2\x9f\x84\xd4\x25\xc8\x8d" + "\x6b\x4e\xff\x72\x7d\xf6\x6a\x7c\x01\x97", + }, { + .plaintext = "abcdefghijklmnopqrstuvwxyz", + .psize = 26, + .digest = "\xca\xbd\xb1\x81\x0b\x92\x47\x0a\x20\x93" + "\xaa\x6b\xce\x05\x95\x2c\x28\x34\x8c\xf4" + "\x3f\xf6\x08\x41\x97\x51\x66\xbb\x40\xed" + "\x23\x40\x04\xb8\x82\x44\x63\xe6\xb0\x09", + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcde" + "fghijklmnopqrstuvwxyz0123456789", + .psize = 62, + .digest = "\xed\x54\x49\x40\xc8\x6d\x67\xf2\x50\xd2" + "\x32\xc3\x0b\x7b\x3e\x57\x70\xe0\xc6\x0c" + "\x8c\xb9\xa4\xca\xfe\x3b\x11\x38\x8a\xf9" + "\x92\x0e\x1b\x99\x23\x0b\x84\x3c\x86\xa4", + }, { + .plaintext = "1234567890123456789012345678901234567890" + "1234567890123456789012345678901234567890", + .psize = 80, + .digest = "\x55\x78\x88\xaf\x5f\x6d\x8e\xd6\x2a\xb6" + "\x69\x45\xc6\xd2\xa0\xa4\x7e\xcd\x53\x41" + "\xe9\x15\xeb\x8f\xea\x1d\x05\x24\x95\x5f" + "\x82\x5d\xc7\x17\xe4\xa0\x08\xab\x2d\x42", + }, { + .plaintext = "abcdbcdecdefdefgefghfghighij" + "hijkijkljklmklmnlmnomnopnopq", + .psize = 56, + .digest = "\xd0\x34\xa7\x95\x0c\xf7\x22\x02\x1b\xa4" + "\xb8\x4d\xf7\x69\xa5\xde\x20\x60\xe2\x59" + "\xdf\x4c\x9b\xb4\xa4\x26\x8c\x0e\x93\x5b" + "\xbc\x74\x70\xa9\x69\xc9\xd0\x72\xa1\xac", + .np = 2, + .tap = { 28, 28 }, + } +}; + +/* + * SHA1 test vectors from from FIPS PUB 180-1 + * Long vector from CAVS 5.0 + */ +#define SHA1_TEST_VECTORS 3 + +static struct hash_testvec sha1_tv_template[] = { + { + .plaintext = "abc", + .psize = 3, + .digest = "\xa9\x99\x3e\x36\x47\x06\x81\x6a\xba\x3e" + "\x25\x71\x78\x50\xc2\x6c\x9c\xd0\xd8\x9d", + }, { + .plaintext = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", + .psize = 56, + .digest = "\x84\x98\x3e\x44\x1c\x3b\xd2\x6e\xba\xae" + "\x4a\xa1\xf9\x51\x29\xe5\xe5\x46\x70\xf1", + .np = 2, + .tap = { 28, 28 } + }, { + .plaintext = "\xec\x29\x56\x12\x44\xed\xe7\x06" + "\xb6\xeb\x30\xa1\xc3\x71\xd7\x44" + "\x50\xa1\x05\xc3\xf9\x73\x5f\x7f" + "\xa9\xfe\x38\xcf\x67\xf3\x04\xa5" + "\x73\x6a\x10\x6e\x92\xe1\x71\x39" + "\xa6\x81\x3b\x1c\x81\xa4\xf3\xd3" + "\xfb\x95\x46\xab\x42\x96\xfa\x9f" + "\x72\x28\x26\xc0\x66\x86\x9e\xda" + "\xcd\x73\xb2\x54\x80\x35\x18\x58" + "\x13\xe2\x26\x34\xa9\xda\x44\x00" + "\x0d\x95\xa2\x81\xff\x9f\x26\x4e" + "\xcc\xe0\xa9\x31\x22\x21\x62\xd0" + "\x21\xcc\xa2\x8d\xb5\xf3\xc2\xaa" + "\x24\x94\x5a\xb1\xe3\x1c\xb4\x13" + "\xae\x29\x81\x0f\xd7\x94\xca\xd5" + "\xdf\xaf\x29\xec\x43\xcb\x38\xd1" + "\x98\xfe\x4a\xe1\xda\x23\x59\x78" + "\x02\x21\x40\x5b\xd6\x71\x2a\x53" + "\x05\xda\x4b\x1b\x73\x7f\xce\x7c" + "\xd2\x1c\x0e\xb7\x72\x8d\x08\x23" + "\x5a\x90\x11", + .psize = 163, + .digest = "\x97\x01\x11\xc4\xe7\x7b\xcc\x88\xcc\x20" + "\x45\x9c\x02\xb6\x9b\x4a\xa8\xf5\x82\x17", + .np = 4, + .tap = { 63, 64, 31, 5 } + } +}; + + +/* + * SHA224 test vectors from from FIPS PUB 180-2 + */ +#define SHA224_TEST_VECTORS 2 + +static struct hash_testvec sha224_tv_template[] = { + { + .plaintext = "abc", + .psize = 3, + .digest = "\x23\x09\x7D\x22\x34\x05\xD8\x22" + "\x86\x42\xA4\x77\xBD\xA2\x55\xB3" + "\x2A\xAD\xBC\xE4\xBD\xA0\xB3\xF7" + "\xE3\x6C\x9D\xA7", + }, { + .plaintext = + "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", + .psize = 56, + .digest = "\x75\x38\x8B\x16\x51\x27\x76\xCC" + "\x5D\xBA\x5D\xA1\xFD\x89\x01\x50" + "\xB0\xC6\x45\x5C\xB4\xF5\x8B\x19" + "\x52\x52\x25\x25", + .np = 2, + .tap = { 28, 28 } + } +}; + +/* + * SHA256 test vectors from from NIST + */ +#define SHA256_TEST_VECTORS 2 + +static struct hash_testvec sha256_tv_template[] = { + { + .plaintext = "abc", + .psize = 3, + .digest = "\xba\x78\x16\xbf\x8f\x01\xcf\xea" + "\x41\x41\x40\xde\x5d\xae\x22\x23" + "\xb0\x03\x61\xa3\x96\x17\x7a\x9c" + "\xb4\x10\xff\x61\xf2\x00\x15\xad", + }, { + .plaintext = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", + .psize = 56, + .digest = "\x24\x8d\x6a\x61\xd2\x06\x38\xb8" + "\xe5\xc0\x26\x93\x0c\x3e\x60\x39" + "\xa3\x3c\xe4\x59\x64\xff\x21\x67" + "\xf6\xec\xed\xd4\x19\xdb\x06\xc1", + .np = 2, + .tap = { 28, 28 } + }, +}; + +/* + * SHA384 test vectors from from NIST and kerneli + */ +#define SHA384_TEST_VECTORS 4 + +static struct hash_testvec sha384_tv_template[] = { + { + .plaintext= "abc", + .psize = 3, + .digest = "\xcb\x00\x75\x3f\x45\xa3\x5e\x8b" + "\xb5\xa0\x3d\x69\x9a\xc6\x50\x07" + "\x27\x2c\x32\xab\x0e\xde\xd1\x63" + "\x1a\x8b\x60\x5a\x43\xff\x5b\xed" + "\x80\x86\x07\x2b\xa1\xe7\xcc\x23" + "\x58\xba\xec\xa1\x34\xc8\x25\xa7", + }, { + .plaintext = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", + .psize = 56, + .digest = "\x33\x91\xfd\xdd\xfc\x8d\xc7\x39" + "\x37\x07\xa6\x5b\x1b\x47\x09\x39" + "\x7c\xf8\xb1\xd1\x62\xaf\x05\xab" + "\xfe\x8f\x45\x0d\xe5\xf3\x6b\xc6" + "\xb0\x45\x5a\x85\x20\xbc\x4e\x6f" + "\x5f\xe9\x5b\x1f\xe3\xc8\x45\x2b", + }, { + .plaintext = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn" + "hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu", + .psize = 112, + .digest = "\x09\x33\x0c\x33\xf7\x11\x47\xe8" + "\x3d\x19\x2f\xc7\x82\xcd\x1b\x47" + "\x53\x11\x1b\x17\x3b\x3b\x05\xd2" + "\x2f\xa0\x80\x86\xe3\xb0\xf7\x12" + "\xfc\xc7\xc7\x1a\x55\x7e\x2d\xb9" + "\x66\xc3\xe9\xfa\x91\x74\x60\x39", + }, { + .plaintext = "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcd" + "efghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz", + .psize = 104, + .digest = "\x3d\x20\x89\x73\xab\x35\x08\xdb" + "\xbd\x7e\x2c\x28\x62\xba\x29\x0a" + "\xd3\x01\x0e\x49\x78\xc1\x98\xdc" + "\x4d\x8f\xd0\x14\xe5\x82\x82\x3a" + "\x89\xe1\x6f\x9b\x2a\x7b\xbc\x1a" + "\xc9\x38\xe2\xd1\x99\xe8\xbe\xa4", + .np = 4, + .tap = { 26, 26, 26, 26 } + }, +}; + +/* + * SHA512 test vectors from from NIST and kerneli + */ +#define SHA512_TEST_VECTORS 4 + +static struct hash_testvec sha512_tv_template[] = { + { + .plaintext = "abc", + .psize = 3, + .digest = "\xdd\xaf\x35\xa1\x93\x61\x7a\xba" + "\xcc\x41\x73\x49\xae\x20\x41\x31" + "\x12\xe6\xfa\x4e\x89\xa9\x7e\xa2" + "\x0a\x9e\xee\xe6\x4b\x55\xd3\x9a" + "\x21\x92\x99\x2a\x27\x4f\xc1\xa8" + "\x36\xba\x3c\x23\xa3\xfe\xeb\xbd" + "\x45\x4d\x44\x23\x64\x3c\xe8\x0e" + "\x2a\x9a\xc9\x4f\xa5\x4c\xa4\x9f", + }, { + .plaintext = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", + .psize = 56, + .digest = "\x20\x4a\x8f\xc6\xdd\xa8\x2f\x0a" + "\x0c\xed\x7b\xeb\x8e\x08\xa4\x16" + "\x57\xc1\x6e\xf4\x68\xb2\x28\xa8" + "\x27\x9b\xe3\x31\xa7\x03\xc3\x35" + "\x96\xfd\x15\xc1\x3b\x1b\x07\xf9" + "\xaa\x1d\x3b\xea\x57\x78\x9c\xa0" + "\x31\xad\x85\xc7\xa7\x1d\xd7\x03" + "\x54\xec\x63\x12\x38\xca\x34\x45", + }, { + .plaintext = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn" + "hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu", + .psize = 112, + .digest = "\x8e\x95\x9b\x75\xda\xe3\x13\xda" + "\x8c\xf4\xf7\x28\x14\xfc\x14\x3f" + "\x8f\x77\x79\xc6\xeb\x9f\x7f\xa1" + "\x72\x99\xae\xad\xb6\x88\x90\x18" + "\x50\x1d\x28\x9e\x49\x00\xf7\xe4" + "\x33\x1b\x99\xde\xc4\xb5\x43\x3a" + "\xc7\xd3\x29\xee\xb6\xdd\x26\x54" + "\x5e\x96\xe5\x5b\x87\x4b\xe9\x09", + }, { + .plaintext = "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcd" + "efghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz", + .psize = 104, + .digest = "\x93\x0d\x0c\xef\xcb\x30\xff\x11" + "\x33\xb6\x89\x81\x21\xf1\xcf\x3d" + "\x27\x57\x8a\xfc\xaf\xe8\x67\x7c" + "\x52\x57\xcf\x06\x99\x11\xf7\x5d" + "\x8f\x58\x31\xb5\x6e\xbf\xda\x67" + "\xb2\x78\xe6\x6d\xff\x8b\x84\xfe" + "\x2b\x28\x70\xf7\x42\xa5\x80\xd8" + "\xed\xb4\x19\x87\x23\x28\x50\xc9", + .np = 4, + .tap = { 26, 26, 26, 26 } + }, +}; + + +/* + * WHIRLPOOL test vectors from Whirlpool package + * by Vincent Rijmen and Paulo S. L. M. Barreto as part of the NESSIE + * submission + */ +#define WP512_TEST_VECTORS 8 + +static struct hash_testvec wp512_tv_template[] = { + { + .plaintext = "", + .psize = 0, + .digest = "\x19\xFA\x61\xD7\x55\x22\xA4\x66" + "\x9B\x44\xE3\x9C\x1D\x2E\x17\x26" + "\xC5\x30\x23\x21\x30\xD4\x07\xF8" + "\x9A\xFE\xE0\x96\x49\x97\xF7\xA7" + "\x3E\x83\xBE\x69\x8B\x28\x8F\xEB" + "\xCF\x88\xE3\xE0\x3C\x4F\x07\x57" + "\xEA\x89\x64\xE5\x9B\x63\xD9\x37" + "\x08\xB1\x38\xCC\x42\xA6\x6E\xB3", + + + }, { + .plaintext = "a", + .psize = 1, + .digest = "\x8A\xCA\x26\x02\x79\x2A\xEC\x6F" + "\x11\xA6\x72\x06\x53\x1F\xB7\xD7" + "\xF0\xDF\xF5\x94\x13\x14\x5E\x69" + "\x73\xC4\x50\x01\xD0\x08\x7B\x42" + "\xD1\x1B\xC6\x45\x41\x3A\xEF\xF6" + "\x3A\x42\x39\x1A\x39\x14\x5A\x59" + "\x1A\x92\x20\x0D\x56\x01\x95\xE5" + "\x3B\x47\x85\x84\xFD\xAE\x23\x1A", + }, { + .plaintext = "abc", + .psize = 3, + .digest = "\x4E\x24\x48\xA4\xC6\xF4\x86\xBB" + "\x16\xB6\x56\x2C\x73\xB4\x02\x0B" + "\xF3\x04\x3E\x3A\x73\x1B\xCE\x72" + "\x1A\xE1\xB3\x03\xD9\x7E\x6D\x4C" + "\x71\x81\xEE\xBD\xB6\xC5\x7E\x27" + "\x7D\x0E\x34\x95\x71\x14\xCB\xD6" + "\xC7\x97\xFC\x9D\x95\xD8\xB5\x82" + "\xD2\x25\x29\x20\x76\xD4\xEE\xF5", + }, { + .plaintext = "message digest", + .psize = 14, + .digest = "\x37\x8C\x84\xA4\x12\x6E\x2D\xC6" + "\xE5\x6D\xCC\x74\x58\x37\x7A\xAC" + "\x83\x8D\x00\x03\x22\x30\xF5\x3C" + "\xE1\xF5\x70\x0C\x0F\xFB\x4D\x3B" + "\x84\x21\x55\x76\x59\xEF\x55\xC1" + "\x06\xB4\xB5\x2A\xC5\xA4\xAA\xA6" + "\x92\xED\x92\x00\x52\x83\x8F\x33" + "\x62\xE8\x6D\xBD\x37\xA8\x90\x3E", + }, { + .plaintext = "abcdefghijklmnopqrstuvwxyz", + .psize = 26, + .digest = "\xF1\xD7\x54\x66\x26\x36\xFF\xE9" + "\x2C\x82\xEB\xB9\x21\x2A\x48\x4A" + "\x8D\x38\x63\x1E\xAD\x42\x38\xF5" + "\x44\x2E\xE1\x3B\x80\x54\xE4\x1B" + "\x08\xBF\x2A\x92\x51\xC3\x0B\x6A" + "\x0B\x8A\xAE\x86\x17\x7A\xB4\xA6" + "\xF6\x8F\x67\x3E\x72\x07\x86\x5D" + "\x5D\x98\x19\xA3\xDB\xA4\xEB\x3B", + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "abcdefghijklmnopqrstuvwxyz0123456789", + .psize = 62, + .digest = "\xDC\x37\xE0\x08\xCF\x9E\xE6\x9B" + "\xF1\x1F\x00\xED\x9A\xBA\x26\x90" + "\x1D\xD7\xC2\x8C\xDE\xC0\x66\xCC" + "\x6A\xF4\x2E\x40\xF8\x2F\x3A\x1E" + "\x08\xEB\xA2\x66\x29\x12\x9D\x8F" + "\xB7\xCB\x57\x21\x1B\x92\x81\xA6" + "\x55\x17\xCC\x87\x9D\x7B\x96\x21" + "\x42\xC6\x5F\x5A\x7A\xF0\x14\x67", + }, { + .plaintext = "1234567890123456789012345678901234567890" + "1234567890123456789012345678901234567890", + .psize = 80, + .digest = "\x46\x6E\xF1\x8B\xAB\xB0\x15\x4D" + "\x25\xB9\xD3\x8A\x64\x14\xF5\xC0" + "\x87\x84\x37\x2B\xCC\xB2\x04\xD6" + "\x54\x9C\x4A\xFA\xDB\x60\x14\x29" + "\x4D\x5B\xD8\xDF\x2A\x6C\x44\xE5" + "\x38\xCD\x04\x7B\x26\x81\xA5\x1A" + "\x2C\x60\x48\x1E\x88\xC5\xA2\x0B" + "\x2C\x2A\x80\xCF\x3A\x9A\x08\x3B", + }, { + .plaintext = "abcdbcdecdefdefgefghfghighijhijk", + .psize = 32, + .digest = "\x2A\x98\x7E\xA4\x0F\x91\x70\x61" + "\xF5\xD6\xF0\xA0\xE4\x64\x4F\x48" + "\x8A\x7A\x5A\x52\xDE\xEE\x65\x62" + "\x07\xC5\x62\xF9\x88\xE9\x5C\x69" + "\x16\xBD\xC8\x03\x1B\xC5\xBE\x1B" + "\x7B\x94\x76\x39\xFE\x05\x0B\x56" + "\x93\x9B\xAA\xA0\xAD\xFF\x9A\xE6" + "\x74\x5B\x7B\x18\x1C\x3B\xE3\xFD", + }, +}; + +#define WP384_TEST_VECTORS 8 + +static struct hash_testvec wp384_tv_template[] = { + { + .plaintext = "", + .psize = 0, + .digest = "\x19\xFA\x61\xD7\x55\x22\xA4\x66" + "\x9B\x44\xE3\x9C\x1D\x2E\x17\x26" + "\xC5\x30\x23\x21\x30\xD4\x07\xF8" + "\x9A\xFE\xE0\x96\x49\x97\xF7\xA7" + "\x3E\x83\xBE\x69\x8B\x28\x8F\xEB" + "\xCF\x88\xE3\xE0\x3C\x4F\x07\x57", + + + }, { + .plaintext = "a", + .psize = 1, + .digest = "\x8A\xCA\x26\x02\x79\x2A\xEC\x6F" + "\x11\xA6\x72\x06\x53\x1F\xB7\xD7" + "\xF0\xDF\xF5\x94\x13\x14\x5E\x69" + "\x73\xC4\x50\x01\xD0\x08\x7B\x42" + "\xD1\x1B\xC6\x45\x41\x3A\xEF\xF6" + "\x3A\x42\x39\x1A\x39\x14\x5A\x59", + }, { + .plaintext = "abc", + .psize = 3, + .digest = "\x4E\x24\x48\xA4\xC6\xF4\x86\xBB" + "\x16\xB6\x56\x2C\x73\xB4\x02\x0B" + "\xF3\x04\x3E\x3A\x73\x1B\xCE\x72" + "\x1A\xE1\xB3\x03\xD9\x7E\x6D\x4C" + "\x71\x81\xEE\xBD\xB6\xC5\x7E\x27" + "\x7D\x0E\x34\x95\x71\x14\xCB\xD6", + }, { + .plaintext = "message digest", + .psize = 14, + .digest = "\x37\x8C\x84\xA4\x12\x6E\x2D\xC6" + "\xE5\x6D\xCC\x74\x58\x37\x7A\xAC" + "\x83\x8D\x00\x03\x22\x30\xF5\x3C" + "\xE1\xF5\x70\x0C\x0F\xFB\x4D\x3B" + "\x84\x21\x55\x76\x59\xEF\x55\xC1" + "\x06\xB4\xB5\x2A\xC5\xA4\xAA\xA6", + }, { + .plaintext = "abcdefghijklmnopqrstuvwxyz", + .psize = 26, + .digest = "\xF1\xD7\x54\x66\x26\x36\xFF\xE9" + "\x2C\x82\xEB\xB9\x21\x2A\x48\x4A" + "\x8D\x38\x63\x1E\xAD\x42\x38\xF5" + "\x44\x2E\xE1\x3B\x80\x54\xE4\x1B" + "\x08\xBF\x2A\x92\x51\xC3\x0B\x6A" + "\x0B\x8A\xAE\x86\x17\x7A\xB4\xA6", + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "abcdefghijklmnopqrstuvwxyz0123456789", + .psize = 62, + .digest = "\xDC\x37\xE0\x08\xCF\x9E\xE6\x9B" + "\xF1\x1F\x00\xED\x9A\xBA\x26\x90" + "\x1D\xD7\xC2\x8C\xDE\xC0\x66\xCC" + "\x6A\xF4\x2E\x40\xF8\x2F\x3A\x1E" + "\x08\xEB\xA2\x66\x29\x12\x9D\x8F" + "\xB7\xCB\x57\x21\x1B\x92\x81\xA6", + }, { + .plaintext = "1234567890123456789012345678901234567890" + "1234567890123456789012345678901234567890", + .psize = 80, + .digest = "\x46\x6E\xF1\x8B\xAB\xB0\x15\x4D" + "\x25\xB9\xD3\x8A\x64\x14\xF5\xC0" + "\x87\x84\x37\x2B\xCC\xB2\x04\xD6" + "\x54\x9C\x4A\xFA\xDB\x60\x14\x29" + "\x4D\x5B\xD8\xDF\x2A\x6C\x44\xE5" + "\x38\xCD\x04\x7B\x26\x81\xA5\x1A", + }, { + .plaintext = "abcdbcdecdefdefgefghfghighijhijk", + .psize = 32, + .digest = "\x2A\x98\x7E\xA4\x0F\x91\x70\x61" + "\xF5\xD6\xF0\xA0\xE4\x64\x4F\x48" + "\x8A\x7A\x5A\x52\xDE\xEE\x65\x62" + "\x07\xC5\x62\xF9\x88\xE9\x5C\x69" + "\x16\xBD\xC8\x03\x1B\xC5\xBE\x1B" + "\x7B\x94\x76\x39\xFE\x05\x0B\x56", + }, +}; + +#define WP256_TEST_VECTORS 8 + +static struct hash_testvec wp256_tv_template[] = { + { + .plaintext = "", + .psize = 0, + .digest = "\x19\xFA\x61\xD7\x55\x22\xA4\x66" + "\x9B\x44\xE3\x9C\x1D\x2E\x17\x26" + "\xC5\x30\x23\x21\x30\xD4\x07\xF8" + "\x9A\xFE\xE0\x96\x49\x97\xF7\xA7", + + + }, { + .plaintext = "a", + .psize = 1, + .digest = "\x8A\xCA\x26\x02\x79\x2A\xEC\x6F" + "\x11\xA6\x72\x06\x53\x1F\xB7\xD7" + "\xF0\xDF\xF5\x94\x13\x14\x5E\x69" + "\x73\xC4\x50\x01\xD0\x08\x7B\x42", + }, { + .plaintext = "abc", + .psize = 3, + .digest = "\x4E\x24\x48\xA4\xC6\xF4\x86\xBB" + "\x16\xB6\x56\x2C\x73\xB4\x02\x0B" + "\xF3\x04\x3E\x3A\x73\x1B\xCE\x72" + "\x1A\xE1\xB3\x03\xD9\x7E\x6D\x4C", + }, { + .plaintext = "message digest", + .psize = 14, + .digest = "\x37\x8C\x84\xA4\x12\x6E\x2D\xC6" + "\xE5\x6D\xCC\x74\x58\x37\x7A\xAC" + "\x83\x8D\x00\x03\x22\x30\xF5\x3C" + "\xE1\xF5\x70\x0C\x0F\xFB\x4D\x3B", + }, { + .plaintext = "abcdefghijklmnopqrstuvwxyz", + .psize = 26, + .digest = "\xF1\xD7\x54\x66\x26\x36\xFF\xE9" + "\x2C\x82\xEB\xB9\x21\x2A\x48\x4A" + "\x8D\x38\x63\x1E\xAD\x42\x38\xF5" + "\x44\x2E\xE1\x3B\x80\x54\xE4\x1B", + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "abcdefghijklmnopqrstuvwxyz0123456789", + .psize = 62, + .digest = "\xDC\x37\xE0\x08\xCF\x9E\xE6\x9B" + "\xF1\x1F\x00\xED\x9A\xBA\x26\x90" + "\x1D\xD7\xC2\x8C\xDE\xC0\x66\xCC" + "\x6A\xF4\x2E\x40\xF8\x2F\x3A\x1E", + }, { + .plaintext = "1234567890123456789012345678901234567890" + "1234567890123456789012345678901234567890", + .psize = 80, + .digest = "\x46\x6E\xF1\x8B\xAB\xB0\x15\x4D" + "\x25\xB9\xD3\x8A\x64\x14\xF5\xC0" + "\x87\x84\x37\x2B\xCC\xB2\x04\xD6" + "\x54\x9C\x4A\xFA\xDB\x60\x14\x29", + }, { + .plaintext = "abcdbcdecdefdefgefghfghighijhijk", + .psize = 32, + .digest = "\x2A\x98\x7E\xA4\x0F\x91\x70\x61" + "\xF5\xD6\xF0\xA0\xE4\x64\x4F\x48" + "\x8A\x7A\x5A\x52\xDE\xEE\x65\x62" + "\x07\xC5\x62\xF9\x88\xE9\x5C\x69", + }, +}; + +/* + * TIGER test vectors from Tiger website + */ +#define TGR192_TEST_VECTORS 6 + +static struct hash_testvec tgr192_tv_template[] = { + { + .plaintext = "", + .psize = 0, + .digest = "\x24\xf0\x13\x0c\x63\xac\x93\x32" + "\x16\x16\x6e\x76\xb1\xbb\x92\x5f" + "\xf3\x73\xde\x2d\x49\x58\x4e\x7a", + }, { + .plaintext = "abc", + .psize = 3, + .digest = "\xf2\x58\xc1\xe8\x84\x14\xab\x2a" + "\x52\x7a\xb5\x41\xff\xc5\xb8\xbf" + "\x93\x5f\x7b\x95\x1c\x13\x29\x51", + }, { + .plaintext = "Tiger", + .psize = 5, + .digest = "\x9f\x00\xf5\x99\x07\x23\x00\xdd" + "\x27\x6a\xbb\x38\xc8\xeb\x6d\xec" + "\x37\x79\x0c\x11\x6f\x9d\x2b\xdf", + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+-", + .psize = 64, + .digest = "\x87\xfb\x2a\x90\x83\x85\x1c\xf7" + "\x47\x0d\x2c\xf8\x10\xe6\xdf\x9e" + "\xb5\x86\x44\x50\x34\xa5\xa3\x86", + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZ=abcdefghijklmnopqrstuvwxyz+0123456789", + .psize = 64, + .digest = "\x46\x7d\xb8\x08\x63\xeb\xce\x48" + "\x8d\xf1\xcd\x12\x61\x65\x5d\xe9" + "\x57\x89\x65\x65\x97\x5f\x91\x97", + }, { + .plaintext = "Tiger - A Fast New Hash Function, " + "by Ross Anderson and Eli Biham, " + "proceedings of Fast Software Encryption 3, " + "Cambridge, 1996.", + .psize = 125, + .digest = "\x3d\x9a\xeb\x03\xd1\xbd\x1a\x63" + "\x57\xb2\x77\x4d\xfd\x6d\x5b\x24" + "\xdd\x68\x15\x1d\x50\x39\x74\xfc", + }, +}; + +#define TGR160_TEST_VECTORS 6 + +static struct hash_testvec tgr160_tv_template[] = { + { + .plaintext = "", + .psize = 0, + .digest = "\x24\xf0\x13\x0c\x63\xac\x93\x32" + "\x16\x16\x6e\x76\xb1\xbb\x92\x5f" + "\xf3\x73\xde\x2d", + }, { + .plaintext = "abc", + .psize = 3, + .digest = "\xf2\x58\xc1\xe8\x84\x14\xab\x2a" + "\x52\x7a\xb5\x41\xff\xc5\xb8\xbf" + "\x93\x5f\x7b\x95", + }, { + .plaintext = "Tiger", + .psize = 5, + .digest = "\x9f\x00\xf5\x99\x07\x23\x00\xdd" + "\x27\x6a\xbb\x38\xc8\xeb\x6d\xec" + "\x37\x79\x0c\x11", + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+-", + .psize = 64, + .digest = "\x87\xfb\x2a\x90\x83\x85\x1c\xf7" + "\x47\x0d\x2c\xf8\x10\xe6\xdf\x9e" + "\xb5\x86\x44\x50", + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZ=abcdefghijklmnopqrstuvwxyz+0123456789", + .psize = 64, + .digest = "\x46\x7d\xb8\x08\x63\xeb\xce\x48" + "\x8d\xf1\xcd\x12\x61\x65\x5d\xe9" + "\x57\x89\x65\x65", + }, { + .plaintext = "Tiger - A Fast New Hash Function, " + "by Ross Anderson and Eli Biham, " + "proceedings of Fast Software Encryption 3, " + "Cambridge, 1996.", + .psize = 125, + .digest = "\x3d\x9a\xeb\x03\xd1\xbd\x1a\x63" + "\x57\xb2\x77\x4d\xfd\x6d\x5b\x24" + "\xdd\x68\x15\x1d", + }, +}; + +#define TGR128_TEST_VECTORS 6 + +static struct hash_testvec tgr128_tv_template[] = { + { + .plaintext = "", + .psize = 0, + .digest = "\x24\xf0\x13\x0c\x63\xac\x93\x32" + "\x16\x16\x6e\x76\xb1\xbb\x92\x5f", + }, { + .plaintext = "abc", + .psize = 3, + .digest = "\xf2\x58\xc1\xe8\x84\x14\xab\x2a" + "\x52\x7a\xb5\x41\xff\xc5\xb8\xbf", + }, { + .plaintext = "Tiger", + .psize = 5, + .digest = "\x9f\x00\xf5\x99\x07\x23\x00\xdd" + "\x27\x6a\xbb\x38\xc8\xeb\x6d\xec", + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+-", + .psize = 64, + .digest = "\x87\xfb\x2a\x90\x83\x85\x1c\xf7" + "\x47\x0d\x2c\xf8\x10\xe6\xdf\x9e", + }, { + .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZ=abcdefghijklmnopqrstuvwxyz+0123456789", + .psize = 64, + .digest = "\x46\x7d\xb8\x08\x63\xeb\xce\x48" + "\x8d\xf1\xcd\x12\x61\x65\x5d\xe9", + }, { + .plaintext = "Tiger - A Fast New Hash Function, " + "by Ross Anderson and Eli Biham, " + "proceedings of Fast Software Encryption 3, " + "Cambridge, 1996.", + .psize = 125, + .digest = "\x3d\x9a\xeb\x03\xd1\xbd\x1a\x63" + "\x57\xb2\x77\x4d\xfd\x6d\x5b\x24", + }, +}; + +#define GHASH_TEST_VECTORS 1 + +static struct hash_testvec ghash_tv_template[] = +{ + { + + .key = "\xdf\xa6\xbf\x4d\xed\x81\xdb\x03\xff\xca\xff\x95\xf8\x30\xf0\x61", + .ksize = 16, + .plaintext = "\x95\x2b\x2a\x56\xa5\x60\x04a\xc0\xb3\x2b\x66\x56\xa0\x5b\x40\xb6", + .psize = 16, + .digest = "\xda\x53\xeb\x0a\xd2\xc5\x5b\xb6" + "\x4f\xc4\x80\x2c\xc3\xfe\xda\x60", + }, +}; + +/* + * HMAC-MD5 test vectors from RFC2202 + * (These need to be fixed to not use strlen). + */ +#define HMAC_MD5_TEST_VECTORS 7 + +static struct hash_testvec hmac_md5_tv_template[] = +{ + { + .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b", + .ksize = 16, + .plaintext = "Hi There", + .psize = 8, + .digest = "\x92\x94\x72\x7a\x36\x38\xbb\x1c" + "\x13\xf4\x8e\xf8\x15\x8b\xfc\x9d", + }, { + .key = "Jefe", + .ksize = 4, + .plaintext = "what do ya want for nothing?", + .psize = 28, + .digest = "\x75\x0c\x78\x3e\x6a\xb0\xb5\x03" + "\xea\xa8\x6e\x31\x0a\x5d\xb7\x38", + .np = 2, + .tap = {14, 14} + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa", + .ksize = 16, + .plaintext = "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd", + .psize = 50, + .digest = "\x56\xbe\x34\x52\x1d\x14\x4c\x88" + "\xdb\xb8\xc7\x33\xf0\xe8\xb3\xf6", + }, { + .key = "\x01\x02\x03\x04\x05\x06\x07\x08" + "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19", + .ksize = 25, + .plaintext = "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd", + .psize = 50, + .digest = "\x69\x7e\xaf\x0a\xca\x3a\x3a\xea" + "\x3a\x75\x16\x47\x46\xff\xaa\x79", + }, { + .key = "\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c", + .ksize = 16, + .plaintext = "Test With Truncation", + .psize = 20, + .digest = "\x56\x46\x1e\xf2\x34\x2e\xdc\x00" + "\xf9\xba\xb9\x95\x69\x0e\xfd\x4c", + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa", + .ksize = 80, + .plaintext = "Test Using Larger Than Block-Size Key - Hash Key First", + .psize = 54, + .digest = "\x6b\x1a\xb7\xfe\x4b\xd7\xbf\x8f" + "\x0b\x62\xe6\xce\x61\xb9\xd0\xcd", + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa", + .ksize = 80, + .plaintext = "Test Using Larger Than Block-Size Key and Larger Than One " + "Block-Size Data", + .psize = 73, + .digest = "\x6f\x63\x0f\xad\x67\xcd\xa0\xee" + "\x1f\xb1\xf5\x62\xdb\x3a\xa5\x3e", + }, +}; + +/* + * HMAC-RIPEMD128 test vectors from RFC2286 + */ +#define HMAC_RMD128_TEST_VECTORS 7 + +static struct hash_testvec hmac_rmd128_tv_template[] = { + { + .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b", + .ksize = 16, + .plaintext = "Hi There", + .psize = 8, + .digest = "\xfb\xf6\x1f\x94\x92\xaa\x4b\xbf" + "\x81\xc1\x72\xe8\x4e\x07\x34\xdb", + }, { + .key = "Jefe", + .ksize = 4, + .plaintext = "what do ya want for nothing?", + .psize = 28, + .digest = "\x87\x5f\x82\x88\x62\xb6\xb3\x34" + "\xb4\x27\xc5\x5f\x9f\x7f\xf0\x9b", + .np = 2, + .tap = { 14, 14 }, + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa", + .ksize = 16, + .plaintext = "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd", + .psize = 50, + .digest = "\x09\xf0\xb2\x84\x6d\x2f\x54\x3d" + "\xa3\x63\xcb\xec\x8d\x62\xa3\x8d", + }, { + .key = "\x01\x02\x03\x04\x05\x06\x07\x08" + "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19", + .ksize = 25, + .plaintext = "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd", + .psize = 50, + .digest = "\xbd\xbb\xd7\xcf\x03\xe4\x4b\x5a" + "\xa6\x0a\xf8\x15\xbe\x4d\x22\x94", + }, { + .key = "\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c", + .ksize = 16, + .plaintext = "Test With Truncation", + .psize = 20, + .digest = "\xe7\x98\x08\xf2\x4b\x25\xfd\x03" + "\x1c\x15\x5f\x0d\x55\x1d\x9a\x3a", + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa", + .ksize = 80, + .plaintext = "Test Using Larger Than Block-Size Key - Hash Key First", + .psize = 54, + .digest = "\xdc\x73\x29\x28\xde\x98\x10\x4a" + "\x1f\x59\xd3\x73\xc1\x50\xac\xbb", + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa", + .ksize = 80, + .plaintext = "Test Using Larger Than Block-Size Key and Larger Than One " + "Block-Size Data", + .psize = 73, + .digest = "\x5c\x6b\xec\x96\x79\x3e\x16\xd4" + "\x06\x90\xc2\x37\x63\x5f\x30\xc5", + }, +}; + +/* + * HMAC-RIPEMD160 test vectors from RFC2286 + */ +#define HMAC_RMD160_TEST_VECTORS 7 + +static struct hash_testvec hmac_rmd160_tv_template[] = { + { + .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b", + .ksize = 20, + .plaintext = "Hi There", + .psize = 8, + .digest = "\x24\xcb\x4b\xd6\x7d\x20\xfc\x1a\x5d\x2e" + "\xd7\x73\x2d\xcc\x39\x37\x7f\x0a\x56\x68", + }, { + .key = "Jefe", + .ksize = 4, + .plaintext = "what do ya want for nothing?", + .psize = 28, + .digest = "\xdd\xa6\xc0\x21\x3a\x48\x5a\x9e\x24\xf4" + "\x74\x20\x64\xa7\xf0\x33\xb4\x3c\x40\x69", + .np = 2, + .tap = { 14, 14 }, + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa", + .ksize = 20, + .plaintext = "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd", + .psize = 50, + .digest = "\xb0\xb1\x05\x36\x0d\xe7\x59\x96\x0a\xb4" + "\xf3\x52\x98\xe1\x16\xe2\x95\xd8\xe7\xc1", + }, { + .key = "\x01\x02\x03\x04\x05\x06\x07\x08" + "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19", + .ksize = 25, + .plaintext = "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd", + .psize = 50, + .digest = "\xd5\xca\x86\x2f\x4d\x21\xd5\xe6\x10\xe1" + "\x8b\x4c\xf1\xbe\xb9\x7a\x43\x65\xec\xf4", + }, { + .key = "\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c", + .ksize = 20, + .plaintext = "Test With Truncation", + .psize = 20, + .digest = "\x76\x19\x69\x39\x78\xf9\x1d\x90\x53\x9a" + "\xe7\x86\x50\x0f\xf3\xd8\xe0\x51\x8e\x39", + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa", + .ksize = 80, + .plaintext = "Test Using Larger Than Block-Size Key - Hash Key First", + .psize = 54, + .digest = "\x64\x66\xca\x07\xac\x5e\xac\x29\xe1\xbd" + "\x52\x3e\x5a\xda\x76\x05\xb7\x91\xfd\x8b", + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa", + .ksize = 80, + .plaintext = "Test Using Larger Than Block-Size Key and Larger Than One " + "Block-Size Data", + .psize = 73, + .digest = "\x69\xea\x60\x79\x8d\x71\x61\x6c\xce\x5f" + "\xd0\x87\x1e\x23\x75\x4c\xd7\x5d\x5a\x0a", + }, +}; + +/* + * HMAC-SHA1 test vectors from RFC2202 + */ +#define HMAC_SHA1_TEST_VECTORS 7 + +static struct hash_testvec hmac_sha1_tv_template[] = { + { + .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b", + .ksize = 20, + .plaintext = "Hi There", + .psize = 8, + .digest = "\xb6\x17\x31\x86\x55\x05\x72\x64" + "\xe2\x8b\xc0\xb6\xfb\x37\x8c\x8e\xf1" + "\x46\xbe", + }, { + .key = "Jefe", + .ksize = 4, + .plaintext = "what do ya want for nothing?", + .psize = 28, + .digest = "\xef\xfc\xdf\x6a\xe5\xeb\x2f\xa2\xd2\x74" + "\x16\xd5\xf1\x84\xdf\x9c\x25\x9a\x7c\x79", + .np = 2, + .tap = { 14, 14 } + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa", + .ksize = 20, + .plaintext = "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd", + .psize = 50, + .digest = "\x12\x5d\x73\x42\xb9\xac\x11\xcd\x91\xa3" + "\x9a\xf4\x8a\xa1\x7b\x4f\x63\xf1\x75\xd3", + }, { + .key = "\x01\x02\x03\x04\x05\x06\x07\x08" + "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19", + .ksize = 25, + .plaintext = "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd", + .psize = 50, + .digest = "\x4c\x90\x07\xf4\x02\x62\x50\xc6\xbc\x84" + "\x14\xf9\xbf\x50\xc8\x6c\x2d\x72\x35\xda", + }, { + .key = "\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c", + .ksize = 20, + .plaintext = "Test With Truncation", + .psize = 20, + .digest = "\x4c\x1a\x03\x42\x4b\x55\xe0\x7f\xe7\xf2" + "\x7b\xe1\xd5\x8b\xb9\x32\x4a\x9a\x5a\x04", + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa", + .ksize = 80, + .plaintext = "Test Using Larger Than Block-Size Key - Hash Key First", + .psize = 54, + .digest = "\xaa\x4a\xe5\xe1\x52\x72\xd0\x0e\x95\x70" + "\x56\x37\xce\x8a\x3b\x55\xed\x40\x21\x12", + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa", + .ksize = 80, + .plaintext = "Test Using Larger Than Block-Size Key and Larger Than One " + "Block-Size Data", + .psize = 73, + .digest = "\xe8\xe9\x9d\x0f\x45\x23\x7d\x78\x6d\x6b" + "\xba\xa7\x96\x5c\x78\x08\xbb\xff\x1a\x91", + }, +}; + + +/* + * SHA224 HMAC test vectors from RFC4231 + */ +#define HMAC_SHA224_TEST_VECTORS 4 + +static struct hash_testvec hmac_sha224_tv_template[] = { + { + .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" + "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" + "\x0b\x0b\x0b\x0b", + .ksize = 20, + /* ("Hi There") */ + .plaintext = "\x48\x69\x20\x54\x68\x65\x72\x65", + .psize = 8, + .digest = "\x89\x6f\xb1\x12\x8a\xbb\xdf\x19" + "\x68\x32\x10\x7c\xd4\x9d\xf3\x3f" + "\x47\xb4\xb1\x16\x99\x12\xba\x4f" + "\x53\x68\x4b\x22", + }, { + .key = "Jefe", + .ksize = 4, + /* ("what do ya want for nothing?") */ + .plaintext = "\x77\x68\x61\x74\x20\x64\x6f\x20" + "\x79\x61\x20\x77\x61\x6e\x74\x20" + "\x66\x6f\x72\x20\x6e\x6f\x74\x68" + "\x69\x6e\x67\x3f", + .psize = 28, + .digest = "\xa3\x0e\x01\x09\x8b\xc6\xdb\xbf" + "\x45\x69\x0f\x3a\x7e\x9e\x6d\x0f" + "\x8b\xbe\xa2\xa3\x9e\x61\x48\x00" + "\x8f\xd0\x5e\x44", + .np = 4, + .tap = { 7, 7, 7, 7 } + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa", + .ksize = 131, + /* ("Test Using Larger Than Block-Size Key - Hash Key First") */ + .plaintext = "\x54\x65\x73\x74\x20\x55\x73\x69" + "\x6e\x67\x20\x4c\x61\x72\x67\x65" + "\x72\x20\x54\x68\x61\x6e\x20\x42" + "\x6c\x6f\x63\x6b\x2d\x53\x69\x7a" + "\x65\x20\x4b\x65\x79\x20\x2d\x20" + "\x48\x61\x73\x68\x20\x4b\x65\x79" + "\x20\x46\x69\x72\x73\x74", + .psize = 54, + .digest = "\x95\xe9\xa0\xdb\x96\x20\x95\xad" + "\xae\xbe\x9b\x2d\x6f\x0d\xbc\xe2" + "\xd4\x99\xf1\x12\xf2\xd2\xb7\x27" + "\x3f\xa6\x87\x0e", + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa", + .ksize = 131, + /* ("This is a test using a larger than block-size key and a") + (" larger than block-size data. The key needs to be") + (" hashed before being used by the HMAC algorithm.") */ + .plaintext = "\x54\x68\x69\x73\x20\x69\x73\x20" + "\x61\x20\x74\x65\x73\x74\x20\x75" + "\x73\x69\x6e\x67\x20\x61\x20\x6c" + "\x61\x72\x67\x65\x72\x20\x74\x68" + "\x61\x6e\x20\x62\x6c\x6f\x63\x6b" + "\x2d\x73\x69\x7a\x65\x20\x6b\x65" + "\x79\x20\x61\x6e\x64\x20\x61\x20" + "\x6c\x61\x72\x67\x65\x72\x20\x74" + "\x68\x61\x6e\x20\x62\x6c\x6f\x63" + "\x6b\x2d\x73\x69\x7a\x65\x20\x64" + "\x61\x74\x61\x2e\x20\x54\x68\x65" + "\x20\x6b\x65\x79\x20\x6e\x65\x65" + "\x64\x73\x20\x74\x6f\x20\x62\x65" + "\x20\x68\x61\x73\x68\x65\x64\x20" + "\x62\x65\x66\x6f\x72\x65\x20\x62" + "\x65\x69\x6e\x67\x20\x75\x73\x65" + "\x64\x20\x62\x79\x20\x74\x68\x65" + "\x20\x48\x4d\x41\x43\x20\x61\x6c" + "\x67\x6f\x72\x69\x74\x68\x6d\x2e", + .psize = 152, + .digest = "\x3a\x85\x41\x66\xac\x5d\x9f\x02" + "\x3f\x54\xd5\x17\xd0\xb3\x9d\xbd" + "\x94\x67\x70\xdb\x9c\x2b\x95\xc9" + "\xf6\xf5\x65\xd1", + }, +}; + +/* + * HMAC-SHA256 test vectors from + * draft-ietf-ipsec-ciph-sha-256-01.txt + */ +#define HMAC_SHA256_TEST_VECTORS 10 + +static struct hash_testvec hmac_sha256_tv_template[] = { + { + .key = "\x01\x02\x03\x04\x05\x06\x07\x08" + "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18" + "\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + .ksize = 32, + .plaintext = "abc", + .psize = 3, + .digest = "\xa2\x1b\x1f\x5d\x4c\xf4\xf7\x3a" + "\x4d\xd9\x39\x75\x0f\x7a\x06\x6a" + "\x7f\x98\xcc\x13\x1c\xb1\x6a\x66" + "\x92\x75\x90\x21\xcf\xab\x81\x81", + }, { + .key = "\x01\x02\x03\x04\x05\x06\x07\x08" + "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18" + "\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + .ksize = 32, + .plaintext = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", + .psize = 56, + .digest = "\x10\x4f\xdc\x12\x57\x32\x8f\x08" + "\x18\x4b\xa7\x31\x31\xc5\x3c\xae" + "\xe6\x98\xe3\x61\x19\x42\x11\x49" + "\xea\x8c\x71\x24\x56\x69\x7d\x30", + }, { + .key = "\x01\x02\x03\x04\x05\x06\x07\x08" + "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18" + "\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + .ksize = 32, + .plaintext = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" + "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", + .psize = 112, + .digest = "\x47\x03\x05\xfc\x7e\x40\xfe\x34" + "\xd3\xee\xb3\xe7\x73\xd9\x5a\xab" + "\x73\xac\xf0\xfd\x06\x04\x47\xa5" + "\xeb\x45\x95\xbf\x33\xa9\xd1\xa3", + }, { + .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" + "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" + "\x0b\x0b\x0b\x0b\x0b\x0b", + .ksize = 32, + .plaintext = "Hi There", + .psize = 8, + .digest = "\x19\x8a\x60\x7e\xb4\x4b\xfb\xc6" + "\x99\x03\xa0\xf1\xcf\x2b\xbd\xc5" + "\xba\x0a\xa3\xf3\xd9\xae\x3c\x1c" + "\x7a\x3b\x16\x96\xa0\xb6\x8c\xf7", + }, { + .key = "Jefe", + .ksize = 4, + .plaintext = "what do ya want for nothing?", + .psize = 28, + .digest = "\x5b\xdc\xc1\x46\xbf\x60\x75\x4e" + "\x6a\x04\x24\x26\x08\x95\x75\xc7" + "\x5a\x00\x3f\x08\x9d\x27\x39\x83" + "\x9d\xec\x58\xb9\x64\xec\x38\x43", + .np = 2, + .tap = { 14, 14 } + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa", + .ksize = 32, + .plaintext = "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd" + "\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd", + .psize = 50, + .digest = "\xcd\xcb\x12\x20\xd1\xec\xcc\xea" + "\x91\xe5\x3a\xba\x30\x92\xf9\x62" + "\xe5\x49\xfe\x6c\xe9\xed\x7f\xdc" + "\x43\x19\x1f\xbd\xe4\x5c\x30\xb0", + }, { + .key = "\x01\x02\x03\x04\x05\x06\x07\x08" + "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18" + "\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20" + "\x21\x22\x23\x24\x25", + .ksize = 37, + .plaintext = "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd" + "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd", + .psize = 50, + .digest = "\xd4\x63\x3c\x17\xf6\xfb\x8d\x74" + "\x4c\x66\xde\xe0\xf8\xf0\x74\x55" + "\x6e\xc4\xaf\x55\xef\x07\x99\x85" + "\x41\x46\x8e\xb4\x9b\xd2\xe9\x17", + }, { + .key = "\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c" + "\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c" + "\x0c\x0c\x0c\x0c\x0c\x0c", + .ksize = 32, + .plaintext = "Test With Truncation", + .psize = 20, + .digest = "\x75\x46\xaf\x01\x84\x1f\xc0\x9b" + "\x1a\xb9\xc3\x74\x9a\x5f\x1c\x17" + "\xd4\xf5\x89\x66\x8a\x58\x7b\x27" + "\x00\xa9\xc9\x7c\x11\x93\xcf\x42", + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa", + .ksize = 80, + .plaintext = "Test Using Larger Than Block-Size Key - Hash Key First", + .psize = 54, + .digest = "\x69\x53\x02\x5e\xd9\x6f\x0c\x09" + "\xf8\x0a\x96\xf7\x8e\x65\x38\xdb" + "\xe2\xe7\xb8\x20\xe3\xdd\x97\x0e" + "\x7d\xdd\x39\x09\x1b\x32\x35\x2f", + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa", + .ksize = 80, + .plaintext = "Test Using Larger Than Block-Size Key and Larger Than " + "One Block-Size Data", + .psize = 73, + .digest = "\x63\x55\xac\x22\xe8\x90\xd0\xa3" + "\xc8\x48\x1a\x5c\xa4\x82\x5b\xc8" + "\x84\xd3\xe7\xa1\xff\x98\xa2\xfc" + "\x2a\xc7\xd8\xe0\x64\xc3\xb2\xe6", + }, +}; + +#define XCBC_AES_TEST_VECTORS 6 + +static struct hash_testvec aes_xcbc128_tv_template[] = { + { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .plaintext = zeroed_string, + .digest = "\x75\xf0\x25\x1d\x52\x8a\xc0\x1c" + "\x45\x73\xdf\xd5\x84\xd7\x9f\x29", + .psize = 0, + .ksize = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .plaintext = "\x00\x01\x02", + .digest = "\x5b\x37\x65\x80\xae\x2f\x19\xaf" + "\xe7\x21\x9c\xee\xf1\x72\x75\x6f", + .psize = 3, + .ksize = 16, + } , { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .plaintext = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .digest = "\xd2\xa2\x46\xfa\x34\x9b\x68\xa7" + "\x99\x98\xa4\x39\x4f\xf7\xa2\x63", + .psize = 16, + .ksize = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .plaintext = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13", + .digest = "\x47\xf5\x1b\x45\x64\x96\x62\x15" + "\xb8\x98\x5c\x63\x05\x5e\xd3\x08", + .tap = { 10, 10 }, + .psize = 20, + .np = 2, + .ksize = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .plaintext = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .digest = "\xf5\x4f\x0e\xc8\xd2\xb9\xf3\xd3" + "\x68\x07\x73\x4b\xd5\x28\x3f\xd4", + .psize = 32, + .ksize = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .plaintext = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21", + .digest = "\xbe\xcb\xb3\xbc\xcd\xb5\x18\xa3" + "\x06\x77\xd5\x48\x1f\xb6\xb4\xd8", + .tap = { 17, 17 }, + .psize = 34, + .np = 2, + .ksize = 16, + } +}; + +#define VMAC_AES_TEST_VECTORS 8 +static char vmac_string1[128] = {'\x01', '\x01', '\x01', '\x01', + '\x02', '\x03', '\x02', '\x02', + '\x02', '\x04', '\x01', '\x07', + '\x04', '\x01', '\x04', '\x03',}; +static char vmac_string2[128] = {'a', 'b', 'c',}; +static char vmac_string3[128] = {'a', 'b', 'c', 'a', 'b', 'c', + 'a', 'b', 'c', 'a', 'b', 'c', + 'a', 'b', 'c', 'a', 'b', 'c', + 'a', 'b', 'c', 'a', 'b', 'c', + 'a', 'b', 'c', 'a', 'b', 'c', + 'a', 'b', 'c', 'a', 'b', 'c', + 'a', 'b', 'c', 'a', 'b', 'c', + 'a', 'b', 'c', 'a', 'b', 'c', + }; + +static struct hash_testvec aes_vmac128_tv_template[] = { + { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .plaintext = NULL, + .digest = "\x07\x58\x80\x35\x77\xa4\x7b\x54", + .psize = 0, + .ksize = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .plaintext = vmac_string1, + .digest = "\xce\xf5\x3c\xd3\xae\x68\x8c\xa1", + .psize = 128, + .ksize = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .plaintext = vmac_string2, + .digest = "\xc9\x27\xb0\x73\x81\xbd\x14\x2d", + .psize = 128, + .ksize = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .plaintext = vmac_string3, + .digest = "\x8d\x1a\x95\x8c\x98\x47\x0b\x19", + .psize = 128, + .ksize = 16, + }, { + .key = "abcdefghijklmnop", + .plaintext = NULL, + .digest = "\x3b\x89\xa1\x26\x9e\x55\x8f\x84", + .psize = 0, + .ksize = 16, + }, { + .key = "abcdefghijklmnop", + .plaintext = vmac_string1, + .digest = "\xab\x5e\xab\xb0\xf6\x8d\x74\xc2", + .psize = 128, + .ksize = 16, + }, { + .key = "abcdefghijklmnop", + .plaintext = vmac_string2, + .digest = "\x11\x15\x68\x42\x3d\x7b\x09\xdf", + .psize = 128, + .ksize = 16, + }, { + .key = "abcdefghijklmnop", + .plaintext = vmac_string3, + .digest = "\x8b\x32\x8f\xe1\xed\x8f\xfa\xd4", + .psize = 128, + .ksize = 16, + }, +}; + +/* + * SHA384 HMAC test vectors from RFC4231 + */ + +#define HMAC_SHA384_TEST_VECTORS 4 + +static struct hash_testvec hmac_sha384_tv_template[] = { + { + .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" + "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" + "\x0b\x0b\x0b\x0b", + .ksize = 20, + .plaintext = "Hi There", + .psize = 8, + .digest = "\xaf\xd0\x39\x44\xd8\x48\x95\x62" + "\x6b\x08\x25\xf4\xab\x46\x90\x7f" + "\x15\xf9\xda\xdb\xe4\x10\x1e\xc6" + "\x82\xaa\x03\x4c\x7c\xeb\xc5\x9c" + "\xfa\xea\x9e\xa9\x07\x6e\xde\x7f" + "\x4a\xf1\x52\xe8\xb2\xfa\x9c\xb6", + }, { + .key = "Jefe", + .ksize = 4, + .plaintext = "what do ya want for nothing?", + .psize = 28, + .digest = "\xaf\x45\xd2\xe3\x76\x48\x40\x31" + "\x61\x7f\x78\xd2\xb5\x8a\x6b\x1b" + "\x9c\x7e\xf4\x64\xf5\xa0\x1b\x47" + "\xe4\x2e\xc3\x73\x63\x22\x44\x5e" + "\x8e\x22\x40\xca\x5e\x69\xe2\xc7" + "\x8b\x32\x39\xec\xfa\xb2\x16\x49", + .np = 4, + .tap = { 7, 7, 7, 7 } + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa", + .ksize = 131, + .plaintext = "Test Using Larger Than Block-Siz" + "e Key - Hash Key First", + .psize = 54, + .digest = "\x4e\xce\x08\x44\x85\x81\x3e\x90" + "\x88\xd2\xc6\x3a\x04\x1b\xc5\xb4" + "\x4f\x9e\xf1\x01\x2a\x2b\x58\x8f" + "\x3c\xd1\x1f\x05\x03\x3a\xc4\xc6" + "\x0c\x2e\xf6\xab\x40\x30\xfe\x82" + "\x96\x24\x8d\xf1\x63\xf4\x49\x52", + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa", + .ksize = 131, + .plaintext = "This is a test u" + "sing a larger th" + "an block-size ke" + "y and a larger t" + "han block-size d" + "ata. The key nee" + "ds to be hashed " + "before being use" + "d by the HMAC al" + "gorithm.", + .psize = 152, + .digest = "\x66\x17\x17\x8e\x94\x1f\x02\x0d" + "\x35\x1e\x2f\x25\x4e\x8f\xd3\x2c" + "\x60\x24\x20\xfe\xb0\xb8\xfb\x9a" + "\xdc\xce\xbb\x82\x46\x1e\x99\xc5" + "\xa6\x78\xcc\x31\xe7\x99\x17\x6d" + "\x38\x60\xe6\x11\x0c\x46\x52\x3e", + }, +}; + +/* + * SHA512 HMAC test vectors from RFC4231 + */ + +#define HMAC_SHA512_TEST_VECTORS 4 + +static struct hash_testvec hmac_sha512_tv_template[] = { + { + .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" + "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" + "\x0b\x0b\x0b\x0b", + .ksize = 20, + .plaintext = "Hi There", + .psize = 8, + .digest = "\x87\xaa\x7c\xde\xa5\xef\x61\x9d" + "\x4f\xf0\xb4\x24\x1a\x1d\x6c\xb0" + "\x23\x79\xf4\xe2\xce\x4e\xc2\x78" + "\x7a\xd0\xb3\x05\x45\xe1\x7c\xde" + "\xda\xa8\x33\xb7\xd6\xb8\xa7\x02" + "\x03\x8b\x27\x4e\xae\xa3\xf4\xe4" + "\xbe\x9d\x91\x4e\xeb\x61\xf1\x70" + "\x2e\x69\x6c\x20\x3a\x12\x68\x54", + }, { + .key = "Jefe", + .ksize = 4, + .plaintext = "what do ya want for nothing?", + .psize = 28, + .digest = "\x16\x4b\x7a\x7b\xfc\xf8\x19\xe2" + "\xe3\x95\xfb\xe7\x3b\x56\xe0\xa3" + "\x87\xbd\x64\x22\x2e\x83\x1f\xd6" + "\x10\x27\x0c\xd7\xea\x25\x05\x54" + "\x97\x58\xbf\x75\xc0\x5a\x99\x4a" + "\x6d\x03\x4f\x65\xf8\xf0\xe6\xfd" + "\xca\xea\xb1\xa3\x4d\x4a\x6b\x4b" + "\x63\x6e\x07\x0a\x38\xbc\xe7\x37", + .np = 4, + .tap = { 7, 7, 7, 7 } + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa", + .ksize = 131, + .plaintext = "Test Using Large" + "r Than Block-Siz" + "e Key - Hash Key" + " First", + .psize = 54, + .digest = "\x80\xb2\x42\x63\xc7\xc1\xa3\xeb" + "\xb7\x14\x93\xc1\xdd\x7b\xe8\xb4" + "\x9b\x46\xd1\xf4\x1b\x4a\xee\xc1" + "\x12\x1b\x01\x37\x83\xf8\xf3\x52" + "\x6b\x56\xd0\x37\xe0\x5f\x25\x98" + "\xbd\x0f\xd2\x21\x5d\x6a\x1e\x52" + "\x95\xe6\x4f\x73\xf6\x3f\x0a\xec" + "\x8b\x91\x5a\x98\x5d\x78\x65\x98", + }, { + .key = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa", + .ksize = 131, + .plaintext = + "This is a test u" + "sing a larger th" + "an block-size ke" + "y and a larger t" + "han block-size d" + "ata. The key nee" + "ds to be hashed " + "before being use" + "d by the HMAC al" + "gorithm.", + .psize = 152, + .digest = "\xe3\x7b\x6a\x77\x5d\xc8\x7d\xba" + "\xa4\xdf\xa9\xf9\x6e\x5e\x3f\xfd" + "\xde\xbd\x71\xf8\x86\x72\x89\x86" + "\x5d\xf5\xa3\x2d\x20\xcd\xc9\x44" + "\xb6\x02\x2c\xac\x3c\x49\x82\xb1" + "\x0d\x5e\xeb\x55\xc3\xe4\xde\x15" + "\x13\x46\x76\xfb\x6d\xe0\x44\x60" + "\x65\xc9\x74\x40\xfa\x8c\x6a\x58", + }, +}; + +/* + * DES test vectors. + */ +#define DES_ENC_TEST_VECTORS 10 +#define DES_DEC_TEST_VECTORS 4 +#define DES_CBC_ENC_TEST_VECTORS 5 +#define DES_CBC_DEC_TEST_VECTORS 4 +#define DES3_EDE_ENC_TEST_VECTORS 3 +#define DES3_EDE_DEC_TEST_VECTORS 3 +#define DES3_EDE_CBC_ENC_TEST_VECTORS 1 +#define DES3_EDE_CBC_DEC_TEST_VECTORS 1 + +static struct cipher_testvec des_enc_tv_template[] = { + { /* From Applied Cryptography */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xe7", + .ilen = 8, + .result = "\xc9\x57\x44\x25\x6a\x5e\xd3\x1d", + .rlen = 8, + }, { /* Same key, different plaintext block */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x22\x33\x44\x55\x66\x77\x88\x99", + .ilen = 8, + .result = "\xf7\x9c\x89\x2a\x33\x8f\x4a\x8b", + .rlen = 8, + }, { /* Sbox test from NBS */ + .key = "\x7c\xa1\x10\x45\x4a\x1a\x6e\x57", + .klen = 8, + .input = "\x01\xa1\xd6\xd0\x39\x77\x67\x42", + .ilen = 8, + .result = "\x69\x0f\x5b\x0d\x9a\x26\x93\x9b", + .rlen = 8, + }, { /* Three blocks */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xe7" + "\x22\x33\x44\x55\x66\x77\x88\x99" + "\xca\xfe\xba\xbe\xfe\xed\xbe\xef", + .ilen = 24, + .result = "\xc9\x57\x44\x25\x6a\x5e\xd3\x1d" + "\xf7\x9c\x89\x2a\x33\x8f\x4a\x8b" + "\xb4\x99\x26\xf7\x1f\xe1\xd4\x90", + .rlen = 24, + }, { /* Weak key */ + .fail = 1, + .wk = 1, + .key = "\x01\x01\x01\x01\x01\x01\x01\x01", + .klen = 8, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xe7", + .ilen = 8, + .result = "\xc9\x57\x44\x25\x6a\x5e\xd3\x1d", + .rlen = 8, + }, { /* Two blocks -- for testing encryption across pages */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xe7" + "\x22\x33\x44\x55\x66\x77\x88\x99", + .ilen = 16, + .result = "\xc9\x57\x44\x25\x6a\x5e\xd3\x1d" + "\xf7\x9c\x89\x2a\x33\x8f\x4a\x8b", + .rlen = 16, + .np = 2, + .tap = { 8, 8 } + }, { /* Four blocks -- for testing encryption with chunking */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xe7" + "\x22\x33\x44\x55\x66\x77\x88\x99" + "\xca\xfe\xba\xbe\xfe\xed\xbe\xef" + "\x22\x33\x44\x55\x66\x77\x88\x99", + .ilen = 32, + .result = "\xc9\x57\x44\x25\x6a\x5e\xd3\x1d" + "\xf7\x9c\x89\x2a\x33\x8f\x4a\x8b" + "\xb4\x99\x26\xf7\x1f\xe1\xd4\x90" + "\xf7\x9c\x89\x2a\x33\x8f\x4a\x8b", + .rlen = 32, + .np = 3, + .tap = { 14, 10, 8 } + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xe7" + "\x22\x33\x44\x55\x66\x77\x88\x99" + "\xca\xfe\xba\xbe\xfe\xed\xbe\xef", + .ilen = 24, + .result = "\xc9\x57\x44\x25\x6a\x5e\xd3\x1d" + "\xf7\x9c\x89\x2a\x33\x8f\x4a\x8b" + "\xb4\x99\x26\xf7\x1f\xe1\xd4\x90", + .rlen = 24, + .np = 4, + .tap = { 2, 1, 3, 18 } + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xe7" + "\x22\x33\x44\x55\x66\x77\x88\x99", + .ilen = 16, + .result = "\xc9\x57\x44\x25\x6a\x5e\xd3\x1d" + "\xf7\x9c\x89\x2a\x33\x8f\x4a\x8b", + .rlen = 16, + .np = 5, + .tap = { 2, 2, 2, 2, 8 } + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xe7", + .ilen = 8, + .result = "\xc9\x57\x44\x25\x6a\x5e\xd3\x1d", + .rlen = 8, + .np = 8, + .tap = { 1, 1, 1, 1, 1, 1, 1, 1 } + }, +}; + +static struct cipher_testvec des_dec_tv_template[] = { + { /* From Applied Cryptography */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\xc9\x57\x44\x25\x6a\x5e\xd3\x1d", + .ilen = 8, + .result = "\x01\x23\x45\x67\x89\xab\xcd\xe7", + .rlen = 8, + }, { /* Sbox test from NBS */ + .key = "\x7c\xa1\x10\x45\x4a\x1a\x6e\x57", + .klen = 8, + .input = "\x69\x0f\x5b\x0d\x9a\x26\x93\x9b", + .ilen = 8, + .result = "\x01\xa1\xd6\xd0\x39\x77\x67\x42", + .rlen = 8, + }, { /* Two blocks, for chunking test */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\xc9\x57\x44\x25\x6a\x5e\xd3\x1d" + "\x69\x0f\x5b\x0d\x9a\x26\x93\x9b", + .ilen = 16, + .result = "\x01\x23\x45\x67\x89\xab\xcd\xe7" + "\xa3\x99\x7b\xca\xaf\x69\xa0\xf5", + .rlen = 16, + .np = 2, + .tap = { 8, 8 } + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\xc9\x57\x44\x25\x6a\x5e\xd3\x1d" + "\x69\x0f\x5b\x0d\x9a\x26\x93\x9b", + .ilen = 16, + .result = "\x01\x23\x45\x67\x89\xab\xcd\xe7" + "\xa3\x99\x7b\xca\xaf\x69\xa0\xf5", + .rlen = 16, + .np = 3, + .tap = { 3, 12, 1 } + }, +}; + +static struct cipher_testvec des_cbc_enc_tv_template[] = { + { /* From OpenSSL */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .iv = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .input = "\x37\x36\x35\x34\x33\x32\x31\x20" + "\x4e\x6f\x77\x20\x69\x73\x20\x74" + "\x68\x65\x20\x74\x69\x6d\x65\x20", + .ilen = 24, + .result = "\xcc\xd1\x73\xff\xab\x20\x39\xf4" + "\xac\xd8\xae\xfd\xdf\xd8\xa1\xeb" + "\x46\x8e\x91\x15\x78\x88\xba\x68", + .rlen = 24, + }, { /* FIPS Pub 81 */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .iv = "\x12\x34\x56\x78\x90\xab\xcd\xef", + .input = "\x4e\x6f\x77\x20\x69\x73\x20\x74", + .ilen = 8, + .result = "\xe5\xc7\xcd\xde\x87\x2b\xf2\x7c", + .rlen = 8, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .iv = "\xe5\xc7\xcd\xde\x87\x2b\xf2\x7c", + .input = "\x68\x65\x20\x74\x69\x6d\x65\x20", + .ilen = 8, + .result = "\x43\xe9\x34\x00\x8c\x38\x9c\x0f", + .rlen = 8, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .iv = "\x43\xe9\x34\x00\x8c\x38\x9c\x0f", + .input = "\x66\x6f\x72\x20\x61\x6c\x6c\x20", + .ilen = 8, + .result = "\x68\x37\x88\x49\x9a\x7c\x05\xf6", + .rlen = 8, + }, { /* Copy of openssl vector for chunk testing */ + /* From OpenSSL */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .iv = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .input = "\x37\x36\x35\x34\x33\x32\x31\x20" + "\x4e\x6f\x77\x20\x69\x73\x20\x74" + "\x68\x65\x20\x74\x69\x6d\x65\x20", + .ilen = 24, + .result = "\xcc\xd1\x73\xff\xab\x20\x39\xf4" + "\xac\xd8\xae\xfd\xdf\xd8\xa1\xeb" + "\x46\x8e\x91\x15\x78\x88\xba\x68", + .rlen = 24, + .np = 2, + .tap = { 13, 11 } + }, +}; + +static struct cipher_testvec des_cbc_dec_tv_template[] = { + { /* FIPS Pub 81 */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .iv = "\x12\x34\x56\x78\x90\xab\xcd\xef", + .input = "\xe5\xc7\xcd\xde\x87\x2b\xf2\x7c", + .ilen = 8, + .result = "\x4e\x6f\x77\x20\x69\x73\x20\x74", + .rlen = 8, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .iv = "\xe5\xc7\xcd\xde\x87\x2b\xf2\x7c", + .input = "\x43\xe9\x34\x00\x8c\x38\x9c\x0f", + .ilen = 8, + .result = "\x68\x65\x20\x74\x69\x6d\x65\x20", + .rlen = 8, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .iv = "\x43\xe9\x34\x00\x8c\x38\x9c\x0f", + .input = "\x68\x37\x88\x49\x9a\x7c\x05\xf6", + .ilen = 8, + .result = "\x66\x6f\x72\x20\x61\x6c\x6c\x20", + .rlen = 8, + }, { /* Copy of above, for chunk testing */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .iv = "\x43\xe9\x34\x00\x8c\x38\x9c\x0f", + .input = "\x68\x37\x88\x49\x9a\x7c\x05\xf6", + .ilen = 8, + .result = "\x66\x6f\x72\x20\x61\x6c\x6c\x20", + .rlen = 8, + .np = 2, + .tap = { 4, 4 } + }, +}; + +static struct cipher_testvec des3_ede_enc_tv_template[] = { + { /* These are from openssl */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\x55\x55\x55\x55\x55\x55\x55\x55" + "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .klen = 24, + .input = "\x73\x6f\x6d\x65\x64\x61\x74\x61", + .ilen = 8, + .result = "\x18\xd7\x48\xe5\x63\x62\x05\x72", + .rlen = 8, + }, { + .key = "\x03\x52\x02\x07\x67\x20\x82\x17" + "\x86\x02\x87\x66\x59\x08\x21\x98" + "\x64\x05\x6a\xbd\xfe\xa9\x34\x57", + .klen = 24, + .input = "\x73\x71\x75\x69\x67\x67\x6c\x65", + .ilen = 8, + .result = "\xc0\x7d\x2a\x0f\xa5\x66\xfa\x30", + .rlen = 8, + }, { + .key = "\x10\x46\x10\x34\x89\x98\x80\x20" + "\x91\x07\xd0\x15\x89\x19\x01\x01" + "\x19\x07\x92\x10\x98\x1a\x01\x01", + .klen = 24, + .input = "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 8, + .result = "\xe1\xef\x62\xc3\x32\xfe\x82\x5b", + .rlen = 8, + }, +}; + +static struct cipher_testvec des3_ede_dec_tv_template[] = { + { /* These are from openssl */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\x55\x55\x55\x55\x55\x55\x55\x55" + "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .klen = 24, + .input = "\x18\xd7\x48\xe5\x63\x62\x05\x72", + .ilen = 8, + .result = "\x73\x6f\x6d\x65\x64\x61\x74\x61", + .rlen = 8, + }, { + .key = "\x03\x52\x02\x07\x67\x20\x82\x17" + "\x86\x02\x87\x66\x59\x08\x21\x98" + "\x64\x05\x6a\xbd\xfe\xa9\x34\x57", + .klen = 24, + .input = "\xc0\x7d\x2a\x0f\xa5\x66\xfa\x30", + .ilen = 8, + .result = "\x73\x71\x75\x69\x67\x67\x6c\x65", + .rlen = 8, + }, { + .key = "\x10\x46\x10\x34\x89\x98\x80\x20" + "\x91\x07\xd0\x15\x89\x19\x01\x01" + "\x19\x07\x92\x10\x98\x1a\x01\x01", + .klen = 24, + .input = "\xe1\xef\x62\xc3\x32\xfe\x82\x5b", + .ilen = 8, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00", + .rlen = 8, + }, +}; + +static struct cipher_testvec des3_ede_cbc_enc_tv_template[] = { + { /* Generated from openssl */ + .key = "\xE9\xC0\xFF\x2E\x76\x0B\x64\x24" + "\x44\x4D\x99\x5A\x12\xD6\x40\xC0" + "\xEA\xC2\x84\xE8\x14\x95\xDB\xE8", + .klen = 24, + .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42", + .input = "\x6f\x54\x20\x6f\x61\x4d\x79\x6e" + "\x53\x20\x63\x65\x65\x72\x73\x74" + "\x54\x20\x6f\x6f\x4d\x20\x6e\x61" + "\x20\x79\x65\x53\x72\x63\x74\x65" + "\x20\x73\x6f\x54\x20\x6f\x61\x4d" + "\x79\x6e\x53\x20\x63\x65\x65\x72" + "\x73\x74\x54\x20\x6f\x6f\x4d\x20" + "\x6e\x61\x20\x79\x65\x53\x72\x63" + "\x74\x65\x20\x73\x6f\x54\x20\x6f" + "\x61\x4d\x79\x6e\x53\x20\x63\x65" + "\x65\x72\x73\x74\x54\x20\x6f\x6f" + "\x4d\x20\x6e\x61\x20\x79\x65\x53" + "\x72\x63\x74\x65\x20\x73\x6f\x54" + "\x20\x6f\x61\x4d\x79\x6e\x53\x20" + "\x63\x65\x65\x72\x73\x74\x54\x20" + "\x6f\x6f\x4d\x20\x6e\x61\x0a\x79", + .ilen = 128, + .result = "\x0e\x2d\xb6\x97\x3c\x56\x33\xf4" + "\x67\x17\x21\xc7\x6e\x8a\xd5\x49" + "\x74\xb3\x49\x05\xc5\x1c\xd0\xed" + "\x12\x56\x5c\x53\x96\xb6\x00\x7d" + "\x90\x48\xfc\xf5\x8d\x29\x39\xcc" + "\x8a\xd5\x35\x18\x36\x23\x4e\xd7" + "\x76\xd1\xda\x0c\x94\x67\xbb\x04" + "\x8b\xf2\x03\x6c\xa8\xcf\xb6\xea" + "\x22\x64\x47\xaa\x8f\x75\x13\xbf" + "\x9f\xc2\xc3\xf0\xc9\x56\xc5\x7a" + "\x71\x63\x2e\x89\x7b\x1e\x12\xca" + "\xe2\x5f\xaf\xd8\xa4\xf8\xc9\x7a" + "\xd6\xf9\x21\x31\x62\x44\x45\xa6" + "\xd6\xbc\x5a\xd3\x2d\x54\x43\xcc" + "\x9d\xde\xa5\x70\xe9\x42\x45\x8a" + "\x6b\xfa\xb1\x91\x13\xb0\xd9\x19", + .rlen = 128, + }, +}; + +static struct cipher_testvec des3_ede_cbc_dec_tv_template[] = { + { /* Generated from openssl */ + .key = "\xE9\xC0\xFF\x2E\x76\x0B\x64\x24" + "\x44\x4D\x99\x5A\x12\xD6\x40\xC0" + "\xEA\xC2\x84\xE8\x14\x95\xDB\xE8", + .klen = 24, + .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42", + .input = "\x0e\x2d\xb6\x97\x3c\x56\x33\xf4" + "\x67\x17\x21\xc7\x6e\x8a\xd5\x49" + "\x74\xb3\x49\x05\xc5\x1c\xd0\xed" + "\x12\x56\x5c\x53\x96\xb6\x00\x7d" + "\x90\x48\xfc\xf5\x8d\x29\x39\xcc" + "\x8a\xd5\x35\x18\x36\x23\x4e\xd7" + "\x76\xd1\xda\x0c\x94\x67\xbb\x04" + "\x8b\xf2\x03\x6c\xa8\xcf\xb6\xea" + "\x22\x64\x47\xaa\x8f\x75\x13\xbf" + "\x9f\xc2\xc3\xf0\xc9\x56\xc5\x7a" + "\x71\x63\x2e\x89\x7b\x1e\x12\xca" + "\xe2\x5f\xaf\xd8\xa4\xf8\xc9\x7a" + "\xd6\xf9\x21\x31\x62\x44\x45\xa6" + "\xd6\xbc\x5a\xd3\x2d\x54\x43\xcc" + "\x9d\xde\xa5\x70\xe9\x42\x45\x8a" + "\x6b\xfa\xb1\x91\x13\xb0\xd9\x19", + .ilen = 128, + .result = "\x6f\x54\x20\x6f\x61\x4d\x79\x6e" + "\x53\x20\x63\x65\x65\x72\x73\x74" + "\x54\x20\x6f\x6f\x4d\x20\x6e\x61" + "\x20\x79\x65\x53\x72\x63\x74\x65" + "\x20\x73\x6f\x54\x20\x6f\x61\x4d" + "\x79\x6e\x53\x20\x63\x65\x65\x72" + "\x73\x74\x54\x20\x6f\x6f\x4d\x20" + "\x6e\x61\x20\x79\x65\x53\x72\x63" + "\x74\x65\x20\x73\x6f\x54\x20\x6f" + "\x61\x4d\x79\x6e\x53\x20\x63\x65" + "\x65\x72\x73\x74\x54\x20\x6f\x6f" + "\x4d\x20\x6e\x61\x20\x79\x65\x53" + "\x72\x63\x74\x65\x20\x73\x6f\x54" + "\x20\x6f\x61\x4d\x79\x6e\x53\x20" + "\x63\x65\x65\x72\x73\x74\x54\x20" + "\x6f\x6f\x4d\x20\x6e\x61\x0a\x79", + .rlen = 128, + }, +}; + +/* + * Blowfish test vectors. + */ +#define BF_ENC_TEST_VECTORS 7 +#define BF_DEC_TEST_VECTORS 7 +#define BF_CBC_ENC_TEST_VECTORS 2 +#define BF_CBC_DEC_TEST_VECTORS 2 +#define BF_CTR_ENC_TEST_VECTORS 2 +#define BF_CTR_DEC_TEST_VECTORS 2 + +static struct cipher_testvec bf_enc_tv_template[] = { + { /* DES test vectors from OpenSSL */ + .key = "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 8, + .input = "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 8, + .result = "\x4e\xf9\x97\x45\x61\x98\xdd\x78", + .rlen = 8, + }, { + .key = "\x1f\x1f\x1f\x1f\x0e\x0e\x0e\x0e", + .klen = 8, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .ilen = 8, + .result = "\xa7\x90\x79\x51\x08\xea\x3c\xae", + .rlen = 8, + }, { + .key = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87", + .klen = 8, + .input = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .ilen = 8, + .result = "\xe8\x7a\x24\x4e\x2c\xc8\x5e\x82", + .rlen = 8, + }, { /* Vary the keylength... */ + .key = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87" + "\x78\x69\x5a\x4b\x3c\x2d\x1e\x0f", + .klen = 16, + .input = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .ilen = 8, + .result = "\x93\x14\x28\x87\xee\x3b\xe1\x5c", + .rlen = 8, + }, { + .key = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87" + "\x78\x69\x5a\x4b\x3c\x2d\x1e\x0f" + "\x00\x11\x22\x33\x44", + .klen = 21, + .input = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .ilen = 8, + .result = "\xe6\xf5\x1e\xd7\x9b\x9d\xb2\x1f", + .rlen = 8, + }, { /* Generated with bf488 */ + .key = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87" + "\x78\x69\x5a\x4b\x3c\x2d\x1e\x0f" + "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x04\x68\x91\x04\xc2\xfd\x3b\x2f" + "\x58\x40\x23\x64\x1a\xba\x61\x76" + "\x1f\x1f\x1f\x1f\x0e\x0e\x0e\x0e" + "\xff\xff\xff\xff\xff\xff\xff\xff", + .klen = 56, + .input = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .ilen = 8, + .result = "\xc0\x45\x04\x01\x2e\x4e\x1f\x53", + .rlen = 8, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9", + .ilen = 40, + .result = "\x96\x87\x3D\x0C\x7B\xFB\xBD\x1F" + "\xE3\xC1\x99\x6D\x39\xD4\xC2\x7D" + "\xD7\x87\xA1\xF2\xDF\x51\x71\x26" + "\xC2\xF4\x6D\xFF\xF6\xCD\x6B\x40" + "\xE1\xB3\xBF\xD4\x38\x2B\xC8\x3B", + .rlen = 40, + }, +}; + +static struct cipher_testvec bf_dec_tv_template[] = { + { /* DES test vectors from OpenSSL */ + .key = "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 8, + .input = "\x4e\xf9\x97\x45\x61\x98\xdd\x78", + .ilen = 8, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00", + .rlen = 8, + }, { + .key = "\x1f\x1f\x1f\x1f\x0e\x0e\x0e\x0e", + .klen = 8, + .input = "\xa7\x90\x79\x51\x08\xea\x3c\xae", + .ilen = 8, + .result = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .rlen = 8, + }, { + .key = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87", + .klen = 8, + .input = "\xe8\x7a\x24\x4e\x2c\xc8\x5e\x82", + .ilen = 8, + .result = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .rlen = 8, + }, { /* Vary the keylength... */ + .key = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87" + "\x78\x69\x5a\x4b\x3c\x2d\x1e\x0f", + .klen = 16, + .input = "\x93\x14\x28\x87\xee\x3b\xe1\x5c", + .ilen = 8, + .result = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .rlen = 8, + }, { + .key = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87" + "\x78\x69\x5a\x4b\x3c\x2d\x1e\x0f" + "\x00\x11\x22\x33\x44", + .klen = 21, + .input = "\xe6\xf5\x1e\xd7\x9b\x9d\xb2\x1f", + .ilen = 8, + .result = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .rlen = 8, + }, { /* Generated with bf488, using OpenSSL, Libgcrypt and Nettle */ + .key = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87" + "\x78\x69\x5a\x4b\x3c\x2d\x1e\x0f" + "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x04\x68\x91\x04\xc2\xfd\x3b\x2f" + "\x58\x40\x23\x64\x1a\xba\x61\x76" + "\x1f\x1f\x1f\x1f\x0e\x0e\x0e\x0e" + "\xff\xff\xff\xff\xff\xff\xff\xff", + .klen = 56, + .input = "\xc0\x45\x04\x01\x2e\x4e\x1f\x53", + .ilen = 8, + .result = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .rlen = 8, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .input = "\x96\x87\x3D\x0C\x7B\xFB\xBD\x1F" + "\xE3\xC1\x99\x6D\x39\xD4\xC2\x7D" + "\xD7\x87\xA1\xF2\xDF\x51\x71\x26" + "\xC2\xF4\x6D\xFF\xF6\xCD\x6B\x40" + "\xE1\xB3\xBF\xD4\x38\x2B\xC8\x3B", + .ilen = 40, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9", + .rlen = 40, + }, +}; + +static struct cipher_testvec bf_cbc_enc_tv_template[] = { + { /* From OpenSSL */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87", + .klen = 16, + .iv = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .input = "\x37\x36\x35\x34\x33\x32\x31\x20" + "\x4e\x6f\x77\x20\x69\x73\x20\x74" + "\x68\x65\x20\x74\x69\x6d\x65\x20" + "\x66\x6f\x72\x20\x00\x00\x00\x00", + .ilen = 32, + .result = "\x6b\x77\xb4\xd6\x30\x06\xde\xe6" + "\x05\xb1\x56\xe2\x74\x03\x97\x93" + "\x58\xde\xb9\xe7\x15\x46\x16\xd9" + "\x59\xf1\x65\x2b\xd5\xff\x92\xcc", + .rlen = 32, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9", + .ilen = 40, + .result = "\xB4\xFE\xA5\xBB\x3D\x2C\x27\x06" + "\x06\x2B\x3A\x92\xB2\xF5\x5E\x62" + "\x84\xCD\xF7\x66\x7E\x41\x6C\x8E" + "\x1B\xD9\x02\xB6\x48\xB0\x87\x25" + "\x01\x9C\x93\x63\x51\x60\x82\xD2", + .rlen = 40, + }, +}; + +static struct cipher_testvec bf_cbc_dec_tv_template[] = { + { /* From OpenSSL */ + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87", + .klen = 16, + .iv = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .input = "\x6b\x77\xb4\xd6\x30\x06\xde\xe6" + "\x05\xb1\x56\xe2\x74\x03\x97\x93" + "\x58\xde\xb9\xe7\x15\x46\x16\xd9" + "\x59\xf1\x65\x2b\xd5\xff\x92\xcc", + .ilen = 32, + .result = "\x37\x36\x35\x34\x33\x32\x31\x20" + "\x4e\x6f\x77\x20\x69\x73\x20\x74" + "\x68\x65\x20\x74\x69\x6d\x65\x20" + "\x66\x6f\x72\x20\x00\x00\x00\x00", + .rlen = 32, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\xB4\xFE\xA5\xBB\x3D\x2C\x27\x06" + "\x06\x2B\x3A\x92\xB2\xF5\x5E\x62" + "\x84\xCD\xF7\x66\x7E\x41\x6C\x8E" + "\x1B\xD9\x02\xB6\x48\xB0\x87\x25" + "\x01\x9C\x93\x63\x51\x60\x82\xD2", + .ilen = 40, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9", + .rlen = 40, + }, +}; + +static struct cipher_testvec bf_ctr_enc_tv_template[] = { + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9", + .ilen = 40, + .result = "\xC7\xA3\xDF\xB9\x05\xF4\x9E\x8D" + "\x9E\xDF\x38\x18\x83\x07\xEF\xC1" + "\x93\x3C\xAA\xAA\xFE\x06\x42\xCC" + "\x0D\x70\x86\x5A\x44\xAD\x85\x17" + "\xE4\x1F\x5E\xA5\x89\xAC\x32\xBC", + .rlen = 40, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B", + .ilen = 43, + .result = "\xC7\xA3\xDF\xB9\x05\xF4\x9E\x8D" + "\x9E\xDF\x38\x18\x83\x07\xEF\xC1" + "\x93\x3C\xAA\xAA\xFE\x06\x42\xCC" + "\x0D\x70\x86\x5A\x44\xAD\x85\x17" + "\xE4\x1F\x5E\xA5\x89\xAC\x32\xBC" + "\x3D\xA7\xE9", + .rlen = 43, + }, +}; + +static struct cipher_testvec bf_ctr_dec_tv_template[] = { + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\xC7\xA3\xDF\xB9\x05\xF4\x9E\x8D" + "\x9E\xDF\x38\x18\x83\x07\xEF\xC1" + "\x93\x3C\xAA\xAA\xFE\x06\x42\xCC" + "\x0D\x70\x86\x5A\x44\xAD\x85\x17" + "\xE4\x1F\x5E\xA5\x89\xAC\x32\xBC", + .ilen = 40, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9", + .rlen = 40, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\xC7\xA3\xDF\xB9\x05\xF4\x9E\x8D" + "\x9E\xDF\x38\x18\x83\x07\xEF\xC1" + "\x93\x3C\xAA\xAA\xFE\x06\x42\xCC" + "\x0D\x70\x86\x5A\x44\xAD\x85\x17" + "\xE4\x1F\x5E\xA5\x89\xAC\x32\xBC" + "\x3D\xA7\xE9", + .ilen = 43, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B", + .rlen = 43, + }, +}; + +/* + * Twofish test vectors. + */ +#define TF_ENC_TEST_VECTORS 4 +#define TF_DEC_TEST_VECTORS 4 +#define TF_CBC_ENC_TEST_VECTORS 5 +#define TF_CBC_DEC_TEST_VECTORS 5 +#define TF_CTR_ENC_TEST_VECTORS 2 +#define TF_CTR_DEC_TEST_VECTORS 2 +#define TF_LRW_ENC_TEST_VECTORS 8 +#define TF_LRW_DEC_TEST_VECTORS 8 +#define TF_XTS_ENC_TEST_VECTORS 5 +#define TF_XTS_DEC_TEST_VECTORS 5 + +static struct cipher_testvec tf_enc_tv_template[] = { + { + .key = zeroed_string, + .klen = 16, + .input = zeroed_string, + .ilen = 16, + .result = "\x9f\x58\x9f\x5c\xf6\x12\x2c\x32" + "\xb6\xbf\xec\x2f\x2a\xe8\xc3\x5a", + .rlen = 16, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10" + "\x00\x11\x22\x33\x44\x55\x66\x77", + .klen = 24, + .input = zeroed_string, + .ilen = 16, + .result = "\xcf\xd1\xd2\xe5\xa9\xbe\x9c\xdf" + "\x50\x1f\x13\xb8\x92\xbd\x22\x48", + .rlen = 16, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10" + "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + .klen = 32, + .input = zeroed_string, + .ilen = 16, + .result = "\x37\x52\x7b\xe0\x05\x23\x34\xb8" + "\x9f\x0c\xfc\xca\xe8\x7c\xfa\x20", + .rlen = 16, + }, { /* Generated with Crypto++ */ + .key = "\x3F\x85\x62\x3F\x1C\xF9\xD6\x1C" + "\xF9\xD6\xB3\x90\x6D\x4A\x90\x6D" + "\x4A\x27\x04\xE1\x27\x04\xE1\xBE" + "\x9B\x78\xBE\x9B\x78\x55\x32\x0F", + .klen = 32, + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C", + .ilen = 64, + .result = "\x88\xCB\x1E\xC2\xAF\x8A\x97\xFF" + "\xF6\x90\x46\x9C\x4A\x0F\x08\xDC" + "\xDE\xAB\xAD\xFA\xFC\xA8\xC2\x3D" + "\xE0\xE4\x8B\x3F\xD5\xA3\xF7\x14" + "\x34\x9E\xB6\x08\xB2\xDD\xA8\xF5" + "\xDF\xFA\xC7\xE8\x09\x50\x76\x08" + "\xA2\xB6\x6A\x59\xC0\x2B\x6D\x05" + "\x89\xF6\x82\xF0\xD3\xDB\x06\x02", + .rlen = 64, + }, +}; + +static struct cipher_testvec tf_dec_tv_template[] = { + { + .key = zeroed_string, + .klen = 16, + .input = "\x9f\x58\x9f\x5c\xf6\x12\x2c\x32" + "\xb6\xbf\xec\x2f\x2a\xe8\xc3\x5a", + .ilen = 16, + .result = zeroed_string, + .rlen = 16, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10" + "\x00\x11\x22\x33\x44\x55\x66\x77", + .klen = 24, + .input = "\xcf\xd1\xd2\xe5\xa9\xbe\x9c\xdf" + "\x50\x1f\x13\xb8\x92\xbd\x22\x48", + .ilen = 16, + .result = zeroed_string, + .rlen = 16, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10" + "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + .klen = 32, + .input = "\x37\x52\x7b\xe0\x05\x23\x34\xb8" + "\x9f\x0c\xfc\xca\xe8\x7c\xfa\x20", + .ilen = 16, + .result = zeroed_string, + .rlen = 16, + }, { /* Generated with Crypto++ */ + .key = "\x3F\x85\x62\x3F\x1C\xF9\xD6\x1C" + "\xF9\xD6\xB3\x90\x6D\x4A\x90\x6D" + "\x4A\x27\x04\xE1\x27\x04\xE1\xBE" + "\x9B\x78\xBE\x9B\x78\x55\x32\x0F", + .klen = 32, + .input = "\x88\xCB\x1E\xC2\xAF\x8A\x97\xFF" + "\xF6\x90\x46\x9C\x4A\x0F\x08\xDC" + "\xDE\xAB\xAD\xFA\xFC\xA8\xC2\x3D" + "\xE0\xE4\x8B\x3F\xD5\xA3\xF7\x14" + "\x34\x9E\xB6\x08\xB2\xDD\xA8\xF5" + "\xDF\xFA\xC7\xE8\x09\x50\x76\x08" + "\xA2\xB6\x6A\x59\xC0\x2B\x6D\x05" + "\x89\xF6\x82\xF0\xD3\xDB\x06\x02", + .ilen = 64, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C", + .rlen = 64, + }, +}; + +static struct cipher_testvec tf_cbc_enc_tv_template[] = { + { /* Generated with Nettle */ + .key = zeroed_string, + .klen = 16, + .iv = zeroed_string, + .input = zeroed_string, + .ilen = 16, + .result = "\x9f\x58\x9f\x5c\xf6\x12\x2c\x32" + "\xb6\xbf\xec\x2f\x2a\xe8\xc3\x5a", + .rlen = 16, + }, { + .key = zeroed_string, + .klen = 16, + .iv = "\x9f\x58\x9f\x5c\xf6\x12\x2c\x32" + "\xb6\xbf\xec\x2f\x2a\xe8\xc3\x5a", + .input = zeroed_string, + .ilen = 16, + .result = "\xd4\x91\xdb\x16\xe7\xb1\xc3\x9e" + "\x86\xcb\x08\x6b\x78\x9f\x54\x19", + .rlen = 16, + }, { + .key = zeroed_string, + .klen = 16, + .iv = "\xd4\x91\xdb\x16\xe7\xb1\xc3\x9e" + "\x86\xcb\x08\x6b\x78\x9f\x54\x19", + .input = zeroed_string, + .ilen = 16, + .result = "\x05\xef\x8c\x61\xa8\x11\x58\x26" + "\x34\xba\x5c\xb7\x10\x6a\xa6\x41", + .rlen = 16, + }, { + .key = zeroed_string, + .klen = 16, + .iv = zeroed_string, + .input = zeroed_string, + .ilen = 48, + .result = "\x9f\x58\x9f\x5c\xf6\x12\x2c\x32" + "\xb6\xbf\xec\x2f\x2a\xe8\xc3\x5a" + "\xd4\x91\xdb\x16\xe7\xb1\xc3\x9e" + "\x86\xcb\x08\x6b\x78\x9f\x54\x19" + "\x05\xef\x8c\x61\xa8\x11\x58\x26" + "\x34\xba\x5c\xb7\x10\x6a\xa6\x41", + .rlen = 48, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C", + .ilen = 64, + .result = "\xC8\xFF\xF2\x53\xA6\x27\x09\xD1" + "\x33\x38\xC2\xC0\x0C\x14\x7E\xB5" + "\x26\x1B\x05\x0C\x05\x12\x3F\xC0" + "\xF9\x1C\x02\x28\x40\x96\x6F\xD0" + "\x3D\x32\xDF\xDA\x56\x00\x6E\xEE" + "\x5B\x2A\x72\x9D\xC2\x4D\x19\xBC" + "\x8C\x53\xFA\x87\x6F\xDD\x81\xA3" + "\xB1\xD3\x44\x65\xDF\xE7\x63\x38", + .rlen = 64, + }, +}; + +static struct cipher_testvec tf_cbc_dec_tv_template[] = { + { /* Reverse of the first four above */ + .key = zeroed_string, + .klen = 16, + .iv = zeroed_string, + .input = "\x9f\x58\x9f\x5c\xf6\x12\x2c\x32" + "\xb6\xbf\xec\x2f\x2a\xe8\xc3\x5a", + .ilen = 16, + .result = zeroed_string, + .rlen = 16, + }, { + .key = zeroed_string, + .klen = 16, + .iv = "\x9f\x58\x9f\x5c\xf6\x12\x2c\x32" + "\xb6\xbf\xec\x2f\x2a\xe8\xc3\x5a", + .input = "\xd4\x91\xdb\x16\xe7\xb1\xc3\x9e" + "\x86\xcb\x08\x6b\x78\x9f\x54\x19", + .ilen = 16, + .result = zeroed_string, + .rlen = 16, + }, { + .key = zeroed_string, + .klen = 16, + .iv = "\xd4\x91\xdb\x16\xe7\xb1\xc3\x9e" + "\x86\xcb\x08\x6b\x78\x9f\x54\x19", + .input = "\x05\xef\x8c\x61\xa8\x11\x58\x26" + "\x34\xba\x5c\xb7\x10\x6a\xa6\x41", + .ilen = 16, + .result = zeroed_string, + .rlen = 16, + }, { + .key = zeroed_string, + .klen = 16, + .iv = zeroed_string, + .input = "\x9f\x58\x9f\x5c\xf6\x12\x2c\x32" + "\xb6\xbf\xec\x2f\x2a\xe8\xc3\x5a" + "\xd4\x91\xdb\x16\xe7\xb1\xc3\x9e" + "\x86\xcb\x08\x6b\x78\x9f\x54\x19" + "\x05\xef\x8c\x61\xa8\x11\x58\x26" + "\x34\xba\x5c\xb7\x10\x6a\xa6\x41", + .ilen = 48, + .result = zeroed_string, + .rlen = 48, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\xC8\xFF\xF2\x53\xA6\x27\x09\xD1" + "\x33\x38\xC2\xC0\x0C\x14\x7E\xB5" + "\x26\x1B\x05\x0C\x05\x12\x3F\xC0" + "\xF9\x1C\x02\x28\x40\x96\x6F\xD0" + "\x3D\x32\xDF\xDA\x56\x00\x6E\xEE" + "\x5B\x2A\x72\x9D\xC2\x4D\x19\xBC" + "\x8C\x53\xFA\x87\x6F\xDD\x81\xA3" + "\xB1\xD3\x44\x65\xDF\xE7\x63\x38", + .ilen = 64, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C", + .rlen = 64, + }, +}; + +static struct cipher_testvec tf_ctr_enc_tv_template[] = { + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C", + .ilen = 64, + .result = "\xDF\xDD\x69\xFA\xB0\x2E\xFD\xFE" + "\x70\x9E\xC5\x4B\xC9\xD4\xA1\x30" + "\x26\x9B\x89\xA1\xEE\x43\xE0\x52" + "\x55\x17\x4E\xC7\x0E\x33\x1F\xF1" + "\x9F\x8D\x40\x9F\x24\xFD\x92\xA0" + "\xBC\x8F\x35\xDD\x67\x38\xD8\xAA" + "\xCF\xF8\x48\xCA\xFB\xE4\x5C\x60" + "\x01\x41\x21\x12\x38\xAB\x52\x4F", + .rlen = 64, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE", + .ilen = 67, + .result = "\xDF\xDD\x69\xFA\xB0\x2E\xFD\xFE" + "\x70\x9E\xC5\x4B\xC9\xD4\xA1\x30" + "\x26\x9B\x89\xA1\xEE\x43\xE0\x52" + "\x55\x17\x4E\xC7\x0E\x33\x1F\xF1" + "\x9F\x8D\x40\x9F\x24\xFD\x92\xA0" + "\xBC\x8F\x35\xDD\x67\x38\xD8\xAA" + "\xCF\xF8\x48\xCA\xFB\xE4\x5C\x60" + "\x01\x41\x21\x12\x38\xAB\x52\x4F" + "\xA8\x57\x20", + .rlen = 67, + }, +}; + +static struct cipher_testvec tf_ctr_dec_tv_template[] = { + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\xDF\xDD\x69\xFA\xB0\x2E\xFD\xFE" + "\x70\x9E\xC5\x4B\xC9\xD4\xA1\x30" + "\x26\x9B\x89\xA1\xEE\x43\xE0\x52" + "\x55\x17\x4E\xC7\x0E\x33\x1F\xF1" + "\x9F\x8D\x40\x9F\x24\xFD\x92\xA0" + "\xBC\x8F\x35\xDD\x67\x38\xD8\xAA" + "\xCF\xF8\x48\xCA\xFB\xE4\x5C\x60" + "\x01\x41\x21\x12\x38\xAB\x52\x4F", + .ilen = 64, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C", + .rlen = 64, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\xDF\xDD\x69\xFA\xB0\x2E\xFD\xFE" + "\x70\x9E\xC5\x4B\xC9\xD4\xA1\x30" + "\x26\x9B\x89\xA1\xEE\x43\xE0\x52" + "\x55\x17\x4E\xC7\x0E\x33\x1F\xF1" + "\x9F\x8D\x40\x9F\x24\xFD\x92\xA0" + "\xBC\x8F\x35\xDD\x67\x38\xD8\xAA" + "\xCF\xF8\x48\xCA\xFB\xE4\x5C\x60" + "\x01\x41\x21\x12\x38\xAB\x52\x4F" + "\xA8\x57\x20", + .ilen = 67, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE", + .rlen = 67, + }, +}; + +static struct cipher_testvec tf_lrw_enc_tv_template[] = { + /* Generated from AES-LRW test vectors */ + { + .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d" + "\x4c\x26\x84\x14\xb5\x68\x01\x85" + "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03" + "\xee\x5a\x83\x0c\xcc\x09\x4c\x87", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\xa1\x6c\x50\x69\x26\xa4\xef\x7b" + "\x7c\xc6\x91\xeb\x72\xdd\x9b\xee", + .rlen = 16, + }, { + .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c" + "\xd7\x79\xe8\x0f\x54\x88\x79\x44" + "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea" + "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x02", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\xab\x72\x0a\xad\x3b\x0c\xf0\xc9" + "\x42\x2f\xf1\xae\xf1\x3c\xb1\xbd", + .rlen = 16, + }, { + .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50" + "\x30\xfe\x69\xe2\x37\x7f\x98\x47" + "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6" + "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x85\xa7\x56\x67\x08\xfa\x42\xe1" + "\x22\xe6\x82\xfc\xd9\xb4\xd7\xd4", + .rlen = 16, + }, { + .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15" + "\x25\x83\xf7\x3c\x1f\x01\x28\x74" + "\xca\xc6\xbc\x35\x4d\x4a\x65\x54" + "\x90\xae\x61\xcf\x7b\xae\xbd\xcc" + "\xad\xe4\x94\xc5\x4a\x29\xae\x70", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\xd2\xaf\x69\x35\x24\x1d\x0e\x1c" + "\x84\x8b\x05\xe4\xa2\x2f\x16\xf5", + .rlen = 16, + }, { + .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff" + "\xf8\x86\xce\xac\x93\xc5\xad\xc6" + "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd" + "\x52\x13\xb2\xb7\xf0\xff\x11\xd8" + "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x4a\x23\x56\xd7\xff\x90\xd0\x9a" + "\x0d\x7c\x26\xfc\xf0\xf0\xf6\xe4", + .rlen = 16, + }, { + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x30\xaf\x26\x05\x9d\x5d\x0a\x58" + "\xe2\xe7\xce\x8a\xb2\x56\x6d\x76", + .rlen = 16, + }, { + .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d" + "\xd4\x70\x98\x0b\xc7\x95\x84\xc8" + "\xb2\xfb\x64\xce\x60\x97\x87\x8d" + "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7" + "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4" + "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\xdf\xcf\xdc\xd2\xe1\xcf\x86\x75" + "\x17\x66\x5e\x0c\x14\xa1\x3d\x40", + .rlen = 16, + }, { + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x05\x11\xb7\x18\xab\xc6\x2d\xac" + "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c" + "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8" + "\x50\x38\x1f\x71\x49\xb6\x57\xd6" + "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90" + "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6" + "\xad\x1e\x9e\x20\x5f\x38\xbe\x04" + "\xda\x10\x8e\xed\xa2\xa4\x87\xab" + "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c" + "\xc9\xac\x42\x31\x95\x7c\xc9\x04" + "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6" + "\x15\xd7\x3f\x4f\x2f\x66\x69\x03" + "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65" + "\x4c\x96\x12\xed\x7c\x92\x03\x01" + "\x6f\xbc\x35\x93\xac\xf1\x27\xf1" + "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50" + "\x89\xa4\x8e\x66\x44\x85\xcc\xfd" + "\x33\x14\x70\xe3\x96\xb2\xc3\xd3" + "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5" + "\x2d\x64\x75\xdd\xb4\x54\xe6\x74" + "\x8c\xd3\x9d\x9e\x86\xab\x51\x53" + "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40" + "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5" + "\x76\x12\x73\x44\x1a\x56\xd7\x72" + "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda" + "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd" + "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60" + "\x1a\xe2\x70\x85\x58\xc2\x1b\x09" + "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9" + "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8" + "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8" + "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10" + "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1" + "\x90\x3e\x76\x4a\x74\xa4\x21\x2c" + "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e" + "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f" + "\x8d\x23\x31\x74\x84\xeb\x88\x6e" + "\xcc\xb9\xbc\x22\x83\x19\x07\x22" + "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78" + "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5" + "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41" + "\x3c\xce\x8f\x42\x60\x71\xa7\x75" + "\x08\x40\x65\x8a\x82\xbf\xf5\x43" + "\x71\x96\xa9\x4d\x44\x8a\x20\xbe" + "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65" + "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9" + "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4" + "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a" + "\x62\x73\x65\xfd\x46\x63\x25\x3d" + "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf" + "\x24\xf3\xb4\xac\x64\xba\xdf\x4b" + "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7" + "\xc5\x68\x77\x84\x32\x2b\xcc\x85" + "\x74\x96\xf0\x12\x77\x61\xb9\xeb" + "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8" + "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24" + "\xda\x39\x87\x45\xc0\x2b\xbb\x01" + "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce" + "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6" + "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32" + "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45" + "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6" + "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" + "\x21\xc4\xc2\x75\x67\x89\x37\x0a", + .ilen = 512, + .result = "\x30\x38\xeb\xaf\x12\x43\x1a\x89" + "\x62\xa2\x36\xe5\xcf\x77\x1e\xd9" + "\x08\xc3\x0d\xdd\x95\xab\x19\x96" + "\x27\x52\x41\xc3\xca\xfb\xf6\xee" + "\x40\x2d\xdf\xdd\x00\x0c\xb9\x0a" + "\x3a\xf0\xc0\xd1\xda\x63\x9e\x45" + "\x42\xe9\x29\xc0\xb4\x07\xb4\x31" + "\x66\x77\x72\xb5\xb6\xb3\x57\x46" + "\x34\x9a\xfe\x03\xaf\x6b\x36\x07" + "\x63\x8e\xc2\x5d\xa6\x0f\xb6\x7d" + "\xfb\x6d\x82\x51\xb6\x98\xd0\x71" + "\xe7\x10\x7a\xdf\xb2\xbd\xf1\x1d" + "\x72\x2b\x54\x13\xe3\x6d\x79\x37" + "\xa9\x39\x2c\xdf\x21\xab\x87\xd5" + "\xee\xef\x9a\x12\x50\x39\x2e\x1b" + "\x7d\xe6\x6a\x27\x48\xb9\xe7\xac" + "\xaa\xcd\x79\x5f\xf2\xf3\xa0\x08" + "\x6f\x2c\xf4\x0e\xd1\xb8\x89\x25" + "\x31\x9d\xef\xb1\x1d\x27\x55\x04" + "\xc9\x8c\xb7\x68\xdc\xb6\x67\x8a" + "\xdb\xcf\x22\xf2\x3b\x6f\xce\xbb" + "\x26\xbe\x4f\x27\x04\x42\xd1\x44" + "\x4c\x08\xa3\x95\x4c\x7f\x1a\xaf" + "\x1d\x28\x14\xfd\xb1\x1a\x34\x18" + "\xf5\x1e\x28\x69\x95\x6a\x5a\xba" + "\x8e\xb2\x58\x1d\x28\x17\x13\x3d" + "\x38\x7d\x14\x8d\xab\x5d\xf9\xe8" + "\x3c\x0f\x2b\x0d\x2b\x08\xb4\x4b" + "\x6b\x0d\xc8\xa7\x84\xc2\x3a\x1a" + "\xb7\xbd\xda\x92\x29\xb8\x5b\x5a" + "\x63\xa5\x99\x82\x09\x72\x8f\xc6" + "\xa4\x62\x24\x69\x8c\x2d\x26\x00" + "\x99\x83\x91\xd6\xc6\xcf\x57\x67" + "\x38\xea\xf2\xfc\x29\xe0\x73\x39" + "\xf9\x13\x94\x6d\xe2\x58\x28\x75" + "\x3e\xae\x71\x90\x07\x70\x1c\x38" + "\x5b\x4c\x1e\xb5\xa5\x3b\x20\xef" + "\xb1\x4c\x3e\x1a\x72\x62\xbb\x22" + "\x82\x09\xe3\x18\x3f\x4f\x48\xfc" + "\xdd\xac\xfc\xb6\x09\xdb\xd2\x7b" + "\xd6\xb7\x7e\x41\x2f\x14\xf5\x0e" + "\xc3\xac\x4a\xed\xe7\x82\xef\x31" + "\x1f\x1a\x51\x1e\x29\x60\xc8\x98" + "\x93\x51\x1d\x3d\x62\x59\x83\x82" + "\x0c\xf1\xd7\x8d\xac\x33\x44\x81" + "\x3c\x59\xb7\xd4\x5b\x65\x82\xc4" + "\xec\xdc\x24\xfd\x0e\x1a\x79\x94" + "\x34\xb0\x62\xfa\x98\x49\x26\x1f" + "\xf4\x9e\x40\x44\x5b\x1f\xf8\xbe" + "\x36\xff\xc6\xc6\x9d\xf2\xd6\xcc" + "\x63\x93\x29\xb9\x0b\x6d\xd7\x6c" + "\xdb\xf6\x21\x80\xf7\x5a\x37\x15" + "\x0c\xe3\x36\xc8\x74\x75\x20\x91" + "\xdf\x52\x2d\x0c\xe7\x45\xff\x46" + "\xb3\xf4\xec\xc2\xbd\xd3\x37\xb6" + "\x26\xa2\x5d\x7d\x61\xbf\x10\x46" + "\x57\x8d\x05\x96\x70\x0b\xd6\x41" + "\x5c\xe9\xd3\x54\x81\x39\x3a\xdd" + "\x5f\x92\x81\x6e\x35\x03\xd4\x72" + "\x3d\x5a\xe7\xb9\x3b\x0c\x84\x23" + "\x45\x5d\xec\x72\xc1\x52\xef\x2e" + "\x81\x00\xd3\xfe\x4c\x3c\x05\x61" + "\x80\x18\xc4\x6c\x03\xd3\xb7\xba" + "\x11\xd7\xb8\x6e\xea\xe1\x80\x30", + .rlen = 512, + }, +}; + +static struct cipher_testvec tf_lrw_dec_tv_template[] = { + /* Generated from AES-LRW test vectors */ + /* same as enc vectors with input and result reversed */ + { + .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d" + "\x4c\x26\x84\x14\xb5\x68\x01\x85" + "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03" + "\xee\x5a\x83\x0c\xcc\x09\x4c\x87", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\xa1\x6c\x50\x69\x26\xa4\xef\x7b" + "\x7c\xc6\x91\xeb\x72\xdd\x9b\xee", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c" + "\xd7\x79\xe8\x0f\x54\x88\x79\x44" + "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea" + "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x02", + .input = "\xab\x72\x0a\xad\x3b\x0c\xf0\xc9" + "\x42\x2f\xf1\xae\xf1\x3c\xb1\xbd", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50" + "\x30\xfe\x69\xe2\x37\x7f\x98\x47" + "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6" + "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x85\xa7\x56\x67\x08\xfa\x42\xe1" + "\x22\xe6\x82\xfc\xd9\xb4\xd7\xd4", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15" + "\x25\x83\xf7\x3c\x1f\x01\x28\x74" + "\xca\xc6\xbc\x35\x4d\x4a\x65\x54" + "\x90\xae\x61\xcf\x7b\xae\xbd\xcc" + "\xad\xe4\x94\xc5\x4a\x29\xae\x70", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\xd2\xaf\x69\x35\x24\x1d\x0e\x1c" + "\x84\x8b\x05\xe4\xa2\x2f\x16\xf5", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff" + "\xf8\x86\xce\xac\x93\xc5\xad\xc6" + "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd" + "\x52\x13\xb2\xb7\xf0\xff\x11\xd8" + "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x4a\x23\x56\xd7\xff\x90\xd0\x9a" + "\x0d\x7c\x26\xfc\xf0\xf0\xf6\xe4", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\xaf\x26\x05\x9d\x5d\x0a\x58" + "\xe2\xe7\xce\x8a\xb2\x56\x6d\x76", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d" + "\xd4\x70\x98\x0b\xc7\x95\x84\xc8" + "\xb2\xfb\x64\xce\x60\x97\x87\x8d" + "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7" + "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4" + "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\xdf\xcf\xdc\xd2\xe1\xcf\x86\x75" + "\x17\x66\x5e\x0c\x14\xa1\x3d\x40", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\x38\xeb\xaf\x12\x43\x1a\x89" + "\x62\xa2\x36\xe5\xcf\x77\x1e\xd9" + "\x08\xc3\x0d\xdd\x95\xab\x19\x96" + "\x27\x52\x41\xc3\xca\xfb\xf6\xee" + "\x40\x2d\xdf\xdd\x00\x0c\xb9\x0a" + "\x3a\xf0\xc0\xd1\xda\x63\x9e\x45" + "\x42\xe9\x29\xc0\xb4\x07\xb4\x31" + "\x66\x77\x72\xb5\xb6\xb3\x57\x46" + "\x34\x9a\xfe\x03\xaf\x6b\x36\x07" + "\x63\x8e\xc2\x5d\xa6\x0f\xb6\x7d" + "\xfb\x6d\x82\x51\xb6\x98\xd0\x71" + "\xe7\x10\x7a\xdf\xb2\xbd\xf1\x1d" + "\x72\x2b\x54\x13\xe3\x6d\x79\x37" + "\xa9\x39\x2c\xdf\x21\xab\x87\xd5" + "\xee\xef\x9a\x12\x50\x39\x2e\x1b" + "\x7d\xe6\x6a\x27\x48\xb9\xe7\xac" + "\xaa\xcd\x79\x5f\xf2\xf3\xa0\x08" + "\x6f\x2c\xf4\x0e\xd1\xb8\x89\x25" + "\x31\x9d\xef\xb1\x1d\x27\x55\x04" + "\xc9\x8c\xb7\x68\xdc\xb6\x67\x8a" + "\xdb\xcf\x22\xf2\x3b\x6f\xce\xbb" + "\x26\xbe\x4f\x27\x04\x42\xd1\x44" + "\x4c\x08\xa3\x95\x4c\x7f\x1a\xaf" + "\x1d\x28\x14\xfd\xb1\x1a\x34\x18" + "\xf5\x1e\x28\x69\x95\x6a\x5a\xba" + "\x8e\xb2\x58\x1d\x28\x17\x13\x3d" + "\x38\x7d\x14\x8d\xab\x5d\xf9\xe8" + "\x3c\x0f\x2b\x0d\x2b\x08\xb4\x4b" + "\x6b\x0d\xc8\xa7\x84\xc2\x3a\x1a" + "\xb7\xbd\xda\x92\x29\xb8\x5b\x5a" + "\x63\xa5\x99\x82\x09\x72\x8f\xc6" + "\xa4\x62\x24\x69\x8c\x2d\x26\x00" + "\x99\x83\x91\xd6\xc6\xcf\x57\x67" + "\x38\xea\xf2\xfc\x29\xe0\x73\x39" + "\xf9\x13\x94\x6d\xe2\x58\x28\x75" + "\x3e\xae\x71\x90\x07\x70\x1c\x38" + "\x5b\x4c\x1e\xb5\xa5\x3b\x20\xef" + "\xb1\x4c\x3e\x1a\x72\x62\xbb\x22" + "\x82\x09\xe3\x18\x3f\x4f\x48\xfc" + "\xdd\xac\xfc\xb6\x09\xdb\xd2\x7b" + "\xd6\xb7\x7e\x41\x2f\x14\xf5\x0e" + "\xc3\xac\x4a\xed\xe7\x82\xef\x31" + "\x1f\x1a\x51\x1e\x29\x60\xc8\x98" + "\x93\x51\x1d\x3d\x62\x59\x83\x82" + "\x0c\xf1\xd7\x8d\xac\x33\x44\x81" + "\x3c\x59\xb7\xd4\x5b\x65\x82\xc4" + "\xec\xdc\x24\xfd\x0e\x1a\x79\x94" + "\x34\xb0\x62\xfa\x98\x49\x26\x1f" + "\xf4\x9e\x40\x44\x5b\x1f\xf8\xbe" + "\x36\xff\xc6\xc6\x9d\xf2\xd6\xcc" + "\x63\x93\x29\xb9\x0b\x6d\xd7\x6c" + "\xdb\xf6\x21\x80\xf7\x5a\x37\x15" + "\x0c\xe3\x36\xc8\x74\x75\x20\x91" + "\xdf\x52\x2d\x0c\xe7\x45\xff\x46" + "\xb3\xf4\xec\xc2\xbd\xd3\x37\xb6" + "\x26\xa2\x5d\x7d\x61\xbf\x10\x46" + "\x57\x8d\x05\x96\x70\x0b\xd6\x41" + "\x5c\xe9\xd3\x54\x81\x39\x3a\xdd" + "\x5f\x92\x81\x6e\x35\x03\xd4\x72" + "\x3d\x5a\xe7\xb9\x3b\x0c\x84\x23" + "\x45\x5d\xec\x72\xc1\x52\xef\x2e" + "\x81\x00\xd3\xfe\x4c\x3c\x05\x61" + "\x80\x18\xc4\x6c\x03\xd3\xb7\xba" + "\x11\xd7\xb8\x6e\xea\xe1\x80\x30", + .ilen = 512, + .result = "\x05\x11\xb7\x18\xab\xc6\x2d\xac" + "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c" + "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8" + "\x50\x38\x1f\x71\x49\xb6\x57\xd6" + "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90" + "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6" + "\xad\x1e\x9e\x20\x5f\x38\xbe\x04" + "\xda\x10\x8e\xed\xa2\xa4\x87\xab" + "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c" + "\xc9\xac\x42\x31\x95\x7c\xc9\x04" + "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6" + "\x15\xd7\x3f\x4f\x2f\x66\x69\x03" + "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65" + "\x4c\x96\x12\xed\x7c\x92\x03\x01" + "\x6f\xbc\x35\x93\xac\xf1\x27\xf1" + "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50" + "\x89\xa4\x8e\x66\x44\x85\xcc\xfd" + "\x33\x14\x70\xe3\x96\xb2\xc3\xd3" + "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5" + "\x2d\x64\x75\xdd\xb4\x54\xe6\x74" + "\x8c\xd3\x9d\x9e\x86\xab\x51\x53" + "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40" + "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5" + "\x76\x12\x73\x44\x1a\x56\xd7\x72" + "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda" + "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd" + "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60" + "\x1a\xe2\x70\x85\x58\xc2\x1b\x09" + "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9" + "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8" + "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8" + "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10" + "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1" + "\x90\x3e\x76\x4a\x74\xa4\x21\x2c" + "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e" + "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f" + "\x8d\x23\x31\x74\x84\xeb\x88\x6e" + "\xcc\xb9\xbc\x22\x83\x19\x07\x22" + "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78" + "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5" + "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41" + "\x3c\xce\x8f\x42\x60\x71\xa7\x75" + "\x08\x40\x65\x8a\x82\xbf\xf5\x43" + "\x71\x96\xa9\x4d\x44\x8a\x20\xbe" + "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65" + "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9" + "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4" + "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a" + "\x62\x73\x65\xfd\x46\x63\x25\x3d" + "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf" + "\x24\xf3\xb4\xac\x64\xba\xdf\x4b" + "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7" + "\xc5\x68\x77\x84\x32\x2b\xcc\x85" + "\x74\x96\xf0\x12\x77\x61\xb9\xeb" + "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8" + "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24" + "\xda\x39\x87\x45\xc0\x2b\xbb\x01" + "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce" + "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6" + "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32" + "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45" + "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6" + "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" + "\x21\xc4\xc2\x75\x67\x89\x37\x0a", + .rlen = 512, + }, +}; + +static struct cipher_testvec tf_xts_enc_tv_template[] = { + /* Generated from AES-XTS test vectors */ +{ + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 32, + .result = "\x4b\xc9\x44\x4a\x11\xa3\xef\xac" + "\x30\x74\xe4\x44\x52\x77\x97\x43" + "\xa7\x60\xb2\x45\x2e\xf9\x00\x90" + "\x9f\xaa\xfd\x89\x6e\x9d\x4a\xe0", + .rlen = 32, + }, { + .key = "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .ilen = 32, + .result = "\x57\x0e\x8f\xe5\x2a\x35\x61\x4f" + "\x32\xd3\xbd\x36\x05\x15\x44\x2c" + "\x58\x06\xf7\xf8\x00\xa8\xb6\xd5" + "\xc6\x28\x92\xdb\xd8\x34\xa2\xe9", + .rlen = 32, + }, { + .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8" + "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .ilen = 32, + .result = "\x96\x45\x8f\x8d\x7a\x75\xb1\xde" + "\x40\x0c\x89\x56\xf6\x4d\xa7\x07" + "\x38\xbb\x5b\xe9\xcd\x84\xae\xb2" + "\x7b\x6a\x62\xf4\x8c\xb5\x37\xea", + .rlen = 32, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .ilen = 512, + .result = "\xa9\x78\xae\x1e\xea\xa2\x44\x4c" + "\xa2\x7a\x64\x1f\xaf\x46\xc1\xe0" + "\x6c\xb2\xf3\x92\x9a\xd6\x7d\x58" + "\xb8\x2d\xb9\x5d\x58\x07\x66\x50" + "\xea\x35\x35\x8c\xb2\x46\x61\x06" + "\x5d\x65\xfc\x57\x8f\x69\x74\xab" + "\x8a\x06\x69\xb5\x6c\xda\x66\xc7" + "\x52\x90\xbb\x8e\x6d\x8b\xb5\xa2" + "\x78\x1d\xc2\xa9\xc2\x73\x00\xc3" + "\x32\x36\x7c\x97\x6b\x4e\x8a\x50" + "\xe4\x91\x83\x96\x8f\xf4\x94\x1a" + "\xa6\x27\xe1\x33\xcb\x91\xc6\x5f" + "\x94\x75\xbc\xd7\x3e\x3e\x6f\x9e" + "\xa9\x31\x80\x5e\xe5\xdb\xc8\x53" + "\x01\x73\x68\x32\x25\x19\xfa\xfb" + "\xe4\xcf\xb9\x3e\xa2\xa0\x8f\x31" + "\xbf\x54\x06\x93\xa8\xb1\x0f\xb6" + "\x7c\x3c\xde\x6f\x0f\xfb\x0c\x11" + "\x39\x80\x39\x09\x97\x65\xf2\x83" + "\xae\xe6\xa1\x6f\x47\xb8\x49\xde" + "\x99\x36\x20\x7d\x97\x3b\xec\xfa" + "\xb4\x33\x6e\x7a\xc7\x46\x84\x49" + "\x91\xcd\xe1\x57\x0d\xed\x40\x08" + "\x13\xf1\x4e\x3e\xa4\xa4\x5c\xe6" + "\xd2\x0c\x20\x8f\x3e\xdf\x3f\x47" + "\x9a\x2f\xde\x6d\x66\xc9\x99\x4a" + "\x2d\x9e\x9d\x4b\x1a\x27\xa2\x12" + "\x99\xf0\xf8\xb1\xb6\xf6\x57\xc3" + "\xca\x1c\xa3\x8e\xed\x39\x28\xb5" + "\x10\x1b\x4b\x08\x42\x00\x4a\xd3" + "\xad\x5a\xc6\x8e\xc8\xbb\x95\xc4" + "\x4b\xaa\xfe\xd5\x42\xa8\xa3\x6d" + "\x3c\xf3\x34\x91\x2d\xb4\xdd\x20" + "\x0c\x90\x6d\xa3\x9b\x66\x9d\x24" + "\x02\xa6\xa9\x3f\x3f\x58\x5d\x47" + "\x24\x65\x63\x7e\xbd\x8c\xe6\x52" + "\x7d\xef\x33\x53\x63\xec\xaa\x0b" + "\x64\x15\xa9\xa6\x1f\x10\x00\x38" + "\x35\xa8\xe7\xbe\x23\x70\x22\xe0" + "\xd3\xb9\xe6\xfd\xe6\xaa\x03\x50" + "\xf3\x3c\x27\x36\x8b\xcc\xfe\x9c" + "\x9c\xa3\xb3\xe7\x68\x9b\xa2\x71" + "\xe0\x07\xd9\x1f\x68\x1f\xac\x5e" + "\x7a\x74\x85\xa9\x6a\x90\xab\x2c" + "\x38\x51\xbc\x1f\x43\x4a\x56\x1c" + "\xf8\x47\x03\x4e\x67\xa8\x1f\x99" + "\x04\x39\x73\x32\xb2\x86\x79\xe7" + "\x14\x28\x70\xb8\xe2\x7d\x69\x85" + "\xb6\x0f\xc5\xd0\xd0\x01\x5c\xe6" + "\x09\x0f\x75\xf7\xb6\x81\xd2\x11" + "\x20\x9c\xa1\xee\x11\x44\x79\xd0" + "\xb2\x34\x77\xda\x10\x9a\x6f\x6f" + "\xef\x7c\xd9\xdc\x35\xb7\x61\xdd" + "\xf1\xa4\xc6\x1c\xbf\x05\x22\xac" + "\xfe\x2f\x85\x00\x44\xdf\x33\x16" + "\x35\xb6\xa3\xd3\x70\xdf\x69\x35" + "\x6a\xc7\xb4\x99\x45\x27\xc8\x8e" + "\x5a\x14\x30\xd0\x55\x3e\x4f\x64" + "\x0d\x38\xe3\xdf\x8b\xa8\x93\x26" + "\x75\xae\xf6\xb5\x23\x0b\x17\x31" + "\xbf\x27\xb8\xb5\x94\x31\xa7\x8f" + "\x43\xc4\x46\x24\x22\x4f\x8f\x7e" + "\xe5\xf4\x6d\x1e\x0e\x18\x7a\xbb" + "\xa6\x8f\xfb\x49\x49\xd8\x7e\x5a", + .rlen = 512, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95" + "\x02\x88\x41\x97\x16\x93\x99\x37" + "\x51\x05\x82\x09\x74\x94\x45\x92", + .klen = 64, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .ilen = 512, + .result = "\xd7\x4b\x93\x7d\x13\xa2\xa2\xe1" + "\x35\x39\x71\x88\x76\x1e\xc9\xea" + "\x86\xad\xf3\x14\x48\x3d\x5e\xe9" + "\xe9\x2d\xb2\x56\x59\x35\x9d\xec" + "\x84\xfa\x7e\x9d\x6d\x33\x36\x8f" + "\xce\xf4\xa9\x21\x0b\x5f\x96\xec" + "\xcb\xf9\x57\x68\x33\x88\x39\xbf" + "\x2f\xbb\x59\x03\xbd\x66\x8b\x11" + "\x11\x65\x51\x2e\xb8\x67\x05\xd1" + "\x27\x11\x5c\xd4\xcc\x97\xc2\xb3" + "\xa9\x55\xaf\x07\x56\xd1\xdc\xf5" + "\x85\xdc\x46\xe6\xf0\x24\xeb\x93" + "\x4d\xf0\x9b\xf5\x73\x1c\xda\x03" + "\x22\xc8\x3a\x4f\xb4\x19\x91\x09" + "\x54\x0b\xf6\xfe\x17\x3d\x1a\x53" + "\x72\x60\x79\xcb\x0e\x32\x8a\x77" + "\xd5\xed\xdb\x33\xd7\x62\x16\x69" + "\x63\xe0\xab\xb5\xf6\x9c\x5f\x3d" + "\x69\x35\x61\x86\xf8\x86\xb9\x89" + "\x6e\x59\x35\xac\xf6\x6b\x33\xa0" + "\xea\xef\x96\x62\xd8\xa9\xcf\x56" + "\xbf\xdb\x8a\xfd\xa1\x82\x77\x73" + "\x3d\x94\x4a\x49\x42\x6d\x08\x60" + "\xa1\xea\xab\xb6\x88\x13\x94\xb8" + "\x51\x98\xdb\x35\x85\xdf\xf6\xb9" + "\x8f\xcd\xdf\x80\xd3\x40\x2d\x72" + "\xb8\xb2\x6c\x02\x43\x35\x22\x2a" + "\x31\xed\xcd\x16\x19\xdf\x62\x0f" + "\x29\xcf\x87\x04\xec\x02\x4f\xe4" + "\xa2\xed\x73\xc6\x69\xd3\x7e\x89" + "\x0b\x76\x10\x7c\xd6\xf9\x6a\x25" + "\xed\xcc\x60\x5d\x61\x20\xc1\x97" + "\x56\x91\x57\x28\xbe\x71\x0d\xcd" + "\xde\xc4\x9e\x55\x91\xbe\xd1\x28" + "\x9b\x90\xeb\x73\xf3\x68\x51\xc6" + "\xdf\x82\xcc\xd8\x1f\xce\x5b\x27" + "\xc0\x60\x5e\x33\xd6\xa7\x20\xea" + "\xb2\x54\xc7\x5d\x6a\x3b\x67\x47" + "\xcf\xa0\xe3\xab\x86\xaf\xc1\x42" + "\xe6\xb0\x23\x4a\xaf\x53\xdf\xa0" + "\xad\x12\x32\x31\x03\xf7\x21\xbe" + "\x2d\xd5\x82\x42\xb6\x4a\x3d\xcd" + "\xd8\x81\x77\xa9\x49\x98\x6c\x09" + "\xc5\xa3\x61\x12\x62\x85\x6b\xcd" + "\xb3\xf4\x20\x0c\x41\xc4\x05\x37" + "\x46\x5f\xeb\x71\x8b\xf1\xaf\x6e" + "\xba\xf3\x50\x2e\xfe\xa8\x37\xeb" + "\xe8\x8c\x4f\xa4\x0c\xf1\x31\xc8" + "\x6e\x71\x4f\xa5\xd7\x97\x73\xe0" + "\x93\x4a\x2f\xda\x7b\xe0\x20\x54" + "\x1f\x8d\x85\x79\x0b\x7b\x5e\x75" + "\xb9\x07\x67\xcc\xc8\xe7\x21\x15" + "\xa7\xc8\x98\xff\x4b\x80\x1c\x12" + "\xa8\x54\xe1\x38\x52\xe6\x74\x81" + "\x97\x47\xa1\x41\x0e\xc0\x50\xe3" + "\x55\x0e\xc3\xa7\x70\x77\xce\x07" + "\xed\x8c\x88\xe6\xa1\x5b\x14\xec" + "\xe6\xde\x06\x6d\x74\xc5\xd9\xfa" + "\xe5\x2f\x5a\xff\xc8\x05\xee\x27" + "\x35\x61\xbf\x0b\x19\x78\x9b\xd2" + "\x04\xc7\x05\xb1\x79\xb4\xff\x5f" + "\xf3\xea\x67\x52\x78\xc2\xce\x70" + "\xa4\x05\x0b\xb2\xb3\xa8\x30\x97" + "\x37\x30\xe1\x91\x8d\xb3\x2a\xff", + .rlen = 512, + }, +}; + +static struct cipher_testvec tf_xts_dec_tv_template[] = { + /* Generated from AES-XTS test vectors */ + /* same as enc vectors with input and result reversed */ + { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x4b\xc9\x44\x4a\x11\xa3\xef\xac" + "\x30\x74\xe4\x44\x52\x77\x97\x43" + "\xa7\x60\xb2\x45\x2e\xf9\x00\x90" + "\x9f\xaa\xfd\x89\x6e\x9d\x4a\xe0", + .ilen = 32, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .rlen = 32, + }, { + .key = "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x57\x0e\x8f\xe5\x2a\x35\x61\x4f" + "\x32\xd3\xbd\x36\x05\x15\x44\x2c" + "\x58\x06\xf7\xf8\x00\xa8\xb6\xd5" + "\xc6\x28\x92\xdb\xd8\x34\xa2\xe9", + .ilen = 32, + .result = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .rlen = 32, + }, { + .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8" + "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x96\x45\x8f\x8d\x7a\x75\xb1\xde" + "\x40\x0c\x89\x56\xf6\x4d\xa7\x07" + "\x38\xbb\x5b\xe9\xcd\x84\xae\xb2" + "\x7b\x6a\x62\xf4\x8c\xb5\x37\xea", + .ilen = 32, + .result = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .rlen = 32, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xa9\x78\xae\x1e\xea\xa2\x44\x4c" + "\xa2\x7a\x64\x1f\xaf\x46\xc1\xe0" + "\x6c\xb2\xf3\x92\x9a\xd6\x7d\x58" + "\xb8\x2d\xb9\x5d\x58\x07\x66\x50" + "\xea\x35\x35\x8c\xb2\x46\x61\x06" + "\x5d\x65\xfc\x57\x8f\x69\x74\xab" + "\x8a\x06\x69\xb5\x6c\xda\x66\xc7" + "\x52\x90\xbb\x8e\x6d\x8b\xb5\xa2" + "\x78\x1d\xc2\xa9\xc2\x73\x00\xc3" + "\x32\x36\x7c\x97\x6b\x4e\x8a\x50" + "\xe4\x91\x83\x96\x8f\xf4\x94\x1a" + "\xa6\x27\xe1\x33\xcb\x91\xc6\x5f" + "\x94\x75\xbc\xd7\x3e\x3e\x6f\x9e" + "\xa9\x31\x80\x5e\xe5\xdb\xc8\x53" + "\x01\x73\x68\x32\x25\x19\xfa\xfb" + "\xe4\xcf\xb9\x3e\xa2\xa0\x8f\x31" + "\xbf\x54\x06\x93\xa8\xb1\x0f\xb6" + "\x7c\x3c\xde\x6f\x0f\xfb\x0c\x11" + "\x39\x80\x39\x09\x97\x65\xf2\x83" + "\xae\xe6\xa1\x6f\x47\xb8\x49\xde" + "\x99\x36\x20\x7d\x97\x3b\xec\xfa" + "\xb4\x33\x6e\x7a\xc7\x46\x84\x49" + "\x91\xcd\xe1\x57\x0d\xed\x40\x08" + "\x13\xf1\x4e\x3e\xa4\xa4\x5c\xe6" + "\xd2\x0c\x20\x8f\x3e\xdf\x3f\x47" + "\x9a\x2f\xde\x6d\x66\xc9\x99\x4a" + "\x2d\x9e\x9d\x4b\x1a\x27\xa2\x12" + "\x99\xf0\xf8\xb1\xb6\xf6\x57\xc3" + "\xca\x1c\xa3\x8e\xed\x39\x28\xb5" + "\x10\x1b\x4b\x08\x42\x00\x4a\xd3" + "\xad\x5a\xc6\x8e\xc8\xbb\x95\xc4" + "\x4b\xaa\xfe\xd5\x42\xa8\xa3\x6d" + "\x3c\xf3\x34\x91\x2d\xb4\xdd\x20" + "\x0c\x90\x6d\xa3\x9b\x66\x9d\x24" + "\x02\xa6\xa9\x3f\x3f\x58\x5d\x47" + "\x24\x65\x63\x7e\xbd\x8c\xe6\x52" + "\x7d\xef\x33\x53\x63\xec\xaa\x0b" + "\x64\x15\xa9\xa6\x1f\x10\x00\x38" + "\x35\xa8\xe7\xbe\x23\x70\x22\xe0" + "\xd3\xb9\xe6\xfd\xe6\xaa\x03\x50" + "\xf3\x3c\x27\x36\x8b\xcc\xfe\x9c" + "\x9c\xa3\xb3\xe7\x68\x9b\xa2\x71" + "\xe0\x07\xd9\x1f\x68\x1f\xac\x5e" + "\x7a\x74\x85\xa9\x6a\x90\xab\x2c" + "\x38\x51\xbc\x1f\x43\x4a\x56\x1c" + "\xf8\x47\x03\x4e\x67\xa8\x1f\x99" + "\x04\x39\x73\x32\xb2\x86\x79\xe7" + "\x14\x28\x70\xb8\xe2\x7d\x69\x85" + "\xb6\x0f\xc5\xd0\xd0\x01\x5c\xe6" + "\x09\x0f\x75\xf7\xb6\x81\xd2\x11" + "\x20\x9c\xa1\xee\x11\x44\x79\xd0" + "\xb2\x34\x77\xda\x10\x9a\x6f\x6f" + "\xef\x7c\xd9\xdc\x35\xb7\x61\xdd" + "\xf1\xa4\xc6\x1c\xbf\x05\x22\xac" + "\xfe\x2f\x85\x00\x44\xdf\x33\x16" + "\x35\xb6\xa3\xd3\x70\xdf\x69\x35" + "\x6a\xc7\xb4\x99\x45\x27\xc8\x8e" + "\x5a\x14\x30\xd0\x55\x3e\x4f\x64" + "\x0d\x38\xe3\xdf\x8b\xa8\x93\x26" + "\x75\xae\xf6\xb5\x23\x0b\x17\x31" + "\xbf\x27\xb8\xb5\x94\x31\xa7\x8f" + "\x43\xc4\x46\x24\x22\x4f\x8f\x7e" + "\xe5\xf4\x6d\x1e\x0e\x18\x7a\xbb" + "\xa6\x8f\xfb\x49\x49\xd8\x7e\x5a", + .ilen = 512, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .rlen = 512, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95" + "\x02\x88\x41\x97\x16\x93\x99\x37" + "\x51\x05\x82\x09\x74\x94\x45\x92", + .klen = 64, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xd7\x4b\x93\x7d\x13\xa2\xa2\xe1" + "\x35\x39\x71\x88\x76\x1e\xc9\xea" + "\x86\xad\xf3\x14\x48\x3d\x5e\xe9" + "\xe9\x2d\xb2\x56\x59\x35\x9d\xec" + "\x84\xfa\x7e\x9d\x6d\x33\x36\x8f" + "\xce\xf4\xa9\x21\x0b\x5f\x96\xec" + "\xcb\xf9\x57\x68\x33\x88\x39\xbf" + "\x2f\xbb\x59\x03\xbd\x66\x8b\x11" + "\x11\x65\x51\x2e\xb8\x67\x05\xd1" + "\x27\x11\x5c\xd4\xcc\x97\xc2\xb3" + "\xa9\x55\xaf\x07\x56\xd1\xdc\xf5" + "\x85\xdc\x46\xe6\xf0\x24\xeb\x93" + "\x4d\xf0\x9b\xf5\x73\x1c\xda\x03" + "\x22\xc8\x3a\x4f\xb4\x19\x91\x09" + "\x54\x0b\xf6\xfe\x17\x3d\x1a\x53" + "\x72\x60\x79\xcb\x0e\x32\x8a\x77" + "\xd5\xed\xdb\x33\xd7\x62\x16\x69" + "\x63\xe0\xab\xb5\xf6\x9c\x5f\x3d" + "\x69\x35\x61\x86\xf8\x86\xb9\x89" + "\x6e\x59\x35\xac\xf6\x6b\x33\xa0" + "\xea\xef\x96\x62\xd8\xa9\xcf\x56" + "\xbf\xdb\x8a\xfd\xa1\x82\x77\x73" + "\x3d\x94\x4a\x49\x42\x6d\x08\x60" + "\xa1\xea\xab\xb6\x88\x13\x94\xb8" + "\x51\x98\xdb\x35\x85\xdf\xf6\xb9" + "\x8f\xcd\xdf\x80\xd3\x40\x2d\x72" + "\xb8\xb2\x6c\x02\x43\x35\x22\x2a" + "\x31\xed\xcd\x16\x19\xdf\x62\x0f" + "\x29\xcf\x87\x04\xec\x02\x4f\xe4" + "\xa2\xed\x73\xc6\x69\xd3\x7e\x89" + "\x0b\x76\x10\x7c\xd6\xf9\x6a\x25" + "\xed\xcc\x60\x5d\x61\x20\xc1\x97" + "\x56\x91\x57\x28\xbe\x71\x0d\xcd" + "\xde\xc4\x9e\x55\x91\xbe\xd1\x28" + "\x9b\x90\xeb\x73\xf3\x68\x51\xc6" + "\xdf\x82\xcc\xd8\x1f\xce\x5b\x27" + "\xc0\x60\x5e\x33\xd6\xa7\x20\xea" + "\xb2\x54\xc7\x5d\x6a\x3b\x67\x47" + "\xcf\xa0\xe3\xab\x86\xaf\xc1\x42" + "\xe6\xb0\x23\x4a\xaf\x53\xdf\xa0" + "\xad\x12\x32\x31\x03\xf7\x21\xbe" + "\x2d\xd5\x82\x42\xb6\x4a\x3d\xcd" + "\xd8\x81\x77\xa9\x49\x98\x6c\x09" + "\xc5\xa3\x61\x12\x62\x85\x6b\xcd" + "\xb3\xf4\x20\x0c\x41\xc4\x05\x37" + "\x46\x5f\xeb\x71\x8b\xf1\xaf\x6e" + "\xba\xf3\x50\x2e\xfe\xa8\x37\xeb" + "\xe8\x8c\x4f\xa4\x0c\xf1\x31\xc8" + "\x6e\x71\x4f\xa5\xd7\x97\x73\xe0" + "\x93\x4a\x2f\xda\x7b\xe0\x20\x54" + "\x1f\x8d\x85\x79\x0b\x7b\x5e\x75" + "\xb9\x07\x67\xcc\xc8\xe7\x21\x15" + "\xa7\xc8\x98\xff\x4b\x80\x1c\x12" + "\xa8\x54\xe1\x38\x52\xe6\x74\x81" + "\x97\x47\xa1\x41\x0e\xc0\x50\xe3" + "\x55\x0e\xc3\xa7\x70\x77\xce\x07" + "\xed\x8c\x88\xe6\xa1\x5b\x14\xec" + "\xe6\xde\x06\x6d\x74\xc5\xd9\xfa" + "\xe5\x2f\x5a\xff\xc8\x05\xee\x27" + "\x35\x61\xbf\x0b\x19\x78\x9b\xd2" + "\x04\xc7\x05\xb1\x79\xb4\xff\x5f" + "\xf3\xea\x67\x52\x78\xc2\xce\x70" + "\xa4\x05\x0b\xb2\xb3\xa8\x30\x97" + "\x37\x30\xe1\x91\x8d\xb3\x2a\xff", + .ilen = 512, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .rlen = 512, + }, +}; + +/* + * Serpent test vectors. These are backwards because Serpent writes + * octet sequences in right-to-left mode. + */ +#define SERPENT_ENC_TEST_VECTORS 5 +#define SERPENT_DEC_TEST_VECTORS 5 + +#define TNEPRES_ENC_TEST_VECTORS 4 +#define TNEPRES_DEC_TEST_VECTORS 4 + +#define SERPENT_CBC_ENC_TEST_VECTORS 1 +#define SERPENT_CBC_DEC_TEST_VECTORS 1 + +#define SERPENT_CTR_ENC_TEST_VECTORS 2 +#define SERPENT_CTR_DEC_TEST_VECTORS 2 + +#define SERPENT_LRW_ENC_TEST_VECTORS 8 +#define SERPENT_LRW_DEC_TEST_VECTORS 8 + +#define SERPENT_XTS_ENC_TEST_VECTORS 5 +#define SERPENT_XTS_DEC_TEST_VECTORS 5 + +static struct cipher_testvec serpent_enc_tv_template[] = { + { + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .ilen = 16, + .result = "\x12\x07\xfc\xce\x9b\xd0\xd6\x47" + "\x6a\xe9\x8f\xbe\xd1\x43\xa0\xe2", + .rlen = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .klen = 16, + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .ilen = 16, + .result = "\x4c\x7d\x8a\x32\x80\x72\xa2\x2c" + "\x82\x3e\x4a\x1f\x3a\xcd\xa1\x6d", + .rlen = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .klen = 32, + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .ilen = 16, + .result = "\xde\x26\x9f\xf8\x33\xe4\x32\xb8" + "\x5b\x2e\x88\xd2\x70\x1c\xe7\x5c", + .rlen = 16, + }, { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80", + .klen = 16, + .input = zeroed_string, + .ilen = 16, + .result = "\xdd\xd2\x6b\x98\xa5\xff\xd8\x2c" + "\x05\x34\x5a\x9d\xad\xbf\xaf\x49", + .rlen = 16, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A", + .ilen = 144, + .result = "\xFB\xB0\x5D\xDE\xC0\xFE\xFC\xEB" + "\xB1\x80\x10\x43\xDE\x62\x70\xBD" + "\xFA\x8A\x93\xEA\x6B\xF7\xC5\xD7" + "\x0C\xD1\xBB\x29\x25\x14\x4C\x22" + "\x77\xA6\x38\x00\xDB\xB9\xE2\x07" + "\xD1\xAC\x82\xBA\xEA\x67\xAA\x39" + "\x99\x34\x89\x5B\x54\xE9\x12\x13" + "\x3B\x04\xE5\x12\x42\xC5\x79\xAB" + "\x0D\xC7\x3C\x58\x2D\xA3\x98\xF6" + "\xE4\x61\x9E\x17\x0B\xCE\xE8\xAA" + "\xB5\x6C\x1A\x3A\x67\x52\x81\x6A" + "\x04\xFF\x8A\x1B\x96\xFE\xE6\x87" + "\x3C\xD4\x39\x7D\x36\x9B\x03\xD5" + "\xB6\xA0\x75\x3C\x83\xE6\x1C\x73" + "\x9D\x74\x2B\x77\x53\x2D\xE5\xBD" + "\x69\xDA\x7A\x01\xF5\x6A\x70\x39" + "\x30\xD4\x2C\xF2\x8E\x06\x4B\x39" + "\xB3\x12\x1D\xB3\x17\x46\xE6\xD6", + .rlen = 144, + }, +}; + +static struct cipher_testvec tnepres_enc_tv_template[] = { + { /* KeySize=128, PT=0, I=1 */ + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .key = "\x80\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 16, + .ilen = 16, + .result = "\x49\xaf\xbf\xad\x9d\x5a\x34\x05" + "\x2c\xd8\xff\xa5\x98\x6b\xd2\xdd", + .rlen = 16, + }, { /* KeySize=192, PT=0, I=1 */ + .key = "\x80\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 24, + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 16, + .result = "\xe7\x8e\x54\x02\xc7\x19\x55\x68" + "\xac\x36\x78\xf7\xa3\xf6\x0c\x66", + .rlen = 16, + }, { /* KeySize=256, PT=0, I=1 */ + .key = "\x80\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 32, + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 16, + .result = "\xab\xed\x96\xe7\x66\xbf\x28\xcb" + "\xc0\xeb\xd2\x1a\x82\xef\x08\x19", + .rlen = 16, + }, { /* KeySize=256, I=257 */ + .key = "\x1f\x1e\x1d\x1c\x1b\x1a\x19\x18" + "\x17\x16\x15\x14\x13\x12\x11\x10" + "\x0f\x0e\x0d\x0c\x0b\x0a\x09\x08" + "\x07\x06\x05\x04\x03\x02\x01\x00", + .klen = 32, + .input = "\x0f\x0e\x0d\x0c\x0b\x0a\x09\x08" + "\x07\x06\x05\x04\x03\x02\x01\x00", + .ilen = 16, + .result = "\x5c\xe7\x1c\x70\xd2\x88\x2e\x5b" + "\xb8\x32\xe4\x33\xf8\x9f\x26\xde", + .rlen = 16, + }, +}; + + +static struct cipher_testvec serpent_dec_tv_template[] = { + { + .input = "\x12\x07\xfc\xce\x9b\xd0\xd6\x47" + "\x6a\xe9\x8f\xbe\xd1\x43\xa0\xe2", + .ilen = 16, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .rlen = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .klen = 16, + .input = "\x4c\x7d\x8a\x32\x80\x72\xa2\x2c" + "\x82\x3e\x4a\x1f\x3a\xcd\xa1\x6d", + .ilen = 16, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .rlen = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .klen = 32, + .input = "\xde\x26\x9f\xf8\x33\xe4\x32\xb8" + "\x5b\x2e\x88\xd2\x70\x1c\xe7\x5c", + .ilen = 16, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .rlen = 16, + }, { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80", + .klen = 16, + .input = "\xdd\xd2\x6b\x98\xa5\xff\xd8\x2c" + "\x05\x34\x5a\x9d\xad\xbf\xaf\x49", + .ilen = 16, + .result = zeroed_string, + .rlen = 16, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .input = "\xFB\xB0\x5D\xDE\xC0\xFE\xFC\xEB" + "\xB1\x80\x10\x43\xDE\x62\x70\xBD" + "\xFA\x8A\x93\xEA\x6B\xF7\xC5\xD7" + "\x0C\xD1\xBB\x29\x25\x14\x4C\x22" + "\x77\xA6\x38\x00\xDB\xB9\xE2\x07" + "\xD1\xAC\x82\xBA\xEA\x67\xAA\x39" + "\x99\x34\x89\x5B\x54\xE9\x12\x13" + "\x3B\x04\xE5\x12\x42\xC5\x79\xAB" + "\x0D\xC7\x3C\x58\x2D\xA3\x98\xF6" + "\xE4\x61\x9E\x17\x0B\xCE\xE8\xAA" + "\xB5\x6C\x1A\x3A\x67\x52\x81\x6A" + "\x04\xFF\x8A\x1B\x96\xFE\xE6\x87" + "\x3C\xD4\x39\x7D\x36\x9B\x03\xD5" + "\xB6\xA0\x75\x3C\x83\xE6\x1C\x73" + "\x9D\x74\x2B\x77\x53\x2D\xE5\xBD" + "\x69\xDA\x7A\x01\xF5\x6A\x70\x39" + "\x30\xD4\x2C\xF2\x8E\x06\x4B\x39" + "\xB3\x12\x1D\xB3\x17\x46\xE6\xD6", + .ilen = 144, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A", + .rlen = 144, + }, +}; + +static struct cipher_testvec tnepres_dec_tv_template[] = { + { + .input = "\x41\xcc\x6b\x31\x59\x31\x45\x97" + "\x6d\x6f\xbb\x38\x4b\x37\x21\x28", + .ilen = 16, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .rlen = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .klen = 16, + .input = "\xea\xf4\xd7\xfc\xd8\x01\x34\x47" + "\x81\x45\x0b\xfa\x0c\xd6\xad\x6e", + .ilen = 16, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .rlen = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .klen = 32, + .input = "\x64\xa9\x1a\x37\xed\x9f\xe7\x49" + "\xa8\x4e\x76\xd6\xf5\x0d\x78\xee", + .ilen = 16, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .rlen = 16, + }, { /* KeySize=128, I=121 */ + .key = "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80", + .klen = 16, + .input = "\x3d\xda\xbf\xc0\x06\xda\xab\x06" + "\x46\x2a\xf4\xef\x81\x54\x4e\x26", + .ilen = 16, + .result = zeroed_string, + .rlen = 16, + }, +}; + +static struct cipher_testvec serpent_cbc_enc_tv_template[] = { + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A", + .ilen = 144, + .result = "\x80\xCF\x11\x41\x1A\xB9\x4B\x9C" + "\xFF\xB7\x6C\xEA\xF0\xAF\x77\x6E" + "\x71\x75\x95\x9D\x4E\x1C\xCF\xAD" + "\x81\x34\xE9\x8F\xAE\x5A\x91\x1C" + "\x38\x63\x35\x7E\x79\x18\x0A\xE8" + "\x67\x06\x76\xD5\xFF\x22\x2F\xDA" + "\xB6\x2D\x57\x13\xB6\x3C\xBC\x97" + "\xFE\x53\x75\x35\x97\x7F\x51\xEA" + "\xDF\x5D\xE8\x9D\xCC\xD9\xAE\xE7" + "\x62\x67\xFF\x04\xC2\x18\x22\x5F" + "\x2E\x06\xC1\xE2\x26\xCD\xC6\x1E" + "\xE5\x2C\x4E\x87\x23\xDD\xF0\x41" + "\x08\xA5\xB4\x3E\x07\x1E\x0B\xBB" + "\x72\x84\xF8\x0A\x3F\x38\x5E\x91" + "\x15\x26\xE1\xDB\xA4\x3D\x74\xD2" + "\x41\x1E\x3F\xA9\xC6\x7D\x2A\xAB" + "\x27\xDF\x89\x1D\x86\x3E\xF7\x5A" + "\xF6\xE3\x0F\xC7\x6B\x4C\x96\x7C", + .rlen = 144, + }, +}; + +static struct cipher_testvec serpent_cbc_dec_tv_template[] = { + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x80\xCF\x11\x41\x1A\xB9\x4B\x9C" + "\xFF\xB7\x6C\xEA\xF0\xAF\x77\x6E" + "\x71\x75\x95\x9D\x4E\x1C\xCF\xAD" + "\x81\x34\xE9\x8F\xAE\x5A\x91\x1C" + "\x38\x63\x35\x7E\x79\x18\x0A\xE8" + "\x67\x06\x76\xD5\xFF\x22\x2F\xDA" + "\xB6\x2D\x57\x13\xB6\x3C\xBC\x97" + "\xFE\x53\x75\x35\x97\x7F\x51\xEA" + "\xDF\x5D\xE8\x9D\xCC\xD9\xAE\xE7" + "\x62\x67\xFF\x04\xC2\x18\x22\x5F" + "\x2E\x06\xC1\xE2\x26\xCD\xC6\x1E" + "\xE5\x2C\x4E\x87\x23\xDD\xF0\x41" + "\x08\xA5\xB4\x3E\x07\x1E\x0B\xBB" + "\x72\x84\xF8\x0A\x3F\x38\x5E\x91" + "\x15\x26\xE1\xDB\xA4\x3D\x74\xD2" + "\x41\x1E\x3F\xA9\xC6\x7D\x2A\xAB" + "\x27\xDF\x89\x1D\x86\x3E\xF7\x5A" + "\xF6\xE3\x0F\xC7\x6B\x4C\x96\x7C", + .ilen = 144, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A", + .rlen = 144, + }, +}; + +static struct cipher_testvec serpent_ctr_enc_tv_template[] = { + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A", + .ilen = 144, + .result = "\x84\x68\xEC\xF2\x1C\x88\x20\xCA" + "\x37\x69\xE3\x3A\x22\x85\x48\x46" + "\x70\xAA\x25\xB4\xCD\x8B\x04\x4E" + "\x8D\x15\x2B\x98\xDF\x7B\x6D\xB9" + "\xE0\x4A\x73\x00\x65\xB6\x1A\x0D" + "\x5C\x60\xDF\x34\xDC\x60\x4C\xDF" + "\xB5\x1F\x26\x8C\xDA\xC1\x11\xA8" + "\x80\xFA\x37\x7A\x89\xAA\xAE\x7B" + "\x92\x6E\xB9\xDC\xC9\x62\x4F\x88" + "\x0A\x5D\x97\x2F\x6B\xAC\x03\x7C" + "\x22\xF6\x55\x5A\xFA\x35\xA5\x17" + "\xA1\x5C\x5E\x2B\x63\x2D\xB9\x91" + "\x3E\x83\x26\x00\x4E\xD5\xBE\xCE" + "\x79\xC4\x3D\xFC\x70\xA0\xAD\x96" + "\xBA\x58\x2A\x1C\xDF\xC2\x3A\xA5" + "\x7C\xB5\x12\x89\xED\xBF\xB6\x09" + "\x13\x4F\x7D\x61\x3C\x5C\x27\xFC" + "\x5D\xE1\x4F\xA1\xEA\xB3\xCA\xB9", + .rlen = 144, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC", + .ilen = 147, + .result = "\x84\x68\xEC\xF2\x1C\x88\x20\xCA" + "\x37\x69\xE3\x3A\x22\x85\x48\x46" + "\x70\xAA\x25\xB4\xCD\x8B\x04\x4E" + "\x8D\x15\x2B\x98\xDF\x7B\x6D\xB9" + "\xE0\x4A\x73\x00\x65\xB6\x1A\x0D" + "\x5C\x60\xDF\x34\xDC\x60\x4C\xDF" + "\xB5\x1F\x26\x8C\xDA\xC1\x11\xA8" + "\x80\xFA\x37\x7A\x89\xAA\xAE\x7B" + "\x92\x6E\xB9\xDC\xC9\x62\x4F\x88" + "\x0A\x5D\x97\x2F\x6B\xAC\x03\x7C" + "\x22\xF6\x55\x5A\xFA\x35\xA5\x17" + "\xA1\x5C\x5E\x2B\x63\x2D\xB9\x91" + "\x3E\x83\x26\x00\x4E\xD5\xBE\xCE" + "\x79\xC4\x3D\xFC\x70\xA0\xAD\x96" + "\xBA\x58\x2A\x1C\xDF\xC2\x3A\xA5" + "\x7C\xB5\x12\x89\xED\xBF\xB6\x09" + "\x13\x4F\x7D\x61\x3C\x5C\x27\xFC" + "\x5D\xE1\x4F\xA1\xEA\xB3\xCA\xB9" + "\xE6\xD0\x97", + .rlen = 147, + }, +}; + +static struct cipher_testvec serpent_ctr_dec_tv_template[] = { + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x84\x68\xEC\xF2\x1C\x88\x20\xCA" + "\x37\x69\xE3\x3A\x22\x85\x48\x46" + "\x70\xAA\x25\xB4\xCD\x8B\x04\x4E" + "\x8D\x15\x2B\x98\xDF\x7B\x6D\xB9" + "\xE0\x4A\x73\x00\x65\xB6\x1A\x0D" + "\x5C\x60\xDF\x34\xDC\x60\x4C\xDF" + "\xB5\x1F\x26\x8C\xDA\xC1\x11\xA8" + "\x80\xFA\x37\x7A\x89\xAA\xAE\x7B" + "\x92\x6E\xB9\xDC\xC9\x62\x4F\x88" + "\x0A\x5D\x97\x2F\x6B\xAC\x03\x7C" + "\x22\xF6\x55\x5A\xFA\x35\xA5\x17" + "\xA1\x5C\x5E\x2B\x63\x2D\xB9\x91" + "\x3E\x83\x26\x00\x4E\xD5\xBE\xCE" + "\x79\xC4\x3D\xFC\x70\xA0\xAD\x96" + "\xBA\x58\x2A\x1C\xDF\xC2\x3A\xA5" + "\x7C\xB5\x12\x89\xED\xBF\xB6\x09" + "\x13\x4F\x7D\x61\x3C\x5C\x27\xFC" + "\x5D\xE1\x4F\xA1\xEA\xB3\xCA\xB9", + .ilen = 144, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A", + .rlen = 144, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x84\x68\xEC\xF2\x1C\x88\x20\xCA" + "\x37\x69\xE3\x3A\x22\x85\x48\x46" + "\x70\xAA\x25\xB4\xCD\x8B\x04\x4E" + "\x8D\x15\x2B\x98\xDF\x7B\x6D\xB9" + "\xE0\x4A\x73\x00\x65\xB6\x1A\x0D" + "\x5C\x60\xDF\x34\xDC\x60\x4C\xDF" + "\xB5\x1F\x26\x8C\xDA\xC1\x11\xA8" + "\x80\xFA\x37\x7A\x89\xAA\xAE\x7B" + "\x92\x6E\xB9\xDC\xC9\x62\x4F\x88" + "\x0A\x5D\x97\x2F\x6B\xAC\x03\x7C" + "\x22\xF6\x55\x5A\xFA\x35\xA5\x17" + "\xA1\x5C\x5E\x2B\x63\x2D\xB9\x91" + "\x3E\x83\x26\x00\x4E\xD5\xBE\xCE" + "\x79\xC4\x3D\xFC\x70\xA0\xAD\x96" + "\xBA\x58\x2A\x1C\xDF\xC2\x3A\xA5" + "\x7C\xB5\x12\x89\xED\xBF\xB6\x09" + "\x13\x4F\x7D\x61\x3C\x5C\x27\xFC" + "\x5D\xE1\x4F\xA1\xEA\xB3\xCA\xB9" + "\xE6\xD0\x97", + .ilen = 147, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC", + .rlen = 147, + }, +}; + +static struct cipher_testvec serpent_lrw_enc_tv_template[] = { + /* Generated from AES-LRW test vectors */ + { + .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d" + "\x4c\x26\x84\x14\xb5\x68\x01\x85" + "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03" + "\xee\x5a\x83\x0c\xcc\x09\x4c\x87", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x6f\xbf\xd4\xa4\x5d\x71\x16\x79" + "\x63\x9c\xa6\x8e\x40\xbe\x0d\x8a", + .rlen = 16, + }, { + .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c" + "\xd7\x79\xe8\x0f\x54\x88\x79\x44" + "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea" + "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x02", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\xfd\xb2\x66\x98\x80\x96\x55\xad" + "\x08\x94\x54\x9c\x21\x7c\x69\xe3", + .rlen = 16, + }, { + .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50" + "\x30\xfe\x69\xe2\x37\x7f\x98\x47" + "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6" + "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x14\x5e\x3d\x70\xc0\x6e\x9c\x34" + "\x5b\x5e\xcf\x0f\xe4\x8c\x21\x5c", + .rlen = 16, + }, { + .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15" + "\x25\x83\xf7\x3c\x1f\x01\x28\x74" + "\xca\xc6\xbc\x35\x4d\x4a\x65\x54" + "\x90\xae\x61\xcf\x7b\xae\xbd\xcc" + "\xad\xe4\x94\xc5\x4a\x29\xae\x70", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x25\x39\xaa\xa5\xf0\x65\xc8\xdc" + "\x5d\x45\x95\x30\x8f\xff\x2f\x1b", + .rlen = 16, + }, { + .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff" + "\xf8\x86\xce\xac\x93\xc5\xad\xc6" + "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd" + "\x52\x13\xb2\xb7\xf0\xff\x11\xd8" + "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x0c\x20\x20\x63\xd6\x8b\xfc\x8f" + "\xc0\xe2\x17\xbb\xd2\x59\x6f\x26", + .rlen = 16, + }, { + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\xc1\x35\x2e\x53\xf0\x96\x4d\x9c" + "\x2e\x18\xe6\x99\xcd\xd3\x15\x68", + .rlen = 16, + }, { + .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d" + "\xd4\x70\x98\x0b\xc7\x95\x84\xc8" + "\xb2\xfb\x64\xce\x60\x97\x87\x8d" + "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7" + "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4" + "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x86\x0a\xc6\xa9\x1a\x9f\xe7\xe6" + "\x64\x3b\x33\xd6\xd5\x84\xd6\xdf", + .rlen = 16, + }, { + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x05\x11\xb7\x18\xab\xc6\x2d\xac" + "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c" + "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8" + "\x50\x38\x1f\x71\x49\xb6\x57\xd6" + "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90" + "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6" + "\xad\x1e\x9e\x20\x5f\x38\xbe\x04" + "\xda\x10\x8e\xed\xa2\xa4\x87\xab" + "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c" + "\xc9\xac\x42\x31\x95\x7c\xc9\x04" + "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6" + "\x15\xd7\x3f\x4f\x2f\x66\x69\x03" + "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65" + "\x4c\x96\x12\xed\x7c\x92\x03\x01" + "\x6f\xbc\x35\x93\xac\xf1\x27\xf1" + "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50" + "\x89\xa4\x8e\x66\x44\x85\xcc\xfd" + "\x33\x14\x70\xe3\x96\xb2\xc3\xd3" + "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5" + "\x2d\x64\x75\xdd\xb4\x54\xe6\x74" + "\x8c\xd3\x9d\x9e\x86\xab\x51\x53" + "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40" + "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5" + "\x76\x12\x73\x44\x1a\x56\xd7\x72" + "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda" + "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd" + "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60" + "\x1a\xe2\x70\x85\x58\xc2\x1b\x09" + "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9" + "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8" + "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8" + "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10" + "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1" + "\x90\x3e\x76\x4a\x74\xa4\x21\x2c" + "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e" + "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f" + "\x8d\x23\x31\x74\x84\xeb\x88\x6e" + "\xcc\xb9\xbc\x22\x83\x19\x07\x22" + "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78" + "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5" + "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41" + "\x3c\xce\x8f\x42\x60\x71\xa7\x75" + "\x08\x40\x65\x8a\x82\xbf\xf5\x43" + "\x71\x96\xa9\x4d\x44\x8a\x20\xbe" + "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65" + "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9" + "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4" + "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a" + "\x62\x73\x65\xfd\x46\x63\x25\x3d" + "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf" + "\x24\xf3\xb4\xac\x64\xba\xdf\x4b" + "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7" + "\xc5\x68\x77\x84\x32\x2b\xcc\x85" + "\x74\x96\xf0\x12\x77\x61\xb9\xeb" + "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8" + "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24" + "\xda\x39\x87\x45\xc0\x2b\xbb\x01" + "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce" + "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6" + "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32" + "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45" + "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6" + "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" + "\x21\xc4\xc2\x75\x67\x89\x37\x0a", + .ilen = 512, + .result = "\xe3\x5a\x38\x0f\x4d\x92\x3a\x74" + "\x15\xb1\x50\x8c\x9a\xd8\x99\x1d" + "\x82\xec\xf1\x5f\x03\x6d\x02\x58" + "\x90\x67\xfc\xdd\x8d\xe1\x38\x08" + "\x7b\xc9\x9b\x4b\x04\x09\x50\x15" + "\xce\xab\xda\x33\x30\x20\x12\xfa" + "\x83\xc4\xa6\x9a\x2e\x7d\x90\xd9" + "\xa6\xa6\x67\x43\xb4\xa7\xa8\x5c" + "\xbb\x6a\x49\x2b\x8b\xf8\xd0\x22" + "\xe5\x9e\xba\xe8\x8c\x67\xb8\x5b" + "\x60\xbc\xf5\xa4\x95\x4e\x66\xe5" + "\x6d\x8e\xa9\xf6\x65\x2e\x04\xf5" + "\xba\xb5\xdb\x88\xc2\xf6\x7a\x4b" + "\x89\x58\x7c\x9a\xae\x26\xe8\xb7" + "\xb7\x28\xcc\xd6\xcc\xa5\x98\x4d" + "\xb9\x91\xcb\xb4\xe4\x8b\x96\x47" + "\x5f\x03\x8b\xdd\x94\xd1\xee\x12" + "\xa7\x83\x80\xf2\xc1\x15\x74\x4f" + "\x49\xf9\xb0\x7e\x6f\xdc\x73\x2f" + "\xe2\xcf\xe0\x1b\x34\xa5\xa0\x52" + "\xfb\x3c\x5d\x85\x91\xe6\x6d\x98" + "\x04\xd6\xdd\x4c\x00\x64\xd9\x54" + "\x5c\x3c\x08\x1d\x4c\x06\x9f\xb8" + "\x1c\x4d\x8d\xdc\xa4\x3c\xb9\x3b" + "\x9e\x85\xce\xc3\xa8\x4a\x0c\xd9" + "\x04\xc3\x6f\x17\x66\xa9\x1f\x59" + "\xd9\xe2\x19\x36\xa3\x88\xb8\x0b" + "\x0f\x4a\x4d\xf8\xc8\x6f\xd5\x43" + "\xeb\xa0\xab\x1f\x61\xc0\x06\xeb" + "\x93\xb7\xb8\x6f\x0d\xbd\x07\x49" + "\xb3\xac\x5d\xcf\x31\xa0\x27\x26" + "\x21\xbe\x94\x2e\x19\xea\xf4\xee" + "\xb5\x13\x89\xf7\x94\x0b\xef\x59" + "\x44\xc5\x78\x8b\x3c\x3b\x71\x20" + "\xf9\x35\x0c\x70\x74\xdc\x5b\xc2" + "\xb4\x11\x0e\x2c\x61\xa1\x52\x46" + "\x18\x11\x16\xc6\x86\x44\xa7\xaf" + "\xd5\x0c\x7d\xa6\x9e\x25\x2d\x1b" + "\x9a\x8f\x0f\xf8\x6a\x61\xa0\xea" + "\x3f\x0e\x90\xd6\x8f\x83\x30\x64" + "\xb5\x51\x2d\x08\x3c\xcd\x99\x36" + "\x96\xd4\xb1\xb5\x48\x30\xca\x48" + "\xf7\x11\xa8\xf5\x97\x8a\x6a\x6d" + "\x12\x33\x2f\xc0\xe8\xda\xec\x8a" + "\xe1\x88\x72\x63\xde\x20\xa3\xe1" + "\x8e\xac\x84\x37\x35\xf5\xf7\x3f" + "\x00\x02\x0e\xe4\xc1\x53\x68\x3f" + "\xaa\xd5\xac\x52\x3d\x20\x2f\x4d" + "\x7c\x83\xd0\xbd\xaa\x97\x35\x36" + "\x98\x88\x59\x5d\xe7\x24\xe3\x90" + "\x9d\x30\x47\xa7\xc3\x60\x35\xf4" + "\xd5\xdb\x0e\x4d\x44\xc1\x81\x8b" + "\xfd\xbd\xc3\x2b\xba\x68\xfe\x8d" + "\x49\x5a\x3c\x8a\xa3\x01\xae\x25" + "\x42\xab\xd2\x87\x1b\x35\xd6\xd2" + "\xd7\x70\x1c\x1f\x72\xd1\xe1\x39" + "\x1c\x58\xa2\xb4\xd0\x78\x55\x72" + "\x76\x59\xea\xd9\xd7\x6e\x63\x8b" + "\xcc\x9b\xa7\x74\x89\xfc\xa3\x68" + "\x86\x28\xd1\xbb\x54\x8d\x66\xad" + "\x2a\x92\xf9\x4e\x04\x3d\xae\xfd" + "\x1b\x2b\x7f\xc3\x2f\x1a\x78\x0a" + "\x5c\xc6\x84\xfe\x7c\xcb\x26\xfd" + "\xd9\x51\x0f\xd7\x94\x2f\xc5\xa7", + .rlen = 512, + }, +}; + +static struct cipher_testvec serpent_lrw_dec_tv_template[] = { + /* Generated from AES-LRW test vectors */ + /* same as enc vectors with input and result reversed */ + { + .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d" + "\x4c\x26\x84\x14\xb5\x68\x01\x85" + "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03" + "\xee\x5a\x83\x0c\xcc\x09\x4c\x87", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x6f\xbf\xd4\xa4\x5d\x71\x16\x79" + "\x63\x9c\xa6\x8e\x40\xbe\x0d\x8a", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c" + "\xd7\x79\xe8\x0f\x54\x88\x79\x44" + "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea" + "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x02", + .input = "\xfd\xb2\x66\x98\x80\x96\x55\xad" + "\x08\x94\x54\x9c\x21\x7c\x69\xe3", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50" + "\x30\xfe\x69\xe2\x37\x7f\x98\x47" + "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6" + "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x14\x5e\x3d\x70\xc0\x6e\x9c\x34" + "\x5b\x5e\xcf\x0f\xe4\x8c\x21\x5c", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15" + "\x25\x83\xf7\x3c\x1f\x01\x28\x74" + "\xca\xc6\xbc\x35\x4d\x4a\x65\x54" + "\x90\xae\x61\xcf\x7b\xae\xbd\xcc" + "\xad\xe4\x94\xc5\x4a\x29\xae\x70", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x25\x39\xaa\xa5\xf0\x65\xc8\xdc" + "\x5d\x45\x95\x30\x8f\xff\x2f\x1b", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff" + "\xf8\x86\xce\xac\x93\xc5\xad\xc6" + "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd" + "\x52\x13\xb2\xb7\xf0\xff\x11\xd8" + "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x0c\x20\x20\x63\xd6\x8b\xfc\x8f" + "\xc0\xe2\x17\xbb\xd2\x59\x6f\x26", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\xc1\x35\x2e\x53\xf0\x96\x4d\x9c" + "\x2e\x18\xe6\x99\xcd\xd3\x15\x68", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d" + "\xd4\x70\x98\x0b\xc7\x95\x84\xc8" + "\xb2\xfb\x64\xce\x60\x97\x87\x8d" + "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7" + "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4" + "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x86\x0a\xc6\xa9\x1a\x9f\xe7\xe6" + "\x64\x3b\x33\xd6\xd5\x84\xd6\xdf", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\xe3\x5a\x38\x0f\x4d\x92\x3a\x74" + "\x15\xb1\x50\x8c\x9a\xd8\x99\x1d" + "\x82\xec\xf1\x5f\x03\x6d\x02\x58" + "\x90\x67\xfc\xdd\x8d\xe1\x38\x08" + "\x7b\xc9\x9b\x4b\x04\x09\x50\x15" + "\xce\xab\xda\x33\x30\x20\x12\xfa" + "\x83\xc4\xa6\x9a\x2e\x7d\x90\xd9" + "\xa6\xa6\x67\x43\xb4\xa7\xa8\x5c" + "\xbb\x6a\x49\x2b\x8b\xf8\xd0\x22" + "\xe5\x9e\xba\xe8\x8c\x67\xb8\x5b" + "\x60\xbc\xf5\xa4\x95\x4e\x66\xe5" + "\x6d\x8e\xa9\xf6\x65\x2e\x04\xf5" + "\xba\xb5\xdb\x88\xc2\xf6\x7a\x4b" + "\x89\x58\x7c\x9a\xae\x26\xe8\xb7" + "\xb7\x28\xcc\xd6\xcc\xa5\x98\x4d" + "\xb9\x91\xcb\xb4\xe4\x8b\x96\x47" + "\x5f\x03\x8b\xdd\x94\xd1\xee\x12" + "\xa7\x83\x80\xf2\xc1\x15\x74\x4f" + "\x49\xf9\xb0\x7e\x6f\xdc\x73\x2f" + "\xe2\xcf\xe0\x1b\x34\xa5\xa0\x52" + "\xfb\x3c\x5d\x85\x91\xe6\x6d\x98" + "\x04\xd6\xdd\x4c\x00\x64\xd9\x54" + "\x5c\x3c\x08\x1d\x4c\x06\x9f\xb8" + "\x1c\x4d\x8d\xdc\xa4\x3c\xb9\x3b" + "\x9e\x85\xce\xc3\xa8\x4a\x0c\xd9" + "\x04\xc3\x6f\x17\x66\xa9\x1f\x59" + "\xd9\xe2\x19\x36\xa3\x88\xb8\x0b" + "\x0f\x4a\x4d\xf8\xc8\x6f\xd5\x43" + "\xeb\xa0\xab\x1f\x61\xc0\x06\xeb" + "\x93\xb7\xb8\x6f\x0d\xbd\x07\x49" + "\xb3\xac\x5d\xcf\x31\xa0\x27\x26" + "\x21\xbe\x94\x2e\x19\xea\xf4\xee" + "\xb5\x13\x89\xf7\x94\x0b\xef\x59" + "\x44\xc5\x78\x8b\x3c\x3b\x71\x20" + "\xf9\x35\x0c\x70\x74\xdc\x5b\xc2" + "\xb4\x11\x0e\x2c\x61\xa1\x52\x46" + "\x18\x11\x16\xc6\x86\x44\xa7\xaf" + "\xd5\x0c\x7d\xa6\x9e\x25\x2d\x1b" + "\x9a\x8f\x0f\xf8\x6a\x61\xa0\xea" + "\x3f\x0e\x90\xd6\x8f\x83\x30\x64" + "\xb5\x51\x2d\x08\x3c\xcd\x99\x36" + "\x96\xd4\xb1\xb5\x48\x30\xca\x48" + "\xf7\x11\xa8\xf5\x97\x8a\x6a\x6d" + "\x12\x33\x2f\xc0\xe8\xda\xec\x8a" + "\xe1\x88\x72\x63\xde\x20\xa3\xe1" + "\x8e\xac\x84\x37\x35\xf5\xf7\x3f" + "\x00\x02\x0e\xe4\xc1\x53\x68\x3f" + "\xaa\xd5\xac\x52\x3d\x20\x2f\x4d" + "\x7c\x83\xd0\xbd\xaa\x97\x35\x36" + "\x98\x88\x59\x5d\xe7\x24\xe3\x90" + "\x9d\x30\x47\xa7\xc3\x60\x35\xf4" + "\xd5\xdb\x0e\x4d\x44\xc1\x81\x8b" + "\xfd\xbd\xc3\x2b\xba\x68\xfe\x8d" + "\x49\x5a\x3c\x8a\xa3\x01\xae\x25" + "\x42\xab\xd2\x87\x1b\x35\xd6\xd2" + "\xd7\x70\x1c\x1f\x72\xd1\xe1\x39" + "\x1c\x58\xa2\xb4\xd0\x78\x55\x72" + "\x76\x59\xea\xd9\xd7\x6e\x63\x8b" + "\xcc\x9b\xa7\x74\x89\xfc\xa3\x68" + "\x86\x28\xd1\xbb\x54\x8d\x66\xad" + "\x2a\x92\xf9\x4e\x04\x3d\xae\xfd" + "\x1b\x2b\x7f\xc3\x2f\x1a\x78\x0a" + "\x5c\xc6\x84\xfe\x7c\xcb\x26\xfd" + "\xd9\x51\x0f\xd7\x94\x2f\xc5\xa7", + .ilen = 512, + .result = "\x05\x11\xb7\x18\xab\xc6\x2d\xac" + "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c" + "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8" + "\x50\x38\x1f\x71\x49\xb6\x57\xd6" + "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90" + "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6" + "\xad\x1e\x9e\x20\x5f\x38\xbe\x04" + "\xda\x10\x8e\xed\xa2\xa4\x87\xab" + "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c" + "\xc9\xac\x42\x31\x95\x7c\xc9\x04" + "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6" + "\x15\xd7\x3f\x4f\x2f\x66\x69\x03" + "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65" + "\x4c\x96\x12\xed\x7c\x92\x03\x01" + "\x6f\xbc\x35\x93\xac\xf1\x27\xf1" + "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50" + "\x89\xa4\x8e\x66\x44\x85\xcc\xfd" + "\x33\x14\x70\xe3\x96\xb2\xc3\xd3" + "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5" + "\x2d\x64\x75\xdd\xb4\x54\xe6\x74" + "\x8c\xd3\x9d\x9e\x86\xab\x51\x53" + "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40" + "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5" + "\x76\x12\x73\x44\x1a\x56\xd7\x72" + "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda" + "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd" + "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60" + "\x1a\xe2\x70\x85\x58\xc2\x1b\x09" + "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9" + "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8" + "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8" + "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10" + "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1" + "\x90\x3e\x76\x4a\x74\xa4\x21\x2c" + "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e" + "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f" + "\x8d\x23\x31\x74\x84\xeb\x88\x6e" + "\xcc\xb9\xbc\x22\x83\x19\x07\x22" + "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78" + "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5" + "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41" + "\x3c\xce\x8f\x42\x60\x71\xa7\x75" + "\x08\x40\x65\x8a\x82\xbf\xf5\x43" + "\x71\x96\xa9\x4d\x44\x8a\x20\xbe" + "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65" + "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9" + "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4" + "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a" + "\x62\x73\x65\xfd\x46\x63\x25\x3d" + "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf" + "\x24\xf3\xb4\xac\x64\xba\xdf\x4b" + "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7" + "\xc5\x68\x77\x84\x32\x2b\xcc\x85" + "\x74\x96\xf0\x12\x77\x61\xb9\xeb" + "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8" + "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24" + "\xda\x39\x87\x45\xc0\x2b\xbb\x01" + "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce" + "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6" + "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32" + "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45" + "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6" + "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" + "\x21\xc4\xc2\x75\x67\x89\x37\x0a", + .rlen = 512, + }, +}; + +static struct cipher_testvec serpent_xts_enc_tv_template[] = { + /* Generated from AES-XTS test vectors */ + { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 32, + .result = "\xe1\x08\xb8\x1d\x2c\xf5\x33\x64" + "\xc8\x12\x04\xc7\xb3\x70\xe8\xc4" + "\x6a\x31\xc5\xf3\x00\xca\xb9\x16" + "\xde\xe2\x77\x66\xf7\xfe\x62\x08", + .rlen = 32, + }, { + .key = "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .ilen = 32, + .result = "\x1a\x0a\x09\x5f\xcd\x07\x07\x98" + "\x41\x86\x12\xaf\xb3\xd7\x68\x13" + "\xed\x81\xcd\x06\x87\x43\x1a\xbb" + "\x13\x3d\xd6\x1e\x2b\xe1\x77\xbe", + .rlen = 32, + }, { + .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8" + "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .ilen = 32, + .result = "\xf9\x9b\x28\xb8\x5c\xaf\x8c\x61" + "\xb6\x1c\x81\x8f\x2c\x87\x60\x89" + "\x0d\x8d\x7a\xe8\x60\x48\xcc\x86" + "\xc1\x68\x45\xaa\x00\xe9\x24\xc5", + .rlen = 32, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .ilen = 512, + .result = "\xfe\x47\x4a\xc8\x60\x7e\xb4\x8b" + "\x0d\x10\xf4\xb0\x0d\xba\xf8\x53" + "\x65\x6e\x38\x4b\xdb\xaa\xb1\x9e" + "\x28\xca\xb0\x22\xb3\x85\x75\xf4" + "\x00\x5c\x75\x14\x06\xd6\x25\x82" + "\xe6\xcb\x08\xf7\x29\x90\x23\x8e" + "\xa4\x68\x57\xe4\xf0\xd8\x32\xf3" + "\x80\x51\x67\xb5\x0b\x85\x69\xe8" + "\x19\xfe\xc4\xc7\x3e\xea\x90\xd3" + "\x8f\xa3\xf2\x0a\xac\x17\x4b\xa0" + "\x63\x5a\x16\x0f\xf0\xce\x66\x1f" + "\x2c\x21\x07\xf1\xa4\x03\xa3\x44" + "\x41\x61\x87\x5d\x6b\xb3\xef\xd4" + "\xfc\xaa\x32\x7e\x55\x58\x04\x41" + "\xc9\x07\x33\xc6\xa2\x68\xd6\x5a" + "\x55\x79\x4b\x6f\xcf\x89\xb9\x19" + "\xe5\x54\x13\x15\xb2\x1a\xfa\x15" + "\xc2\xf0\x06\x59\xfa\xa0\x25\x05" + "\x58\xfa\x43\x91\x16\x85\x40\xbb" + "\x0d\x34\x4d\xc5\x1e\x20\xd5\x08" + "\xcd\x22\x22\x41\x11\x9f\x6c\x7c" + "\x8d\x57\xc9\xba\x57\xe8\x2c\xf7" + "\xa0\x42\xa8\xde\xfc\xa3\xca\x98" + "\x4b\x43\xb1\xce\x4b\xbf\x01\x67" + "\x6e\x29\x60\xbd\x10\x14\x84\x82" + "\x83\x82\x0c\x63\x73\x92\x02\x7c" + "\x55\x37\x20\x80\x17\x51\xc8\xbc" + "\x46\x02\xcb\x38\x07\x6d\xe2\x85" + "\xaa\x29\xaf\x24\x58\x0d\xf0\x75" + "\x08\x0a\xa5\x34\x25\x16\xf3\x74" + "\xa7\x0b\x97\xbe\xc1\xa9\xdc\x29" + "\x1a\x0a\x56\xc1\x1a\x91\x97\x8c" + "\x0b\xc7\x16\xed\x5a\x22\xa6\x2e" + "\x8c\x2b\x4f\x54\x76\x47\x53\x8e" + "\xe8\x00\xec\x92\xb9\x55\xe6\xa2" + "\xf3\xe2\x4f\x6a\x66\x60\xd0\x87" + "\xe6\xd1\xcc\xe3\x6a\xc5\x2d\x21" + "\xcc\x9d\x6a\xb6\x75\xaa\xe2\x19" + "\x21\x9f\xa1\x5e\x4c\xfd\x72\xf9" + "\x94\x4e\x63\xc7\xae\xfc\xed\x47" + "\xe2\xfe\x7a\x63\x77\xfe\x97\x82" + "\xb1\x10\x6e\x36\x1d\xe1\xc4\x80" + "\xec\x69\x41\xec\xa7\x8a\xe0\x2f" + "\xe3\x49\x26\xa2\x41\xb2\x08\x0f" + "\x28\xb4\xa7\x39\xa1\x99\x2d\x1e" + "\x43\x42\x35\xd0\xcf\xec\x77\x67" + "\xb2\x3b\x9e\x1c\x35\xde\x4f\x5e" + "\x73\x3f\x5d\x6f\x07\x4b\x2e\x50" + "\xab\x6c\x6b\xff\xea\x00\x67\xaa" + "\x0e\x82\x32\xdd\x3d\xb5\xe5\x76" + "\x2b\x77\x3f\xbe\x12\x75\xfb\x92" + "\xc6\x89\x67\x4d\xca\xf7\xd4\x50" + "\xc0\x74\x47\xcc\xd9\x0a\xd4\xc6" + "\x3b\x17\x2e\xe3\x35\xbb\x53\xb5" + "\x86\xad\x51\xcc\xd5\x96\xb8\xdc" + "\x03\x57\xe6\x98\x52\x2f\x61\x62" + "\xc4\x5c\x9c\x36\x71\x07\xfb\x94" + "\xe3\x02\xc4\x2b\x08\x75\xc7\x35" + "\xfb\x2e\x88\x7b\xbb\x67\x00\xe1" + "\xc9\xdd\x99\xb2\x13\x53\x1a\x4e" + "\x76\x87\x19\x04\x1a\x2f\x38\x3e" + "\xef\x91\x64\x1d\x18\x07\x4e\x31" + "\x88\x21\x7c\xb0\xa5\x12\x4c\x3c" + "\xb0\x20\xbd\xda\xdf\xf9\x7c\xdd", + .rlen = 512, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95" + "\x02\x88\x41\x97\x16\x93\x99\x37" + "\x51\x05\x82\x09\x74\x94\x45\x92", + .klen = 64, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .ilen = 512, + .result = "\x2b\xc9\xb4\x6b\x10\x94\xa9\x32" + "\xaa\xb0\x20\xc6\x44\x3d\x74\x1f" + "\x75\x01\xa7\xf6\xf5\xf7\x62\x1b" + "\x80\x1b\x82\xcb\x01\x59\x91\x7f" + "\x80\x3a\x98\xf0\xd2\xca\xc4\xc3" + "\x34\xfd\xe6\x11\xf9\x33\x45\x12" + "\x48\xc5\x8c\x25\xf1\xc5\xc5\x23" + "\xd3\x44\xb4\x73\xd5\x04\xc0\xb7" + "\xca\x2f\xf5\xcd\xc5\xb4\xdd\xb0" + "\xf4\x60\xe8\xfb\xc6\x9c\xc5\x78" + "\xcd\xec\x7d\xdc\x19\x9c\x72\x64" + "\x63\x0b\x38\x2e\x76\xdd\x2d\x36" + "\x49\xb0\x1d\xea\x78\x9e\x00\xca" + "\x20\xcc\x1b\x1e\x98\x74\xab\xed" + "\x79\xf7\xd0\x6c\xd8\x93\x80\x29" + "\xac\xa5\x5e\x34\xa9\xab\xa0\x55" + "\x9a\xea\xaa\x95\x4d\x7b\xfe\x46" + "\x26\x8a\xfd\x88\xa2\xa8\xa6\xae" + "\x25\x42\x17\xbf\x76\x8f\x1c\x3d" + "\xec\x9a\xda\x64\x96\xb5\x61\xff" + "\x99\xeb\x12\x96\x85\x82\x9d\xd5" + "\x81\x85\x14\xa8\x59\xac\x8c\x94" + "\xbb\x3b\x85\x2b\xdf\xb3\x0c\xba" + "\x82\xc6\x4d\xca\x86\xea\x53\x28" + "\x4c\xe0\x4e\x31\xe3\x73\x2f\x79" + "\x9d\x42\xe1\x03\xe3\x8b\xc4\xff" + "\x05\xca\x81\x7b\xda\xa2\xde\x63" + "\x3a\x10\xbe\xc2\xac\x32\xc4\x05" + "\x47\x7e\xef\x67\xe2\x5f\x5b\xae" + "\xed\xf1\x70\x34\x16\x9a\x07\x7b" + "\xf2\x25\x2b\xb0\xf8\x3c\x15\x9a" + "\xa6\x59\x55\x5f\xc1\xf4\x1e\xcd" + "\x93\x1f\x06\xba\xd4\x9a\x22\x69" + "\xfa\x8e\x95\x0d\xf3\x23\x59\x2c" + "\xfe\x00\xba\xf0\x0e\xbc\x6d\xd6" + "\x62\xf0\x7a\x0e\x83\x3e\xdb\x32" + "\xfd\x43\x7d\xda\x42\x51\x87\x43" + "\x9d\xf9\xef\xf4\x30\x97\xf8\x09" + "\x88\xfc\x3f\x93\x70\xc1\x4a\xec" + "\x27\x5f\x11\xac\x71\xc7\x48\x46" + "\x2f\xf9\xdf\x8d\x9f\xf7\x2e\x56" + "\x0d\x4e\xb0\x32\x76\xce\x86\x81" + "\xcd\xdf\xe4\x00\xbf\xfd\x5f\x24" + "\xaf\xf7\x9a\xde\xff\x18\xac\x14" + "\x90\xc5\x01\x39\x34\x0f\x24\xf3" + "\x13\x2f\x5e\x4f\x30\x9a\x36\x40" + "\xec\xea\xbc\xcd\x9e\x0e\x5b\x23" + "\x50\x88\x97\x40\x69\xb1\x37\xf5" + "\xc3\x15\xf9\x3f\xb7\x79\x64\xe8" + "\x7b\x10\x20\xb9\x2b\x46\x83\x5b" + "\xd8\x39\xfc\xe4\xfa\x88\x52\xf2" + "\x72\xb0\x97\x4e\x89\xb3\x48\x00" + "\xc1\x16\x73\x50\x77\xba\xa6\x65" + "\x20\x2d\xb0\x02\x27\x89\xda\x99" + "\x45\xfb\xe9\xd3\x1d\x39\x2f\xd6" + "\x2a\xda\x09\x12\x11\xaf\xe6\x57" + "\x01\x04\x8a\xff\x86\x8b\xac\xf8" + "\xee\xe4\x1c\x98\x5b\xcf\x6b\x76" + "\xa3\x0e\x33\x74\x40\x18\x39\x72" + "\x66\x50\x31\xfd\x70\xdf\xe8\x51" + "\x96\x21\x36\xb2\x9b\xfa\x85\xd1" + "\x30\x05\xc8\x92\x98\x80\xff\x7a" + "\xaf\x43\x0b\xc5\x20\x41\x92\x20" + "\xd4\xa0\x91\x98\x11\x5f\x4d\xb1", + .rlen = 512, + }, +}; + +static struct cipher_testvec serpent_xts_dec_tv_template[] = { + /* Generated from AES-XTS test vectors */ + /* same as enc vectors with input and result reversed */ + { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xe1\x08\xb8\x1d\x2c\xf5\x33\x64" + "\xc8\x12\x04\xc7\xb3\x70\xe8\xc4" + "\x6a\x31\xc5\xf3\x00\xca\xb9\x16" + "\xde\xe2\x77\x66\xf7\xfe\x62\x08", + .ilen = 32, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .rlen = 32, + }, { + .key = "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x1a\x0a\x09\x5f\xcd\x07\x07\x98" + "\x41\x86\x12\xaf\xb3\xd7\x68\x13" + "\xed\x81\xcd\x06\x87\x43\x1a\xbb" + "\x13\x3d\xd6\x1e\x2b\xe1\x77\xbe", + .ilen = 32, + .result = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .rlen = 32, + }, { + .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8" + "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xf9\x9b\x28\xb8\x5c\xaf\x8c\x61" + "\xb6\x1c\x81\x8f\x2c\x87\x60\x89" + "\x0d\x8d\x7a\xe8\x60\x48\xcc\x86" + "\xc1\x68\x45\xaa\x00\xe9\x24\xc5", + .ilen = 32, + .result = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .rlen = 32, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xfe\x47\x4a\xc8\x60\x7e\xb4\x8b" + "\x0d\x10\xf4\xb0\x0d\xba\xf8\x53" + "\x65\x6e\x38\x4b\xdb\xaa\xb1\x9e" + "\x28\xca\xb0\x22\xb3\x85\x75\xf4" + "\x00\x5c\x75\x14\x06\xd6\x25\x82" + "\xe6\xcb\x08\xf7\x29\x90\x23\x8e" + "\xa4\x68\x57\xe4\xf0\xd8\x32\xf3" + "\x80\x51\x67\xb5\x0b\x85\x69\xe8" + "\x19\xfe\xc4\xc7\x3e\xea\x90\xd3" + "\x8f\xa3\xf2\x0a\xac\x17\x4b\xa0" + "\x63\x5a\x16\x0f\xf0\xce\x66\x1f" + "\x2c\x21\x07\xf1\xa4\x03\xa3\x44" + "\x41\x61\x87\x5d\x6b\xb3\xef\xd4" + "\xfc\xaa\x32\x7e\x55\x58\x04\x41" + "\xc9\x07\x33\xc6\xa2\x68\xd6\x5a" + "\x55\x79\x4b\x6f\xcf\x89\xb9\x19" + "\xe5\x54\x13\x15\xb2\x1a\xfa\x15" + "\xc2\xf0\x06\x59\xfa\xa0\x25\x05" + "\x58\xfa\x43\x91\x16\x85\x40\xbb" + "\x0d\x34\x4d\xc5\x1e\x20\xd5\x08" + "\xcd\x22\x22\x41\x11\x9f\x6c\x7c" + "\x8d\x57\xc9\xba\x57\xe8\x2c\xf7" + "\xa0\x42\xa8\xde\xfc\xa3\xca\x98" + "\x4b\x43\xb1\xce\x4b\xbf\x01\x67" + "\x6e\x29\x60\xbd\x10\x14\x84\x82" + "\x83\x82\x0c\x63\x73\x92\x02\x7c" + "\x55\x37\x20\x80\x17\x51\xc8\xbc" + "\x46\x02\xcb\x38\x07\x6d\xe2\x85" + "\xaa\x29\xaf\x24\x58\x0d\xf0\x75" + "\x08\x0a\xa5\x34\x25\x16\xf3\x74" + "\xa7\x0b\x97\xbe\xc1\xa9\xdc\x29" + "\x1a\x0a\x56\xc1\x1a\x91\x97\x8c" + "\x0b\xc7\x16\xed\x5a\x22\xa6\x2e" + "\x8c\x2b\x4f\x54\x76\x47\x53\x8e" + "\xe8\x00\xec\x92\xb9\x55\xe6\xa2" + "\xf3\xe2\x4f\x6a\x66\x60\xd0\x87" + "\xe6\xd1\xcc\xe3\x6a\xc5\x2d\x21" + "\xcc\x9d\x6a\xb6\x75\xaa\xe2\x19" + "\x21\x9f\xa1\x5e\x4c\xfd\x72\xf9" + "\x94\x4e\x63\xc7\xae\xfc\xed\x47" + "\xe2\xfe\x7a\x63\x77\xfe\x97\x82" + "\xb1\x10\x6e\x36\x1d\xe1\xc4\x80" + "\xec\x69\x41\xec\xa7\x8a\xe0\x2f" + "\xe3\x49\x26\xa2\x41\xb2\x08\x0f" + "\x28\xb4\xa7\x39\xa1\x99\x2d\x1e" + "\x43\x42\x35\xd0\xcf\xec\x77\x67" + "\xb2\x3b\x9e\x1c\x35\xde\x4f\x5e" + "\x73\x3f\x5d\x6f\x07\x4b\x2e\x50" + "\xab\x6c\x6b\xff\xea\x00\x67\xaa" + "\x0e\x82\x32\xdd\x3d\xb5\xe5\x76" + "\x2b\x77\x3f\xbe\x12\x75\xfb\x92" + "\xc6\x89\x67\x4d\xca\xf7\xd4\x50" + "\xc0\x74\x47\xcc\xd9\x0a\xd4\xc6" + "\x3b\x17\x2e\xe3\x35\xbb\x53\xb5" + "\x86\xad\x51\xcc\xd5\x96\xb8\xdc" + "\x03\x57\xe6\x98\x52\x2f\x61\x62" + "\xc4\x5c\x9c\x36\x71\x07\xfb\x94" + "\xe3\x02\xc4\x2b\x08\x75\xc7\x35" + "\xfb\x2e\x88\x7b\xbb\x67\x00\xe1" + "\xc9\xdd\x99\xb2\x13\x53\x1a\x4e" + "\x76\x87\x19\x04\x1a\x2f\x38\x3e" + "\xef\x91\x64\x1d\x18\x07\x4e\x31" + "\x88\x21\x7c\xb0\xa5\x12\x4c\x3c" + "\xb0\x20\xbd\xda\xdf\xf9\x7c\xdd", + .ilen = 512, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .rlen = 512, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95" + "\x02\x88\x41\x97\x16\x93\x99\x37" + "\x51\x05\x82\x09\x74\x94\x45\x92", + .klen = 64, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x2b\xc9\xb4\x6b\x10\x94\xa9\x32" + "\xaa\xb0\x20\xc6\x44\x3d\x74\x1f" + "\x75\x01\xa7\xf6\xf5\xf7\x62\x1b" + "\x80\x1b\x82\xcb\x01\x59\x91\x7f" + "\x80\x3a\x98\xf0\xd2\xca\xc4\xc3" + "\x34\xfd\xe6\x11\xf9\x33\x45\x12" + "\x48\xc5\x8c\x25\xf1\xc5\xc5\x23" + "\xd3\x44\xb4\x73\xd5\x04\xc0\xb7" + "\xca\x2f\xf5\xcd\xc5\xb4\xdd\xb0" + "\xf4\x60\xe8\xfb\xc6\x9c\xc5\x78" + "\xcd\xec\x7d\xdc\x19\x9c\x72\x64" + "\x63\x0b\x38\x2e\x76\xdd\x2d\x36" + "\x49\xb0\x1d\xea\x78\x9e\x00\xca" + "\x20\xcc\x1b\x1e\x98\x74\xab\xed" + "\x79\xf7\xd0\x6c\xd8\x93\x80\x29" + "\xac\xa5\x5e\x34\xa9\xab\xa0\x55" + "\x9a\xea\xaa\x95\x4d\x7b\xfe\x46" + "\x26\x8a\xfd\x88\xa2\xa8\xa6\xae" + "\x25\x42\x17\xbf\x76\x8f\x1c\x3d" + "\xec\x9a\xda\x64\x96\xb5\x61\xff" + "\x99\xeb\x12\x96\x85\x82\x9d\xd5" + "\x81\x85\x14\xa8\x59\xac\x8c\x94" + "\xbb\x3b\x85\x2b\xdf\xb3\x0c\xba" + "\x82\xc6\x4d\xca\x86\xea\x53\x28" + "\x4c\xe0\x4e\x31\xe3\x73\x2f\x79" + "\x9d\x42\xe1\x03\xe3\x8b\xc4\xff" + "\x05\xca\x81\x7b\xda\xa2\xde\x63" + "\x3a\x10\xbe\xc2\xac\x32\xc4\x05" + "\x47\x7e\xef\x67\xe2\x5f\x5b\xae" + "\xed\xf1\x70\x34\x16\x9a\x07\x7b" + "\xf2\x25\x2b\xb0\xf8\x3c\x15\x9a" + "\xa6\x59\x55\x5f\xc1\xf4\x1e\xcd" + "\x93\x1f\x06\xba\xd4\x9a\x22\x69" + "\xfa\x8e\x95\x0d\xf3\x23\x59\x2c" + "\xfe\x00\xba\xf0\x0e\xbc\x6d\xd6" + "\x62\xf0\x7a\x0e\x83\x3e\xdb\x32" + "\xfd\x43\x7d\xda\x42\x51\x87\x43" + "\x9d\xf9\xef\xf4\x30\x97\xf8\x09" + "\x88\xfc\x3f\x93\x70\xc1\x4a\xec" + "\x27\x5f\x11\xac\x71\xc7\x48\x46" + "\x2f\xf9\xdf\x8d\x9f\xf7\x2e\x56" + "\x0d\x4e\xb0\x32\x76\xce\x86\x81" + "\xcd\xdf\xe4\x00\xbf\xfd\x5f\x24" + "\xaf\xf7\x9a\xde\xff\x18\xac\x14" + "\x90\xc5\x01\x39\x34\x0f\x24\xf3" + "\x13\x2f\x5e\x4f\x30\x9a\x36\x40" + "\xec\xea\xbc\xcd\x9e\x0e\x5b\x23" + "\x50\x88\x97\x40\x69\xb1\x37\xf5" + "\xc3\x15\xf9\x3f\xb7\x79\x64\xe8" + "\x7b\x10\x20\xb9\x2b\x46\x83\x5b" + "\xd8\x39\xfc\xe4\xfa\x88\x52\xf2" + "\x72\xb0\x97\x4e\x89\xb3\x48\x00" + "\xc1\x16\x73\x50\x77\xba\xa6\x65" + "\x20\x2d\xb0\x02\x27\x89\xda\x99" + "\x45\xfb\xe9\xd3\x1d\x39\x2f\xd6" + "\x2a\xda\x09\x12\x11\xaf\xe6\x57" + "\x01\x04\x8a\xff\x86\x8b\xac\xf8" + "\xee\xe4\x1c\x98\x5b\xcf\x6b\x76" + "\xa3\x0e\x33\x74\x40\x18\x39\x72" + "\x66\x50\x31\xfd\x70\xdf\xe8\x51" + "\x96\x21\x36\xb2\x9b\xfa\x85\xd1" + "\x30\x05\xc8\x92\x98\x80\xff\x7a" + "\xaf\x43\x0b\xc5\x20\x41\x92\x20" + "\xd4\xa0\x91\x98\x11\x5f\x4d\xb1", + .ilen = 512, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .rlen = 512, + }, +}; + +/* Cast6 test vectors from RFC 2612 */ +#define CAST6_ENC_TEST_VECTORS 3 +#define CAST6_DEC_TEST_VECTORS 3 + +static struct cipher_testvec cast6_enc_tv_template[] = { + { + .key = "\x23\x42\xbb\x9e\xfa\x38\x54\x2c" + "\x0a\xf7\x56\x47\xf2\x9f\x61\x5d", + .klen = 16, + .input = zeroed_string, + .ilen = 16, + .result = "\xc8\x42\xa0\x89\x72\xb4\x3d\x20" + "\x83\x6c\x91\xd1\xb7\x53\x0f\x6b", + .rlen = 16, + }, { + .key = "\x23\x42\xbb\x9e\xfa\x38\x54\x2c" + "\xbe\xd0\xac\x83\x94\x0a\xc2\x98" + "\xba\xc7\x7a\x77\x17\x94\x28\x63", + .klen = 24, + .input = zeroed_string, + .ilen = 16, + .result = "\x1b\x38\x6c\x02\x10\xdc\xad\xcb" + "\xdd\x0e\x41\xaa\x08\xa7\xa7\xe8", + .rlen = 16, + }, { + .key = "\x23\x42\xbb\x9e\xfa\x38\x54\x2c" + "\xbe\xd0\xac\x83\x94\x0a\xc2\x98" + "\x8d\x7c\x47\xce\x26\x49\x08\x46" + "\x1c\xc1\xb5\x13\x7a\xe6\xb6\x04", + .klen = 32, + .input = zeroed_string, + .ilen = 16, + .result = "\x4f\x6a\x20\x38\x28\x68\x97\xb9" + "\xc9\x87\x01\x36\x55\x33\x17\xfa", + .rlen = 16, + }, +}; + +static struct cipher_testvec cast6_dec_tv_template[] = { + { + .key = "\x23\x42\xbb\x9e\xfa\x38\x54\x2c" + "\x0a\xf7\x56\x47\xf2\x9f\x61\x5d", + .klen = 16, + .input = "\xc8\x42\xa0\x89\x72\xb4\x3d\x20" + "\x83\x6c\x91\xd1\xb7\x53\x0f\x6b", + .ilen = 16, + .result = zeroed_string, + .rlen = 16, + }, { + .key = "\x23\x42\xbb\x9e\xfa\x38\x54\x2c" + "\xbe\xd0\xac\x83\x94\x0a\xc2\x98" + "\xba\xc7\x7a\x77\x17\x94\x28\x63", + .klen = 24, + .input = "\x1b\x38\x6c\x02\x10\xdc\xad\xcb" + "\xdd\x0e\x41\xaa\x08\xa7\xa7\xe8", + .ilen = 16, + .result = zeroed_string, + .rlen = 16, + }, { + .key = "\x23\x42\xbb\x9e\xfa\x38\x54\x2c" + "\xbe\xd0\xac\x83\x94\x0a\xc2\x98" + "\x8d\x7c\x47\xce\x26\x49\x08\x46" + "\x1c\xc1\xb5\x13\x7a\xe6\xb6\x04", + .klen = 32, + .input = "\x4f\x6a\x20\x38\x28\x68\x97\xb9" + "\xc9\x87\x01\x36\x55\x33\x17\xfa", + .ilen = 16, + .result = zeroed_string, + .rlen = 16, + }, +}; + + +/* + * AES test vectors. + */ +#define AES_ENC_TEST_VECTORS 3 +#define AES_DEC_TEST_VECTORS 3 +#define AES_CBC_ENC_TEST_VECTORS 4 +#define AES_CBC_DEC_TEST_VECTORS 4 +#define AES_LRW_ENC_TEST_VECTORS 8 +#define AES_LRW_DEC_TEST_VECTORS 8 +#define AES_XTS_ENC_TEST_VECTORS 5 +#define AES_XTS_DEC_TEST_VECTORS 5 +#define AES_CTR_ENC_TEST_VECTORS 3 +#define AES_CTR_DEC_TEST_VECTORS 3 +#define AES_OFB_ENC_TEST_VECTORS 1 +#define AES_OFB_DEC_TEST_VECTORS 1 +#define AES_CTR_3686_ENC_TEST_VECTORS 7 +#define AES_CTR_3686_DEC_TEST_VECTORS 6 +#define AES_GCM_ENC_TEST_VECTORS 9 +#define AES_GCM_DEC_TEST_VECTORS 8 +#define AES_GCM_4106_ENC_TEST_VECTORS 7 +#define AES_GCM_4106_DEC_TEST_VECTORS 7 +#define AES_CCM_ENC_TEST_VECTORS 7 +#define AES_CCM_DEC_TEST_VECTORS 7 +#define AES_CCM_4309_ENC_TEST_VECTORS 7 +#define AES_CCM_4309_DEC_TEST_VECTORS 10 + +static struct cipher_testvec aes_enc_tv_template[] = { + { /* From FIPS-197 */ + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .klen = 16, + .input = "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + .ilen = 16, + .result = "\x69\xc4\xe0\xd8\x6a\x7b\x04\x30" + "\xd8\xcd\xb7\x80\x70\xb4\xc5\x5a", + .rlen = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17", + .klen = 24, + .input = "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + .ilen = 16, + .result = "\xdd\xa9\x7c\xa4\x86\x4c\xdf\xe0" + "\x6e\xaf\x70\xa0\xec\x0d\x71\x91", + .rlen = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .klen = 32, + .input = "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + .ilen = 16, + .result = "\x8e\xa2\xb7\xca\x51\x67\x45\xbf" + "\xea\xfc\x49\x90\x4b\x49\x60\x89", + .rlen = 16, + }, +}; + +static struct cipher_testvec aes_dec_tv_template[] = { + { /* From FIPS-197 */ + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .klen = 16, + .input = "\x69\xc4\xe0\xd8\x6a\x7b\x04\x30" + "\xd8\xcd\xb7\x80\x70\xb4\xc5\x5a", + .ilen = 16, + .result = "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + .rlen = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17", + .klen = 24, + .input = "\xdd\xa9\x7c\xa4\x86\x4c\xdf\xe0" + "\x6e\xaf\x70\xa0\xec\x0d\x71\x91", + .ilen = 16, + .result = "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + .rlen = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .klen = 32, + .input = "\x8e\xa2\xb7\xca\x51\x67\x45\xbf" + "\xea\xfc\x49\x90\x4b\x49\x60\x89", + .ilen = 16, + .result = "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + .rlen = 16, + }, +}; + +static struct cipher_testvec aes_cbc_enc_tv_template[] = { + { /* From RFC 3602 */ + .key = "\x06\xa9\x21\x40\x36\xb8\xa1\x5b" + "\x51\x2e\x03\xd5\x34\x12\x00\x06", + .klen = 16, + .iv = "\x3d\xaf\xba\x42\x9d\x9e\xb4\x30" + "\xb4\x22\xda\x80\x2c\x9f\xac\x41", + .input = "Single block msg", + .ilen = 16, + .result = "\xe3\x53\x77\x9c\x10\x79\xae\xb8" + "\x27\x08\x94\x2d\xbe\x77\x18\x1a", + .rlen = 16, + }, { + .key = "\xc2\x86\x69\x6d\x88\x7c\x9a\xa0" + "\x61\x1b\xbb\x3e\x20\x25\xa4\x5a", + .klen = 16, + .iv = "\x56\x2e\x17\x99\x6d\x09\x3d\x28" + "\xdd\xb3\xba\x69\x5a\x2e\x6f\x58", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .ilen = 32, + .result = "\xd2\x96\xcd\x94\xc2\xcc\xcf\x8a" + "\x3a\x86\x30\x28\xb5\xe1\xdc\x0a" + "\x75\x86\x60\x2d\x25\x3c\xff\xf9" + "\x1b\x82\x66\xbe\xa6\xd6\x1a\xb1", + .rlen = 32, + }, { /* From NIST SP800-38A */ + .key = "\x8e\x73\xb0\xf7\xda\x0e\x64\x52" + "\xc8\x10\xf3\x2b\x80\x90\x79\xe5" + "\x62\xf8\xea\xd2\x52\x2c\x6b\x7b", + .klen = 24, + .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37\x10", + .ilen = 64, + .result = "\x4f\x02\x1d\xb2\x43\xbc\x63\x3d" + "\x71\x78\x18\x3a\x9f\xa0\x71\xe8" + "\xb4\xd9\xad\xa9\xad\x7d\xed\xf4" + "\xe5\xe7\x38\x76\x3f\x69\x14\x5a" + "\x57\x1b\x24\x20\x12\xfb\x7a\xe0" + "\x7f\xa9\xba\xac\x3d\xf1\x02\xe0" + "\x08\xb0\xe2\x79\x88\x59\x88\x81" + "\xd9\x20\xa9\xe6\x4f\x56\x15\xcd", + .rlen = 64, + }, { + .key = "\x60\x3d\xeb\x10\x15\xca\x71\xbe" + "\x2b\x73\xae\xf0\x85\x7d\x77\x81" + "\x1f\x35\x2c\x07\x3b\x61\x08\xd7" + "\x2d\x98\x10\xa3\x09\x14\xdf\xf4", + .klen = 32, + .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37\x10", + .ilen = 64, + .result = "\xf5\x8c\x4c\x04\xd6\xe5\xf1\xba" + "\x77\x9e\xab\xfb\x5f\x7b\xfb\xd6" + "\x9c\xfc\x4e\x96\x7e\xdb\x80\x8d" + "\x67\x9f\x77\x7b\xc6\x70\x2c\x7d" + "\x39\xf2\x33\x69\xa9\xd9\xba\xcf" + "\xa5\x30\xe2\x63\x04\x23\x14\x61" + "\xb2\xeb\x05\xe2\xc3\x9b\xe9\xfc" + "\xda\x6c\x19\x07\x8c\x6a\x9d\x1b", + .rlen = 64, + }, +}; + +static struct cipher_testvec aes_cbc_dec_tv_template[] = { + { /* From RFC 3602 */ + .key = "\x06\xa9\x21\x40\x36\xb8\xa1\x5b" + "\x51\x2e\x03\xd5\x34\x12\x00\x06", + .klen = 16, + .iv = "\x3d\xaf\xba\x42\x9d\x9e\xb4\x30" + "\xb4\x22\xda\x80\x2c\x9f\xac\x41", + .input = "\xe3\x53\x77\x9c\x10\x79\xae\xb8" + "\x27\x08\x94\x2d\xbe\x77\x18\x1a", + .ilen = 16, + .result = "Single block msg", + .rlen = 16, + }, { + .key = "\xc2\x86\x69\x6d\x88\x7c\x9a\xa0" + "\x61\x1b\xbb\x3e\x20\x25\xa4\x5a", + .klen = 16, + .iv = "\x56\x2e\x17\x99\x6d\x09\x3d\x28" + "\xdd\xb3\xba\x69\x5a\x2e\x6f\x58", + .input = "\xd2\x96\xcd\x94\xc2\xcc\xcf\x8a" + "\x3a\x86\x30\x28\xb5\xe1\xdc\x0a" + "\x75\x86\x60\x2d\x25\x3c\xff\xf9" + "\x1b\x82\x66\xbe\xa6\xd6\x1a\xb1", + .ilen = 32, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .rlen = 32, + }, { /* From NIST SP800-38A */ + .key = "\x8e\x73\xb0\xf7\xda\x0e\x64\x52" + "\xc8\x10\xf3\x2b\x80\x90\x79\xe5" + "\x62\xf8\xea\xd2\x52\x2c\x6b\x7b", + .klen = 24, + .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .input = "\x4f\x02\x1d\xb2\x43\xbc\x63\x3d" + "\x71\x78\x18\x3a\x9f\xa0\x71\xe8" + "\xb4\xd9\xad\xa9\xad\x7d\xed\xf4" + "\xe5\xe7\x38\x76\x3f\x69\x14\x5a" + "\x57\x1b\x24\x20\x12\xfb\x7a\xe0" + "\x7f\xa9\xba\xac\x3d\xf1\x02\xe0" + "\x08\xb0\xe2\x79\x88\x59\x88\x81" + "\xd9\x20\xa9\xe6\x4f\x56\x15\xcd", + .ilen = 64, + .result = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37\x10", + .rlen = 64, + }, { + .key = "\x60\x3d\xeb\x10\x15\xca\x71\xbe" + "\x2b\x73\xae\xf0\x85\x7d\x77\x81" + "\x1f\x35\x2c\x07\x3b\x61\x08\xd7" + "\x2d\x98\x10\xa3\x09\x14\xdf\xf4", + .klen = 32, + .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .input = "\xf5\x8c\x4c\x04\xd6\xe5\xf1\xba" + "\x77\x9e\xab\xfb\x5f\x7b\xfb\xd6" + "\x9c\xfc\x4e\x96\x7e\xdb\x80\x8d" + "\x67\x9f\x77\x7b\xc6\x70\x2c\x7d" + "\x39\xf2\x33\x69\xa9\xd9\xba\xcf" + "\xa5\x30\xe2\x63\x04\x23\x14\x61" + "\xb2\xeb\x05\xe2\xc3\x9b\xe9\xfc" + "\xda\x6c\x19\x07\x8c\x6a\x9d\x1b", + .ilen = 64, + .result = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37\x10", + .rlen = 64, + }, +}; + +static struct cipher_testvec aes_lrw_enc_tv_template[] = { + /* from http://grouper.ieee.org/groups/1619/email/pdf00017.pdf */ + { /* LRW-32-AES 1 */ + .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d" + "\x4c\x26\x84\x14\xb5\x68\x01\x85" + "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03" + "\xee\x5a\x83\x0c\xcc\x09\x4c\x87", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\xf1\xb2\x73\xcd\x65\xa3\xdf\x5f" + "\xe9\x5d\x48\x92\x54\x63\x4e\xb8", + .rlen = 16, + }, { /* LRW-32-AES 2 */ + .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c" + "\xd7\x79\xe8\x0f\x54\x88\x79\x44" + "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea" + "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x02", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x00\xc8\x2b\xae\x95\xbb\xcd\xe5" + "\x27\x4f\x07\x69\xb2\x60\xe1\x36", + .rlen = 16, + }, { /* LRW-32-AES 3 */ + .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50" + "\x30\xfe\x69\xe2\x37\x7f\x98\x47" + "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6" + "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x76\x32\x21\x83\xed\x8f\xf1\x82" + "\xf9\x59\x62\x03\x69\x0e\x5e\x01", + .rlen = 16, + }, { /* LRW-32-AES 4 */ + .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15" + "\x25\x83\xf7\x3c\x1f\x01\x28\x74" + "\xca\xc6\xbc\x35\x4d\x4a\x65\x54" + "\x90\xae\x61\xcf\x7b\xae\xbd\xcc" + "\xad\xe4\x94\xc5\x4a\x29\xae\x70", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x9c\x0f\x15\x2f\x55\xa2\xd8\xf0" + "\xd6\x7b\x8f\x9e\x28\x22\xbc\x41", + .rlen = 16, + }, { /* LRW-32-AES 5 */ + .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff" + "\xf8\x86\xce\xac\x93\xc5\xad\xc6" + "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd" + "\x52\x13\xb2\xb7\xf0\xff\x11\xd8" + "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\xd4\x27\x6a\x7f\x14\x91\x3d\x65" + "\xc8\x60\x48\x02\x87\xe3\x34\x06", + .rlen = 16, + }, { /* LRW-32-AES 6 */ + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\xbd\x06\xb8\xe1\xdb\x98\x89\x9e" + "\xc4\x98\xe4\x91\xcf\x1c\x70\x2b", + .rlen = 16, + }, { /* LRW-32-AES 7 */ + .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d" + "\xd4\x70\x98\x0b\xc7\x95\x84\xc8" + "\xb2\xfb\x64\xce\x60\x97\x87\x8d" + "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7" + "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4" + "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x5b\x90\x8e\xc1\xab\xdd\x67\x5f" + "\x3d\x69\x8a\x95\x53\xc8\x9c\xe5", + .rlen = 16, + }, { +/* http://www.mail-archive.com/stds-p1619@listserv.ieee.org/msg00173.html */ + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x05\x11\xb7\x18\xab\xc6\x2d\xac" + "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c" + "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8" + "\x50\x38\x1f\x71\x49\xb6\x57\xd6" + "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90" + "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6" + "\xad\x1e\x9e\x20\x5f\x38\xbe\x04" + "\xda\x10\x8e\xed\xa2\xa4\x87\xab" + "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c" + "\xc9\xac\x42\x31\x95\x7c\xc9\x04" + "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6" + "\x15\xd7\x3f\x4f\x2f\x66\x69\x03" + "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65" + "\x4c\x96\x12\xed\x7c\x92\x03\x01" + "\x6f\xbc\x35\x93\xac\xf1\x27\xf1" + "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50" + "\x89\xa4\x8e\x66\x44\x85\xcc\xfd" + "\x33\x14\x70\xe3\x96\xb2\xc3\xd3" + "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5" + "\x2d\x64\x75\xdd\xb4\x54\xe6\x74" + "\x8c\xd3\x9d\x9e\x86\xab\x51\x53" + "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40" + "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5" + "\x76\x12\x73\x44\x1a\x56\xd7\x72" + "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda" + "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd" + "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60" + "\x1a\xe2\x70\x85\x58\xc2\x1b\x09" + "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9" + "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8" + "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8" + "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10" + "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1" + "\x90\x3e\x76\x4a\x74\xa4\x21\x2c" + "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e" + "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f" + "\x8d\x23\x31\x74\x84\xeb\x88\x6e" + "\xcc\xb9\xbc\x22\x83\x19\x07\x22" + "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78" + "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5" + "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41" + "\x3c\xce\x8f\x42\x60\x71\xa7\x75" + "\x08\x40\x65\x8a\x82\xbf\xf5\x43" + "\x71\x96\xa9\x4d\x44\x8a\x20\xbe" + "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65" + "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9" + "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4" + "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a" + "\x62\x73\x65\xfd\x46\x63\x25\x3d" + "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf" + "\x24\xf3\xb4\xac\x64\xba\xdf\x4b" + "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7" + "\xc5\x68\x77\x84\x32\x2b\xcc\x85" + "\x74\x96\xf0\x12\x77\x61\xb9\xeb" + "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8" + "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24" + "\xda\x39\x87\x45\xc0\x2b\xbb\x01" + "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce" + "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6" + "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32" + "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45" + "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6" + "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" + "\x21\xc4\xc2\x75\x67\x89\x37\x0a", + .ilen = 512, + .result = "\x1a\x1d\xa9\x30\xad\xf9\x2f\x9b" + "\xb6\x1d\xae\xef\xf0\x2f\xf8\x5a" + "\x39\x3c\xbf\x2a\xb2\x45\xb2\x23" + "\x1b\x63\x3c\xcf\xaa\xbe\xcf\x4e" + "\xfa\xe8\x29\xc2\x20\x68\x2b\x3c" + "\x2e\x8b\xf7\x6e\x25\xbd\xe3\x3d" + "\x66\x27\xd6\xaf\xd6\x64\x3e\xe3" + "\xe8\x58\x46\x97\x39\x51\x07\xde" + "\xcb\x37\xbc\xa9\xc0\x5f\x75\xc3" + "\x0e\x84\x23\x1d\x16\xd4\x1c\x59" + "\x9c\x1a\x02\x55\xab\x3a\x97\x1d" + "\xdf\xdd\xc7\x06\x51\xd7\x70\xae" + "\x23\xc6\x8c\xf5\x1e\xa0\xe5\x82" + "\xb8\xb2\xbf\x04\xa0\x32\x8e\x68" + "\xeb\xaf\x6e\x2d\x94\x22\x2f\xce" + "\x4c\xb5\x59\xe2\xa2\x2f\xa0\x98" + "\x1a\x97\xc6\xd4\xb5\x00\x59\xf2" + "\x84\x14\x72\xb1\x9a\x6e\xa3\x7f" + "\xea\x20\xe7\xcb\x65\x77\x3a\xdf" + "\xc8\x97\x67\x15\xc2\x2a\x27\xcc" + "\x18\x55\xa1\x24\x0b\x24\x24\xaf" + "\x5b\xec\x68\xb8\xc8\xf5\xba\x63" + "\xff\xed\x89\xce\xd5\x3d\x88\xf3" + "\x25\xef\x05\x7c\x3a\xef\xeb\xd8" + "\x7a\x32\x0d\xd1\x1e\x58\x59\x99" + "\x90\x25\xb5\x26\xb0\xe3\x2b\x6c" + "\x4c\xa9\x8b\x84\x4f\x5e\x01\x50" + "\x41\x30\x58\xc5\x62\x74\x52\x1d" + "\x45\x24\x6a\x42\x64\x4f\x97\x1c" + "\xa8\x66\xb5\x6d\x79\xd4\x0d\x48" + "\xc5\x5f\xf3\x90\x32\xdd\xdd\xe1" + "\xe4\xa9\x9f\xfc\xc3\x52\x5a\x46" + "\xe4\x81\x84\x95\x36\x59\x7a\x6b" + "\xaa\xb3\x60\xad\xce\x9f\x9f\x28" + "\xe0\x01\x75\x22\xc4\x4e\xa9\x62" + "\x5c\x62\x0d\x00\xcb\x13\xe8\x43" + "\x72\xd4\x2d\x53\x46\xb5\xd1\x16" + "\x22\x18\xdf\x34\x33\xf5\xd6\x1c" + "\xb8\x79\x78\x97\x94\xff\x72\x13" + "\x4c\x27\xfc\xcb\xbf\x01\x53\xa6" + "\xb4\x50\x6e\xde\xdf\xb5\x43\xa4" + "\x59\xdf\x52\xf9\x7c\xe0\x11\x6f" + "\x2d\x14\x8e\x24\x61\x2c\xe1\x17" + "\xcc\xce\x51\x0c\x19\x8a\x82\x30" + "\x94\xd5\x3d\x6a\x53\x06\x5e\xbd" + "\xb7\xeb\xfa\xfd\x27\x51\xde\x85" + "\x1e\x86\x53\x11\x53\x94\x00\xee" + "\x2b\x8c\x08\x2a\xbf\xdd\xae\x11" + "\xcb\x1e\xa2\x07\x9a\x80\xcf\x62" + "\x9b\x09\xdc\x95\x3c\x96\x8e\xb1" + "\x09\xbd\xe4\xeb\xdb\xca\x70\x7a" + "\x9e\xfa\x31\x18\x45\x3c\x21\x33" + "\xb0\xb3\x2b\xea\xf3\x71\x2d\xe1" + "\x03\xad\x1b\x48\xd4\x67\x27\xf0" + "\x62\xe4\x3d\xfb\x9b\x08\x76\xe7" + "\xdd\x2b\x01\x39\x04\x5a\x58\x7a" + "\xf7\x11\x90\xec\xbd\x51\x5c\x32" + "\x6b\xd7\x35\x39\x02\x6b\xf2\xa6" + "\xd0\x0d\x07\xe1\x06\xc4\x5b\x7d" + "\xe4\x6a\xd7\xee\x15\x1f\x83\xb4" + "\xa3\xa7\x5e\xc3\x90\xb7\xef\xd3" + "\xb7\x4f\xf8\x92\x4c\xb7\x3c\x29" + "\xcd\x7e\x2b\x5d\x43\xea\x42\xe7" + "\x74\x3f\x7d\x58\x88\x75\xde\x3e", + .rlen = 512, + } +}; + +static struct cipher_testvec aes_lrw_dec_tv_template[] = { + /* from http://grouper.ieee.org/groups/1619/email/pdf00017.pdf */ + /* same as enc vectors with input and result reversed */ + { /* LRW-32-AES 1 */ + .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d" + "\x4c\x26\x84\x14\xb5\x68\x01\x85" + "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03" + "\xee\x5a\x83\x0c\xcc\x09\x4c\x87", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\xf1\xb2\x73\xcd\x65\xa3\xdf\x5f" + "\xe9\x5d\x48\x92\x54\x63\x4e\xb8", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { /* LRW-32-AES 2 */ + .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c" + "\xd7\x79\xe8\x0f\x54\x88\x79\x44" + "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea" + "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x02", + .input = "\x00\xc8\x2b\xae\x95\xbb\xcd\xe5" + "\x27\x4f\x07\x69\xb2\x60\xe1\x36", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { /* LRW-32-AES 3 */ + .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50" + "\x30\xfe\x69\xe2\x37\x7f\x98\x47" + "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6" + "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x76\x32\x21\x83\xed\x8f\xf1\x82" + "\xf9\x59\x62\x03\x69\x0e\x5e\x01", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { /* LRW-32-AES 4 */ + .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15" + "\x25\x83\xf7\x3c\x1f\x01\x28\x74" + "\xca\xc6\xbc\x35\x4d\x4a\x65\x54" + "\x90\xae\x61\xcf\x7b\xae\xbd\xcc" + "\xad\xe4\x94\xc5\x4a\x29\xae\x70", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x9c\x0f\x15\x2f\x55\xa2\xd8\xf0" + "\xd6\x7b\x8f\x9e\x28\x22\xbc\x41", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { /* LRW-32-AES 5 */ + .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff" + "\xf8\x86\xce\xac\x93\xc5\xad\xc6" + "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd" + "\x52\x13\xb2\xb7\xf0\xff\x11\xd8" + "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\xd4\x27\x6a\x7f\x14\x91\x3d\x65" + "\xc8\x60\x48\x02\x87\xe3\x34\x06", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { /* LRW-32-AES 6 */ + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\xbd\x06\xb8\xe1\xdb\x98\x89\x9e" + "\xc4\x98\xe4\x91\xcf\x1c\x70\x2b", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { /* LRW-32-AES 7 */ + .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d" + "\xd4\x70\x98\x0b\xc7\x95\x84\xc8" + "\xb2\xfb\x64\xce\x60\x97\x87\x8d" + "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7" + "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4" + "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x5b\x90\x8e\xc1\xab\xdd\x67\x5f" + "\x3d\x69\x8a\x95\x53\xc8\x9c\xe5", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { +/* http://www.mail-archive.com/stds-p1619@listserv.ieee.org/msg00173.html */ + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x1a\x1d\xa9\x30\xad\xf9\x2f\x9b" + "\xb6\x1d\xae\xef\xf0\x2f\xf8\x5a" + "\x39\x3c\xbf\x2a\xb2\x45\xb2\x23" + "\x1b\x63\x3c\xcf\xaa\xbe\xcf\x4e" + "\xfa\xe8\x29\xc2\x20\x68\x2b\x3c" + "\x2e\x8b\xf7\x6e\x25\xbd\xe3\x3d" + "\x66\x27\xd6\xaf\xd6\x64\x3e\xe3" + "\xe8\x58\x46\x97\x39\x51\x07\xde" + "\xcb\x37\xbc\xa9\xc0\x5f\x75\xc3" + "\x0e\x84\x23\x1d\x16\xd4\x1c\x59" + "\x9c\x1a\x02\x55\xab\x3a\x97\x1d" + "\xdf\xdd\xc7\x06\x51\xd7\x70\xae" + "\x23\xc6\x8c\xf5\x1e\xa0\xe5\x82" + "\xb8\xb2\xbf\x04\xa0\x32\x8e\x68" + "\xeb\xaf\x6e\x2d\x94\x22\x2f\xce" + "\x4c\xb5\x59\xe2\xa2\x2f\xa0\x98" + "\x1a\x97\xc6\xd4\xb5\x00\x59\xf2" + "\x84\x14\x72\xb1\x9a\x6e\xa3\x7f" + "\xea\x20\xe7\xcb\x65\x77\x3a\xdf" + "\xc8\x97\x67\x15\xc2\x2a\x27\xcc" + "\x18\x55\xa1\x24\x0b\x24\x24\xaf" + "\x5b\xec\x68\xb8\xc8\xf5\xba\x63" + "\xff\xed\x89\xce\xd5\x3d\x88\xf3" + "\x25\xef\x05\x7c\x3a\xef\xeb\xd8" + "\x7a\x32\x0d\xd1\x1e\x58\x59\x99" + "\x90\x25\xb5\x26\xb0\xe3\x2b\x6c" + "\x4c\xa9\x8b\x84\x4f\x5e\x01\x50" + "\x41\x30\x58\xc5\x62\x74\x52\x1d" + "\x45\x24\x6a\x42\x64\x4f\x97\x1c" + "\xa8\x66\xb5\x6d\x79\xd4\x0d\x48" + "\xc5\x5f\xf3\x90\x32\xdd\xdd\xe1" + "\xe4\xa9\x9f\xfc\xc3\x52\x5a\x46" + "\xe4\x81\x84\x95\x36\x59\x7a\x6b" + "\xaa\xb3\x60\xad\xce\x9f\x9f\x28" + "\xe0\x01\x75\x22\xc4\x4e\xa9\x62" + "\x5c\x62\x0d\x00\xcb\x13\xe8\x43" + "\x72\xd4\x2d\x53\x46\xb5\xd1\x16" + "\x22\x18\xdf\x34\x33\xf5\xd6\x1c" + "\xb8\x79\x78\x97\x94\xff\x72\x13" + "\x4c\x27\xfc\xcb\xbf\x01\x53\xa6" + "\xb4\x50\x6e\xde\xdf\xb5\x43\xa4" + "\x59\xdf\x52\xf9\x7c\xe0\x11\x6f" + "\x2d\x14\x8e\x24\x61\x2c\xe1\x17" + "\xcc\xce\x51\x0c\x19\x8a\x82\x30" + "\x94\xd5\x3d\x6a\x53\x06\x5e\xbd" + "\xb7\xeb\xfa\xfd\x27\x51\xde\x85" + "\x1e\x86\x53\x11\x53\x94\x00\xee" + "\x2b\x8c\x08\x2a\xbf\xdd\xae\x11" + "\xcb\x1e\xa2\x07\x9a\x80\xcf\x62" + "\x9b\x09\xdc\x95\x3c\x96\x8e\xb1" + "\x09\xbd\xe4\xeb\xdb\xca\x70\x7a" + "\x9e\xfa\x31\x18\x45\x3c\x21\x33" + "\xb0\xb3\x2b\xea\xf3\x71\x2d\xe1" + "\x03\xad\x1b\x48\xd4\x67\x27\xf0" + "\x62\xe4\x3d\xfb\x9b\x08\x76\xe7" + "\xdd\x2b\x01\x39\x04\x5a\x58\x7a" + "\xf7\x11\x90\xec\xbd\x51\x5c\x32" + "\x6b\xd7\x35\x39\x02\x6b\xf2\xa6" + "\xd0\x0d\x07\xe1\x06\xc4\x5b\x7d" + "\xe4\x6a\xd7\xee\x15\x1f\x83\xb4" + "\xa3\xa7\x5e\xc3\x90\xb7\xef\xd3" + "\xb7\x4f\xf8\x92\x4c\xb7\x3c\x29" + "\xcd\x7e\x2b\x5d\x43\xea\x42\xe7" + "\x74\x3f\x7d\x58\x88\x75\xde\x3e", + .ilen = 512, + .result = "\x05\x11\xb7\x18\xab\xc6\x2d\xac" + "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c" + "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8" + "\x50\x38\x1f\x71\x49\xb6\x57\xd6" + "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90" + "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6" + "\xad\x1e\x9e\x20\x5f\x38\xbe\x04" + "\xda\x10\x8e\xed\xa2\xa4\x87\xab" + "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c" + "\xc9\xac\x42\x31\x95\x7c\xc9\x04" + "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6" + "\x15\xd7\x3f\x4f\x2f\x66\x69\x03" + "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65" + "\x4c\x96\x12\xed\x7c\x92\x03\x01" + "\x6f\xbc\x35\x93\xac\xf1\x27\xf1" + "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50" + "\x89\xa4\x8e\x66\x44\x85\xcc\xfd" + "\x33\x14\x70\xe3\x96\xb2\xc3\xd3" + "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5" + "\x2d\x64\x75\xdd\xb4\x54\xe6\x74" + "\x8c\xd3\x9d\x9e\x86\xab\x51\x53" + "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40" + "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5" + "\x76\x12\x73\x44\x1a\x56\xd7\x72" + "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda" + "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd" + "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60" + "\x1a\xe2\x70\x85\x58\xc2\x1b\x09" + "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9" + "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8" + "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8" + "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10" + "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1" + "\x90\x3e\x76\x4a\x74\xa4\x21\x2c" + "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e" + "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f" + "\x8d\x23\x31\x74\x84\xeb\x88\x6e" + "\xcc\xb9\xbc\x22\x83\x19\x07\x22" + "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78" + "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5" + "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41" + "\x3c\xce\x8f\x42\x60\x71\xa7\x75" + "\x08\x40\x65\x8a\x82\xbf\xf5\x43" + "\x71\x96\xa9\x4d\x44\x8a\x20\xbe" + "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65" + "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9" + "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4" + "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a" + "\x62\x73\x65\xfd\x46\x63\x25\x3d" + "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf" + "\x24\xf3\xb4\xac\x64\xba\xdf\x4b" + "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7" + "\xc5\x68\x77\x84\x32\x2b\xcc\x85" + "\x74\x96\xf0\x12\x77\x61\xb9\xeb" + "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8" + "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24" + "\xda\x39\x87\x45\xc0\x2b\xbb\x01" + "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce" + "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6" + "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32" + "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45" + "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6" + "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" + "\x21\xc4\xc2\x75\x67\x89\x37\x0a", + .rlen = 512, + } +}; + +static struct cipher_testvec aes_xts_enc_tv_template[] = { + /* http://grouper.ieee.org/groups/1619/email/pdf00086.pdf */ + { /* XTS-AES 1 */ + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 32, + .result = "\x91\x7c\xf6\x9e\xbd\x68\xb2\xec" + "\x9b\x9f\xe9\xa3\xea\xdd\xa6\x92" + "\xcd\x43\xd2\xf5\x95\x98\xed\x85" + "\x8c\x02\xc2\x65\x2f\xbf\x92\x2e", + .rlen = 32, + }, { /* XTS-AES 2 */ + .key = "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .ilen = 32, + .result = "\xc4\x54\x18\x5e\x6a\x16\x93\x6e" + "\x39\x33\x40\x38\xac\xef\x83\x8b" + "\xfb\x18\x6f\xff\x74\x80\xad\xc4" + "\x28\x93\x82\xec\xd6\xd3\x94\xf0", + .rlen = 32, + }, { /* XTS-AES 3 */ + .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8" + "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .ilen = 32, + .result = "\xaf\x85\x33\x6b\x59\x7a\xfc\x1a" + "\x90\x0b\x2e\xb2\x1e\xc9\x49\xd2" + "\x92\xdf\x4c\x04\x7e\x0b\x21\x53" + "\x21\x86\xa5\x97\x1a\x22\x7a\x89", + .rlen = 32, + }, { /* XTS-AES 4 */ + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .ilen = 512, + .result = "\x27\xa7\x47\x9b\xef\xa1\xd4\x76" + "\x48\x9f\x30\x8c\xd4\xcf\xa6\xe2" + "\xa9\x6e\x4b\xbe\x32\x08\xff\x25" + "\x28\x7d\xd3\x81\x96\x16\xe8\x9c" + "\xc7\x8c\xf7\xf5\xe5\x43\x44\x5f" + "\x83\x33\xd8\xfa\x7f\x56\x00\x00" + "\x05\x27\x9f\xa5\xd8\xb5\xe4\xad" + "\x40\xe7\x36\xdd\xb4\xd3\x54\x12" + "\x32\x80\x63\xfd\x2a\xab\x53\xe5" + "\xea\x1e\x0a\x9f\x33\x25\x00\xa5" + "\xdf\x94\x87\xd0\x7a\x5c\x92\xcc" + "\x51\x2c\x88\x66\xc7\xe8\x60\xce" + "\x93\xfd\xf1\x66\xa2\x49\x12\xb4" + "\x22\x97\x61\x46\xae\x20\xce\x84" + "\x6b\xb7\xdc\x9b\xa9\x4a\x76\x7a" + "\xae\xf2\x0c\x0d\x61\xad\x02\x65" + "\x5e\xa9\x2d\xc4\xc4\xe4\x1a\x89" + "\x52\xc6\x51\xd3\x31\x74\xbe\x51" + "\xa1\x0c\x42\x11\x10\xe6\xd8\x15" + "\x88\xed\xe8\x21\x03\xa2\x52\xd8" + "\xa7\x50\xe8\x76\x8d\xef\xff\xed" + "\x91\x22\x81\x0a\xae\xb9\x9f\x91" + "\x72\xaf\x82\xb6\x04\xdc\x4b\x8e" + "\x51\xbc\xb0\x82\x35\xa6\xf4\x34" + "\x13\x32\xe4\xca\x60\x48\x2a\x4b" + "\xa1\xa0\x3b\x3e\x65\x00\x8f\xc5" + "\xda\x76\xb7\x0b\xf1\x69\x0d\xb4" + "\xea\xe2\x9c\x5f\x1b\xad\xd0\x3c" + "\x5c\xcf\x2a\x55\xd7\x05\xdd\xcd" + "\x86\xd4\x49\x51\x1c\xeb\x7e\xc3" + "\x0b\xf1\x2b\x1f\xa3\x5b\x91\x3f" + "\x9f\x74\x7a\x8a\xfd\x1b\x13\x0e" + "\x94\xbf\xf9\x4e\xff\xd0\x1a\x91" + "\x73\x5c\xa1\x72\x6a\xcd\x0b\x19" + "\x7c\x4e\x5b\x03\x39\x36\x97\xe1" + "\x26\x82\x6f\xb6\xbb\xde\x8e\xcc" + "\x1e\x08\x29\x85\x16\xe2\xc9\xed" + "\x03\xff\x3c\x1b\x78\x60\xf6\xde" + "\x76\xd4\xce\xcd\x94\xc8\x11\x98" + "\x55\xef\x52\x97\xca\x67\xe9\xf3" + "\xe7\xff\x72\xb1\xe9\x97\x85\xca" + "\x0a\x7e\x77\x20\xc5\xb3\x6d\xc6" + "\xd7\x2c\xac\x95\x74\xc8\xcb\xbc" + "\x2f\x80\x1e\x23\xe5\x6f\xd3\x44" + "\xb0\x7f\x22\x15\x4b\xeb\xa0\xf0" + "\x8c\xe8\x89\x1e\x64\x3e\xd9\x95" + "\xc9\x4d\x9a\x69\xc9\xf1\xb5\xf4" + "\x99\x02\x7a\x78\x57\x2a\xee\xbd" + "\x74\xd2\x0c\xc3\x98\x81\xc2\x13" + "\xee\x77\x0b\x10\x10\xe4\xbe\xa7" + "\x18\x84\x69\x77\xae\x11\x9f\x7a" + "\x02\x3a\xb5\x8c\xca\x0a\xd7\x52" + "\xaf\xe6\x56\xbb\x3c\x17\x25\x6a" + "\x9f\x6e\x9b\xf1\x9f\xdd\x5a\x38" + "\xfc\x82\xbb\xe8\x72\xc5\x53\x9e" + "\xdb\x60\x9e\xf4\xf7\x9c\x20\x3e" + "\xbb\x14\x0f\x2e\x58\x3c\xb2\xad" + "\x15\xb4\xaa\x5b\x65\x50\x16\xa8" + "\x44\x92\x77\xdb\xd4\x77\xef\x2c" + "\x8d\x6c\x01\x7d\xb7\x38\xb1\x8d" + "\xeb\x4a\x42\x7d\x19\x23\xce\x3f" + "\xf2\x62\x73\x57\x79\xa4\x18\xf2" + "\x0a\x28\x2d\xf9\x20\x14\x7b\xea" + "\xbe\x42\x1e\xe5\x31\x9d\x05\x68", + .rlen = 512, + }, { /* XTS-AES 10, XTS-AES-256, data unit 512 bytes */ + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95" + "\x02\x88\x41\x97\x16\x93\x99\x37" + "\x51\x05\x82\x09\x74\x94\x45\x92", + .klen = 64, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + "\x00\x00\x00\x00\x00\x00\x00\x00", + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .ilen = 512, + .result = "\x1c\x3b\x3a\x10\x2f\x77\x03\x86" + "\xe4\x83\x6c\x99\xe3\x70\xcf\x9b" + "\xea\x00\x80\x3f\x5e\x48\x23\x57" + "\xa4\xae\x12\xd4\x14\xa3\xe6\x3b" + "\x5d\x31\xe2\x76\xf8\xfe\x4a\x8d" + "\x66\xb3\x17\xf9\xac\x68\x3f\x44" + "\x68\x0a\x86\xac\x35\xad\xfc\x33" + "\x45\xbe\xfe\xcb\x4b\xb1\x88\xfd" + "\x57\x76\x92\x6c\x49\xa3\x09\x5e" + "\xb1\x08\xfd\x10\x98\xba\xec\x70" + "\xaa\xa6\x69\x99\xa7\x2a\x82\xf2" + "\x7d\x84\x8b\x21\xd4\xa7\x41\xb0" + "\xc5\xcd\x4d\x5f\xff\x9d\xac\x89" + "\xae\xba\x12\x29\x61\xd0\x3a\x75" + "\x71\x23\xe9\x87\x0f\x8a\xcf\x10" + "\x00\x02\x08\x87\x89\x14\x29\xca" + "\x2a\x3e\x7a\x7d\x7d\xf7\xb1\x03" + "\x55\x16\x5c\x8b\x9a\x6d\x0a\x7d" + "\xe8\xb0\x62\xc4\x50\x0d\xc4\xcd" + "\x12\x0c\x0f\x74\x18\xda\xe3\xd0" + "\xb5\x78\x1c\x34\x80\x3f\xa7\x54" + "\x21\xc7\x90\xdf\xe1\xde\x18\x34" + "\xf2\x80\xd7\x66\x7b\x32\x7f\x6c" + "\x8c\xd7\x55\x7e\x12\xac\x3a\x0f" + "\x93\xec\x05\xc5\x2e\x04\x93\xef" + "\x31\xa1\x2d\x3d\x92\x60\xf7\x9a" + "\x28\x9d\x6a\x37\x9b\xc7\x0c\x50" + "\x84\x14\x73\xd1\xa8\xcc\x81\xec" + "\x58\x3e\x96\x45\xe0\x7b\x8d\x96" + "\x70\x65\x5b\xa5\xbb\xcf\xec\xc6" + "\xdc\x39\x66\x38\x0a\xd8\xfe\xcb" + "\x17\xb6\xba\x02\x46\x9a\x02\x0a" + "\x84\xe1\x8e\x8f\x84\x25\x20\x70" + "\xc1\x3e\x9f\x1f\x28\x9b\xe5\x4f" + "\xbc\x48\x14\x57\x77\x8f\x61\x60" + "\x15\xe1\x32\x7a\x02\xb1\x40\xf1" + "\x50\x5e\xb3\x09\x32\x6d\x68\x37" + "\x8f\x83\x74\x59\x5c\x84\x9d\x84" + "\xf4\xc3\x33\xec\x44\x23\x88\x51" + "\x43\xcb\x47\xbd\x71\xc5\xed\xae" + "\x9b\xe6\x9a\x2f\xfe\xce\xb1\xbe" + "\xc9\xde\x24\x4f\xbe\x15\x99\x2b" + "\x11\xb7\x7c\x04\x0f\x12\xbd\x8f" + "\x6a\x97\x5a\x44\xa0\xf9\x0c\x29" + "\xa9\xab\xc3\xd4\xd8\x93\x92\x72" + "\x84\xc5\x87\x54\xcc\xe2\x94\x52" + "\x9f\x86\x14\xdc\xd2\xab\xa9\x91" + "\x92\x5f\xed\xc4\xae\x74\xff\xac" + "\x6e\x33\x3b\x93\xeb\x4a\xff\x04" + "\x79\xda\x9a\x41\x0e\x44\x50\xe0" + "\xdd\x7a\xe4\xc6\xe2\x91\x09\x00" + "\x57\x5d\xa4\x01\xfc\x07\x05\x9f" + "\x64\x5e\x8b\x7e\x9b\xfd\xef\x33" + "\x94\x30\x54\xff\x84\x01\x14\x93" + "\xc2\x7b\x34\x29\xea\xed\xb4\xed" + "\x53\x76\x44\x1a\x77\xed\x43\x85" + "\x1a\xd7\x7f\x16\xf5\x41\xdf\xd2" + "\x69\xd5\x0d\x6a\x5f\x14\xfb\x0a" + "\xab\x1c\xbb\x4c\x15\x50\xbe\x97" + "\xf7\xab\x40\x66\x19\x3c\x4c\xaa" + "\x77\x3d\xad\x38\x01\x4b\xd2\x09" + "\x2f\xa7\x55\xc8\x24\xbb\x5e\x54" + "\xc4\xf3\x6f\xfd\xa9\xfc\xea\x70" + "\xb9\xc6\xe6\x93\xe1\x48\xc1\x51", + .rlen = 512, + } +}; + +static struct cipher_testvec aes_xts_dec_tv_template[] = { + /* http://grouper.ieee.org/groups/1619/email/pdf00086.pdf */ + { /* XTS-AES 1 */ + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x91\x7c\xf6\x9e\xbd\x68\xb2\xec" + "\x9b\x9f\xe9\xa3\xea\xdd\xa6\x92" + "\xcd\x43\xd2\xf5\x95\x98\xed\x85" + "\x8c\x02\xc2\x65\x2f\xbf\x92\x2e", + .ilen = 32, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .rlen = 32, + }, { /* XTS-AES 2 */ + .key = "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xc4\x54\x18\x5e\x6a\x16\x93\x6e" + "\x39\x33\x40\x38\xac\xef\x83\x8b" + "\xfb\x18\x6f\xff\x74\x80\xad\xc4" + "\x28\x93\x82\xec\xd6\xd3\x94\xf0", + .ilen = 32, + .result = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .rlen = 32, + }, { /* XTS-AES 3 */ + .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8" + "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xaf\x85\x33\x6b\x59\x7a\xfc\x1a" + "\x90\x0b\x2e\xb2\x1e\xc9\x49\xd2" + "\x92\xdf\x4c\x04\x7e\x0b\x21\x53" + "\x21\x86\xa5\x97\x1a\x22\x7a\x89", + .ilen = 32, + .result = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .rlen = 32, + }, { /* XTS-AES 4 */ + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x27\xa7\x47\x9b\xef\xa1\xd4\x76" + "\x48\x9f\x30\x8c\xd4\xcf\xa6\xe2" + "\xa9\x6e\x4b\xbe\x32\x08\xff\x25" + "\x28\x7d\xd3\x81\x96\x16\xe8\x9c" + "\xc7\x8c\xf7\xf5\xe5\x43\x44\x5f" + "\x83\x33\xd8\xfa\x7f\x56\x00\x00" + "\x05\x27\x9f\xa5\xd8\xb5\xe4\xad" + "\x40\xe7\x36\xdd\xb4\xd3\x54\x12" + "\x32\x80\x63\xfd\x2a\xab\x53\xe5" + "\xea\x1e\x0a\x9f\x33\x25\x00\xa5" + "\xdf\x94\x87\xd0\x7a\x5c\x92\xcc" + "\x51\x2c\x88\x66\xc7\xe8\x60\xce" + "\x93\xfd\xf1\x66\xa2\x49\x12\xb4" + "\x22\x97\x61\x46\xae\x20\xce\x84" + "\x6b\xb7\xdc\x9b\xa9\x4a\x76\x7a" + "\xae\xf2\x0c\x0d\x61\xad\x02\x65" + "\x5e\xa9\x2d\xc4\xc4\xe4\x1a\x89" + "\x52\xc6\x51\xd3\x31\x74\xbe\x51" + "\xa1\x0c\x42\x11\x10\xe6\xd8\x15" + "\x88\xed\xe8\x21\x03\xa2\x52\xd8" + "\xa7\x50\xe8\x76\x8d\xef\xff\xed" + "\x91\x22\x81\x0a\xae\xb9\x9f\x91" + "\x72\xaf\x82\xb6\x04\xdc\x4b\x8e" + "\x51\xbc\xb0\x82\x35\xa6\xf4\x34" + "\x13\x32\xe4\xca\x60\x48\x2a\x4b" + "\xa1\xa0\x3b\x3e\x65\x00\x8f\xc5" + "\xda\x76\xb7\x0b\xf1\x69\x0d\xb4" + "\xea\xe2\x9c\x5f\x1b\xad\xd0\x3c" + "\x5c\xcf\x2a\x55\xd7\x05\xdd\xcd" + "\x86\xd4\x49\x51\x1c\xeb\x7e\xc3" + "\x0b\xf1\x2b\x1f\xa3\x5b\x91\x3f" + "\x9f\x74\x7a\x8a\xfd\x1b\x13\x0e" + "\x94\xbf\xf9\x4e\xff\xd0\x1a\x91" + "\x73\x5c\xa1\x72\x6a\xcd\x0b\x19" + "\x7c\x4e\x5b\x03\x39\x36\x97\xe1" + "\x26\x82\x6f\xb6\xbb\xde\x8e\xcc" + "\x1e\x08\x29\x85\x16\xe2\xc9\xed" + "\x03\xff\x3c\x1b\x78\x60\xf6\xde" + "\x76\xd4\xce\xcd\x94\xc8\x11\x98" + "\x55\xef\x52\x97\xca\x67\xe9\xf3" + "\xe7\xff\x72\xb1\xe9\x97\x85\xca" + "\x0a\x7e\x77\x20\xc5\xb3\x6d\xc6" + "\xd7\x2c\xac\x95\x74\xc8\xcb\xbc" + "\x2f\x80\x1e\x23\xe5\x6f\xd3\x44" + "\xb0\x7f\x22\x15\x4b\xeb\xa0\xf0" + "\x8c\xe8\x89\x1e\x64\x3e\xd9\x95" + "\xc9\x4d\x9a\x69\xc9\xf1\xb5\xf4" + "\x99\x02\x7a\x78\x57\x2a\xee\xbd" + "\x74\xd2\x0c\xc3\x98\x81\xc2\x13" + "\xee\x77\x0b\x10\x10\xe4\xbe\xa7" + "\x18\x84\x69\x77\xae\x11\x9f\x7a" + "\x02\x3a\xb5\x8c\xca\x0a\xd7\x52" + "\xaf\xe6\x56\xbb\x3c\x17\x25\x6a" + "\x9f\x6e\x9b\xf1\x9f\xdd\x5a\x38" + "\xfc\x82\xbb\xe8\x72\xc5\x53\x9e" + "\xdb\x60\x9e\xf4\xf7\x9c\x20\x3e" + "\xbb\x14\x0f\x2e\x58\x3c\xb2\xad" + "\x15\xb4\xaa\x5b\x65\x50\x16\xa8" + "\x44\x92\x77\xdb\xd4\x77\xef\x2c" + "\x8d\x6c\x01\x7d\xb7\x38\xb1\x8d" + "\xeb\x4a\x42\x7d\x19\x23\xce\x3f" + "\xf2\x62\x73\x57\x79\xa4\x18\xf2" + "\x0a\x28\x2d\xf9\x20\x14\x7b\xea" + "\xbe\x42\x1e\xe5\x31\x9d\x05\x68", + .ilen = 512, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .rlen = 512, + }, { /* XTS-AES 10, XTS-AES-256, data unit 512 bytes */ + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95" + "\x02\x88\x41\x97\x16\x93\x99\x37" + "\x51\x05\x82\x09\x74\x94\x45\x92", + .klen = 64, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + "\x00\x00\x00\x00\x00\x00\x00\x00", + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x1c\x3b\x3a\x10\x2f\x77\x03\x86" + "\xe4\x83\x6c\x99\xe3\x70\xcf\x9b" + "\xea\x00\x80\x3f\x5e\x48\x23\x57" + "\xa4\xae\x12\xd4\x14\xa3\xe6\x3b" + "\x5d\x31\xe2\x76\xf8\xfe\x4a\x8d" + "\x66\xb3\x17\xf9\xac\x68\x3f\x44" + "\x68\x0a\x86\xac\x35\xad\xfc\x33" + "\x45\xbe\xfe\xcb\x4b\xb1\x88\xfd" + "\x57\x76\x92\x6c\x49\xa3\x09\x5e" + "\xb1\x08\xfd\x10\x98\xba\xec\x70" + "\xaa\xa6\x69\x99\xa7\x2a\x82\xf2" + "\x7d\x84\x8b\x21\xd4\xa7\x41\xb0" + "\xc5\xcd\x4d\x5f\xff\x9d\xac\x89" + "\xae\xba\x12\x29\x61\xd0\x3a\x75" + "\x71\x23\xe9\x87\x0f\x8a\xcf\x10" + "\x00\x02\x08\x87\x89\x14\x29\xca" + "\x2a\x3e\x7a\x7d\x7d\xf7\xb1\x03" + "\x55\x16\x5c\x8b\x9a\x6d\x0a\x7d" + "\xe8\xb0\x62\xc4\x50\x0d\xc4\xcd" + "\x12\x0c\x0f\x74\x18\xda\xe3\xd0" + "\xb5\x78\x1c\x34\x80\x3f\xa7\x54" + "\x21\xc7\x90\xdf\xe1\xde\x18\x34" + "\xf2\x80\xd7\x66\x7b\x32\x7f\x6c" + "\x8c\xd7\x55\x7e\x12\xac\x3a\x0f" + "\x93\xec\x05\xc5\x2e\x04\x93\xef" + "\x31\xa1\x2d\x3d\x92\x60\xf7\x9a" + "\x28\x9d\x6a\x37\x9b\xc7\x0c\x50" + "\x84\x14\x73\xd1\xa8\xcc\x81\xec" + "\x58\x3e\x96\x45\xe0\x7b\x8d\x96" + "\x70\x65\x5b\xa5\xbb\xcf\xec\xc6" + "\xdc\x39\x66\x38\x0a\xd8\xfe\xcb" + "\x17\xb6\xba\x02\x46\x9a\x02\x0a" + "\x84\xe1\x8e\x8f\x84\x25\x20\x70" + "\xc1\x3e\x9f\x1f\x28\x9b\xe5\x4f" + "\xbc\x48\x14\x57\x77\x8f\x61\x60" + "\x15\xe1\x32\x7a\x02\xb1\x40\xf1" + "\x50\x5e\xb3\x09\x32\x6d\x68\x37" + "\x8f\x83\x74\x59\x5c\x84\x9d\x84" + "\xf4\xc3\x33\xec\x44\x23\x88\x51" + "\x43\xcb\x47\xbd\x71\xc5\xed\xae" + "\x9b\xe6\x9a\x2f\xfe\xce\xb1\xbe" + "\xc9\xde\x24\x4f\xbe\x15\x99\x2b" + "\x11\xb7\x7c\x04\x0f\x12\xbd\x8f" + "\x6a\x97\x5a\x44\xa0\xf9\x0c\x29" + "\xa9\xab\xc3\xd4\xd8\x93\x92\x72" + "\x84\xc5\x87\x54\xcc\xe2\x94\x52" + "\x9f\x86\x14\xdc\xd2\xab\xa9\x91" + "\x92\x5f\xed\xc4\xae\x74\xff\xac" + "\x6e\x33\x3b\x93\xeb\x4a\xff\x04" + "\x79\xda\x9a\x41\x0e\x44\x50\xe0" + "\xdd\x7a\xe4\xc6\xe2\x91\x09\x00" + "\x57\x5d\xa4\x01\xfc\x07\x05\x9f" + "\x64\x5e\x8b\x7e\x9b\xfd\xef\x33" + "\x94\x30\x54\xff\x84\x01\x14\x93" + "\xc2\x7b\x34\x29\xea\xed\xb4\xed" + "\x53\x76\x44\x1a\x77\xed\x43\x85" + "\x1a\xd7\x7f\x16\xf5\x41\xdf\xd2" + "\x69\xd5\x0d\x6a\x5f\x14\xfb\x0a" + "\xab\x1c\xbb\x4c\x15\x50\xbe\x97" + "\xf7\xab\x40\x66\x19\x3c\x4c\xaa" + "\x77\x3d\xad\x38\x01\x4b\xd2\x09" + "\x2f\xa7\x55\xc8\x24\xbb\x5e\x54" + "\xc4\xf3\x6f\xfd\xa9\xfc\xea\x70" + "\xb9\xc6\xe6\x93\xe1\x48\xc1\x51", + .ilen = 512, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .rlen = 512, + + } +}; + + +static struct cipher_testvec aes_ctr_enc_tv_template[] = { + { /* From NIST Special Publication 800-38A, Appendix F.5 */ + .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6" + "\xab\xf7\x15\x88\x09\xcf\x4f\x3c", + .klen = 16, + .iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37\x10", + .ilen = 64, + .result = "\x87\x4d\x61\x91\xb6\x20\xe3\x26" + "\x1b\xef\x68\x64\x99\x0d\xb6\xce" + "\x98\x06\xf6\x6b\x79\x70\xfd\xff" + "\x86\x17\x18\x7b\xb9\xff\xfd\xff" + "\x5a\xe4\xdf\x3e\xdb\xd5\xd3\x5e" + "\x5b\x4f\x09\x02\x0d\xb0\x3e\xab" + "\x1e\x03\x1d\xda\x2f\xbe\x03\xd1" + "\x79\x21\x70\xa0\xf3\x00\x9c\xee", + .rlen = 64, + }, { + .key = "\x8e\x73\xb0\xf7\xda\x0e\x64\x52" + "\xc8\x10\xf3\x2b\x80\x90\x79\xe5" + "\x62\xf8\xea\xd2\x52\x2c\x6b\x7b", + .klen = 24, + .iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37\x10", + .ilen = 64, + .result = "\x1a\xbc\x93\x24\x17\x52\x1c\xa2" + "\x4f\x2b\x04\x59\xfe\x7e\x6e\x0b" + "\x09\x03\x39\xec\x0a\xa6\xfa\xef" + "\xd5\xcc\xc2\xc6\xf4\xce\x8e\x94" + "\x1e\x36\xb2\x6b\xd1\xeb\xc6\x70" + "\xd1\xbd\x1d\x66\x56\x20\xab\xf7" + "\x4f\x78\xa7\xf6\xd2\x98\x09\x58" + "\x5a\x97\xda\xec\x58\xc6\xb0\x50", + .rlen = 64, + }, { + .key = "\x60\x3d\xeb\x10\x15\xca\x71\xbe" + "\x2b\x73\xae\xf0\x85\x7d\x77\x81" + "\x1f\x35\x2c\x07\x3b\x61\x08\xd7" + "\x2d\x98\x10\xa3\x09\x14\xdf\xf4", + .klen = 32, + .iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37\x10", + .ilen = 64, + .result = "\x60\x1e\xc3\x13\x77\x57\x89\xa5" + "\xb7\xa7\xf5\x04\xbb\xf3\xd2\x28" + "\xf4\x43\xe3\xca\x4d\x62\xb5\x9a" + "\xca\x84\xe9\x90\xca\xca\xf5\xc5" + "\x2b\x09\x30\xda\xa2\x3d\xe9\x4c" + "\xe8\x70\x17\xba\x2d\x84\x98\x8d" + "\xdf\xc9\xc5\x8d\xb6\x7a\xad\xa6" + "\x13\xc2\xdd\x08\x45\x79\x41\xa6", + .rlen = 64, + } +}; + +static struct cipher_testvec aes_ctr_dec_tv_template[] = { + { /* From NIST Special Publication 800-38A, Appendix F.5 */ + .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6" + "\xab\xf7\x15\x88\x09\xcf\x4f\x3c", + .klen = 16, + .iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .input = "\x87\x4d\x61\x91\xb6\x20\xe3\x26" + "\x1b\xef\x68\x64\x99\x0d\xb6\xce" + "\x98\x06\xf6\x6b\x79\x70\xfd\xff" + "\x86\x17\x18\x7b\xb9\xff\xfd\xff" + "\x5a\xe4\xdf\x3e\xdb\xd5\xd3\x5e" + "\x5b\x4f\x09\x02\x0d\xb0\x3e\xab" + "\x1e\x03\x1d\xda\x2f\xbe\x03\xd1" + "\x79\x21\x70\xa0\xf3\x00\x9c\xee", + .ilen = 64, + .result = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37\x10", + .rlen = 64, + }, { + .key = "\x8e\x73\xb0\xf7\xda\x0e\x64\x52" + "\xc8\x10\xf3\x2b\x80\x90\x79\xe5" + "\x62\xf8\xea\xd2\x52\x2c\x6b\x7b", + .klen = 24, + .iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .input = "\x1a\xbc\x93\x24\x17\x52\x1c\xa2" + "\x4f\x2b\x04\x59\xfe\x7e\x6e\x0b" + "\x09\x03\x39\xec\x0a\xa6\xfa\xef" + "\xd5\xcc\xc2\xc6\xf4\xce\x8e\x94" + "\x1e\x36\xb2\x6b\xd1\xeb\xc6\x70" + "\xd1\xbd\x1d\x66\x56\x20\xab\xf7" + "\x4f\x78\xa7\xf6\xd2\x98\x09\x58" + "\x5a\x97\xda\xec\x58\xc6\xb0\x50", + .ilen = 64, + .result = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37\x10", + .rlen = 64, + }, { + .key = "\x60\x3d\xeb\x10\x15\xca\x71\xbe" + "\x2b\x73\xae\xf0\x85\x7d\x77\x81" + "\x1f\x35\x2c\x07\x3b\x61\x08\xd7" + "\x2d\x98\x10\xa3\x09\x14\xdf\xf4", + .klen = 32, + .iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .input = "\x60\x1e\xc3\x13\x77\x57\x89\xa5" + "\xb7\xa7\xf5\x04\xbb\xf3\xd2\x28" + "\xf4\x43\xe3\xca\x4d\x62\xb5\x9a" + "\xca\x84\xe9\x90\xca\xca\xf5\xc5" + "\x2b\x09\x30\xda\xa2\x3d\xe9\x4c" + "\xe8\x70\x17\xba\x2d\x84\x98\x8d" + "\xdf\xc9\xc5\x8d\xb6\x7a\xad\xa6" + "\x13\xc2\xdd\x08\x45\x79\x41\xa6", + .ilen = 64, + .result = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37\x10", + .rlen = 64, + } +}; + +static struct cipher_testvec aes_ctr_rfc3686_enc_tv_template[] = { + { /* From RFC 3686 */ + .key = "\xae\x68\x52\xf8\x12\x10\x67\xcc" + "\x4b\xf7\xa5\x76\x55\x77\xf3\x9e" + "\x00\x00\x00\x30", + .klen = 20, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "Single block msg", + .ilen = 16, + .result = "\xe4\x09\x5d\x4f\xb7\xa7\xb3\x79" + "\x2d\x61\x75\xa3\x26\x13\x11\xb8", + .rlen = 16, + }, { + .key = "\x7e\x24\x06\x78\x17\xfa\xe0\xd7" + "\x43\xd6\xce\x1f\x32\x53\x91\x63" + "\x00\x6c\xb6\xdb", + .klen = 20, + .iv = "\xc0\x54\x3b\x59\xda\x48\xd9\x0b", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .ilen = 32, + .result = "\x51\x04\xa1\x06\x16\x8a\x72\xd9" + "\x79\x0d\x41\xee\x8e\xda\xd3\x88" + "\xeb\x2e\x1e\xfc\x46\xda\x57\xc8" + "\xfc\xe6\x30\xdf\x91\x41\xbe\x28", + .rlen = 32, + }, { + .key = "\x16\xaf\x5b\x14\x5f\xc9\xf5\x79" + "\xc1\x75\xf9\x3e\x3b\xfb\x0e\xed" + "\x86\x3d\x06\xcc\xfd\xb7\x85\x15" + "\x00\x00\x00\x48", + .klen = 28, + .iv = "\x36\x73\x3c\x14\x7d\x6d\x93\xcb", + .input = "Single block msg", + .ilen = 16, + .result = "\x4b\x55\x38\x4f\xe2\x59\xc9\xc8" + "\x4e\x79\x35\xa0\x03\xcb\xe9\x28", + .rlen = 16, + }, { + .key = "\x7c\x5c\xb2\x40\x1b\x3d\xc3\x3c" + "\x19\xe7\x34\x08\x19\xe0\xf6\x9c" + "\x67\x8c\x3d\xb8\xe6\xf6\xa9\x1a" + "\x00\x96\xb0\x3b", + .klen = 28, + .iv = "\x02\x0c\x6e\xad\xc2\xcb\x50\x0d", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .ilen = 32, + .result = "\x45\x32\x43\xfc\x60\x9b\x23\x32" + "\x7e\xdf\xaa\xfa\x71\x31\xcd\x9f" + "\x84\x90\x70\x1c\x5a\xd4\xa7\x9c" + "\xfc\x1f\xe0\xff\x42\xf4\xfb\x00", + .rlen = 32, + }, { + .key = "\x77\x6b\xef\xf2\x85\x1d\xb0\x6f" + "\x4c\x8a\x05\x42\xc8\x69\x6f\x6c" + "\x6a\x81\xaf\x1e\xec\x96\xb4\xd3" + "\x7f\xc1\xd6\x89\xe6\xc1\xc1\x04" + "\x00\x00\x00\x60", + .klen = 36, + .iv = "\xdb\x56\x72\xc9\x7a\xa8\xf0\xb2", + .input = "Single block msg", + .ilen = 16, + .result = "\x14\x5a\xd0\x1d\xbf\x82\x4e\xc7" + "\x56\x08\x63\xdc\x71\xe3\xe0\xc0", + .rlen = 16, + }, { + .key = "\xf6\xd6\x6d\x6b\xd5\x2d\x59\xbb" + "\x07\x96\x36\x58\x79\xef\xf8\x86" + "\xc6\x6d\xd5\x1a\x5b\x6a\x99\x74" + "\x4b\x50\x59\x0c\x87\xa2\x38\x84" + "\x00\xfa\xac\x24", + .klen = 36, + .iv = "\xc1\x58\x5e\xf1\x5a\x43\xd8\x75", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .ilen = 32, + .result = "\xf0\x5e\x23\x1b\x38\x94\x61\x2c" + "\x49\xee\x00\x0b\x80\x4e\xb2\xa9" + "\xb8\x30\x6b\x50\x8f\x83\x9d\x6a" + "\x55\x30\x83\x1d\x93\x44\xaf\x1c", + .rlen = 32, + }, { + // generated using Crypto++ + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x00\x00\x00\x00", + .klen = 32 + 4, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x03\x06\x09\x0c\x0f\x12\x15" + "\x18\x1b\x1e\x21\x24\x27\x2a\x2d" + "\x30\x33\x36\x39\x3c\x3f\x42\x45" + "\x48\x4b\x4e\x51\x54\x57\x5a\x5d" + "\x60\x63\x66\x69\x6c\x6f\x72\x75" + "\x78\x7b\x7e\x81\x84\x87\x8a\x8d" + "\x90\x93\x96\x99\x9c\x9f\xa2\xa5" + "\xa8\xab\xae\xb1\xb4\xb7\xba\xbd" + "\xc0\xc3\xc6\xc9\xcc\xcf\xd2\xd5" + "\xd8\xdb\xde\xe1\xe4\xe7\xea\xed" + "\xf0\xf3\xf6\xf9\xfc\xff\x02\x05" + "\x08\x0b\x0e\x11\x14\x17\x1a\x1d" + "\x20\x23\x26\x29\x2c\x2f\x32\x35" + "\x38\x3b\x3e\x41\x44\x47\x4a\x4d" + "\x50\x53\x56\x59\x5c\x5f\x62\x65" + "\x68\x6b\x6e\x71\x74\x77\x7a\x7d" + "\x80\x83\x86\x89\x8c\x8f\x92\x95" + "\x98\x9b\x9e\xa1\xa4\xa7\xaa\xad" + "\xb0\xb3\xb6\xb9\xbc\xbf\xc2\xc5" + "\xc8\xcb\xce\xd1\xd4\xd7\xda\xdd" + "\xe0\xe3\xe6\xe9\xec\xef\xf2\xf5" + "\xf8\xfb\xfe\x01\x04\x07\x0a\x0d" + "\x10\x13\x16\x19\x1c\x1f\x22\x25" + "\x28\x2b\x2e\x31\x34\x37\x3a\x3d" + "\x40\x43\x46\x49\x4c\x4f\x52\x55" + "\x58\x5b\x5e\x61\x64\x67\x6a\x6d" + "\x70\x73\x76\x79\x7c\x7f\x82\x85" + "\x88\x8b\x8e\x91\x94\x97\x9a\x9d" + "\xa0\xa3\xa6\xa9\xac\xaf\xb2\xb5" + "\xb8\xbb\xbe\xc1\xc4\xc7\xca\xcd" + "\xd0\xd3\xd6\xd9\xdc\xdf\xe2\xe5" + "\xe8\xeb\xee\xf1\xf4\xf7\xfa\xfd" + "\x00\x05\x0a\x0f\x14\x19\x1e\x23" + "\x28\x2d\x32\x37\x3c\x41\x46\x4b" + "\x50\x55\x5a\x5f\x64\x69\x6e\x73" + "\x78\x7d\x82\x87\x8c\x91\x96\x9b" + "\xa0\xa5\xaa\xaf\xb4\xb9\xbe\xc3" + "\xc8\xcd\xd2\xd7\xdc\xe1\xe6\xeb" + "\xf0\xf5\xfa\xff\x04\x09\x0e\x13" + "\x18\x1d\x22\x27\x2c\x31\x36\x3b" + "\x40\x45\x4a\x4f\x54\x59\x5e\x63" + "\x68\x6d\x72\x77\x7c\x81\x86\x8b" + "\x90\x95\x9a\x9f\xa4\xa9\xae\xb3" + "\xb8\xbd\xc2\xc7\xcc\xd1\xd6\xdb" + "\xe0\xe5\xea\xef\xf4\xf9\xfe\x03" + "\x08\x0d\x12\x17\x1c\x21\x26\x2b" + "\x30\x35\x3a\x3f\x44\x49\x4e\x53" + "\x58\x5d\x62\x67\x6c\x71\x76\x7b" + "\x80\x85\x8a\x8f\x94\x99\x9e\xa3" + "\xa8\xad\xb2\xb7\xbc\xc1\xc6\xcb" + "\xd0\xd5\xda\xdf\xe4\xe9\xee\xf3" + "\xf8\xfd\x02\x07\x0c\x11\x16\x1b" + "\x20\x25\x2a\x2f\x34\x39\x3e\x43" + "\x48\x4d\x52\x57\x5c\x61\x66\x6b" + "\x70\x75\x7a\x7f\x84\x89\x8e\x93" + "\x98\x9d\xa2\xa7\xac\xb1\xb6\xbb" + "\xc0\xc5\xca\xcf\xd4\xd9\xde\xe3" + "\xe8\xed\xf2\xf7\xfc\x01\x06\x0b" + "\x10\x15\x1a\x1f\x24\x29\x2e\x33" + "\x38\x3d\x42\x47\x4c\x51\x56\x5b" + "\x60\x65\x6a\x6f\x74\x79\x7e\x83" + "\x88\x8d\x92\x97\x9c\xa1\xa6\xab" + "\xb0\xb5\xba\xbf\xc4\xc9\xce\xd3" + "\xd8\xdd\xe2\xe7\xec\xf1\xf6\xfb" + "\x00\x07\x0e\x15\x1c\x23\x2a\x31" + "\x38\x3f\x46\x4d\x54\x5b\x62\x69" + "\x70\x77\x7e\x85\x8c\x93\x9a\xa1" + "\xa8\xaf\xb6\xbd\xc4\xcb\xd2\xd9" + "\xe0\xe7\xee\xf5\xfc\x03\x0a\x11" + "\x18\x1f\x26\x2d\x34\x3b\x42\x49" + "\x50\x57\x5e\x65\x6c\x73\x7a\x81" + "\x88\x8f\x96\x9d\xa4\xab\xb2\xb9" + "\xc0\xc7\xce\xd5\xdc\xe3\xea\xf1" + "\xf8\xff\x06\x0d\x14\x1b\x22\x29" + "\x30\x37\x3e\x45\x4c\x53\x5a\x61" + "\x68\x6f\x76\x7d\x84\x8b\x92\x99" + "\xa0\xa7\xae\xb5\xbc\xc3\xca\xd1" + "\xd8\xdf\xe6\xed\xf4\xfb\x02\x09" + "\x10\x17\x1e\x25\x2c\x33\x3a\x41" + "\x48\x4f\x56\x5d\x64\x6b\x72\x79" + "\x80\x87\x8e\x95\x9c\xa3\xaa\xb1" + "\xb8\xbf\xc6\xcd\xd4\xdb\xe2\xe9" + "\xf0\xf7\xfe\x05\x0c\x13\x1a\x21" + "\x28\x2f\x36\x3d\x44\x4b\x52\x59" + "\x60\x67\x6e\x75\x7c\x83\x8a\x91" + "\x98\x9f\xa6\xad\xb4\xbb\xc2\xc9" + "\xd0\xd7\xde\xe5\xec\xf3\xfa\x01" + "\x08\x0f\x16\x1d\x24\x2b\x32\x39" + "\x40\x47\x4e\x55\x5c\x63\x6a\x71" + "\x78\x7f\x86\x8d\x94\x9b\xa2\xa9" + "\xb0\xb7\xbe\xc5\xcc\xd3\xda\xe1" + "\xe8\xef\xf6\xfd\x04\x0b\x12\x19" + "\x20\x27\x2e\x35\x3c\x43\x4a\x51" + "\x58\x5f\x66\x6d\x74\x7b\x82\x89" + "\x90\x97\x9e\xa5\xac\xb3\xba\xc1" + "\xc8\xcf\xd6\xdd\xe4\xeb\xf2\xf9" + "\x00\x09\x12\x1b\x24\x2d\x36\x3f" + "\x48\x51\x5a\x63\x6c\x75\x7e\x87" + "\x90\x99\xa2\xab\xb4\xbd\xc6\xcf" + "\xd8\xe1\xea\xf3\xfc\x05\x0e\x17" + "\x20\x29\x32\x3b\x44\x4d\x56\x5f" + "\x68\x71\x7a\x83\x8c\x95\x9e\xa7" + "\xb0\xb9\xc2\xcb\xd4\xdd\xe6\xef" + "\xf8\x01\x0a\x13\x1c\x25\x2e\x37" + "\x40\x49\x52\x5b\x64\x6d\x76\x7f" + "\x88\x91\x9a\xa3\xac\xb5\xbe\xc7" + "\xd0\xd9\xe2\xeb\xf4\xfd\x06\x0f" + "\x18\x21\x2a\x33\x3c\x45\x4e\x57" + "\x60\x69\x72\x7b\x84\x8d\x96\x9f" + "\xa8\xb1\xba\xc3\xcc\xd5\xde\xe7" + "\xf0\xf9\x02\x0b\x14\x1d\x26\x2f" + "\x38\x41\x4a\x53\x5c\x65\x6e\x77" + "\x80\x89\x92\x9b\xa4\xad\xb6\xbf" + "\xc8\xd1\xda\xe3\xec\xf5\xfe\x07" + "\x10\x19\x22\x2b\x34\x3d\x46\x4f" + "\x58\x61\x6a\x73\x7c\x85\x8e\x97" + "\xa0\xa9\xb2\xbb\xc4\xcd\xd6\xdf" + "\xe8\xf1\xfa\x03\x0c\x15\x1e\x27" + "\x30\x39\x42\x4b\x54\x5d\x66\x6f" + "\x78\x81\x8a\x93\x9c\xa5\xae\xb7" + "\xc0\xc9\xd2\xdb\xe4\xed\xf6\xff" + "\x08\x11\x1a\x23\x2c\x35\x3e\x47" + "\x50\x59\x62\x6b\x74\x7d\x86\x8f" + "\x98\xa1\xaa\xb3\xbc\xc5\xce\xd7" + "\xe0\xe9\xf2\xfb\x04\x0d\x16\x1f" + "\x28\x31\x3a\x43\x4c\x55\x5e\x67" + "\x70\x79\x82\x8b\x94\x9d\xa6\xaf" + "\xb8\xc1\xca\xd3\xdc\xe5\xee\xf7" + "\x00\x0b\x16\x21\x2c\x37\x42\x4d" + "\x58\x63\x6e\x79\x84\x8f\x9a\xa5" + "\xb0\xbb\xc6\xd1\xdc\xe7\xf2\xfd" + "\x08\x13\x1e\x29\x34\x3f\x4a\x55" + "\x60\x6b\x76\x81\x8c\x97\xa2\xad" + "\xb8\xc3\xce\xd9\xe4\xef\xfa\x05" + "\x10\x1b\x26\x31\x3c\x47\x52\x5d" + "\x68\x73\x7e\x89\x94\x9f\xaa\xb5" + "\xc0\xcb\xd6\xe1\xec\xf7\x02\x0d" + "\x18\x23\x2e\x39\x44\x4f\x5a\x65" + "\x70\x7b\x86\x91\x9c\xa7\xb2\xbd" + "\xc8\xd3\xde\xe9\xf4\xff\x0a\x15" + "\x20\x2b\x36\x41\x4c\x57\x62\x6d" + "\x78\x83\x8e\x99\xa4\xaf\xba\xc5" + "\xd0\xdb\xe6\xf1\xfc\x07\x12\x1d" + "\x28\x33\x3e\x49\x54\x5f\x6a\x75" + "\x80\x8b\x96\xa1\xac\xb7\xc2\xcd" + "\xd8\xe3\xee\xf9\x04\x0f\x1a\x25" + "\x30\x3b\x46\x51\x5c\x67\x72\x7d" + "\x88\x93\x9e\xa9\xb4\xbf\xca\xd5" + "\xe0\xeb\xf6\x01\x0c\x17\x22\x2d" + "\x38\x43\x4e\x59\x64\x6f\x7a\x85" + "\x90\x9b\xa6\xb1\xbc\xc7\xd2\xdd" + "\xe8\xf3\xfe\x09\x14\x1f\x2a\x35" + "\x40\x4b\x56\x61\x6c\x77\x82\x8d" + "\x98\xa3\xae\xb9\xc4\xcf\xda\xe5" + "\xf0\xfb\x06\x11\x1c\x27\x32\x3d" + "\x48\x53\x5e\x69\x74\x7f\x8a\x95" + "\xa0\xab\xb6\xc1\xcc\xd7\xe2\xed" + "\xf8\x03\x0e\x19\x24\x2f\x3a\x45" + "\x50\x5b\x66\x71\x7c\x87\x92\x9d" + "\xa8\xb3\xbe\xc9\xd4\xdf\xea\xf5" + "\x00\x0d\x1a\x27\x34\x41\x4e\x5b" + "\x68\x75\x82\x8f\x9c\xa9\xb6\xc3" + "\xd0\xdd\xea\xf7\x04\x11\x1e\x2b" + "\x38\x45\x52\x5f\x6c\x79\x86\x93" + "\xa0\xad\xba\xc7\xd4\xe1\xee\xfb" + "\x08\x15\x22\x2f\x3c\x49\x56\x63" + "\x70\x7d\x8a\x97\xa4\xb1\xbe\xcb" + "\xd8\xe5\xf2\xff\x0c\x19\x26\x33" + "\x40\x4d\x5a\x67\x74\x81\x8e\x9b" + "\xa8\xb5\xc2\xcf\xdc\xe9\xf6\x03" + "\x10\x1d\x2a\x37\x44\x51\x5e\x6b" + "\x78\x85\x92\x9f\xac\xb9\xc6\xd3" + "\xe0\xed\xfa\x07\x14\x21\x2e\x3b" + "\x48\x55\x62\x6f\x7c\x89\x96\xa3" + "\xb0\xbd\xca\xd7\xe4\xf1\xfe\x0b" + "\x18\x25\x32\x3f\x4c\x59\x66\x73" + "\x80\x8d\x9a\xa7\xb4\xc1\xce\xdb" + "\xe8\xf5\x02\x0f\x1c\x29\x36\x43" + "\x50\x5d\x6a\x77\x84\x91\x9e\xab" + "\xb8\xc5\xd2\xdf\xec\xf9\x06\x13" + "\x20\x2d\x3a\x47\x54\x61\x6e\x7b" + "\x88\x95\xa2\xaf\xbc\xc9\xd6\xe3" + "\xf0\xfd\x0a\x17\x24\x31\x3e\x4b" + "\x58\x65\x72\x7f\x8c\x99\xa6\xb3" + "\xc0\xcd\xda\xe7\xf4\x01\x0e\x1b" + "\x28\x35\x42\x4f\x5c\x69\x76\x83" + "\x90\x9d\xaa\xb7\xc4\xd1\xde\xeb" + "\xf8\x05\x12\x1f\x2c\x39\x46\x53" + "\x60\x6d\x7a\x87\x94\xa1\xae\xbb" + "\xc8\xd5\xe2\xef\xfc\x09\x16\x23" + "\x30\x3d\x4a\x57\x64\x71\x7e\x8b" + "\x98\xa5\xb2\xbf\xcc\xd9\xe6\xf3" + "\x00\x0f\x1e\x2d\x3c\x4b\x5a\x69" + "\x78\x87\x96\xa5\xb4\xc3\xd2\xe1" + "\xf0\xff\x0e\x1d\x2c\x3b\x4a\x59" + "\x68\x77\x86\x95\xa4\xb3\xc2\xd1" + "\xe0\xef\xfe\x0d\x1c\x2b\x3a\x49" + "\x58\x67\x76\x85\x94\xa3\xb2\xc1" + "\xd0\xdf\xee\xfd\x0c\x1b\x2a\x39" + "\x48\x57\x66\x75\x84\x93\xa2\xb1" + "\xc0\xcf\xde\xed\xfc\x0b\x1a\x29" + "\x38\x47\x56\x65\x74\x83\x92\xa1" + "\xb0\xbf\xce\xdd\xec\xfb\x0a\x19" + "\x28\x37\x46\x55\x64\x73\x82\x91" + "\xa0\xaf\xbe\xcd\xdc\xeb\xfa\x09" + "\x18\x27\x36\x45\x54\x63\x72\x81" + "\x90\x9f\xae\xbd\xcc\xdb\xea\xf9" + "\x08\x17\x26\x35\x44\x53\x62\x71" + "\x80\x8f\x9e\xad\xbc\xcb\xda\xe9" + "\xf8\x07\x16\x25\x34\x43\x52\x61" + "\x70\x7f\x8e\x9d\xac\xbb\xca\xd9" + "\xe8\xf7\x06\x15\x24\x33\x42\x51" + "\x60\x6f\x7e\x8d\x9c\xab\xba\xc9" + "\xd8\xe7\xf6\x05\x14\x23\x32\x41" + "\x50\x5f\x6e\x7d\x8c\x9b\xaa\xb9" + "\xc8\xd7\xe6\xf5\x04\x13\x22\x31" + "\x40\x4f\x5e\x6d\x7c\x8b\x9a\xa9" + "\xb8\xc7\xd6\xe5\xf4\x03\x12\x21" + "\x30\x3f\x4e\x5d\x6c\x7b\x8a\x99" + "\xa8\xb7\xc6\xd5\xe4\xf3\x02\x11" + "\x20\x2f\x3e\x4d\x5c\x6b\x7a\x89" + "\x98\xa7\xb6\xc5\xd4\xe3\xf2\x01" + "\x10\x1f\x2e\x3d\x4c\x5b\x6a\x79" + "\x88\x97\xa6\xb5\xc4\xd3\xe2\xf1" + "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x88\x99\xaa\xbb\xcc\xdd\xee\xff" + "\x10\x21\x32\x43\x54\x65\x76\x87" + "\x98\xa9\xba\xcb\xdc\xed\xfe\x0f" + "\x20\x31\x42\x53\x64\x75\x86\x97" + "\xa8\xb9\xca\xdb\xec\xfd\x0e\x1f" + "\x30\x41\x52\x63\x74\x85\x96\xa7" + "\xb8\xc9\xda\xeb\xfc\x0d\x1e\x2f" + "\x40\x51\x62\x73\x84\x95\xa6\xb7" + "\xc8\xd9\xea\xfb\x0c\x1d\x2e\x3f" + "\x50\x61\x72\x83\x94\xa5\xb6\xc7" + "\xd8\xe9\xfa\x0b\x1c\x2d\x3e\x4f" + "\x60\x71\x82\x93\xa4\xb5\xc6\xd7" + "\xe8\xf9\x0a\x1b\x2c\x3d\x4e\x5f" + "\x70\x81\x92\xa3\xb4\xc5\xd6\xe7" + "\xf8\x09\x1a\x2b\x3c\x4d\x5e\x6f" + "\x80\x91\xa2\xb3\xc4\xd5\xe6\xf7" + "\x08\x19\x2a\x3b\x4c\x5d\x6e\x7f" + "\x90\xa1\xb2\xc3\xd4\xe5\xf6\x07" + "\x18\x29\x3a\x4b\x5c\x6d\x7e\x8f" + "\xa0\xb1\xc2\xd3\xe4\xf5\x06\x17" + "\x28\x39\x4a\x5b\x6c\x7d\x8e\x9f" + "\xb0\xc1\xd2\xe3\xf4\x05\x16\x27" + "\x38\x49\x5a\x6b\x7c\x8d\x9e\xaf" + "\xc0\xd1\xe2\xf3\x04\x15\x26\x37" + "\x48\x59\x6a\x7b\x8c\x9d\xae\xbf" + "\xd0\xe1\xf2\x03\x14\x25\x36\x47" + "\x58\x69\x7a\x8b\x9c\xad\xbe\xcf" + "\xe0\xf1\x02\x13\x24\x35\x46\x57" + "\x68\x79\x8a\x9b\xac\xbd\xce\xdf" + "\xf0\x01\x12\x23\x34\x45\x56\x67" + "\x78\x89\x9a\xab\xbc\xcd\xde\xef" + "\x00\x13\x26\x39\x4c\x5f\x72\x85" + "\x98\xab\xbe\xd1\xe4\xf7\x0a\x1d" + "\x30\x43\x56\x69\x7c\x8f\xa2\xb5" + "\xc8\xdb\xee\x01\x14\x27\x3a\x4d" + "\x60\x73\x86\x99\xac\xbf\xd2\xe5" + "\xf8\x0b\x1e\x31\x44\x57\x6a\x7d" + "\x90\xa3\xb6\xc9\xdc\xef\x02\x15" + "\x28\x3b\x4e\x61\x74\x87\x9a\xad" + "\xc0\xd3\xe6\xf9\x0c\x1f\x32\x45" + "\x58\x6b\x7e\x91\xa4\xb7\xca\xdd" + "\xf0\x03\x16\x29\x3c\x4f\x62\x75" + "\x88\x9b\xae\xc1\xd4\xe7\xfa\x0d" + "\x20\x33\x46\x59\x6c\x7f\x92\xa5" + "\xb8\xcb\xde\xf1\x04\x17\x2a\x3d" + "\x50\x63\x76\x89\x9c\xaf\xc2\xd5" + "\xe8\xfb\x0e\x21\x34\x47\x5a\x6d" + "\x80\x93\xa6\xb9\xcc\xdf\xf2\x05" + "\x18\x2b\x3e\x51\x64\x77\x8a\x9d" + "\xb0\xc3\xd6\xe9\xfc\x0f\x22\x35" + "\x48\x5b\x6e\x81\x94\xa7\xba\xcd" + "\xe0\xf3\x06\x19\x2c\x3f\x52\x65" + "\x78\x8b\x9e\xb1\xc4\xd7\xea\xfd" + "\x10\x23\x36\x49\x5c\x6f\x82\x95" + "\xa8\xbb\xce\xe1\xf4\x07\x1a\x2d" + "\x40\x53\x66\x79\x8c\x9f\xb2\xc5" + "\xd8\xeb\xfe\x11\x24\x37\x4a\x5d" + "\x70\x83\x96\xa9\xbc\xcf\xe2\xf5" + "\x08\x1b\x2e\x41\x54\x67\x7a\x8d" + "\xa0\xb3\xc6\xd9\xec\xff\x12\x25" + "\x38\x4b\x5e\x71\x84\x97\xaa\xbd" + "\xd0\xe3\xf6\x09\x1c\x2f\x42\x55" + "\x68\x7b\x8e\xa1\xb4\xc7\xda\xed" + "\x00\x15\x2a\x3f\x54\x69\x7e\x93" + "\xa8\xbd\xd2\xe7\xfc\x11\x26\x3b" + "\x50\x65\x7a\x8f\xa4\xb9\xce\xe3" + "\xf8\x0d\x22\x37\x4c\x61\x76\x8b" + "\xa0\xb5\xca\xdf\xf4\x09\x1e\x33" + "\x48\x5d\x72\x87\x9c\xb1\xc6\xdb" + "\xf0\x05\x1a\x2f\x44\x59\x6e\x83" + "\x98\xad\xc2\xd7\xec\x01\x16\x2b" + "\x40\x55\x6a\x7f\x94\xa9\xbe\xd3" + "\xe8\xfd\x12\x27\x3c\x51\x66\x7b" + "\x90\xa5\xba\xcf\xe4\xf9\x0e\x23" + "\x38\x4d\x62\x77\x8c\xa1\xb6\xcb" + "\xe0\xf5\x0a\x1f\x34\x49\x5e\x73" + "\x88\x9d\xb2\xc7\xdc\xf1\x06\x1b" + "\x30\x45\x5a\x6f\x84\x99\xae\xc3" + "\xd8\xed\x02\x17\x2c\x41\x56\x6b" + "\x80\x95\xaa\xbf\xd4\xe9\xfe\x13" + "\x28\x3d\x52\x67\x7c\x91\xa6\xbb" + "\xd0\xe5\xfa\x0f\x24\x39\x4e\x63" + "\x78\x8d\xa2\xb7\xcc\xe1\xf6\x0b" + "\x20\x35\x4a\x5f\x74\x89\x9e\xb3" + "\xc8\xdd\xf2\x07\x1c\x31\x46\x5b" + "\x70\x85\x9a\xaf\xc4\xd9\xee\x03" + "\x18\x2d\x42\x57\x6c\x81\x96\xab" + "\xc0\xd5\xea\xff\x14\x29\x3e\x53" + "\x68\x7d\x92\xa7\xbc\xd1\xe6\xfb" + "\x10\x25\x3a\x4f\x64\x79\x8e\xa3" + "\xb8\xcd\xe2\xf7\x0c\x21\x36\x4b" + "\x60\x75\x8a\x9f\xb4\xc9\xde\xf3" + "\x08\x1d\x32\x47\x5c\x71\x86\x9b" + "\xb0\xc5\xda\xef\x04\x19\x2e\x43" + "\x58\x6d\x82\x97\xac\xc1\xd6\xeb" + "\x00\x17\x2e\x45\x5c\x73\x8a\xa1" + "\xb8\xcf\xe6\xfd\x14\x2b\x42\x59" + "\x70\x87\x9e\xb5\xcc\xe3\xfa\x11" + "\x28\x3f\x56\x6d\x84\x9b\xb2\xc9" + "\xe0\xf7\x0e\x25\x3c\x53\x6a\x81" + "\x98\xaf\xc6\xdd\xf4\x0b\x22\x39" + "\x50\x67\x7e\x95\xac\xc3\xda\xf1" + "\x08\x1f\x36\x4d\x64\x7b\x92\xa9" + "\xc0\xd7\xee\x05\x1c\x33\x4a\x61" + "\x78\x8f\xa6\xbd\xd4\xeb\x02\x19" + "\x30\x47\x5e\x75\x8c\xa3\xba\xd1" + "\xe8\xff\x16\x2d\x44\x5b\x72\x89" + "\xa0\xb7\xce\xe5\xfc\x13\x2a\x41" + "\x58\x6f\x86\x9d\xb4\xcb\xe2\xf9" + "\x10\x27\x3e\x55\x6c\x83\x9a\xb1" + "\xc8\xdf\xf6\x0d\x24\x3b\x52\x69" + "\x80\x97\xae\xc5\xdc\xf3\x0a\x21" + "\x38\x4f\x66\x7d\x94\xab\xc2\xd9" + "\xf0\x07\x1e\x35\x4c\x63\x7a\x91" + "\xa8\xbf\xd6\xed\x04\x1b\x32\x49" + "\x60\x77\x8e\xa5\xbc\xd3\xea\x01" + "\x18\x2f\x46\x5d\x74\x8b\xa2\xb9" + "\xd0\xe7\xfe\x15\x2c\x43\x5a\x71" + "\x88\x9f\xb6\xcd\xe4\xfb\x12\x29" + "\x40\x57\x6e\x85\x9c\xb3\xca\xe1" + "\xf8\x0f\x26\x3d\x54\x6b\x82\x99" + "\xb0\xc7\xde\xf5\x0c\x23\x3a\x51" + "\x68\x7f\x96\xad\xc4\xdb\xf2\x09" + "\x20\x37\x4e\x65\x7c\x93\xaa\xc1" + "\xd8\xef\x06\x1d\x34\x4b\x62\x79" + "\x90\xa7\xbe\xd5\xec\x03\x1a\x31" + "\x48\x5f\x76\x8d\xa4\xbb\xd2\xe9" + "\x00\x19\x32\x4b\x64\x7d\x96\xaf" + "\xc8\xe1\xfa\x13\x2c\x45\x5e\x77" + "\x90\xa9\xc2\xdb\xf4\x0d\x26\x3f" + "\x58\x71\x8a\xa3\xbc\xd5\xee\x07" + "\x20\x39\x52\x6b\x84\x9d\xb6\xcf" + "\xe8\x01\x1a\x33\x4c\x65\x7e\x97" + "\xb0\xc9\xe2\xfb\x14\x2d\x46\x5f" + "\x78\x91\xaa\xc3\xdc\xf5\x0e\x27" + "\x40\x59\x72\x8b\xa4\xbd\xd6\xef" + "\x08\x21\x3a\x53\x6c\x85\x9e\xb7" + "\xd0\xe9\x02\x1b\x34\x4d\x66\x7f" + "\x98\xb1\xca\xe3\xfc\x15\x2e\x47" + "\x60\x79\x92\xab\xc4\xdd\xf6\x0f" + "\x28\x41\x5a\x73\x8c\xa5\xbe\xd7" + "\xf0\x09\x22\x3b\x54\x6d\x86\x9f" + "\xb8\xd1\xea\x03\x1c\x35\x4e\x67" + "\x80\x99\xb2\xcb\xe4\xfd\x16\x2f" + "\x48\x61\x7a\x93\xac\xc5\xde\xf7" + "\x10\x29\x42\x5b\x74\x8d\xa6\xbf" + "\xd8\xf1\x0a\x23\x3c\x55\x6e\x87" + "\xa0\xb9\xd2\xeb\x04\x1d\x36\x4f" + "\x68\x81\x9a\xb3\xcc\xe5\xfe\x17" + "\x30\x49\x62\x7b\x94\xad\xc6\xdf" + "\xf8\x11\x2a\x43\x5c\x75\x8e\xa7" + "\xc0\xd9\xf2\x0b\x24\x3d\x56\x6f" + "\x88\xa1\xba\xd3\xec\x05\x1e\x37" + "\x50\x69\x82\x9b\xb4\xcd\xe6\xff" + "\x18\x31\x4a\x63\x7c\x95\xae\xc7" + "\xe0\xf9\x12\x2b\x44\x5d\x76\x8f" + "\xa8\xc1\xda\xf3\x0c\x25\x3e\x57" + "\x70\x89\xa2\xbb\xd4\xed\x06\x1f" + "\x38\x51\x6a\x83\x9c\xb5\xce\xe7" + "\x00\x1b\x36\x51\x6c\x87\xa2\xbd" + "\xd8\xf3\x0e\x29\x44\x5f\x7a\x95" + "\xb0\xcb\xe6\x01\x1c\x37\x52\x6d" + "\x88\xa3\xbe\xd9\xf4\x0f\x2a\x45" + "\x60\x7b\x96\xb1\xcc\xe7\x02\x1d" + "\x38\x53\x6e\x89\xa4\xbf\xda\xf5" + "\x10\x2b\x46\x61\x7c\x97\xb2\xcd" + "\xe8\x03\x1e\x39\x54\x6f\x8a\xa5" + "\xc0\xdb\xf6\x11\x2c\x47\x62\x7d" + "\x98\xb3\xce\xe9\x04\x1f\x3a\x55" + "\x70\x8b\xa6\xc1\xdc\xf7\x12\x2d" + "\x48\x63\x7e\x99\xb4\xcf\xea\x05" + "\x20\x3b\x56\x71\x8c\xa7\xc2\xdd" + "\xf8\x13\x2e\x49\x64\x7f\x9a\xb5" + "\xd0\xeb\x06\x21\x3c\x57\x72\x8d" + "\xa8\xc3\xde\xf9\x14\x2f\x4a\x65" + "\x80\x9b\xb6\xd1\xec\x07\x22\x3d" + "\x58\x73\x8e\xa9\xc4\xdf\xfa\x15" + "\x30\x4b\x66\x81\x9c\xb7\xd2\xed" + "\x08\x23\x3e\x59\x74\x8f\xaa\xc5" + "\xe0\xfb\x16\x31\x4c\x67\x82\x9d" + "\xb8\xd3\xee\x09\x24\x3f\x5a\x75" + "\x90\xab\xc6\xe1\xfc\x17\x32\x4d" + "\x68\x83\x9e\xb9\xd4\xef\x0a\x25" + "\x40\x5b\x76\x91\xac\xc7\xe2\xfd" + "\x18\x33\x4e\x69\x84\x9f\xba\xd5" + "\xf0\x0b\x26\x41\x5c\x77\x92\xad" + "\xc8\xe3\xfe\x19\x34\x4f\x6a\x85" + "\xa0\xbb\xd6\xf1\x0c\x27\x42\x5d" + "\x78\x93\xae\xc9\xe4\xff\x1a\x35" + "\x50\x6b\x86\xa1\xbc\xd7\xf2\x0d" + "\x28\x43\x5e\x79\x94\xaf\xca\xe5" + "\x00\x1d\x3a\x57\x74\x91\xae\xcb" + "\xe8\x05\x22\x3f\x5c\x79\x96\xb3" + "\xd0\xed\x0a\x27\x44\x61\x7e\x9b" + "\xb8\xd5\xf2\x0f\x2c\x49\x66\x83" + "\xa0\xbd\xda\xf7\x14\x31\x4e\x6b" + "\x88\xa5\xc2\xdf\xfc\x19\x36\x53" + "\x70\x8d\xaa\xc7\xe4\x01\x1e\x3b" + "\x58\x75\x92\xaf\xcc\xe9\x06\x23" + "\x40\x5d\x7a\x97\xb4\xd1\xee\x0b" + "\x28\x45\x62\x7f\x9c\xb9\xd6\xf3" + "\x10\x2d\x4a\x67\x84\xa1\xbe\xdb" + "\xf8\x15\x32\x4f\x6c\x89\xa6\xc3" + "\xe0\xfd\x1a\x37\x54\x71\x8e\xab" + "\xc8\xe5\x02\x1f\x3c\x59\x76\x93" + "\xb0\xcd\xea\x07\x24\x41\x5e\x7b" + "\x98\xb5\xd2\xef\x0c\x29\x46\x63" + "\x80\x9d\xba\xd7\xf4\x11\x2e\x4b" + "\x68\x85\xa2\xbf\xdc\xf9\x16\x33" + "\x50\x6d\x8a\xa7\xc4\xe1\xfe\x1b" + "\x38\x55\x72\x8f\xac\xc9\xe6\x03" + "\x20\x3d\x5a\x77\x94\xb1\xce\xeb" + "\x08\x25\x42\x5f\x7c\x99\xb6\xd3" + "\xf0\x0d\x2a\x47\x64\x81\x9e\xbb" + "\xd8\xf5\x12\x2f\x4c\x69\x86\xa3" + "\xc0\xdd\xfa\x17\x34\x51\x6e\x8b" + "\xa8\xc5\xe2\xff\x1c\x39\x56\x73" + "\x90\xad\xca\xe7\x04\x21\x3e\x5b" + "\x78\x95\xb2\xcf\xec\x09\x26\x43" + "\x60\x7d\x9a\xb7\xd4\xf1\x0e\x2b" + "\x48\x65\x82\x9f\xbc\xd9\xf6\x13" + "\x30\x4d\x6a\x87\xa4\xc1\xde\xfb" + "\x18\x35\x52\x6f\x8c\xa9\xc6\xe3" + "\x00\x1f\x3e\x5d\x7c\x9b\xba\xd9" + "\xf8\x17\x36\x55\x74\x93\xb2\xd1" + "\xf0\x0f\x2e\x4d\x6c\x8b\xaa\xc9" + "\xe8\x07\x26\x45\x64\x83\xa2\xc1" + "\xe0\xff\x1e\x3d\x5c\x7b\x9a\xb9" + "\xd8\xf7\x16\x35\x54\x73\x92\xb1" + "\xd0\xef\x0e\x2d\x4c\x6b\x8a\xa9" + "\xc8\xe7\x06\x25\x44\x63\x82\xa1" + "\xc0\xdf\xfe\x1d\x3c\x5b\x7a\x99" + "\xb8\xd7\xf6\x15\x34\x53\x72\x91" + "\xb0\xcf\xee\x0d\x2c\x4b\x6a\x89" + "\xa8\xc7\xe6\x05\x24\x43\x62\x81" + "\xa0\xbf\xde\xfd\x1c\x3b\x5a\x79" + "\x98\xb7\xd6\xf5\x14\x33\x52\x71" + "\x90\xaf\xce\xed\x0c\x2b\x4a\x69" + "\x88\xa7\xc6\xe5\x04\x23\x42\x61" + "\x80\x9f\xbe\xdd\xfc\x1b\x3a\x59" + "\x78\x97\xb6\xd5\xf4\x13\x32\x51" + "\x70\x8f\xae\xcd\xec\x0b\x2a\x49" + "\x68\x87\xa6\xc5\xe4\x03\x22\x41" + "\x60\x7f\x9e\xbd\xdc\xfb\x1a\x39" + "\x58\x77\x96\xb5\xd4\xf3\x12\x31" + "\x50\x6f\x8e\xad\xcc\xeb\x0a\x29" + "\x48\x67\x86\xa5\xc4\xe3\x02\x21" + "\x40\x5f\x7e\x9d\xbc\xdb\xfa\x19" + "\x38\x57\x76\x95\xb4\xd3\xf2\x11" + "\x30\x4f\x6e\x8d\xac\xcb\xea\x09" + "\x28\x47\x66\x85\xa4\xc3\xe2\x01" + "\x20\x3f\x5e\x7d\x9c\xbb\xda\xf9" + "\x18\x37\x56\x75\x94\xb3\xd2\xf1" + "\x10\x2f\x4e\x6d\x8c\xab\xca\xe9" + "\x08\x27\x46\x65\x84\xa3\xc2\xe1" + "\x00\x21\x42\x63", + .ilen = 4100, + .result = + "\xf0\x5c\x74\xad\x4e\xbc\x99\xe2" + "\xae\xff\x91\x3a\x44\xcf\x38\x32" + "\x1e\xad\xa7\xcd\xa1\x39\x95\xaa" + "\x10\xb1\xb3\x2e\x04\x31\x8f\x86" + "\xf2\x62\x74\x70\x0c\xa4\x46\x08" + "\xa8\xb7\x99\xa8\xe9\xd2\x73\x79" + "\x7e\x6e\xd4\x8f\x1e\xc7\x8e\x31" + "\x0b\xfa\x4b\xce\xfd\xf3\x57\x71" + "\xe9\x46\x03\xa5\x3d\x34\x00\xe2" + "\x18\xff\x75\x6d\x06\x2d\x00\xab" + "\xb9\x3e\x6c\x59\xc5\x84\x06\xb5" + "\x8b\xd0\x89\x9c\x4a\x79\x16\xc6" + "\x3d\x74\x54\xfa\x44\xcd\x23\x26" + "\x5c\xcf\x7e\x28\x92\x32\xbf\xdf" + "\xa7\x20\x3c\x74\x58\x2a\x9a\xde" + "\x61\x00\x1c\x4f\xff\x59\xc4\x22" + "\xac\x3c\xd0\xe8\x6c\xf9\x97\x1b" + "\x58\x9b\xad\x71\xe8\xa9\xb5\x0d" + "\xee\x2f\x04\x1f\x7f\xbc\x99\xee" + "\x84\xff\x42\x60\xdc\x3a\x18\xa5" + "\x81\xf9\xef\xdc\x7a\x0f\x65\x41" + "\x2f\xa3\xd3\xf9\xc2\xcb\xc0\x4d" + "\x8f\xd3\x76\x96\xad\x49\x6d\x38" + "\x3d\x39\x0b\x6c\x80\xb7\x54\x69" + "\xf0\x2c\x90\x02\x29\x0d\x1c\x12" + "\xad\x55\xc3\x8b\x68\xd9\xcc\xb3" + "\xb2\x64\x33\x90\x5e\xca\x4b\xe2" + "\xfb\x75\xdc\x63\xf7\x9f\x82\x74" + "\xf0\xc9\xaa\x7f\xe9\x2a\x9b\x33" + "\xbc\x88\x00\x7f\xca\xb2\x1f\x14" + "\xdb\xc5\x8e\x7b\x11\x3c\x3e\x08" + "\xf3\x83\xe8\xe0\x94\x86\x2e\x92" + "\x78\x6b\x01\xc9\xc7\x83\xba\x21" + "\x6a\x25\x15\x33\x4e\x45\x08\xec" + "\x35\xdb\xe0\x6e\x31\x51\x79\xa9" + "\x42\x44\x65\xc1\xa0\xf1\xf9\x2a" + "\x70\xd5\xb6\xc6\xc1\x8c\x39\xfc" + "\x25\xa6\x55\xd9\xdd\x2d\x4c\xec" + "\x49\xc6\xeb\x0e\xa8\x25\x2a\x16" + "\x1b\x66\x84\xda\xe2\x92\xe5\xc0" + "\xc8\x53\x07\xaf\x80\x84\xec\xfd" + "\xcd\xd1\x6e\xcd\x6f\x6a\xf5\x36" + "\xc5\x15\xe5\x25\x7d\x77\xd1\x1a" + "\x93\x36\xa9\xcf\x7c\xa4\x54\x4a" + "\x06\x51\x48\x4e\xf6\x59\x87\xd2" + "\x04\x02\xef\xd3\x44\xde\x76\x31" + "\xb3\x34\x17\x1b\x9d\x66\x11\x9f" + "\x1e\xcc\x17\xe9\xc7\x3c\x1b\xe7" + "\xcb\x50\x08\xfc\xdc\x2b\x24\xdb" + "\x65\x83\xd0\x3b\xe3\x30\xea\x94" + "\x6c\xe7\xe8\x35\x32\xc7\xdb\x64" + "\xb4\x01\xab\x36\x2c\x77\x13\xaf" + "\xf8\x2b\x88\x3f\x54\x39\xc4\x44" + "\xfe\xef\x6f\x68\x34\xbe\x0f\x05" + "\x16\x6d\xf6\x0a\x30\xe7\xe3\xed" + "\xc4\xde\x3c\x1b\x13\xd8\xdb\xfe" + "\x41\x62\xe5\x28\xd4\x8d\xa3\xc7" + "\x93\x97\xc6\x48\x45\x1d\x9f\x83" + "\xdf\x4b\x40\x3e\x42\x25\x87\x80" + "\x4c\x7d\xa8\xd4\x98\x23\x95\x75" + "\x41\x8c\xda\x41\x9b\xd4\xa7\x06" + "\xb5\xf1\x71\x09\x53\xbe\xca\xbf" + "\x32\x03\xed\xf0\x50\x1c\x56\x39" + "\x5b\xa4\x75\x18\xf7\x9b\x58\xef" + "\x53\xfc\x2a\x38\x23\x15\x75\xcd" + "\x45\xe5\x5a\x82\x55\xba\x21\xfa" + "\xd4\xbd\xc6\x94\x7c\xc5\x80\x12" + "\xf7\x4b\x32\xc4\x9a\x82\xd8\x28" + "\x8f\xd9\xc2\x0f\x60\x03\xbe\x5e" + "\x21\xd6\x5f\x58\xbf\x5c\xb1\x32" + "\x82\x8d\xa9\xe5\xf2\x66\x1a\xc0" + "\xa0\xbc\x58\x2f\x71\xf5\x2f\xed" + "\xd1\x26\xb9\xd8\x49\x5a\x07\x19" + "\x01\x7c\x59\xb0\xf8\xa4\xb7\xd3" + "\x7b\x1a\x8c\x38\xf4\x50\xa4\x59" + "\xb0\xcc\x41\x0b\x88\x7f\xe5\x31" + "\xb3\x42\xba\xa2\x7e\xd4\x32\x71" + "\x45\x87\x48\xa9\xc2\xf2\x89\xb3" + "\xe4\xa7\x7e\x52\x15\x61\xfa\xfe" + "\xc9\xdd\x81\xeb\x13\xab\xab\xc3" + "\x98\x59\xd8\x16\x3d\x14\x7a\x1c" + "\x3c\x41\x9a\x16\x16\x9b\xd2\xd2" + "\x69\x3a\x29\x23\xac\x86\x32\xa5" + "\x48\x9c\x9e\xf3\x47\x77\x81\x70" + "\x24\xe8\x85\xd2\xf5\xb5\xfa\xff" + "\x59\x6a\xd3\x50\x59\x43\x59\xde" + "\xd9\xf1\x55\xa5\x0c\xc3\x1a\x1a" + "\x18\x34\x0d\x1a\x63\x33\xed\x10" + "\xe0\x1d\x2a\x18\xd2\xc0\x54\xa8" + "\xca\xb5\x9a\xd3\xdd\xca\x45\x84" + "\x50\xe7\x0f\xfe\xa4\x99\x5a\xbe" + "\x43\x2d\x9a\xcb\x92\x3f\x5a\x1d" + "\x85\xd8\xc9\xdf\x68\xc9\x12\x80" + "\x56\x0c\xdc\x00\xdc\x3a\x7d\x9d" + "\xa3\xa2\xe8\x4d\xbf\xf9\x70\xa0" + "\xa4\x13\x4f\x6b\xaf\x0a\x89\x7f" + "\xda\xf0\xbf\x9b\xc8\x1d\xe5\xf8" + "\x2e\x8b\x07\xb5\x73\x1b\xcc\xa2" + "\xa6\xad\x30\xbc\x78\x3c\x5b\x10" + "\xfa\x5e\x62\x2d\x9e\x64\xb3\x33" + "\xce\xf9\x1f\x86\xe7\x8b\xa2\xb8" + "\xe8\x99\x57\x8c\x11\xed\x66\xd9" + "\x3c\x72\xb9\xc3\xe6\x4e\x17\x3a" + "\x6a\xcb\x42\x24\x06\xed\x3e\x4e" + "\xa3\xe8\x6a\x94\xda\x0d\x4e\xd5" + "\x14\x19\xcf\xb6\x26\xd8\x2e\xcc" + "\x64\x76\x38\x49\x4d\xfe\x30\x6d" + "\xe4\xc8\x8c\x7b\xc4\xe0\x35\xba" + "\x22\x6e\x76\xe1\x1a\xf2\x53\xc3" + "\x28\xa2\x82\x1f\x61\x69\xad\xc1" + "\x7b\x28\x4b\x1e\x6c\x85\x95\x9b" + "\x51\xb5\x17\x7f\x12\x69\x8c\x24" + "\xd5\xc7\x5a\x5a\x11\x54\xff\x5a" + "\xf7\x16\xc3\x91\xa6\xf0\xdc\x0a" + "\xb6\xa7\x4a\x0d\x7a\x58\xfe\xa5" + "\xf5\xcb\x8f\x7b\x0e\xea\x57\xe7" + "\xbd\x79\xd6\x1c\x88\x23\x6c\xf2" + "\x4d\x29\x77\x53\x35\x6a\x00\x8d" + "\xcd\xa3\x58\xbe\x77\x99\x18\xf8" + "\xe6\xe1\x8f\xe9\x37\x8f\xe3\xe2" + "\x5a\x8a\x93\x25\xaf\xf3\x78\x80" + "\xbe\xa6\x1b\xc6\xac\x8b\x1c\x91" + "\x58\xe1\x9f\x89\x35\x9d\x1d\x21" + "\x29\x9f\xf4\x99\x02\x27\x0f\xa8" + "\x4f\x79\x94\x2b\x33\x2c\xda\xa2" + "\x26\x39\x83\x94\xef\x27\xd8\x53" + "\x8f\x66\x0d\xe4\x41\x7d\x34\xcd" + "\x43\x7c\x95\x0a\x53\xef\x66\xda" + "\x7e\x9b\xf3\x93\xaf\xd0\x73\x71" + "\xba\x40\x9b\x74\xf8\xd7\xd7\x41" + "\x6d\xaf\x72\x9c\x8d\x21\x87\x3c" + "\xfd\x0a\x90\xa9\x47\x96\x9e\xd3" + "\x88\xee\x73\xcf\x66\x2f\x52\x56" + "\x6d\xa9\x80\x4c\xe2\x6f\x62\x88" + "\x3f\x0e\x54\x17\x48\x80\x5d\xd3" + "\xc3\xda\x25\x3d\xa1\xc8\xcb\x9f" + "\x9b\x70\xb3\xa1\xeb\x04\x52\xa1" + "\xf2\x22\x0f\xfc\xc8\x18\xfa\xf9" + "\x85\x9c\xf1\xac\xeb\x0c\x02\x46" + "\x75\xd2\xf5\x2c\xe3\xd2\x59\x94" + "\x12\xf3\x3c\xfc\xd7\x92\xfa\x36" + "\xba\x61\x34\x38\x7c\xda\x48\x3e" + "\x08\xc9\x39\x23\x5e\x02\x2c\x1a" + "\x18\x7e\xb4\xd9\xfd\x9e\x40\x02" + "\xb1\x33\x37\x32\xe7\xde\xd6\xd0" + "\x7c\x58\x65\x4b\xf8\x34\x27\x9c" + "\x44\xb4\xbd\xe9\xe9\x4c\x78\x7d" + "\x4b\x9f\xce\xb1\xcd\x47\xa5\x37" + "\xe5\x6d\xbd\xb9\x43\x94\x0a\xd4" + "\xd6\xf9\x04\x5f\xb5\x66\x6c\x1a" + "\x35\x12\xe3\x36\x28\x27\x36\x58" + "\x01\x2b\x79\xe4\xba\x6d\x10\x7d" + "\x65\xdf\x84\x95\xf4\xd5\xb6\x8f" + "\x2b\x9f\x96\x00\x86\x60\xf0\x21" + "\x76\xa8\x6a\x8c\x28\x1c\xb3\x6b" + "\x97\xd7\xb6\x53\x2a\xcc\xab\x40" + "\x9d\x62\x79\x58\x52\xe6\x65\xb7" + "\xab\x55\x67\x9c\x89\x7c\x03\xb0" + "\x73\x59\xc5\x81\xf5\x18\x17\x5c" + "\x89\xf3\x78\x35\x44\x62\x78\x72" + "\xd0\x96\xeb\x31\xe7\x87\x77\x14" + "\x99\x51\xf2\x59\x26\x9e\xb5\xa6" + "\x45\xfe\x6e\xbd\x07\x4c\x94\x5a" + "\xa5\x7d\xfc\xf1\x2b\x77\xe2\xfe" + "\x17\xd4\x84\xa0\xac\xb5\xc7\xda" + "\xa9\x1a\xb6\xf3\x74\x11\xb4\x9d" + "\xfb\x79\x2e\x04\x2d\x50\x28\x83" + "\xbf\xc6\x52\xd3\x34\xd6\xe8\x7a" + "\xb6\xea\xe7\xa8\x6c\x15\x1e\x2c" + "\x57\xbc\x48\x4e\x5f\x5c\xb6\x92" + "\xd2\x49\x77\x81\x6d\x90\x70\xae" + "\x98\xa1\x03\x0d\x6b\xb9\x77\x14" + "\xf1\x4e\x23\xd3\xf8\x68\xbd\xc2" + "\xfe\x04\xb7\x5c\xc5\x17\x60\x8f" + "\x65\x54\xa4\x7a\x42\xdc\x18\x0d" + "\xb5\xcf\x0f\xd3\xc7\x91\x66\x1b" + "\x45\x42\x27\x75\x50\xe5\xee\xb8" + "\x7f\x33\x2c\xba\x4a\x92\x4d\x2c" + "\x3c\xe3\x0d\x80\x01\xba\x0d\x29" + "\xd8\x3c\xe9\x13\x16\x57\xe6\xea" + "\x94\x52\xe7\x00\x4d\x30\xb0\x0f" + "\x35\xb8\xb8\xa7\xb1\xb5\x3b\x44" + "\xe1\x2f\xfd\x88\xed\x43\xe7\x52" + "\x10\x93\xb3\x8a\x30\x6b\x0a\xf7" + "\x23\xc6\x50\x9d\x4a\xb0\xde\xc3" + "\xdc\x9b\x2f\x01\x56\x36\x09\xc5" + "\x2f\x6b\xfe\xf1\xd8\x27\x45\x03" + "\x30\x5e\x5c\x5b\xb4\x62\x0e\x1a" + "\xa9\x21\x2b\x92\x94\x87\x62\x57" + "\x4c\x10\x74\x1a\xf1\x0a\xc5\x84" + "\x3b\x9e\x72\x02\xd7\xcc\x09\x56" + "\xbd\x54\xc1\xf0\xc3\xe3\xb3\xf8" + "\xd2\x0d\x61\xcb\xef\xce\x0d\x05" + "\xb0\x98\xd9\x8e\x4f\xf9\xbc\x93" + "\xa6\xea\xc8\xcf\x10\x53\x4b\xf1" + "\xec\xfc\x89\xf9\x64\xb0\x22\xbf" + "\x9e\x55\x46\x9f\x7c\x50\x8e\x84" + "\x54\x20\x98\xd7\x6c\x40\x1e\xdb" + "\x69\x34\x78\x61\x24\x21\x9c\x8a" + "\xb3\x62\x31\x8b\x6e\xf5\x2a\x35" + "\x86\x13\xb1\x6c\x64\x2e\x41\xa5" + "\x05\xf2\x42\xba\xd2\x3a\x0d\x8e" + "\x8a\x59\x94\x3c\xcf\x36\x27\x82" + "\xc2\x45\xee\x58\xcd\x88\xb4\xec" + "\xde\xb2\x96\x0a\xaf\x38\x6f\x88" + "\xd7\xd8\xe1\xdf\xb9\x96\xa9\x0a" + "\xb1\x95\x28\x86\x20\xe9\x17\x49" + "\xa2\x29\x38\xaa\xa5\xe9\x6e\xf1" + "\x19\x27\xc0\xd5\x2a\x22\xc3\x0b" + "\xdb\x7c\x73\x10\xb9\xba\x89\x76" + "\x54\xae\x7d\x71\xb3\x93\xf6\x32" + "\xe6\x47\x43\x55\xac\xa0\x0d\xc2" + "\x93\x27\x4a\x8e\x0e\x74\x15\xc7" + "\x0b\x85\xd9\x0c\xa9\x30\x7a\x3e" + "\xea\x8f\x85\x6d\x3a\x12\x4f\x72" + "\x69\x58\x7a\x80\xbb\xb5\x97\xf3" + "\xcf\x70\xd2\x5d\xdd\x4d\x21\x79" + "\x54\x4d\xe4\x05\xe8\xbd\xc2\x62" + "\xb1\x3b\x77\x1c\xd6\x5c\xf3\xa0" + "\x79\x00\xa8\x6c\x29\xd9\x18\x24" + "\x36\xa2\x46\xc0\x96\x65\x7f\xbd" + "\x2a\xed\x36\x16\x0c\xaa\x9f\xf4" + "\xc5\xb4\xe2\x12\xed\x69\xed\x4f" + "\x26\x2c\x39\x52\x89\x98\xe7\x2c" + "\x99\xa4\x9e\xa3\x9b\x99\x46\x7a" + "\x3a\xdc\xa8\x59\xa3\xdb\xc3\x3b" + "\x95\x0d\x3b\x09\x6e\xee\x83\x5d" + "\x32\x4d\xed\xab\xfa\x98\x14\x4e" + "\xc3\x15\x45\x53\x61\xc4\x93\xbd" + "\x90\xf4\x99\x95\x4c\xe6\x76\x92" + "\x29\x90\x46\x30\x92\x69\x7d\x13" + "\xf2\xa5\xcd\x69\x49\x44\xb2\x0f" + "\x63\x40\x36\x5f\x09\xe2\x78\xf8" + "\x91\xe3\xe2\xfa\x10\xf7\xc8\x24" + "\xa8\x89\x32\x5c\x37\x25\x1d\xb2" + "\xea\x17\x8a\x0a\xa9\x64\xc3\x7c" + "\x3c\x7c\xbd\xc6\x79\x34\xe7\xe2" + "\x85\x8e\xbf\xf8\xde\x92\xa0\xae" + "\x20\xc4\xf6\xbb\x1f\x38\x19\x0e" + "\xe8\x79\x9c\xa1\x23\xe9\x54\x7e" + "\x37\x2f\xe2\x94\x32\xaf\xa0\x23" + "\x49\xe4\xc0\xb3\xac\x00\x8f\x36" + "\x05\xc4\xa6\x96\xec\x05\x98\x4f" + "\x96\x67\x57\x1f\x20\x86\x1b\x2d" + "\x69\xe4\x29\x93\x66\x5f\xaf\x6b" + "\x88\x26\x2c\x67\x02\x4b\x52\xd0" + "\x83\x7a\x43\x1f\xc0\x71\x15\x25" + "\x77\x65\x08\x60\x11\x76\x4c\x8d" + "\xed\xa9\x27\xc6\xb1\x2a\x2c\x6a" + "\x4a\x97\xf5\xc6\xb7\x70\x42\xd3" + "\x03\xd1\x24\x95\xec\x6d\xab\x38" + "\x72\xce\xe2\x8b\x33\xd7\x51\x09" + "\xdc\x45\xe0\x09\x96\x32\xf3\xc4" + "\x84\xdc\x73\x73\x2d\x1b\x11\x98" + "\xc5\x0e\x69\x28\x94\xc7\xb5\x4d" + "\xc8\x8a\xd0\xaa\x13\x2e\x18\x74" + "\xdd\xd1\x1e\xf3\x90\xe8\xfc\x9a" + "\x72\x4a\x0e\xd1\xe4\xfb\x0d\x96" + "\xd1\x0c\x79\x85\x1b\x1c\xfe\xe1" + "\x62\x8f\x7a\x73\x32\xab\xc8\x18" + "\x69\xe3\x34\x30\xdf\x13\xa6\xe5" + "\xe8\x0e\x67\x7f\x81\x11\xb4\x60" + "\xc7\xbd\x79\x65\x50\xdc\xc4\x5b" + "\xde\x39\xa4\x01\x72\x63\xf3\xd1" + "\x64\x4e\xdf\xfc\x27\x92\x37\x0d" + "\x57\xcd\x11\x4f\x11\x04\x8e\x1d" + "\x16\xf7\xcd\x92\x9a\x99\x30\x14" + "\xf1\x7c\x67\x1b\x1f\x41\x0b\xe8" + "\x32\xe8\xb8\xc1\x4f\x54\x86\x4f" + "\xe5\x79\x81\x73\xcd\x43\x59\x68" + "\x73\x02\x3b\x78\x21\x72\x43\x00" + "\x49\x17\xf7\x00\xaf\x68\x24\x53" + "\x05\x0a\xc3\x33\xe0\x33\x3f\x69" + "\xd2\x84\x2f\x0b\xed\xde\x04\xf4" + "\x11\x94\x13\x69\x51\x09\x28\xde" + "\x57\x5c\xef\xdc\x9a\x49\x1c\x17" + "\x97\xf3\x96\xc1\x7f\x5d\x2e\x7d" + "\x55\xb8\xb3\x02\x09\xb3\x1f\xe7" + "\xc9\x8d\xa3\x36\x34\x8a\x77\x13" + "\x30\x63\x4c\xa5\xcd\xc3\xe0\x7e" + "\x05\xa1\x7b\x0c\xcb\x74\x47\x31" + "\x62\x03\x43\xf1\x87\xb4\xb0\x85" + "\x87\x8e\x4b\x25\xc7\xcf\xae\x4b" + "\x36\x46\x3e\x62\xbc\x6f\xeb\x5f" + "\x73\xac\xe6\x07\xee\xc1\xa1\xd6" + "\xc4\xab\xc9\xd6\x89\x45\xe1\xf1" + "\x04\x4e\x1a\x6f\xbb\x4f\x3a\xa3" + "\xa0\xcb\xa3\x0a\xd8\x71\x35\x55" + "\xe4\xbc\x2e\x04\x06\xe6\xff\x5b" + "\x1c\xc0\x11\x7c\xc5\x17\xf3\x38" + "\xcf\xe9\xba\x0f\x0e\xef\x02\xc2" + "\x8d\xc6\xbc\x4b\x67\x20\x95\xd7" + "\x2c\x45\x5b\x86\x44\x8c\x6f\x2e" + "\x7e\x9f\x1c\x77\xba\x6b\x0e\xa3" + "\x69\xdc\xab\x24\x57\x60\x47\xc1" + "\xd1\xa5\x9d\x23\xe6\xb1\x37\xfe" + "\x93\xd2\x4c\x46\xf9\x0c\xc6\xfb" + "\xd6\x9d\x99\x69\xab\x7a\x07\x0c" + "\x65\xe7\xc4\x08\x96\xe2\xa5\x01" + "\x3f\x46\x07\x05\x7e\xe8\x9a\x90" + "\x50\xdc\xe9\x7a\xea\xa1\x39\x6e" + "\x66\xe4\x6f\xa5\x5f\xb2\xd9\x5b" + "\xf5\xdb\x2a\x32\xf0\x11\x6f\x7c" + "\x26\x10\x8f\x3d\x80\xe9\x58\xf7" + "\xe0\xa8\x57\xf8\xdb\x0e\xce\x99" + "\x63\x19\x3d\xd5\xec\x1b\x77\x69" + "\x98\xf6\xe4\x5f\x67\x17\x4b\x09" + "\x85\x62\x82\x70\x18\xe2\x9a\x78" + "\xe2\x62\xbd\xb4\xf1\x42\xc6\xfb" + "\x08\xd0\xbd\xeb\x4e\x09\xf2\xc8" + "\x1e\xdc\x3d\x32\x21\x56\x9c\x4f" + "\x35\xf3\x61\x06\x72\x84\xc4\x32" + "\xf2\xf1\xfa\x0b\x2f\xc3\xdb\x02" + "\x04\xc2\xde\x57\x64\x60\x8d\xcf" + "\xcb\x86\x5d\x97\x3e\xb1\x9c\x01" + "\xd6\x28\x8f\x99\xbc\x46\xeb\x05" + "\xaf\x7e\xb8\x21\x2a\x56\x85\x1c" + "\xb3\x71\xa0\xde\xca\x96\xf1\x78" + "\x49\xa2\x99\x81\x80\x5c\x01\xf5" + "\xa0\xa2\x56\x63\xe2\x70\x07\xa5" + "\x95\xd6\x85\xeb\x36\x9e\xa9\x51" + "\x66\x56\x5f\x1d\x02\x19\xe2\xf6" + "\x4f\x73\x38\x09\x75\x64\x48\xe0" + "\xf1\x7e\x0e\xe8\x9d\xf9\xed\x94" + "\xfe\x16\x26\x62\x49\x74\xf4\xb0" + "\xd4\xa9\x6c\xb0\xfd\x53\xe9\x81" + "\xe0\x7a\xbf\xcf\xb5\xc4\x01\x81" + "\x79\x99\x77\x01\x3b\xe9\xa2\xb6" + "\xe6\x6a\x8a\x9e\x56\x1c\x8d\x1e" + "\x8f\x06\x55\x2c\x6c\xdc\x92\x87" + "\x64\x3b\x4b\x19\xa1\x13\x64\x1d" + "\x4a\xe9\xc0\x00\xb8\x95\xef\x6b" + "\x1a\x86\x6d\x37\x52\x02\xc2\xe0" + "\xc8\xbb\x42\x0c\x02\x21\x4a\xc9" + "\xef\xa0\x54\xe4\x5e\x16\x53\x81" + "\x70\x62\x10\xaf\xde\xb8\xb5\xd3" + "\xe8\x5e\x6c\xc3\x8a\x3e\x18\x07" + "\xf2\x2f\x7d\xa7\xe1\x3d\x4e\xb4" + "\x26\xa7\xa3\x93\x86\xb2\x04\x1e" + "\x53\x5d\x86\xd6\xde\x65\xca\xe3" + "\x4e\xc1\xcf\xef\xc8\x70\x1b\x83" + "\x13\xdd\x18\x8b\x0d\x76\xd2\xf6" + "\x37\x7a\x93\x7a\x50\x11\x9f\x96" + "\x86\x25\xfd\xac\xdc\xbe\x18\x93" + "\x19\x6b\xec\x58\x4f\xb9\x75\xa7" + "\xdd\x3f\x2f\xec\xc8\x5a\x84\xab" + "\xd5\xe4\x8a\x07\xf6\x4d\x23\xd6" + "\x03\xfb\x03\x6a\xea\x66\xbf\xd4" + "\xb1\x34\xfb\x78\xe9\x55\xdc\x7c" + "\x3d\x9c\xe5\x9a\xac\xc3\x7a\x80" + "\x24\x6d\xa0\xef\x25\x7c\xb7\xea" + "\xce\x4d\x5f\x18\x60\xce\x87\x22" + "\x66\x2f\xd5\xdd\xdd\x02\x21\x75" + "\x82\xa0\x1f\x58\xc6\xd3\x62\xf7" + "\x32\xd8\xaf\x1e\x07\x77\x51\x96" + "\xd5\x6b\x1e\x7e\x80\x02\xe8\x67" + "\xea\x17\x0b\x10\xd2\x3f\x28\x25" + "\x4f\x05\x77\x02\x14\x69\xf0\x2c" + "\xbe\x0c\xf1\x74\x30\xd1\xb9\x9b" + "\xfc\x8c\xbb\x04\x16\xd9\xba\xc3" + "\xbc\x91\x8a\xc4\x30\xa4\xb0\x12" + "\x4c\x21\x87\xcb\xc9\x1d\x16\x96" + "\x07\x6f\x23\x54\xb9\x6f\x79\xe5" + "\x64\xc0\x64\xda\xb1\xae\xdd\x60" + "\x6c\x1a\x9d\xd3\x04\x8e\x45\xb0" + "\x92\x61\xd0\x48\x81\xed\x5e\x1d" + "\xa0\xc9\xa4\x33\xc7\x13\x51\x5d" + "\x7f\x83\x73\xb6\x70\x18\x65\x3e" + "\x2f\x0e\x7a\x12\x39\x98\xab\xd8" + "\x7e\x6f\xa3\xd1\xba\x56\xad\xbd" + "\xf0\x03\x01\x1c\x85\x35\x9f\xeb" + "\x19\x63\xa1\xaf\xfe\x2d\x35\x50" + "\x39\xa0\x65\x7c\x95\x7e\x6b\xfe" + "\xc1\xac\x07\x7c\x98\x4f\xbe\x57" + "\xa7\x22\xec\xe2\x7e\x29\x09\x53" + "\xe8\xbf\xb4\x7e\x3f\x8f\xfc\x14" + "\xce\x54\xf9\x18\x58\xb5\xff\x44" + "\x05\x9d\xce\x1b\xb6\x82\x23\xc8" + "\x2e\xbc\x69\xbb\x4a\x29\x0f\x65" + "\x94\xf0\x63\x06\x0e\xef\x8c\xbd" + "\xff\xfd\xb0\x21\x6e\x57\x05\x75" + "\xda\xd5\xc4\xeb\x8d\x32\xf7\x50" + "\xd3\x6f\x22\xed\x5f\x8e\xa2\x5b" + "\x80\x8c\xc8\x78\x40\x24\x4b\x89" + "\x30\xce\x7a\x97\x0e\xc4\xaf\xef" + "\x9b\xb4\xcd\x66\x74\x14\x04\x2b" + "\xf7\xce\x0b\x1c\x6e\xc2\x78\x8c" + "\xca\xc5\xd0\x1c\x95\x4a\x91\x2d" + "\xa7\x20\xeb\x86\x52\xb7\x67\xd8" + "\x0c\xd6\x04\x14\xde\x51\x74\x75" + "\xe7\x11\xb4\x87\xa3\x3d\x2d\xad" + "\x4f\xef\xa0\x0f\x70\x00\x6d\x13" + "\x19\x1d\x41\x50\xe9\xd8\xf0\x32" + "\x71\xbc\xd3\x11\xf2\xac\xbe\xaf" + "\x75\x46\x65\x4e\x07\x34\x37\xa3" + "\x89\xfe\x75\xd4\x70\x4c\xc6\x3f" + "\x69\x24\x0e\x38\x67\x43\x8c\xde" + "\x06\xb5\xb8\xe7\xc4\xf0\x41\x8f" + "\xf0\xbd\x2f\x0b\xb9\x18\xf8\xde" + "\x64\xb1\xdb\xee\x00\x50\x77\xe1" + "\xc7\xff\xa6\xfa\xdd\x70\xf4\xe3" + "\x93\xe9\x77\x35\x3d\x4b\x2f\x2b" + "\x6d\x55\xf0\xfc\x88\x54\x4e\x89" + "\xc1\x8a\x23\x31\x2d\x14\x2a\xb8" + "\x1b\x15\xdd\x9e\x6e\x7b\xda\x05" + "\x91\x7d\x62\x64\x96\x72\xde\xfc" + "\xc1\xec\xf0\x23\x51\x6f\xdb\x5b" + "\x1d\x08\x57\xce\x09\xb8\xf6\xcd" + "\x8d\x95\xf2\x20\xbf\x0f\x20\x57" + "\x98\x81\x84\x4f\x15\x5c\x76\xe7" + "\x3e\x0a\x3a\x6c\xc4\x8a\xbe\x78" + "\x74\x77\xc3\x09\x4b\x5d\x48\xe4" + "\xc8\xcb\x0b\xea\x17\x28\xcf\xcf" + "\x31\x32\x44\xa4\xe5\x0e\x1a\x98" + "\x94\xc4\xf0\xff\xae\x3e\x44\xe8" + "\xa5\xb3\xb5\x37\x2f\xe8\xaf\x6f" + "\x28\xc1\x37\x5f\x31\xd2\xb9\x33" + "\xb1\xb2\x52\x94\x75\x2c\x29\x59" + "\x06\xc2\x25\xe8\x71\x65\x4e\xed" + "\xc0\x9c\xb1\xbb\x25\xdc\x6c\xe7" + "\x4b\xa5\x7a\x54\x7a\x60\xff\x7a" + "\xe0\x50\x40\x96\x35\x63\xe4\x0b" + "\x76\xbd\xa4\x65\x00\x1b\x57\x88" + "\xae\xed\x39\x88\x42\x11\x3c\xed" + "\x85\x67\x7d\xb9\x68\x82\xe9\x43" + "\x3c\x47\x53\xfa\xe8\xf8\x9f\x1f" + "\x9f\xef\x0f\xf7\x30\xd9\x30\x0e" + "\xb9\x9f\x69\x18\x2f\x7e\xf8\xf8" + "\xf8\x8c\x0f\xd4\x02\x4d\xea\xcd" + "\x0a\x9c\x6f\x71\x6d\x5a\x4c\x60" + "\xce\x20\x56\x32\xc6\xc5\x99\x1f" + "\x09\xe6\x4e\x18\x1a\x15\x13\xa8" + "\x7d\xb1\x6b\xc0\xb2\x6d\xf8\x26" + "\x66\xf8\x3d\x18\x74\x70\x66\x7a" + "\x34\x17\xde\xba\x47\xf1\x06\x18" + "\xcb\xaf\xeb\x4a\x1e\x8f\xa7\x77" + "\xe0\x3b\x78\x62\x66\xc9\x10\xea" + "\x1f\xb7\x29\x0a\x45\xa1\x1d\x1e" + "\x1d\xe2\x65\x61\x50\x9c\xd7\x05" + "\xf2\x0b\x5b\x12\x61\x02\xc8\xe5" + "\x63\x4f\x20\x0c\x07\x17\x33\x5e" + "\x03\x9a\x53\x0f\x2e\x55\xfe\x50" + "\x43\x7d\xd0\xb6\x7e\x5a\xda\xae" + "\x58\xef\x15\xa9\x83\xd9\x46\xb1" + "\x42\xaa\xf5\x02\x6c\xce\x92\x06" + "\x1b\xdb\x66\x45\x91\x79\xc2\x2d" + "\xe6\x53\xd3\x14\xfd\xbb\x44\x63" + "\xc6\xd7\x3d\x7a\x0c\x75\x78\x9d" + "\x5c\xa6\x39\xb3\xe5\x63\xca\x8b" + "\xfe\xd3\xef\x60\x83\xf6\x8e\x70" + "\xb6\x67\xc7\x77\xed\x23\xef\x4c" + "\xf0\xed\x2d\x07\x59\x6f\xc1\x01" + "\x34\x37\x08\xab\xd9\x1f\x09\xb1" + "\xce\x5b\x17\xff\x74\xf8\x9c\xd5" + "\x2c\x56\x39\x79\x0f\x69\x44\x75" + "\x58\x27\x01\xc4\xbf\xa7\xa1\x1d" + "\x90\x17\x77\x86\x5a\x3f\xd9\xd1" + "\x0e\xa0\x10\xf8\xec\x1e\xa5\x7f" + "\x5e\x36\xd1\xe3\x04\x2c\x70\xf7" + "\x8e\xc0\x98\x2f\x6c\x94\x2b\x41" + "\xb7\x60\x00\xb7\x2e\xb8\x02\x8d" + "\xb8\xb0\xd3\x86\xba\x1d\xd7\x90" + "\xd6\xb6\xe1\xfc\xd7\xd8\x28\x06" + "\x63\x9b\xce\x61\x24\x79\xc0\x70" + "\x52\xd0\xb6\xd4\x28\x95\x24\x87" + "\x03\x1f\xb7\x9a\xda\xa3\xfb\x52" + "\x5b\x68\xe7\x4c\x8c\x24\xe1\x42" + "\xf7\xd5\xfd\xad\x06\x32\x9f\xba" + "\xc1\xfc\xdd\xc6\xfc\xfc\xb3\x38" + "\x74\x56\x58\x40\x02\x37\x52\x2c" + "\x55\xcc\xb3\x9e\x7a\xe9\xd4\x38" + "\x41\x5e\x0c\x35\xe2\x11\xd1\x13" + "\xf8\xb7\x8d\x72\x6b\x22\x2a\xb0" + "\xdb\x08\xba\x35\xb9\x3f\xc8\xd3" + "\x24\x90\xec\x58\xd2\x09\xc7\x2d" + "\xed\x38\x80\x36\x72\x43\x27\x49" + "\x4a\x80\x8a\xa2\xe8\xd3\xda\x30" + "\x7d\xb6\x82\x37\x86\x92\x86\x3e" + "\x08\xb2\x28\x5a\x55\x44\x24\x7d" + "\x40\x48\x8a\xb6\x89\x58\x08\xa0" + "\xd6\x6d\x3a\x17\xbf\xf6\x54\xa2" + "\xf5\xd3\x8c\x0f\x78\x12\x57\x8b" + "\xd5\xc2\xfd\x58\x5b\x7f\x38\xe3" + "\xcc\xb7\x7c\x48\xb3\x20\xe8\x81" + "\x14\x32\x45\x05\xe0\xdb\x9f\x75" + "\x85\xb4\x6a\xfc\x95\xe3\x54\x22" + "\x12\xee\x30\xfe\xd8\x30\xef\x34" + "\x50\xab\x46\x30\x98\x2f\xb7\xc0" + "\x15\xa2\x83\xb6\xf2\x06\x21\xa2" + "\xc3\x26\x37\x14\xd1\x4d\xb5\x10" + "\x52\x76\x4d\x6a\xee\xb5\x2b\x15" + "\xb7\xf9\x51\xe8\x2a\xaf\xc7\xfa" + "\x77\xaf\xb0\x05\x4d\xd1\x68\x8e" + "\x74\x05\x9f\x9d\x93\xa5\x3e\x7f" + "\x4e\x5f\x9d\xcb\x09\xc7\x83\xe3" + "\x02\x9d\x27\x1f\xef\x85\x05\x8d" + "\xec\x55\x88\x0f\x0d\x7c\x4c\xe8" + "\xa1\x75\xa0\xd8\x06\x47\x14\xef" + "\xaa\x61\xcf\x26\x15\xad\xd8\xa3" + "\xaa\x75\xf2\x78\x4a\x5a\x61\xdf" + "\x8b\xc7\x04\xbc\xb2\x32\xd2\x7e" + "\x42\xee\xb4\x2f\x51\xff\x7b\x2e" + "\xd3\x02\xe8\xdc\x5d\x0d\x50\xdc" + "\xae\xb7\x46\xf9\xa8\xe6\xd0\x16" + "\xcc\xe6\x2c\x81\xc7\xad\xe9\xf0" + "\x05\x72\x6d\x3d\x0a\x7a\xa9\x02" + "\xac\x82\x93\x6e\xb6\x1c\x28\xfc" + "\x44\x12\xfb\x73\x77\xd4\x13\x39" + "\x29\x88\x8a\xf3\x5c\xa6\x36\xa0" + "\x2a\xed\x7e\xb1\x1d\xd6\x4c\x6b" + "\x41\x01\x18\x5d\x5d\x07\x97\xa6" + "\x4b\xef\x31\x18\xea\xac\xb1\x84" + "\x21\xed\xda\x86", + .rlen = 4100, + .np = 2, + .tap = { 4064, 36 }, + }, +}; + +static struct cipher_testvec aes_ctr_rfc3686_dec_tv_template[] = { + { /* From RFC 3686 */ + .key = "\xae\x68\x52\xf8\x12\x10\x67\xcc" + "\x4b\xf7\xa5\x76\x55\x77\xf3\x9e" + "\x00\x00\x00\x30", + .klen = 20, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xe4\x09\x5d\x4f\xb7\xa7\xb3\x79" + "\x2d\x61\x75\xa3\x26\x13\x11\xb8", + .ilen = 16, + .result = "Single block msg", + .rlen = 16, + }, { + .key = "\x7e\x24\x06\x78\x17\xfa\xe0\xd7" + "\x43\xd6\xce\x1f\x32\x53\x91\x63" + "\x00\x6c\xb6\xdb", + .klen = 20, + .iv = "\xc0\x54\x3b\x59\xda\x48\xd9\x0b", + .input = "\x51\x04\xa1\x06\x16\x8a\x72\xd9" + "\x79\x0d\x41\xee\x8e\xda\xd3\x88" + "\xeb\x2e\x1e\xfc\x46\xda\x57\xc8" + "\xfc\xe6\x30\xdf\x91\x41\xbe\x28", + .ilen = 32, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .rlen = 32, + }, { + .key = "\x16\xaf\x5b\x14\x5f\xc9\xf5\x79" + "\xc1\x75\xf9\x3e\x3b\xfb\x0e\xed" + "\x86\x3d\x06\xcc\xfd\xb7\x85\x15" + "\x00\x00\x00\x48", + .klen = 28, + .iv = "\x36\x73\x3c\x14\x7d\x6d\x93\xcb", + .input = "\x4b\x55\x38\x4f\xe2\x59\xc9\xc8" + "\x4e\x79\x35\xa0\x03\xcb\xe9\x28", + .ilen = 16, + .result = "Single block msg", + .rlen = 16, + }, { + .key = "\x7c\x5c\xb2\x40\x1b\x3d\xc3\x3c" + "\x19\xe7\x34\x08\x19\xe0\xf6\x9c" + "\x67\x8c\x3d\xb8\xe6\xf6\xa9\x1a" + "\x00\x96\xb0\x3b", + .klen = 28, + .iv = "\x02\x0c\x6e\xad\xc2\xcb\x50\x0d", + .input = "\x45\x32\x43\xfc\x60\x9b\x23\x32" + "\x7e\xdf\xaa\xfa\x71\x31\xcd\x9f" + "\x84\x90\x70\x1c\x5a\xd4\xa7\x9c" + "\xfc\x1f\xe0\xff\x42\xf4\xfb\x00", + .ilen = 32, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .rlen = 32, + }, { + .key = "\x77\x6b\xef\xf2\x85\x1d\xb0\x6f" + "\x4c\x8a\x05\x42\xc8\x69\x6f\x6c" + "\x6a\x81\xaf\x1e\xec\x96\xb4\xd3" + "\x7f\xc1\xd6\x89\xe6\xc1\xc1\x04" + "\x00\x00\x00\x60", + .klen = 36, + .iv = "\xdb\x56\x72\xc9\x7a\xa8\xf0\xb2", + .input = "\x14\x5a\xd0\x1d\xbf\x82\x4e\xc7" + "\x56\x08\x63\xdc\x71\xe3\xe0\xc0", + .ilen = 16, + .result = "Single block msg", + .rlen = 16, + }, { + .key = "\xf6\xd6\x6d\x6b\xd5\x2d\x59\xbb" + "\x07\x96\x36\x58\x79\xef\xf8\x86" + "\xc6\x6d\xd5\x1a\x5b\x6a\x99\x74" + "\x4b\x50\x59\x0c\x87\xa2\x38\x84" + "\x00\xfa\xac\x24", + .klen = 36, + .iv = "\xc1\x58\x5e\xf1\x5a\x43\xd8\x75", + .input = "\xf0\x5e\x23\x1b\x38\x94\x61\x2c" + "\x49\xee\x00\x0b\x80\x4e\xb2\xa9" + "\xb8\x30\x6b\x50\x8f\x83\x9d\x6a" + "\x55\x30\x83\x1d\x93\x44\xaf\x1c", + .ilen = 32, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .rlen = 32, + }, +}; + +static struct cipher_testvec aes_ofb_enc_tv_template[] = { + /* From NIST Special Publication 800-38A, Appendix F.5 */ + { + .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6" + "\xab\xf7\x15\x88\x09\xcf\x4f\x3c", + .klen = 16, + .iv = "\x00\x01\x02\x03\x04\x05\x06\x07\x08" + "\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37\x10", + .ilen = 64, + .result = "\x3b\x3f\xd9\x2e\xb7\x2d\xad\x20" + "\x33\x34\x49\xf8\xe8\x3c\xfb\x4a" + "\x77\x89\x50\x8d\x16\x91\x8f\x03\xf5" + "\x3c\x52\xda\xc5\x4e\xd8\x25" + "\x97\x40\x05\x1e\x9c\x5f\xec\xf6\x43" + "\x44\xf7\xa8\x22\x60\xed\xcc" + "\x30\x4c\x65\x28\xf6\x59\xc7\x78" + "\x66\xa5\x10\xd9\xc1\xd6\xae\x5e", + .rlen = 64, + } +}; + +static struct cipher_testvec aes_ofb_dec_tv_template[] = { + /* From NIST Special Publication 800-38A, Appendix F.5 */ + { + .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6" + "\xab\xf7\x15\x88\x09\xcf\x4f\x3c", + .klen = 16, + .iv = "\x00\x01\x02\x03\x04\x05\x06\x07\x08" + "\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .input = "\x3b\x3f\xd9\x2e\xb7\x2d\xad\x20" + "\x33\x34\x49\xf8\xe8\x3c\xfb\x4a" + "\x77\x89\x50\x8d\x16\x91\x8f\x03\xf5" + "\x3c\x52\xda\xc5\x4e\xd8\x25" + "\x97\x40\x05\x1e\x9c\x5f\xec\xf6\x43" + "\x44\xf7\xa8\x22\x60\xed\xcc" + "\x30\x4c\x65\x28\xf6\x59\xc7\x78" + "\x66\xa5\x10\xd9\xc1\xd6\xae\x5e", + .ilen = 64, + .result = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37\x10", + .rlen = 64, + } +}; + +static struct aead_testvec aes_gcm_enc_tv_template[] = { + { /* From McGrew & Viega - http://citeseer.ist.psu.edu/656989.html */ + .key = zeroed_string, + .klen = 16, + .result = "\x58\xe2\xfc\xce\xfa\x7e\x30\x61" + "\x36\x7f\x1d\x57\xa4\xe7\x45\x5a", + .rlen = 16, + }, { + .key = zeroed_string, + .klen = 16, + .input = zeroed_string, + .ilen = 16, + .result = "\x03\x88\xda\xce\x60\xb6\xa3\x92" + "\xf3\x28\xc2\xb9\x71\xb2\xfe\x78" + "\xab\x6e\x47\xd4\x2c\xec\x13\xbd" + "\xf5\x3a\x67\xb2\x12\x57\xbd\xdf", + .rlen = 32, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08", + .klen = 16, + .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" + "\xde\xca\xf8\x88", + .input = "\xd9\x31\x32\x25\xf8\x84\x06\xe5" + "\xa5\x59\x09\xc5\xaf\xf5\x26\x9a" + "\x86\xa7\xa9\x53\x15\x34\xf7\xda" + "\x2e\x4c\x30\x3d\x8a\x31\x8a\x72" + "\x1c\x3c\x0c\x95\x95\x68\x09\x53" + "\x2f\xcf\x0e\x24\x49\xa6\xb5\x25" + "\xb1\x6a\xed\xf5\xaa\x0d\xe6\x57" + "\xba\x63\x7b\x39\x1a\xaf\xd2\x55", + .ilen = 64, + .result = "\x42\x83\x1e\xc2\x21\x77\x74\x24" + "\x4b\x72\x21\xb7\x84\xd0\xd4\x9c" + "\xe3\xaa\x21\x2f\x2c\x02\xa4\xe0" + "\x35\xc1\x7e\x23\x29\xac\xa1\x2e" + "\x21\xd5\x14\xb2\x54\x66\x93\x1c" + "\x7d\x8f\x6a\x5a\xac\x84\xaa\x05" + "\x1b\xa3\x0b\x39\x6a\x0a\xac\x97" + "\x3d\x58\xe0\x91\x47\x3f\x59\x85" + "\x4d\x5c\x2a\xf3\x27\xcd\x64\xa6" + "\x2c\xf3\x5a\xbd\x2b\xa6\xfa\xb4", + .rlen = 80, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08", + .klen = 16, + .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" + "\xde\xca\xf8\x88", + .input = "\xd9\x31\x32\x25\xf8\x84\x06\xe5" + "\xa5\x59\x09\xc5\xaf\xf5\x26\x9a" + "\x86\xa7\xa9\x53\x15\x34\xf7\xda" + "\x2e\x4c\x30\x3d\x8a\x31\x8a\x72" + "\x1c\x3c\x0c\x95\x95\x68\x09\x53" + "\x2f\xcf\x0e\x24\x49\xa6\xb5\x25" + "\xb1\x6a\xed\xf5\xaa\x0d\xe6\x57" + "\xba\x63\x7b\x39", + .ilen = 60, + .assoc = "\xfe\xed\xfa\xce\xde\xad\xbe\xef" + "\xfe\xed\xfa\xce\xde\xad\xbe\xef" + "\xab\xad\xda\xd2", + .alen = 20, + .result = "\x42\x83\x1e\xc2\x21\x77\x74\x24" + "\x4b\x72\x21\xb7\x84\xd0\xd4\x9c" + "\xe3\xaa\x21\x2f\x2c\x02\xa4\xe0" + "\x35\xc1\x7e\x23\x29\xac\xa1\x2e" + "\x21\xd5\x14\xb2\x54\x66\x93\x1c" + "\x7d\x8f\x6a\x5a\xac\x84\xaa\x05" + "\x1b\xa3\x0b\x39\x6a\x0a\xac\x97" + "\x3d\x58\xe0\x91" + "\x5b\xc9\x4f\xbc\x32\x21\xa5\xdb" + "\x94\xfa\xe9\x5a\xe7\x12\x1a\x47", + .rlen = 76, + }, { + .key = zeroed_string, + .klen = 24, + .result = "\xcd\x33\xb2\x8a\xc7\x73\xf7\x4b" + "\xa0\x0e\xd1\xf3\x12\x57\x24\x35", + .rlen = 16, + }, { + .key = zeroed_string, + .klen = 24, + .input = zeroed_string, + .ilen = 16, + .result = "\x98\xe7\x24\x7c\x07\xf0\xfe\x41" + "\x1c\x26\x7e\x43\x84\xb0\xf6\x00" + "\x2f\xf5\x8d\x80\x03\x39\x27\xab" + "\x8e\xf4\xd4\x58\x75\x14\xf0\xfb", + .rlen = 32, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\xfe\xff\xe9\x92\x86\x65\x73\x1c", + .klen = 24, + .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" + "\xde\xca\xf8\x88", + .input = "\xd9\x31\x32\x25\xf8\x84\x06\xe5" + "\xa5\x59\x09\xc5\xaf\xf5\x26\x9a" + "\x86\xa7\xa9\x53\x15\x34\xf7\xda" + "\x2e\x4c\x30\x3d\x8a\x31\x8a\x72" + "\x1c\x3c\x0c\x95\x95\x68\x09\x53" + "\x2f\xcf\x0e\x24\x49\xa6\xb5\x25" + "\xb1\x6a\xed\xf5\xaa\x0d\xe6\x57" + "\xba\x63\x7b\x39\x1a\xaf\xd2\x55", + .ilen = 64, + .result = "\x39\x80\xca\x0b\x3c\x00\xe8\x41" + "\xeb\x06\xfa\xc4\x87\x2a\x27\x57" + "\x85\x9e\x1c\xea\xa6\xef\xd9\x84" + "\x62\x85\x93\xb4\x0c\xa1\xe1\x9c" + "\x7d\x77\x3d\x00\xc1\x44\xc5\x25" + "\xac\x61\x9d\x18\xc8\x4a\x3f\x47" + "\x18\xe2\x44\x8b\x2f\xe3\x24\xd9" + "\xcc\xda\x27\x10\xac\xad\xe2\x56" + "\x99\x24\xa7\xc8\x58\x73\x36\xbf" + "\xb1\x18\x02\x4d\xb8\x67\x4a\x14", + .rlen = 80, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\xfe\xff\xe9\x92\x86\x65\x73\x1c", + .klen = 24, + .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" + "\xde\xca\xf8\x88", + .input = "\xd9\x31\x32\x25\xf8\x84\x06\xe5" + "\xa5\x59\x09\xc5\xaf\xf5\x26\x9a" + "\x86\xa7\xa9\x53\x15\x34\xf7\xda" + "\x2e\x4c\x30\x3d\x8a\x31\x8a\x72" + "\x1c\x3c\x0c\x95\x95\x68\x09\x53" + "\x2f\xcf\x0e\x24\x49\xa6\xb5\x25" + "\xb1\x6a\xed\xf5\xaa\x0d\xe6\x57" + "\xba\x63\x7b\x39", + .ilen = 60, + .assoc = "\xfe\xed\xfa\xce\xde\xad\xbe\xef" + "\xfe\xed\xfa\xce\xde\xad\xbe\xef" + "\xab\xad\xda\xd2", + .alen = 20, + .result = "\x39\x80\xca\x0b\x3c\x00\xe8\x41" + "\xeb\x06\xfa\xc4\x87\x2a\x27\x57" + "\x85\x9e\x1c\xea\xa6\xef\xd9\x84" + "\x62\x85\x93\xb4\x0c\xa1\xe1\x9c" + "\x7d\x77\x3d\x00\xc1\x44\xc5\x25" + "\xac\x61\x9d\x18\xc8\x4a\x3f\x47" + "\x18\xe2\x44\x8b\x2f\xe3\x24\xd9" + "\xcc\xda\x27\x10" + "\x25\x19\x49\x8e\x80\xf1\x47\x8f" + "\x37\xba\x55\xbd\x6d\x27\x61\x8c", + .rlen = 76, + .np = 2, + .tap = { 32, 28 }, + .anp = 2, + .atap = { 8, 12 } + }, { + .key = zeroed_string, + .klen = 32, + .result = "\x53\x0f\x8a\xfb\xc7\x45\x36\xb9" + "\xa9\x63\xb4\xf1\xc4\xcb\x73\x8b", + .rlen = 16, + } +}; + +static struct aead_testvec aes_gcm_dec_tv_template[] = { + { /* From McGrew & Viega - http://citeseer.ist.psu.edu/656989.html */ + .key = zeroed_string, + .klen = 32, + .input = "\xce\xa7\x40\x3d\x4d\x60\x6b\x6e" + "\x07\x4e\xc5\xd3\xba\xf3\x9d\x18" + "\xd0\xd1\xc8\xa7\x99\x99\x6b\xf0" + "\x26\x5b\x98\xb5\xd4\x8a\xb9\x19", + .ilen = 32, + .result = zeroed_string, + .rlen = 16, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08", + .klen = 32, + .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" + "\xde\xca\xf8\x88", + .input = "\x52\x2d\xc1\xf0\x99\x56\x7d\x07" + "\xf4\x7f\x37\xa3\x2a\x84\x42\x7d" + "\x64\x3a\x8c\xdc\xbf\xe5\xc0\xc9" + "\x75\x98\xa2\xbd\x25\x55\xd1\xaa" + "\x8c\xb0\x8e\x48\x59\x0d\xbb\x3d" + "\xa7\xb0\x8b\x10\x56\x82\x88\x38" + "\xc5\xf6\x1e\x63\x93\xba\x7a\x0a" + "\xbc\xc9\xf6\x62\x89\x80\x15\xad" + "\xb0\x94\xda\xc5\xd9\x34\x71\xbd" + "\xec\x1a\x50\x22\x70\xe3\xcc\x6c", + .ilen = 80, + .result = "\xd9\x31\x32\x25\xf8\x84\x06\xe5" + "\xa5\x59\x09\xc5\xaf\xf5\x26\x9a" + "\x86\xa7\xa9\x53\x15\x34\xf7\xda" + "\x2e\x4c\x30\x3d\x8a\x31\x8a\x72" + "\x1c\x3c\x0c\x95\x95\x68\x09\x53" + "\x2f\xcf\x0e\x24\x49\xa6\xb5\x25" + "\xb1\x6a\xed\xf5\xaa\x0d\xe6\x57" + "\xba\x63\x7b\x39\x1a\xaf\xd2\x55", + .rlen = 64, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08", + .klen = 32, + .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" + "\xde\xca\xf8\x88", + .input = "\x52\x2d\xc1\xf0\x99\x56\x7d\x07" + "\xf4\x7f\x37\xa3\x2a\x84\x42\x7d" + "\x64\x3a\x8c\xdc\xbf\xe5\xc0\xc9" + "\x75\x98\xa2\xbd\x25\x55\xd1\xaa" + "\x8c\xb0\x8e\x48\x59\x0d\xbb\x3d" + "\xa7\xb0\x8b\x10\x56\x82\x88\x38" + "\xc5\xf6\x1e\x63\x93\xba\x7a\x0a" + "\xbc\xc9\xf6\x62" + "\x76\xfc\x6e\xce\x0f\x4e\x17\x68" + "\xcd\xdf\x88\x53\xbb\x2d\x55\x1b", + .ilen = 76, + .assoc = "\xfe\xed\xfa\xce\xde\xad\xbe\xef" + "\xfe\xed\xfa\xce\xde\xad\xbe\xef" + "\xab\xad\xda\xd2", + .alen = 20, + .result = "\xd9\x31\x32\x25\xf8\x84\x06\xe5" + "\xa5\x59\x09\xc5\xaf\xf5\x26\x9a" + "\x86\xa7\xa9\x53\x15\x34\xf7\xda" + "\x2e\x4c\x30\x3d\x8a\x31\x8a\x72" + "\x1c\x3c\x0c\x95\x95\x68\x09\x53" + "\x2f\xcf\x0e\x24\x49\xa6\xb5\x25" + "\xb1\x6a\xed\xf5\xaa\x0d\xe6\x57" + "\xba\x63\x7b\x39", + .rlen = 60, + .np = 2, + .tap = { 48, 28 }, + .anp = 3, + .atap = { 8, 8, 4 } + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08", + .klen = 16, + .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" + "\xde\xca\xf8\x88", + .input = "\x42\x83\x1e\xc2\x21\x77\x74\x24" + "\x4b\x72\x21\xb7\x84\xd0\xd4\x9c" + "\xe3\xaa\x21\x2f\x2c\x02\xa4\xe0" + "\x35\xc1\x7e\x23\x29\xac\xa1\x2e" + "\x21\xd5\x14\xb2\x54\x66\x93\x1c" + "\x7d\x8f\x6a\x5a\xac\x84\xaa\x05" + "\x1b\xa3\x0b\x39\x6a\x0a\xac\x97" + "\x3d\x58\xe0\x91\x47\x3f\x59\x85" + "\x4d\x5c\x2a\xf3\x27\xcd\x64\xa6" + "\x2c\xf3\x5a\xbd\x2b\xa6\xfa\xb4", + .ilen = 80, + .result = "\xd9\x31\x32\x25\xf8\x84\x06\xe5" + "\xa5\x59\x09\xc5\xaf\xf5\x26\x9a" + "\x86\xa7\xa9\x53\x15\x34\xf7\xda" + "\x2e\x4c\x30\x3d\x8a\x31\x8a\x72" + "\x1c\x3c\x0c\x95\x95\x68\x09\x53" + "\x2f\xcf\x0e\x24\x49\xa6\xb5\x25" + "\xb1\x6a\xed\xf5\xaa\x0d\xe6\x57" + "\xba\x63\x7b\x39\x1a\xaf\xd2\x55", + .rlen = 64, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08", + .klen = 16, + .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" + "\xde\xca\xf8\x88", + .input = "\x42\x83\x1e\xc2\x21\x77\x74\x24" + "\x4b\x72\x21\xb7\x84\xd0\xd4\x9c" + "\xe3\xaa\x21\x2f\x2c\x02\xa4\xe0" + "\x35\xc1\x7e\x23\x29\xac\xa1\x2e" + "\x21\xd5\x14\xb2\x54\x66\x93\x1c" + "\x7d\x8f\x6a\x5a\xac\x84\xaa\x05" + "\x1b\xa3\x0b\x39\x6a\x0a\xac\x97" + "\x3d\x58\xe0\x91" + "\x5b\xc9\x4f\xbc\x32\x21\xa5\xdb" + "\x94\xfa\xe9\x5a\xe7\x12\x1a\x47", + .ilen = 76, + .assoc = "\xfe\xed\xfa\xce\xde\xad\xbe\xef" + "\xfe\xed\xfa\xce\xde\xad\xbe\xef" + "\xab\xad\xda\xd2", + .alen = 20, + .result = "\xd9\x31\x32\x25\xf8\x84\x06\xe5" + "\xa5\x59\x09\xc5\xaf\xf5\x26\x9a" + "\x86\xa7\xa9\x53\x15\x34\xf7\xda" + "\x2e\x4c\x30\x3d\x8a\x31\x8a\x72" + "\x1c\x3c\x0c\x95\x95\x68\x09\x53" + "\x2f\xcf\x0e\x24\x49\xa6\xb5\x25" + "\xb1\x6a\xed\xf5\xaa\x0d\xe6\x57" + "\xba\x63\x7b\x39", + .rlen = 60, + }, { + .key = zeroed_string, + .klen = 24, + .input = "\x98\xe7\x24\x7c\x07\xf0\xfe\x41" + "\x1c\x26\x7e\x43\x84\xb0\xf6\x00" + "\x2f\xf5\x8d\x80\x03\x39\x27\xab" + "\x8e\xf4\xd4\x58\x75\x14\xf0\xfb", + .ilen = 32, + .result = zeroed_string, + .rlen = 16, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\xfe\xff\xe9\x92\x86\x65\x73\x1c", + .klen = 24, + .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" + "\xde\xca\xf8\x88", + .input = "\x39\x80\xca\x0b\x3c\x00\xe8\x41" + "\xeb\x06\xfa\xc4\x87\x2a\x27\x57" + "\x85\x9e\x1c\xea\xa6\xef\xd9\x84" + "\x62\x85\x93\xb4\x0c\xa1\xe1\x9c" + "\x7d\x77\x3d\x00\xc1\x44\xc5\x25" + "\xac\x61\x9d\x18\xc8\x4a\x3f\x47" + "\x18\xe2\x44\x8b\x2f\xe3\x24\xd9" + "\xcc\xda\x27\x10\xac\xad\xe2\x56" + "\x99\x24\xa7\xc8\x58\x73\x36\xbf" + "\xb1\x18\x02\x4d\xb8\x67\x4a\x14", + .ilen = 80, + .result = "\xd9\x31\x32\x25\xf8\x84\x06\xe5" + "\xa5\x59\x09\xc5\xaf\xf5\x26\x9a" + "\x86\xa7\xa9\x53\x15\x34\xf7\xda" + "\x2e\x4c\x30\x3d\x8a\x31\x8a\x72" + "\x1c\x3c\x0c\x95\x95\x68\x09\x53" + "\x2f\xcf\x0e\x24\x49\xa6\xb5\x25" + "\xb1\x6a\xed\xf5\xaa\x0d\xe6\x57" + "\xba\x63\x7b\x39\x1a\xaf\xd2\x55", + .rlen = 64, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\xfe\xff\xe9\x92\x86\x65\x73\x1c", + .klen = 24, + .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" + "\xde\xca\xf8\x88", + .input = "\x39\x80\xca\x0b\x3c\x00\xe8\x41" + "\xeb\x06\xfa\xc4\x87\x2a\x27\x57" + "\x85\x9e\x1c\xea\xa6\xef\xd9\x84" + "\x62\x85\x93\xb4\x0c\xa1\xe1\x9c" + "\x7d\x77\x3d\x00\xc1\x44\xc5\x25" + "\xac\x61\x9d\x18\xc8\x4a\x3f\x47" + "\x18\xe2\x44\x8b\x2f\xe3\x24\xd9" + "\xcc\xda\x27\x10" + "\x25\x19\x49\x8e\x80\xf1\x47\x8f" + "\x37\xba\x55\xbd\x6d\x27\x61\x8c", + .ilen = 76, + .assoc = "\xfe\xed\xfa\xce\xde\xad\xbe\xef" + "\xfe\xed\xfa\xce\xde\xad\xbe\xef" + "\xab\xad\xda\xd2", + .alen = 20, + .result = "\xd9\x31\x32\x25\xf8\x84\x06\xe5" + "\xa5\x59\x09\xc5\xaf\xf5\x26\x9a" + "\x86\xa7\xa9\x53\x15\x34\xf7\xda" + "\x2e\x4c\x30\x3d\x8a\x31\x8a\x72" + "\x1c\x3c\x0c\x95\x95\x68\x09\x53" + "\x2f\xcf\x0e\x24\x49\xa6\xb5\x25" + "\xb1\x6a\xed\xf5\xaa\x0d\xe6\x57" + "\xba\x63\x7b\x39", + .rlen = 60, + } +}; + +static struct aead_testvec aes_gcm_rfc4106_enc_tv_template[] = { + { /* Generated using Crypto++ */ + .key = zeroed_string, + .klen = 20, + .iv = zeroed_string, + .input = zeroed_string, + .ilen = 16, + .assoc = zeroed_string, + .alen = 8, + .result = "\x03\x88\xDA\xCE\x60\xB6\xA3\x92" + "\xF3\x28\xC2\xB9\x71\xB2\xFE\x78" + "\x97\xFE\x4C\x23\x37\x42\x01\xE0" + "\x81\x9F\x8D\xC5\xD7\x41\xA0\x1B", + .rlen = 32, + },{ + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\x00\x00\x00\x00", + .klen = 20, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x01" + "\x00\x00\x00\x00", + .input = zeroed_string, + .ilen = 16, + .assoc = zeroed_string, + .alen = 8, + .result = "\xC0\x0D\x8B\x42\x0F\x8F\x34\x18" + "\x88\xB1\xC5\xBC\xC5\xB6\xD6\x28" + "\x6A\x9D\xDF\x11\x5E\xFE\x5E\x9D" + "\x2F\x70\x44\x92\xF7\xF2\xE3\xEF", + .rlen = 32, + + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\x00\x00\x00\x00", + .klen = 20, + .iv = zeroed_string, + .input = "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01", + .ilen = 16, + .assoc = zeroed_string, + .alen = 8, + .result = "\x4B\xB1\xB5\xE3\x25\x71\x70\xDE" + "\x7F\xC9\x9C\xA5\x14\x19\xF2\xAC" + "\x0B\x8F\x88\x69\x17\xE6\xB4\x3C" + "\xB1\x68\xFD\x14\x52\x64\x61\xB2", + .rlen = 32, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\x00\x00\x00\x00", + .klen = 20, + .iv = zeroed_string, + .input = "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01", + .ilen = 16, + .assoc = "\x01\x01\x01\x01\x01\x01\x01\x01", + .alen = 8, + .result = "\x4B\xB1\xB5\xE3\x25\x71\x70\xDE" + "\x7F\xC9\x9C\xA5\x14\x19\xF2\xAC" + "\x90\x92\xB7\xE3\x5F\xA3\x9A\x63" + "\x7E\xD7\x1F\xD8\xD3\x7C\x4B\xF5", + .rlen = 32, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\x00\x00\x00\x00", + .klen = 20, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x01" + "\x00\x00\x00\x00", + .input = "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01", + .ilen = 16, + .assoc = "\x01\x01\x01\x01\x01\x01\x01\x01", + .alen = 8, + .result = "\xC1\x0C\x8A\x43\x0E\x8E\x35\x19" + "\x89\xB0\xC4\xBD\xC4\xB7\xD7\x29" + "\x64\x50\xF9\x32\x13\xFB\x74\x61" + "\xF4\xED\x52\xD3\xC5\x10\x55\x3C", + .rlen = 32, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\x00\x00\x00\x00", + .klen = 20, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x01" + "\x00\x00\x00\x00", + .input = "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01", + .ilen = 64, + .assoc = "\x01\x01\x01\x01\x01\x01\x01\x01", + .alen = 8, + .result = "\xC1\x0C\x8A\x43\x0E\x8E\x35\x19" + "\x89\xB0\xC4\xBD\xC4\xB7\xD7\x29" + "\x98\x14\xA1\x42\x37\x80\xFD\x90" + "\x68\x12\x01\xA8\x91\x89\xB9\x83" + "\x5B\x11\x77\x12\x9B\xFF\x24\x89" + "\x94\x5F\x18\x12\xBA\x27\x09\x39" + "\x99\x96\x76\x42\x15\x1C\xCD\xCB" + "\xDC\xD3\xDA\x65\x73\xAF\x80\xCD" + "\xD2\xB6\xC2\x4A\x76\xC2\x92\x85" + "\xBD\xCF\x62\x98\x58\x14\xE5\xBD", + .rlen = 80, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x00\x00\x00\x00", + .klen = 20, + .iv = "\x00\x00\x45\x67\x89\xab\xcd\xef" + "\x00\x00\x00\x00", + .input = "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff", + .ilen = 192, + .assoc = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa", + .alen = 12, + .result = "\xC1\x76\x33\x85\xE2\x9B\x5F\xDE" + "\xDE\x89\x3D\x42\xE7\xC9\x69\x8A" + "\x44\x6D\xC3\x88\x46\x2E\xC2\x01" + "\x5E\xF6\x0C\x39\xF0\xC4\xA5\x82" + "\xCD\xE8\x31\xCC\x0A\x4C\xE4\x44" + "\x41\xA9\x82\x6F\x22\xA1\x23\x1A" + "\xA8\xE3\x16\xFD\x31\x5C\x27\x31" + "\xF1\x7F\x01\x63\xA3\xAF\x70\xA1" + "\xCF\x07\x57\x41\x67\xD0\xC4\x42" + "\xDB\x18\xC6\x4C\x4C\xE0\x3D\x9F" + "\x05\x07\xFB\x13\x7D\x4A\xCA\x5B" + "\xF0\xBF\x64\x7E\x05\xB1\x72\xEE" + "\x7C\x3B\xD4\xCD\x14\x03\xB2\x2C" + "\xD3\xA9\xEE\xFA\x17\xFC\x9C\xDF" + "\xC7\x75\x40\xFF\xAE\xAD\x1E\x59" + "\x2F\x30\x24\xFB\xAD\x6B\x10\xFA" + "\x6C\x9F\x5B\xE7\x25\xD5\xD0\x25" + "\xAC\x4A\x4B\xDA\xFC\x7A\x85\x1B" + "\x7E\x13\x06\x82\x08\x17\xA4\x35" + "\xEC\xC5\x8D\x63\x96\x81\x0A\x8F" + "\xA3\x05\x38\x95\x20\x1A\x47\x04" + "\x6F\x6D\xDA\x8F\xEF\xC1\x76\x35" + "\x6B\xC7\x4D\x0F\x94\x12\xCA\x3E" + "\x2E\xD5\x03\x2E\x86\x7E\xAA\x3B" + "\x37\x08\x1C\xCF\xBA\x5D\x71\x46" + "\x80\x72\xB0\x4C\x82\x0D\x60\x3C", + .rlen = 208, + } +}; + +static struct aead_testvec aes_gcm_rfc4106_dec_tv_template[] = { + { /* Generated using Crypto++ */ + .key = zeroed_string, + .klen = 20, + .iv = zeroed_string, + .input = "\x03\x88\xDA\xCE\x60\xB6\xA3\x92" + "\xF3\x28\xC2\xB9\x71\xB2\xFE\x78" + "\x97\xFE\x4C\x23\x37\x42\x01\xE0" + "\x81\x9F\x8D\xC5\xD7\x41\xA0\x1B", + .ilen = 32, + .assoc = zeroed_string, + .alen = 8, + .result = zeroed_string, + .rlen = 16, + + },{ + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\x00\x00\x00\x00", + .klen = 20, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x01" + "\x00\x00\x00\x00", + .input = "\xC0\x0D\x8B\x42\x0F\x8F\x34\x18" + "\x88\xB1\xC5\xBC\xC5\xB6\xD6\x28" + "\x6A\x9D\xDF\x11\x5E\xFE\x5E\x9D" + "\x2F\x70\x44\x92\xF7\xF2\xE3\xEF", + .ilen = 32, + .assoc = zeroed_string, + .alen = 8, + .result = zeroed_string, + .rlen = 16, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\x00\x00\x00\x00", + .klen = 20, + .iv = zeroed_string, + .input = "\x4B\xB1\xB5\xE3\x25\x71\x70\xDE" + "\x7F\xC9\x9C\xA5\x14\x19\xF2\xAC" + "\x0B\x8F\x88\x69\x17\xE6\xB4\x3C" + "\xB1\x68\xFD\x14\x52\x64\x61\xB2", + .ilen = 32, + .assoc = zeroed_string, + .alen = 8, + .result = "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01", + .rlen = 16, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\x00\x00\x00\x00", + .klen = 20, + .iv = zeroed_string, + .input = "\x4B\xB1\xB5\xE3\x25\x71\x70\xDE" + "\x7F\xC9\x9C\xA5\x14\x19\xF2\xAC" + "\x90\x92\xB7\xE3\x5F\xA3\x9A\x63" + "\x7E\xD7\x1F\xD8\xD3\x7C\x4B\xF5", + .ilen = 32, + .assoc = "\x01\x01\x01\x01\x01\x01\x01\x01", + .alen = 8, + .result = "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01", + .rlen = 16, + + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\x00\x00\x00\x00", + .klen = 20, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x01" + "\x00\x00\x00\x00", + .input = "\xC1\x0C\x8A\x43\x0E\x8E\x35\x19" + "\x89\xB0\xC4\xBD\xC4\xB7\xD7\x29" + "\x64\x50\xF9\x32\x13\xFB\x74\x61" + "\xF4\xED\x52\xD3\xC5\x10\x55\x3C", + .ilen = 32, + .assoc = "\x01\x01\x01\x01\x01\x01\x01\x01", + .alen = 8, + .result = "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01", + .rlen = 16, + }, { + .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c" + "\x6d\x6a\x8f\x94\x67\x30\x83\x08" + "\x00\x00\x00\x00", + .klen = 20, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x01" + "\x00\x00\x00\x00", + .input = "\xC1\x0C\x8A\x43\x0E\x8E\x35\x19" + "\x89\xB0\xC4\xBD\xC4\xB7\xD7\x29" + "\x98\x14\xA1\x42\x37\x80\xFD\x90" + "\x68\x12\x01\xA8\x91\x89\xB9\x83" + "\x5B\x11\x77\x12\x9B\xFF\x24\x89" + "\x94\x5F\x18\x12\xBA\x27\x09\x39" + "\x99\x96\x76\x42\x15\x1C\xCD\xCB" + "\xDC\xD3\xDA\x65\x73\xAF\x80\xCD" + "\xD2\xB6\xC2\x4A\x76\xC2\x92\x85" + "\xBD\xCF\x62\x98\x58\x14\xE5\xBD", + .ilen = 80, + .assoc = "\x01\x01\x01\x01\x01\x01\x01\x01", + .alen = 8, + .result = "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01" + "\x01\x01\x01\x01\x01\x01\x01\x01", + .rlen = 64, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x00\x00\x00\x00", + .klen = 20, + .iv = "\x00\x00\x45\x67\x89\xab\xcd\xef" + "\x00\x00\x00\x00", + .input = "\xC1\x76\x33\x85\xE2\x9B\x5F\xDE" + "\xDE\x89\x3D\x42\xE7\xC9\x69\x8A" + "\x44\x6D\xC3\x88\x46\x2E\xC2\x01" + "\x5E\xF6\x0C\x39\xF0\xC4\xA5\x82" + "\xCD\xE8\x31\xCC\x0A\x4C\xE4\x44" + "\x41\xA9\x82\x6F\x22\xA1\x23\x1A" + "\xA8\xE3\x16\xFD\x31\x5C\x27\x31" + "\xF1\x7F\x01\x63\xA3\xAF\x70\xA1" + "\xCF\x07\x57\x41\x67\xD0\xC4\x42" + "\xDB\x18\xC6\x4C\x4C\xE0\x3D\x9F" + "\x05\x07\xFB\x13\x7D\x4A\xCA\x5B" + "\xF0\xBF\x64\x7E\x05\xB1\x72\xEE" + "\x7C\x3B\xD4\xCD\x14\x03\xB2\x2C" + "\xD3\xA9\xEE\xFA\x17\xFC\x9C\xDF" + "\xC7\x75\x40\xFF\xAE\xAD\x1E\x59" + "\x2F\x30\x24\xFB\xAD\x6B\x10\xFA" + "\x6C\x9F\x5B\xE7\x25\xD5\xD0\x25" + "\xAC\x4A\x4B\xDA\xFC\x7A\x85\x1B" + "\x7E\x13\x06\x82\x08\x17\xA4\x35" + "\xEC\xC5\x8D\x63\x96\x81\x0A\x8F" + "\xA3\x05\x38\x95\x20\x1A\x47\x04" + "\x6F\x6D\xDA\x8F\xEF\xC1\x76\x35" + "\x6B\xC7\x4D\x0F\x94\x12\xCA\x3E" + "\x2E\xD5\x03\x2E\x86\x7E\xAA\x3B" + "\x37\x08\x1C\xCF\xBA\x5D\x71\x46" + "\x80\x72\xB0\x4C\x82\x0D\x60\x3C", + .ilen = 208, + .assoc = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa" + "\xaa\xaa\xaa\xaa", + .alen = 12, + .result = "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff", + .rlen = 192, + + } +}; + +static struct aead_testvec aes_ccm_enc_tv_template[] = { + { /* From RFC 3610 */ + .key = "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf", + .klen = 16, + .iv = "\x01\x00\x00\x00\x03\x02\x01\x00" + "\xa0\xa1\xa2\xa3\xa4\xa5\x00\x00", + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07", + .alen = 8, + .input = "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e", + .ilen = 23, + .result = "\x58\x8c\x97\x9a\x61\xc6\x63\xd2" + "\xf0\x66\xd0\xc2\xc0\xf9\x89\x80" + "\x6d\x5f\x6b\x61\xda\xc3\x84\x17" + "\xe8\xd1\x2c\xfd\xf9\x26\xe0", + .rlen = 31, + }, { + .key = "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf", + .klen = 16, + .iv = "\x01\x00\x00\x00\x07\x06\x05\x04" + "\xa0\xa1\xa2\xa3\xa4\xa5\x00\x00", + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b", + .alen = 12, + .input = "\x0c\x0d\x0e\x0f\x10\x11\x12\x13" + "\x14\x15\x16\x17\x18\x19\x1a\x1b" + "\x1c\x1d\x1e\x1f", + .ilen = 20, + .result = "\xdc\xf1\xfb\x7b\x5d\x9e\x23\xfb" + "\x9d\x4e\x13\x12\x53\x65\x8a\xd8" + "\x6e\xbd\xca\x3e\x51\xe8\x3f\x07" + "\x7d\x9c\x2d\x93", + .rlen = 28, + }, { + .key = "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf", + .klen = 16, + .iv = "\x01\x00\x00\x00\x0b\x0a\x09\x08" + "\xa0\xa1\xa2\xa3\xa4\xa5\x00\x00", + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07", + .alen = 8, + .input = "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20", + .ilen = 25, + .result = "\x82\x53\x1a\x60\xcc\x24\x94\x5a" + "\x4b\x82\x79\x18\x1a\xb5\xc8\x4d" + "\xf2\x1c\xe7\xf9\xb7\x3f\x42\xe1" + "\x97\xea\x9c\x07\xe5\x6b\x5e\xb1" + "\x7e\x5f\x4e", + .rlen = 35, + }, { + .key = "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf", + .klen = 16, + .iv = "\x01\x00\x00\x00\x0c\x0b\x0a\x09" + "\xa0\xa1\xa2\xa3\xa4\xa5\x00\x00", + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b", + .alen = 12, + .input = "\x0c\x0d\x0e\x0f\x10\x11\x12\x13" + "\x14\x15\x16\x17\x18\x19\x1a\x1b" + "\x1c\x1d\x1e", + .ilen = 19, + .result = "\x07\x34\x25\x94\x15\x77\x85\x15" + "\x2b\x07\x40\x98\x33\x0a\xbb\x14" + "\x1b\x94\x7b\x56\x6a\xa9\x40\x6b" + "\x4d\x99\x99\x88\xdd", + .rlen = 29, + }, { + .key = "\xd7\x82\x8d\x13\xb2\xb0\xbd\xc3" + "\x25\xa7\x62\x36\xdf\x93\xcc\x6b", + .klen = 16, + .iv = "\x01\x00\x33\x56\x8e\xf7\xb2\x63" + "\x3c\x96\x96\x76\x6c\xfa\x00\x00", + .assoc = "\x63\x01\x8f\x76\xdc\x8a\x1b\xcb", + .alen = 8, + .input = "\x90\x20\xea\x6f\x91\xbd\xd8\x5a" + "\xfa\x00\x39\xba\x4b\xaf\xf9\xbf" + "\xb7\x9c\x70\x28\x94\x9c\xd0\xec", + .ilen = 24, + .result = "\x4c\xcb\x1e\x7c\xa9\x81\xbe\xfa" + "\xa0\x72\x6c\x55\xd3\x78\x06\x12" + "\x98\xc8\x5c\x92\x81\x4a\xbc\x33" + "\xc5\x2e\xe8\x1d\x7d\x77\xc0\x8a", + .rlen = 32, + }, { + .key = "\xd7\x82\x8d\x13\xb2\xb0\xbd\xc3" + "\x25\xa7\x62\x36\xdf\x93\xcc\x6b", + .klen = 16, + .iv = "\x01\x00\xd5\x60\x91\x2d\x3f\x70" + "\x3c\x96\x96\x76\x6c\xfa\x00\x00", + .assoc = "\xcd\x90\x44\xd2\xb7\x1f\xdb\x81" + "\x20\xea\x60\xc0", + .alen = 12, + .input = "\x64\x35\xac\xba\xfb\x11\xa8\x2e" + "\x2f\x07\x1d\x7c\xa4\xa5\xeb\xd9" + "\x3a\x80\x3b\xa8\x7f", + .ilen = 21, + .result = "\x00\x97\x69\xec\xab\xdf\x48\x62" + "\x55\x94\xc5\x92\x51\xe6\x03\x57" + "\x22\x67\x5e\x04\xc8\x47\x09\x9e" + "\x5a\xe0\x70\x45\x51", + .rlen = 29, + }, { + .key = "\xd7\x82\x8d\x13\xb2\xb0\xbd\xc3" + "\x25\xa7\x62\x36\xdf\x93\xcc\x6b", + .klen = 16, + .iv = "\x01\x00\x42\xff\xf8\xf1\x95\x1c" + "\x3c\x96\x96\x76\x6c\xfa\x00\x00", + .assoc = "\xd8\x5b\xc7\xe6\x9f\x94\x4f\xb8", + .alen = 8, + .input = "\x8a\x19\xb9\x50\xbc\xf7\x1a\x01" + "\x8e\x5e\x67\x01\xc9\x17\x87\x65" + "\x98\x09\xd6\x7d\xbe\xdd\x18", + .ilen = 23, + .result = "\xbc\x21\x8d\xaa\x94\x74\x27\xb6" + "\xdb\x38\x6a\x99\xac\x1a\xef\x23" + "\xad\xe0\xb5\x29\x39\xcb\x6a\x63" + "\x7c\xf9\xbe\xc2\x40\x88\x97\xc6" + "\xba", + .rlen = 33, + }, +}; + +static struct aead_testvec aes_ccm_dec_tv_template[] = { + { /* From RFC 3610 */ + .key = "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf", + .klen = 16, + .iv = "\x01\x00\x00\x00\x03\x02\x01\x00" + "\xa0\xa1\xa2\xa3\xa4\xa5\x00\x00", + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07", + .alen = 8, + .input = "\x58\x8c\x97\x9a\x61\xc6\x63\xd2" + "\xf0\x66\xd0\xc2\xc0\xf9\x89\x80" + "\x6d\x5f\x6b\x61\xda\xc3\x84\x17" + "\xe8\xd1\x2c\xfd\xf9\x26\xe0", + .ilen = 31, + .result = "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e", + .rlen = 23, + }, { + .key = "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf", + .klen = 16, + .iv = "\x01\x00\x00\x00\x07\x06\x05\x04" + "\xa0\xa1\xa2\xa3\xa4\xa5\x00\x00", + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b", + .alen = 12, + .input = "\xdc\xf1\xfb\x7b\x5d\x9e\x23\xfb" + "\x9d\x4e\x13\x12\x53\x65\x8a\xd8" + "\x6e\xbd\xca\x3e\x51\xe8\x3f\x07" + "\x7d\x9c\x2d\x93", + .ilen = 28, + .result = "\x0c\x0d\x0e\x0f\x10\x11\x12\x13" + "\x14\x15\x16\x17\x18\x19\x1a\x1b" + "\x1c\x1d\x1e\x1f", + .rlen = 20, + }, { + .key = "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf", + .klen = 16, + .iv = "\x01\x00\x00\x00\x0b\x0a\x09\x08" + "\xa0\xa1\xa2\xa3\xa4\xa5\x00\x00", + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07", + .alen = 8, + .input = "\x82\x53\x1a\x60\xcc\x24\x94\x5a" + "\x4b\x82\x79\x18\x1a\xb5\xc8\x4d" + "\xf2\x1c\xe7\xf9\xb7\x3f\x42\xe1" + "\x97\xea\x9c\x07\xe5\x6b\x5e\xb1" + "\x7e\x5f\x4e", + .ilen = 35, + .result = "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20", + .rlen = 25, + }, { + .key = "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf", + .klen = 16, + .iv = "\x01\x00\x00\x00\x0c\x0b\x0a\x09" + "\xa0\xa1\xa2\xa3\xa4\xa5\x00\x00", + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b", + .alen = 12, + .input = "\x07\x34\x25\x94\x15\x77\x85\x15" + "\x2b\x07\x40\x98\x33\x0a\xbb\x14" + "\x1b\x94\x7b\x56\x6a\xa9\x40\x6b" + "\x4d\x99\x99\x88\xdd", + .ilen = 29, + .result = "\x0c\x0d\x0e\x0f\x10\x11\x12\x13" + "\x14\x15\x16\x17\x18\x19\x1a\x1b" + "\x1c\x1d\x1e", + .rlen = 19, + }, { + .key = "\xd7\x82\x8d\x13\xb2\xb0\xbd\xc3" + "\x25\xa7\x62\x36\xdf\x93\xcc\x6b", + .klen = 16, + .iv = "\x01\x00\x33\x56\x8e\xf7\xb2\x63" + "\x3c\x96\x96\x76\x6c\xfa\x00\x00", + .assoc = "\x63\x01\x8f\x76\xdc\x8a\x1b\xcb", + .alen = 8, + .input = "\x4c\xcb\x1e\x7c\xa9\x81\xbe\xfa" + "\xa0\x72\x6c\x55\xd3\x78\x06\x12" + "\x98\xc8\x5c\x92\x81\x4a\xbc\x33" + "\xc5\x2e\xe8\x1d\x7d\x77\xc0\x8a", + .ilen = 32, + .result = "\x90\x20\xea\x6f\x91\xbd\xd8\x5a" + "\xfa\x00\x39\xba\x4b\xaf\xf9\xbf" + "\xb7\x9c\x70\x28\x94\x9c\xd0\xec", + .rlen = 24, + }, { + .key = "\xd7\x82\x8d\x13\xb2\xb0\xbd\xc3" + "\x25\xa7\x62\x36\xdf\x93\xcc\x6b", + .klen = 16, + .iv = "\x01\x00\xd5\x60\x91\x2d\x3f\x70" + "\x3c\x96\x96\x76\x6c\xfa\x00\x00", + .assoc = "\xcd\x90\x44\xd2\xb7\x1f\xdb\x81" + "\x20\xea\x60\xc0", + .alen = 12, + .input = "\x00\x97\x69\xec\xab\xdf\x48\x62" + "\x55\x94\xc5\x92\x51\xe6\x03\x57" + "\x22\x67\x5e\x04\xc8\x47\x09\x9e" + "\x5a\xe0\x70\x45\x51", + .ilen = 29, + .result = "\x64\x35\xac\xba\xfb\x11\xa8\x2e" + "\x2f\x07\x1d\x7c\xa4\xa5\xeb\xd9" + "\x3a\x80\x3b\xa8\x7f", + .rlen = 21, + }, { + .key = "\xd7\x82\x8d\x13\xb2\xb0\xbd\xc3" + "\x25\xa7\x62\x36\xdf\x93\xcc\x6b", + .klen = 16, + .iv = "\x01\x00\x42\xff\xf8\xf1\x95\x1c" + "\x3c\x96\x96\x76\x6c\xfa\x00\x00", + .assoc = "\xd8\x5b\xc7\xe6\x9f\x94\x4f\xb8", + .alen = 8, + .input = "\xbc\x21\x8d\xaa\x94\x74\x27\xb6" + "\xdb\x38\x6a\x99\xac\x1a\xef\x23" + "\xad\xe0\xb5\x29\x39\xcb\x6a\x63" + "\x7c\xf9\xbe\xc2\x40\x88\x97\xc6" + "\xba", + .ilen = 33, + .result = "\x8a\x19\xb9\x50\xbc\xf7\x1a\x01" + "\x8e\x5e\x67\x01\xc9\x17\x87\x65" + "\x98\x09\xd6\x7d\xbe\xdd\x18", + .rlen = 23, + }, +}; + +/* + * rfc4309 refers to section 8 of rfc3610 for test vectors, but they all + * use a 13-byte nonce, we only support an 11-byte nonce. Similarly, all of + * Special Publication 800-38C's test vectors also use nonce lengths our + * implementation doesn't support. The following are taken from fips cavs + * fax files on hand at Red Hat. + * + * nb: actual key lengths are (klen - 3), the last 3 bytes are actually + * part of the nonce which combine w/the iv, but need to be input this way. + */ +static struct aead_testvec aes_ccm_rfc4309_enc_tv_template[] = { + { + .key = "\x83\xac\x54\x66\xc2\xeb\xe5\x05" + "\x2e\x01\xd1\xfc\x5d\x82\x66\x2e" + "\x96\xac\x59", + .klen = 19, + .iv = "\x30\x07\xa1\xe2\xa2\xc7\x55\x24", + .alen = 0, + .input = "\x19\xc8\x81\xf6\xe9\x86\xff\x93" + "\x0b\x78\x67\xe5\xbb\xb7\xfc\x6e" + "\x83\x77\xb3\xa6\x0c\x8c\x9f\x9c" + "\x35\x2e\xad\xe0\x62\xf9\x91\xa1", + .ilen = 32, + .result = "\xab\x6f\xe1\x69\x1d\x19\x99\xa8" + "\x92\xa0\xc4\x6f\x7e\xe2\x8b\xb1" + "\x70\xbb\x8c\xa6\x4c\x6e\x97\x8a" + "\x57\x2b\xbe\x5d\x98\xa6\xb1\x32" + "\xda\x24\xea\xd9\xa1\x39\x98\xfd" + "\xa4\xbe\xd9\xf2\x1a\x6d\x22\xa8", + .rlen = 48, + }, { + .key = "\x1e\x2c\x7e\x01\x41\x9a\xef\xc0" + "\x0d\x58\x96\x6e\x5c\xa2\x4b\xd3" + "\x4f\xa3\x19", + .klen = 19, + .iv = "\xd3\x01\x5a\xd8\x30\x60\x15\x56", + .assoc = "\xda\xe6\x28\x9c\x45\x2d\xfd\x63" + "\x5e\xda\x4c\xb6\xe6\xfc\xf9\xb7" + "\x0c\x56\xcb\xe4\xe0\x05\x7a\xe1" + "\x0a\x63\x09\x78\xbc\x2c\x55\xde", + .alen = 32, + .input = "\x87\xa3\x36\xfd\x96\xb3\x93\x78" + "\xa9\x28\x63\xba\x12\xa3\x14\x85" + "\x57\x1e\x06\xc9\x7b\x21\xef\x76" + "\x7f\x38\x7e\x8e\x29\xa4\x3e\x7e", + .ilen = 32, + .result = "\x8a\x1e\x11\xf0\x02\x6b\xe2\x19" + "\xfc\x70\xc4\x6d\x8e\xb7\x99\xab" + "\xc5\x4b\xa2\xac\xd3\xf3\x48\xff" + "\x3b\xb5\xce\x53\xef\xde\xbb\x02" + "\xa9\x86\x15\x6c\x13\xfe\xda\x0a" + "\x22\xb8\x29\x3d\xd8\x39\x9a\x23", + .rlen = 48, + }, { + .key = "\xf4\x6b\xc2\x75\x62\xfe\xb4\xe1" + "\xa3\xf0\xff\xdd\x4e\x4b\x12\x75" + "\x53\x14\x73\x66\x8d\x88\xf6\x80" + "\xa0\x20\x35", + .klen = 27, + .iv = "\x26\xf2\x21\x8d\x50\x20\xda\xe2", + .assoc = "\x5b\x9e\x13\x67\x02\x5e\xef\xc1" + "\x6c\xf9\xd7\x1e\x52\x8f\x7a\x47" + "\xe9\xd4\xcf\x20\x14\x6e\xf0\x2d" + "\xd8\x9e\x2b\x56\x10\x23\x56\xe7", + .alen = 32, + .ilen = 0, + .result = "\x36\xea\x7a\x70\x08\xdc\x6a\xbc" + "\xad\x0c\x7a\x63\xf6\x61\xfd\x9b", + .rlen = 16, + }, { + .key = "\x56\xdf\x5c\x8f\x26\x3f\x0e\x42" + "\xef\x7a\xd3\xce\xfc\x84\x60\x62" + "\xca\xb4\x40\xaf\x5f\xc9\xc9\x01" + "\xd6\x3c\x8c", + .klen = 27, + .iv = "\x86\x84\xb6\xcd\xef\x09\x2e\x94", + .assoc = "\x02\x65\x78\x3c\xe9\x21\x30\x91" + "\xb1\xb9\xda\x76\x9a\x78\x6d\x95" + "\xf2\x88\x32\xa3\xf2\x50\xcb\x4c" + "\xe3\x00\x73\x69\x84\x69\x87\x79", + .alen = 32, + .input = "\x9f\xd2\x02\x4b\x52\x49\x31\x3c" + "\x43\x69\x3a\x2d\x8e\x70\xad\x7e" + "\xe0\xe5\x46\x09\x80\x89\x13\xb2" + "\x8c\x8b\xd9\x3f\x86\xfb\xb5\x6b", + .ilen = 32, + .result = "\x39\xdf\x7c\x3c\x5a\x29\xb9\x62" + "\x5d\x51\xc2\x16\xd8\xbd\x06\x9f" + "\x9b\x6a\x09\x70\xc1\x51\x83\xc2" + "\x66\x88\x1d\x4f\x9a\xda\xe0\x1e" + "\xc7\x79\x11\x58\xe5\x6b\x20\x40" + "\x7a\xea\x46\x42\x8b\xe4\x6f\xe1", + .rlen = 48, + }, { + .key = "\xe0\x8d\x99\x71\x60\xd7\x97\x1a" + "\xbd\x01\x99\xd5\x8a\xdf\x71\x3a" + "\xd3\xdf\x24\x4b\x5e\x3d\x4b\x4e" + "\x30\x7a\xb9\xd8\x53\x0a\x5e\x2b" + "\x1e\x29\x91", + .klen = 35, + .iv = "\xad\x8e\xc1\x53\x0a\xcf\x2d\xbe", + .assoc = "\x19\xb6\x1f\x57\xc4\xf3\xf0\x8b" + "\x78\x2b\x94\x02\x29\x0f\x42\x27" + "\x6b\x75\xcb\x98\x34\x08\x7e\x79" + "\xe4\x3e\x49\x0d\x84\x8b\x22\x87", + .alen = 32, + .input = "\xe1\xd9\xd8\x13\xeb\x3a\x75\x3f" + "\x9d\xbd\x5f\x66\xbe\xdc\xbb\x66" + "\xbf\x17\x99\x62\x4a\x39\x27\x1f" + "\x1d\xdc\x24\xae\x19\x2f\x98\x4c", + .ilen = 32, + .result = "\x19\xb8\x61\x33\x45\x2b\x43\x96" + "\x6f\x51\xd0\x20\x30\x7d\x9b\xc6" + "\x26\x3d\xf8\xc9\x65\x16\xa8\x9f" + "\xf0\x62\x17\x34\xf2\x1e\x8d\x75" + "\x4e\x13\xcc\xc0\xc3\x2a\x54\x2d", + .rlen = 40, + }, { + .key = "\x7c\xc8\x18\x3b\x8d\x99\xe0\x7c" + "\x45\x41\xb8\xbd\x5c\xa7\xc2\x32" + "\x8a\xb8\x02\x59\xa4\xfe\xa9\x2c" + "\x09\x75\x9a\x9b\x3c\x9b\x27\x39" + "\xf9\xd9\x4e", + .klen = 35, + .iv = "\x63\xb5\x3d\x9d\x43\xf6\x1e\x50", + .assoc = "\x57\xf5\x6b\x8b\x57\x5c\x3d\x3b" + "\x13\x02\x01\x0c\x83\x4c\x96\x35" + "\x8e\xd6\x39\xcf\x7d\x14\x9b\x94" + "\xb0\x39\x36\xe6\x8f\x57\xe0\x13", + .alen = 32, + .input = "\x3b\x6c\x29\x36\xb6\xef\x07\xa6" + "\x83\x72\x07\x4f\xcf\xfa\x66\x89" + "\x5f\xca\xb1\xba\xd5\x8f\x2c\x27" + "\x30\xdb\x75\x09\x93\xd4\x65\xe4", + .ilen = 32, + .result = "\xb0\x88\x5a\x33\xaa\xe5\xc7\x1d" + "\x85\x23\xc7\xc6\x2f\xf4\x1e\x3d" + "\xcc\x63\x44\x25\x07\x78\x4f\x9e" + "\x96\xb8\x88\xeb\xbc\x48\x1f\x06" + "\x39\xaf\x39\xac\xd8\x4a\x80\x39" + "\x7b\x72\x8a\xf7", + .rlen = 44, + }, { + .key = "\xab\xd0\xe9\x33\x07\x26\xe5\x83" + "\x8c\x76\x95\xd4\xb6\xdc\xf3\x46" + "\xf9\x8f\xad\xe3\x02\x13\x83\x77" + "\x3f\xb0\xf1\xa1\xa1\x22\x0f\x2b" + "\x24\xa7\x8b", + .klen = 35, + .iv = "\x07\xcb\xcc\x0e\xe6\x33\xbf\xf5", + .assoc = "\xd4\xdb\x30\x1d\x03\xfe\xfd\x5f" + "\x87\xd4\x8c\xb6\xb6\xf1\x7a\x5d" + "\xab\x90\x65\x8d\x8e\xca\x4d\x4f" + "\x16\x0c\x40\x90\x4b\xc7\x36\x73", + .alen = 32, + .input = "\xf5\xc6\x7d\x48\xc1\xb7\xe6\x92" + "\x97\x5a\xca\xc4\xa9\x6d\xf9\x3d" + "\x6c\xde\xbc\xf1\x90\xea\x6a\xb2" + "\x35\x86\x36\xaf\x5c\xfe\x4b\x3a", + .ilen = 32, + .result = "\x83\x6f\x40\x87\x72\xcf\xc1\x13" + "\xef\xbb\x80\x21\x04\x6c\x58\x09" + "\x07\x1b\xfc\xdf\xc0\x3f\x5b\xc7" + "\xe0\x79\xa8\x6e\x71\x7c\x3f\xcf" + "\x5c\xda\xb2\x33\xe5\x13\xe2\x0d" + "\x74\xd1\xef\xb5\x0f\x3a\xb5\xf8", + .rlen = 48, + }, +}; + +static struct aead_testvec aes_ccm_rfc4309_dec_tv_template[] = { + { + .key = "\xab\x2f\x8a\x74\xb7\x1c\xd2\xb1" + "\xff\x80\x2e\x48\x7d\x82\xf8\xb9" + "\xc6\xfb\x7d", + .klen = 19, + .iv = "\x80\x0d\x13\xab\xd8\xa6\xb2\xd8", + .alen = 0, + .input = "\xd5\xe8\x93\x9f\xc7\x89\x2e\x2b", + .ilen = 8, + .result = "\x00", + .rlen = 0, + .novrfy = 1, + }, { + .key = "\xab\x2f\x8a\x74\xb7\x1c\xd2\xb1" + "\xff\x80\x2e\x48\x7d\x82\xf8\xb9" + "\xaf\x94\x87", + .klen = 19, + .iv = "\x78\x35\x82\x81\x7f\x88\x94\x68", + .alen = 0, + .input = "\x41\x3c\xb8\x87\x73\xcb\xf3\xf3", + .ilen = 8, + .result = "\x00", + .rlen = 0, + }, { + .key = "\x61\x0e\x8c\xae\xe3\x23\xb6\x38" + "\x76\x1c\xf6\x3a\x67\xa3\x9c\xd8" + "\xc6\xfb\x7d", + .klen = 19, + .iv = "\x80\x0d\x13\xab\xd8\xa6\xb2\xd8", + .assoc = "\xf3\x94\x87\x78\x35\x82\x81\x7f" + "\x88\x94\x68\xb1\x78\x6b\x2b\xd6" + "\x04\x1f\x4e\xed\x78\xd5\x33\x66" + "\xd8\x94\x99\x91\x81\x54\x62\x57", + .alen = 32, + .input = "\xf0\x7c\x29\x02\xae\x1c\x2f\x55" + "\xd0\xd1\x3d\x1a\xa3\x6d\xe4\x0a" + "\x86\xb0\x87\x6b\x62\x33\x8c\x34" + "\xce\xab\x57\xcc\x79\x0b\xe0\x6f" + "\x5c\x3e\x48\x1f\x6c\x46\xf7\x51" + "\x8b\x84\x83\x2a\xc1\x05\xb8\xc5", + .ilen = 48, + .result = "\x50\x82\x3e\x07\xe2\x1e\xb6\xfb" + "\x33\xe4\x73\xce\xd2\xfb\x95\x79" + "\xe8\xb4\xb5\x77\x11\x10\x62\x6f" + "\x6a\x82\xd1\x13\xec\xf5\xd0\x48", + .rlen = 32, + .novrfy = 1, + }, { + .key = "\x61\x0e\x8c\xae\xe3\x23\xb6\x38" + "\x76\x1c\xf6\x3a\x67\xa3\x9c\xd8" + "\x05\xe0\xc9", + .klen = 19, + .iv = "\x0f\xed\x34\xea\x97\xd4\x3b\xdf", + .assoc = "\x49\x5c\x50\x1f\x1d\x94\xcc\x81" + "\xba\xb7\xb6\x03\xaf\xa5\xc1\xa1" + "\xd8\x5c\x42\x68\xe0\x6c\xda\x89" + "\x05\xac\x56\xac\x1b\x2a\xd3\x86", + .alen = 32, + .input = "\x39\xbe\x7d\x15\x62\x77\xf3\x3c" + "\xad\x83\x52\x6d\x71\x03\x25\x1c" + "\xed\x81\x3a\x9a\x16\x7d\x19\x80" + "\x72\x04\x72\xd0\xf6\xff\x05\x0f" + "\xb7\x14\x30\x00\x32\x9e\xa0\xa6" + "\x9e\x5a\x18\xa1\xb8\xfe\xdb\xd3", + .ilen = 48, + .result = "\x75\x05\xbe\xc2\xd9\x1e\xde\x60" + "\x47\x3d\x8c\x7d\xbd\xb5\xd9\xb7" + "\xf2\xae\x61\x05\x8f\x82\x24\x3f" + "\x9c\x67\x91\xe1\x38\x4f\xe4\x0c", + .rlen = 32, + }, { + .key = "\x39\xbb\xa7\xbe\x59\x97\x9e\x73" + "\xa2\xbc\x6b\x98\xd7\x75\x7f\xe3" + "\xa4\x48\x93\x39\x26\x71\x4a\xc6" + "\xee\x49\x83", + .klen = 27, + .iv = "\xe9\xa9\xff\xe9\x57\xba\xfd\x9e", + .assoc = "\x44\xa6\x2c\x05\xe9\xe1\x43\xb1" + "\x58\x7c\xf2\x5c\x6d\x39\x0a\x64" + "\xa4\xf0\x13\x05\xd1\x77\x99\x67" + "\x11\xc4\xc6\xdb\x00\x56\x36\x61", + .alen = 32, + .input = "\x71\x99\xfa\xf4\x44\x12\x68\x9b", + .ilen = 8, + .result = "\x00", + .rlen = 0, + }, { + .key = "\x58\x5d\xa0\x96\x65\x1a\x04\xd7" + "\x96\xe5\xc5\x68\xaa\x95\x35\xe0" + "\x29\xa0\xba\x9e\x48\x78\xd1\xba" + "\xee\x49\x83", + .klen = 27, + .iv = "\xe9\xa9\xff\xe9\x57\xba\xfd\x9e", + .assoc = "\x44\xa6\x2c\x05\xe9\xe1\x43\xb1" + "\x58\x7c\xf2\x5c\x6d\x39\x0a\x64" + "\xa4\xf0\x13\x05\xd1\x77\x99\x67" + "\x11\xc4\xc6\xdb\x00\x56\x36\x61", + .alen = 32, + .input = "\xfb\xe5\x5d\x34\xbe\xe5\xe8\xe7" + "\x5a\xef\x2f\xbf\x1f\x7f\xd4\xb2" + "\x66\xca\x61\x1e\x96\x7a\x61\xb3" + "\x1c\x16\x45\x52\xba\x04\x9c\x9f" + "\xb1\xd2\x40\xbc\x52\x7c\x6f\xb1", + .ilen = 40, + .result = "\x85\x34\x66\x42\xc8\x92\x0f\x36" + "\x58\xe0\x6b\x91\x3c\x98\x5c\xbb" + "\x0a\x85\xcc\x02\xad\x7a\x96\xe9" + "\x65\x43\xa4\xc3\x0f\xdc\x55\x81", + .rlen = 32, + }, { + .key = "\x58\x5d\xa0\x96\x65\x1a\x04\xd7" + "\x96\xe5\xc5\x68\xaa\x95\x35\xe0" + "\x29\xa0\xba\x9e\x48\x78\xd1\xba" + "\xd1\xfc\x57", + .klen = 27, + .iv = "\x9c\xfe\xb8\x9c\xad\x71\xaa\x1f", + .assoc = "\x86\x67\xa5\xa9\x14\x5f\x0d\xc6" + "\xff\x14\xc7\x44\xbf\x6c\x3a\xc3" + "\xff\xb6\x81\xbd\xe2\xd5\x06\xc7" + "\x3c\xa1\x52\x13\x03\x8a\x23\x3a", + .alen = 32, + .input = "\x3f\x66\xb0\x9d\xe5\x4b\x38\x00" + "\xc6\x0e\x6e\xe5\xd6\x98\xa6\x37" + "\x8c\x26\x33\xc6\xb2\xa2\x17\xfa" + "\x64\x19\xc0\x30\xd7\xfc\x14\x6b" + "\xe3\x33\xc2\x04\xb0\x37\xbe\x3f" + "\xa9\xb4\x2d\x68\x03\xa3\x44\xef", + .ilen = 48, + .result = "\x02\x87\x4d\x28\x80\x6e\xb2\xed" + "\x99\x2a\xa8\xca\x04\x25\x45\x90" + "\x1d\xdd\x5a\xd9\xe4\xdb\x9c\x9c" + "\x49\xe9\x01\xfe\xa7\x80\x6d\x6b", + .rlen = 32, + .novrfy = 1, + }, { + .key = "\xa4\x4b\x54\x29\x0a\xb8\x6d\x01" + "\x5b\x80\x2a\xcf\x25\xc4\xb7\x5c" + "\x20\x2c\xad\x30\xc2\x2b\x41\xfb" + "\x0e\x85\xbc\x33\xad\x0f\x2b\xff" + "\xee\x49\x83", + .klen = 35, + .iv = "\xe9\xa9\xff\xe9\x57\xba\xfd\x9e", + .alen = 0, + .input = "\x1f\xb8\x8f\xa3\xdd\x54\x00\xf2", + .ilen = 8, + .result = "\x00", + .rlen = 0, + }, { + .key = "\x39\xbb\xa7\xbe\x59\x97\x9e\x73" + "\xa2\xbc\x6b\x98\xd7\x75\x7f\xe3" + "\xa4\x48\x93\x39\x26\x71\x4a\xc6" + "\xae\x8f\x11\x4c\xc2\x9c\x4a\xbb" + "\x85\x34\x66", + .klen = 35, + .iv = "\x42\xc8\x92\x0f\x36\x58\xe0\x6b", + .alen = 0, + .input = "\x48\x01\x5e\x02\x24\x04\x66\x47" + "\xa1\xea\x6f\xaf\xe8\xfc\xfb\xdd" + "\xa5\xa9\x87\x8d\x84\xee\x2e\x77" + "\xbb\x86\xb9\xf5\x5c\x6c\xff\xf6" + "\x72\xc3\x8e\xf7\x70\xb1\xb2\x07" + "\xbc\xa8\xa3\xbd\x83\x7c\x1d\x2a", + .ilen = 48, + .result = "\xdc\x56\xf2\x71\xb0\xb1\xa0\x6c" + "\xf0\x97\x3a\xfb\x6d\xe7\x32\x99" + "\x3e\xaf\x70\x5e\xb2\x4d\xea\x39" + "\x89\xd4\x75\x7a\x63\xb1\xda\x93", + .rlen = 32, + .novrfy = 1, + }, { + .key = "\x58\x5d\xa0\x96\x65\x1a\x04\xd7" + "\x96\xe5\xc5\x68\xaa\x95\x35\xe0" + "\x29\xa0\xba\x9e\x48\x78\xd1\xba" + "\x0d\x1a\x53\x3b\xb5\xe3\xf8\x8b" + "\xcf\x76\x3f", + .klen = 35, + .iv = "\xd9\x95\x75\x8f\x44\x89\x40\x7b", + .assoc = "\x8f\x86\x6c\x4d\x1d\xc5\x39\x88" + "\xc8\xf3\x5c\x52\x10\x63\x6f\x2b" + "\x8a\x2a\xc5\x6f\x30\x23\x58\x7b" + "\xfb\x36\x03\x11\xb4\xd9\xf2\xfe", + .alen = 32, + .input = "\x48\x58\xd6\xf3\xad\x63\x58\xbf" + "\xae\xc7\x5e\xae\x83\x8f\x7b\xe4" + "\x78\x5c\x4c\x67\x71\x89\x94\xbf" + "\x47\xf1\x63\x7e\x1c\x59\xbd\xc5" + "\x7f\x44\x0a\x0c\x01\x18\x07\x92" + "\xe1\xd3\x51\xce\x32\x6d\x0c\x5b", + .ilen = 48, + .result = "\xc2\x54\xc8\xde\x78\x87\x77\x40" + "\x49\x71\xe4\xb7\xe7\xcb\x76\x61" + "\x0a\x41\xb9\xe9\xc0\x76\x54\xab" + "\x04\x49\x3b\x19\x93\x57\x25\x5d", + .rlen = 32, + }, +}; + +/* + * ANSI X9.31 Continuous Pseudo-Random Number Generator (AES mode) + * test vectors, taken from Appendix B.2.9 and B.2.10: + * http://csrc.nist.gov/groups/STM/cavp/documents/rng/RNGVS.pdf + * Only AES-128 is supported at this time. + */ +#define ANSI_CPRNG_AES_TEST_VECTORS 6 + +static struct cprng_testvec ansi_cprng_aes_tv_template[] = { + { + .key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42" + "\xed\x06\x1c\xab\xb8\xd4\x62\x02", + .klen = 16, + .dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62" + "\xd7\x1d\x4a\xfb\xb0\xe9\x22\xf9", + .dtlen = 16, + .v = "\x80\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .vlen = 16, + .result = "\x59\x53\x1e\xd1\x3b\xb0\xc0\x55" + "\x84\x79\x66\x85\xc1\x2f\x76\x41", + .rlen = 16, + .loops = 1, + }, { + .key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42" + "\xed\x06\x1c\xab\xb8\xd4\x62\x02", + .klen = 16, + .dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62" + "\xd7\x1d\x4a\xfb\xb0\xe9\x22\xfa", + .dtlen = 16, + .v = "\xc0\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .vlen = 16, + .result = "\x7c\x22\x2c\xf4\xca\x8f\xa2\x4c" + "\x1c\x9c\xb6\x41\xa9\xf3\x22\x0d", + .rlen = 16, + .loops = 1, + }, { + .key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42" + "\xed\x06\x1c\xab\xb8\xd4\x62\x02", + .klen = 16, + .dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62" + "\xd7\x1d\x4a\xfb\xb0\xe9\x22\xfb", + .dtlen = 16, + .v = "\xe0\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .vlen = 16, + .result = "\x8a\xaa\x00\x39\x66\x67\x5b\xe5" + "\x29\x14\x28\x81\xa9\x4d\x4e\xc7", + .rlen = 16, + .loops = 1, + }, { + .key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42" + "\xed\x06\x1c\xab\xb8\xd4\x62\x02", + .klen = 16, + .dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62" + "\xd7\x1d\x4a\xfb\xb0\xe9\x22\xfc", + .dtlen = 16, + .v = "\xf0\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .vlen = 16, + .result = "\x88\xdd\xa4\x56\x30\x24\x23\xe5" + "\xf6\x9d\xa5\x7e\x7b\x95\xc7\x3a", + .rlen = 16, + .loops = 1, + }, { + .key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42" + "\xed\x06\x1c\xab\xb8\xd4\x62\x02", + .klen = 16, + .dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62" + "\xd7\x1d\x4a\xfb\xb0\xe9\x22\xfd", + .dtlen = 16, + .v = "\xf8\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .vlen = 16, + .result = "\x05\x25\x92\x46\x61\x79\xd2\xcb" + "\x78\xc4\x0b\x14\x0a\x5a\x9a\xc8", + .rlen = 16, + .loops = 1, + }, { /* Monte Carlo Test */ + .key = "\x9f\x5b\x51\x20\x0b\xf3\x34\xb5" + "\xd8\x2b\xe8\xc3\x72\x55\xc8\x48", + .klen = 16, + .dt = "\x63\x76\xbb\xe5\x29\x02\xba\x3b" + "\x67\xc9\x25\xfa\x70\x1f\x11\xac", + .dtlen = 16, + .v = "\x57\x2c\x8e\x76\x87\x26\x47\x97" + "\x7e\x74\xfb\xdd\xc4\x95\x01\xd1", + .vlen = 16, + .result = "\x48\xe9\xbd\x0d\x06\xee\x18\xfb" + "\xe4\x57\x90\xd5\xc3\xfc\x9b\x73", + .rlen = 16, + .loops = 10000, + }, +}; + +/* Cast5 test vectors from RFC 2144 */ +#define CAST5_ENC_TEST_VECTORS 3 +#define CAST5_DEC_TEST_VECTORS 3 + +static struct cipher_testvec cast5_enc_tv_template[] = { + { + .key = "\x01\x23\x45\x67\x12\x34\x56\x78" + "\x23\x45\x67\x89\x34\x56\x78\x9a", + .klen = 16, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .ilen = 8, + .result = "\x23\x8b\x4f\xe5\x84\x7e\x44\xb2", + .rlen = 8, + }, { + .key = "\x01\x23\x45\x67\x12\x34\x56\x78" + "\x23\x45", + .klen = 10, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .ilen = 8, + .result = "\xeb\x6a\x71\x1a\x2c\x02\x27\x1b", + .rlen = 8, + }, { + .key = "\x01\x23\x45\x67\x12", + .klen = 5, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .ilen = 8, + .result = "\x7a\xc8\x16\xd1\x6e\x9b\x30\x2e", + .rlen = 8, + }, +}; + +static struct cipher_testvec cast5_dec_tv_template[] = { + { + .key = "\x01\x23\x45\x67\x12\x34\x56\x78" + "\x23\x45\x67\x89\x34\x56\x78\x9a", + .klen = 16, + .input = "\x23\x8b\x4f\xe5\x84\x7e\x44\xb2", + .ilen = 8, + .result = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .rlen = 8, + }, { + .key = "\x01\x23\x45\x67\x12\x34\x56\x78" + "\x23\x45", + .klen = 10, + .input = "\xeb\x6a\x71\x1a\x2c\x02\x27\x1b", + .ilen = 8, + .result = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .rlen = 8, + }, { + .key = "\x01\x23\x45\x67\x12", + .klen = 5, + .input = "\x7a\xc8\x16\xd1\x6e\x9b\x30\x2e", + .ilen = 8, + .result = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .rlen = 8, + }, +}; + +/* + * ARC4 test vectors from OpenSSL + */ +#define ARC4_ENC_TEST_VECTORS 7 +#define ARC4_DEC_TEST_VECTORS 7 + +static struct cipher_testvec arc4_enc_tv_template[] = { + { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .ilen = 8, + .result = "\x75\xb7\x87\x80\x99\xe0\xc5\x96", + .rlen = 8, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 8, + .result = "\x74\x94\xc2\xe7\x10\x4b\x08\x79", + .rlen = 8, + }, { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 8, + .input = "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 8, + .result = "\xde\x18\x89\x41\xa3\x37\x5d\x3a", + .rlen = 8, + }, { + .key = "\xef\x01\x23\x45", + .klen = 4, + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00", + .ilen = 20, + .result = "\xd6\xa1\x41\xa7\xec\x3c\x38\xdf" + "\xbd\x61\x5a\x11\x62\xe1\xc7\xba" + "\x36\xb6\x78\x58", + .rlen = 20, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x12\x34\x56\x78\x9A\xBC\xDE\xF0" + "\x12\x34\x56\x78\x9A\xBC\xDE\xF0" + "\x12\x34\x56\x78\x9A\xBC\xDE\xF0" + "\x12\x34\x56\x78", + .ilen = 28, + .result = "\x66\xa0\x94\x9f\x8a\xf7\xd6\x89" + "\x1f\x7f\x83\x2b\xa8\x33\xc0\x0c" + "\x89\x2e\xbe\x30\x14\x3c\xe2\x87" + "\x40\x01\x1e\xcf", + .rlen = 28, + }, { + .key = "\xef\x01\x23\x45", + .klen = 4, + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00", + .ilen = 10, + .result = "\xd6\xa1\x41\xa7\xec\x3c\x38\xdf" + "\xbd\x61", + .rlen = 10, + }, { + .key = "\x01\x23\x45\x67\x89\xAB\xCD\xEF" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 16, + .input = "\x01\x23\x45\x67\x89\xAB\xCD\xEF", + .ilen = 8, + .result = "\x69\x72\x36\x59\x1B\x52\x42\xB1", + .rlen = 8, + }, +}; + +static struct cipher_testvec arc4_dec_tv_template[] = { + { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x75\xb7\x87\x80\x99\xe0\xc5\x96", + .ilen = 8, + .result = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .rlen = 8, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x74\x94\xc2\xe7\x10\x4b\x08\x79", + .ilen = 8, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00", + .rlen = 8, + }, { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 8, + .input = "\xde\x18\x89\x41\xa3\x37\x5d\x3a", + .ilen = 8, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00", + .rlen = 8, + }, { + .key = "\xef\x01\x23\x45", + .klen = 4, + .input = "\xd6\xa1\x41\xa7\xec\x3c\x38\xdf" + "\xbd\x61\x5a\x11\x62\xe1\xc7\xba" + "\x36\xb6\x78\x58", + .ilen = 20, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00", + .rlen = 20, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", + .klen = 8, + .input = "\x66\xa0\x94\x9f\x8a\xf7\xd6\x89" + "\x1f\x7f\x83\x2b\xa8\x33\xc0\x0c" + "\x89\x2e\xbe\x30\x14\x3c\xe2\x87" + "\x40\x01\x1e\xcf", + .ilen = 28, + .result = "\x12\x34\x56\x78\x9A\xBC\xDE\xF0" + "\x12\x34\x56\x78\x9A\xBC\xDE\xF0" + "\x12\x34\x56\x78\x9A\xBC\xDE\xF0" + "\x12\x34\x56\x78", + .rlen = 28, + }, { + .key = "\xef\x01\x23\x45", + .klen = 4, + .input = "\xd6\xa1\x41\xa7\xec\x3c\x38\xdf" + "\xbd\x61", + .ilen = 10, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00", + .rlen = 10, + }, { + .key = "\x01\x23\x45\x67\x89\xAB\xCD\xEF" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 16, + .input = "\x69\x72\x36\x59\x1B\x52\x42\xB1", + .ilen = 8, + .result = "\x01\x23\x45\x67\x89\xAB\xCD\xEF", + .rlen = 8, + }, +}; + +/* + * TEA test vectors + */ +#define TEA_ENC_TEST_VECTORS 4 +#define TEA_DEC_TEST_VECTORS 4 + +static struct cipher_testvec tea_enc_tv_template[] = { + { + .key = zeroed_string, + .klen = 16, + .input = zeroed_string, + .ilen = 8, + .result = "\x0a\x3a\xea\x41\x40\xa9\xba\x94", + .rlen = 8, + }, { + .key = "\x2b\x02\x05\x68\x06\x14\x49\x76" + "\x77\x5d\x0e\x26\x6c\x28\x78\x43", + .klen = 16, + .input = "\x74\x65\x73\x74\x20\x6d\x65\x2e", + .ilen = 8, + .result = "\x77\x5d\x2a\x6a\xf6\xce\x92\x09", + .rlen = 8, + }, { + .key = "\x09\x65\x43\x11\x66\x44\x39\x25" + "\x51\x3a\x16\x10\x0a\x08\x12\x6e", + .klen = 16, + .input = "\x6c\x6f\x6e\x67\x65\x72\x5f\x74" + "\x65\x73\x74\x5f\x76\x65\x63\x74", + .ilen = 16, + .result = "\xbe\x7a\xbb\x81\x95\x2d\x1f\x1e" + "\xdd\x89\xa1\x25\x04\x21\xdf\x95", + .rlen = 16, + }, { + .key = "\x4d\x76\x32\x17\x05\x3f\x75\x2c" + "\x5d\x04\x16\x36\x15\x72\x63\x2f", + .klen = 16, + .input = "\x54\x65\x61\x20\x69\x73\x20\x67" + "\x6f\x6f\x64\x20\x66\x6f\x72\x20" + "\x79\x6f\x75\x21\x21\x21\x20\x72" + "\x65\x61\x6c\x6c\x79\x21\x21\x21", + .ilen = 32, + .result = "\xe0\x4d\x5d\x3c\xb7\x8c\x36\x47" + "\x94\x18\x95\x91\xa9\xfc\x49\xf8" + "\x44\xd1\x2d\xc2\x99\xb8\x08\x2a" + "\x07\x89\x73\xc2\x45\x92\xc6\x90", + .rlen = 32, + } +}; + +static struct cipher_testvec tea_dec_tv_template[] = { + { + .key = zeroed_string, + .klen = 16, + .input = "\x0a\x3a\xea\x41\x40\xa9\xba\x94", + .ilen = 8, + .result = zeroed_string, + .rlen = 8, + }, { + .key = "\x2b\x02\x05\x68\x06\x14\x49\x76" + "\x77\x5d\x0e\x26\x6c\x28\x78\x43", + .klen = 16, + .input = "\x77\x5d\x2a\x6a\xf6\xce\x92\x09", + .ilen = 8, + .result = "\x74\x65\x73\x74\x20\x6d\x65\x2e", + .rlen = 8, + }, { + .key = "\x09\x65\x43\x11\x66\x44\x39\x25" + "\x51\x3a\x16\x10\x0a\x08\x12\x6e", + .klen = 16, + .input = "\xbe\x7a\xbb\x81\x95\x2d\x1f\x1e" + "\xdd\x89\xa1\x25\x04\x21\xdf\x95", + .ilen = 16, + .result = "\x6c\x6f\x6e\x67\x65\x72\x5f\x74" + "\x65\x73\x74\x5f\x76\x65\x63\x74", + .rlen = 16, + }, { + .key = "\x4d\x76\x32\x17\x05\x3f\x75\x2c" + "\x5d\x04\x16\x36\x15\x72\x63\x2f", + .klen = 16, + .input = "\xe0\x4d\x5d\x3c\xb7\x8c\x36\x47" + "\x94\x18\x95\x91\xa9\xfc\x49\xf8" + "\x44\xd1\x2d\xc2\x99\xb8\x08\x2a" + "\x07\x89\x73\xc2\x45\x92\xc6\x90", + .ilen = 32, + .result = "\x54\x65\x61\x20\x69\x73\x20\x67" + "\x6f\x6f\x64\x20\x66\x6f\x72\x20" + "\x79\x6f\x75\x21\x21\x21\x20\x72" + "\x65\x61\x6c\x6c\x79\x21\x21\x21", + .rlen = 32, + } +}; + +/* + * XTEA test vectors + */ +#define XTEA_ENC_TEST_VECTORS 4 +#define XTEA_DEC_TEST_VECTORS 4 + +static struct cipher_testvec xtea_enc_tv_template[] = { + { + .key = zeroed_string, + .klen = 16, + .input = zeroed_string, + .ilen = 8, + .result = "\xd8\xd4\xe9\xde\xd9\x1e\x13\xf7", + .rlen = 8, + }, { + .key = "\x2b\x02\x05\x68\x06\x14\x49\x76" + "\x77\x5d\x0e\x26\x6c\x28\x78\x43", + .klen = 16, + .input = "\x74\x65\x73\x74\x20\x6d\x65\x2e", + .ilen = 8, + .result = "\x94\xeb\xc8\x96\x84\x6a\x49\xa8", + .rlen = 8, + }, { + .key = "\x09\x65\x43\x11\x66\x44\x39\x25" + "\x51\x3a\x16\x10\x0a\x08\x12\x6e", + .klen = 16, + .input = "\x6c\x6f\x6e\x67\x65\x72\x5f\x74" + "\x65\x73\x74\x5f\x76\x65\x63\x74", + .ilen = 16, + .result = "\x3e\xce\xae\x22\x60\x56\xa8\x9d" + "\x77\x4d\xd4\xb4\x87\x24\xe3\x9a", + .rlen = 16, + }, { + .key = "\x4d\x76\x32\x17\x05\x3f\x75\x2c" + "\x5d\x04\x16\x36\x15\x72\x63\x2f", + .klen = 16, + .input = "\x54\x65\x61\x20\x69\x73\x20\x67" + "\x6f\x6f\x64\x20\x66\x6f\x72\x20" + "\x79\x6f\x75\x21\x21\x21\x20\x72" + "\x65\x61\x6c\x6c\x79\x21\x21\x21", + .ilen = 32, + .result = "\x99\x81\x9f\x5d\x6f\x4b\x31\x3a" + "\x86\xff\x6f\xd0\xe3\x87\x70\x07" + "\x4d\xb8\xcf\xf3\x99\x50\xb3\xd4" + "\x73\xa2\xfa\xc9\x16\x59\x5d\x81", + .rlen = 32, + } +}; + +static struct cipher_testvec xtea_dec_tv_template[] = { + { + .key = zeroed_string, + .klen = 16, + .input = "\xd8\xd4\xe9\xde\xd9\x1e\x13\xf7", + .ilen = 8, + .result = zeroed_string, + .rlen = 8, + }, { + .key = "\x2b\x02\x05\x68\x06\x14\x49\x76" + "\x77\x5d\x0e\x26\x6c\x28\x78\x43", + .klen = 16, + .input = "\x94\xeb\xc8\x96\x84\x6a\x49\xa8", + .ilen = 8, + .result = "\x74\x65\x73\x74\x20\x6d\x65\x2e", + .rlen = 8, + }, { + .key = "\x09\x65\x43\x11\x66\x44\x39\x25" + "\x51\x3a\x16\x10\x0a\x08\x12\x6e", + .klen = 16, + .input = "\x3e\xce\xae\x22\x60\x56\xa8\x9d" + "\x77\x4d\xd4\xb4\x87\x24\xe3\x9a", + .ilen = 16, + .result = "\x6c\x6f\x6e\x67\x65\x72\x5f\x74" + "\x65\x73\x74\x5f\x76\x65\x63\x74", + .rlen = 16, + }, { + .key = "\x4d\x76\x32\x17\x05\x3f\x75\x2c" + "\x5d\x04\x16\x36\x15\x72\x63\x2f", + .klen = 16, + .input = "\x99\x81\x9f\x5d\x6f\x4b\x31\x3a" + "\x86\xff\x6f\xd0\xe3\x87\x70\x07" + "\x4d\xb8\xcf\xf3\x99\x50\xb3\xd4" + "\x73\xa2\xfa\xc9\x16\x59\x5d\x81", + .ilen = 32, + .result = "\x54\x65\x61\x20\x69\x73\x20\x67" + "\x6f\x6f\x64\x20\x66\x6f\x72\x20" + "\x79\x6f\x75\x21\x21\x21\x20\x72" + "\x65\x61\x6c\x6c\x79\x21\x21\x21", + .rlen = 32, + } +}; + +/* + * KHAZAD test vectors. + */ +#define KHAZAD_ENC_TEST_VECTORS 5 +#define KHAZAD_DEC_TEST_VECTORS 5 + +static struct cipher_testvec khazad_enc_tv_template[] = { + { + .key = "\x80\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 16, + .input = "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 8, + .result = "\x49\xa4\xce\x32\xac\x19\x0e\x3f", + .rlen = 8, + }, { + .key = "\x38\x38\x38\x38\x38\x38\x38\x38" + "\x38\x38\x38\x38\x38\x38\x38\x38", + .klen = 16, + .input = "\x38\x38\x38\x38\x38\x38\x38\x38", + .ilen = 8, + .result = "\x7e\x82\x12\xa1\xd9\x5b\xe4\xf9", + .rlen = 8, + }, { + .key = "\xa2\xa2\xa2\xa2\xa2\xa2\xa2\xa2" + "\xa2\xa2\xa2\xa2\xa2\xa2\xa2\xa2", + .klen = 16, + .input = "\xa2\xa2\xa2\xa2\xa2\xa2\xa2\xa2", + .ilen = 8, + .result = "\xaa\xbe\xc1\x95\xc5\x94\x1a\x9c", + .rlen = 8, + }, { + .key = "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f" + "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f", + .klen = 16, + .input = "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f", + .ilen = 8, + .result = "\x04\x74\xf5\x70\x50\x16\xd3\xb8", + .rlen = 8, + }, { + .key = "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f" + "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f", + .klen = 16, + .input = "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f" + "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f", + .ilen = 16, + .result = "\x04\x74\xf5\x70\x50\x16\xd3\xb8" + "\x04\x74\xf5\x70\x50\x16\xd3\xb8", + .rlen = 16, + }, +}; + +static struct cipher_testvec khazad_dec_tv_template[] = { + { + .key = "\x80\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 16, + .input = "\x49\xa4\xce\x32\xac\x19\x0e\x3f", + .ilen = 8, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00", + .rlen = 8, + }, { + .key = "\x38\x38\x38\x38\x38\x38\x38\x38" + "\x38\x38\x38\x38\x38\x38\x38\x38", + .klen = 16, + .input = "\x7e\x82\x12\xa1\xd9\x5b\xe4\xf9", + .ilen = 8, + .result = "\x38\x38\x38\x38\x38\x38\x38\x38", + .rlen = 8, + }, { + .key = "\xa2\xa2\xa2\xa2\xa2\xa2\xa2\xa2" + "\xa2\xa2\xa2\xa2\xa2\xa2\xa2\xa2", + .klen = 16, + .input = "\xaa\xbe\xc1\x95\xc5\x94\x1a\x9c", + .ilen = 8, + .result = "\xa2\xa2\xa2\xa2\xa2\xa2\xa2\xa2", + .rlen = 8, + }, { + .key = "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f" + "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f", + .klen = 16, + .input = "\x04\x74\xf5\x70\x50\x16\xd3\xb8", + .ilen = 8, + .result = "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f", + .rlen = 8, + }, { + .key = "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f" + "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f", + .klen = 16, + .input = "\x04\x74\xf5\x70\x50\x16\xd3\xb8" + "\x04\x74\xf5\x70\x50\x16\xd3\xb8", + .ilen = 16, + .result = "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f" + "\x2f\x2f\x2f\x2f\x2f\x2f\x2f\x2f", + .rlen = 16, + }, +}; + +/* + * Anubis test vectors. + */ + +#define ANUBIS_ENC_TEST_VECTORS 5 +#define ANUBIS_DEC_TEST_VECTORS 5 +#define ANUBIS_CBC_ENC_TEST_VECTORS 2 +#define ANUBIS_CBC_DEC_TEST_VECTORS 2 + +static struct cipher_testvec anubis_enc_tv_template[] = { + { + .key = "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe" + "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe", + .klen = 16, + .input = "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe" + "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe", + .ilen = 16, + .result = "\x6d\xc5\xda\xa2\x26\x7d\x62\x6f" + "\x08\xb7\x52\x8e\x6e\x6e\x86\x90", + .rlen = 16, + }, { + + .key = "\x03\x03\x03\x03\x03\x03\x03\x03" + "\x03\x03\x03\x03\x03\x03\x03\x03" + "\x03\x03\x03\x03", + .klen = 20, + .input = "\x03\x03\x03\x03\x03\x03\x03\x03" + "\x03\x03\x03\x03\x03\x03\x03\x03", + .ilen = 16, + .result = "\xdb\xf1\x42\xf4\xd1\x8a\xc7\x49" + "\x87\x41\x6f\x82\x0a\x98\x64\xae", + .rlen = 16, + }, { + .key = "\x24\x24\x24\x24\x24\x24\x24\x24" + "\x24\x24\x24\x24\x24\x24\x24\x24" + "\x24\x24\x24\x24\x24\x24\x24\x24" + "\x24\x24\x24\x24", + .klen = 28, + .input = "\x24\x24\x24\x24\x24\x24\x24\x24" + "\x24\x24\x24\x24\x24\x24\x24\x24", + .ilen = 16, + .result = "\xfd\x1b\x4a\xe3\xbf\xf0\xad\x3d" + "\x06\xd3\x61\x27\xfd\x13\x9e\xde", + .rlen = 16, + }, { + .key = "\x25\x25\x25\x25\x25\x25\x25\x25" + "\x25\x25\x25\x25\x25\x25\x25\x25" + "\x25\x25\x25\x25\x25\x25\x25\x25" + "\x25\x25\x25\x25\x25\x25\x25\x25", + .klen = 32, + .input = "\x25\x25\x25\x25\x25\x25\x25\x25" + "\x25\x25\x25\x25\x25\x25\x25\x25", + .ilen = 16, + .result = "\x1a\x91\xfb\x2b\xb7\x78\x6b\xc4" + "\x17\xd9\xff\x40\x3b\x0e\xe5\xfe", + .rlen = 16, + }, { + .key = "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35", + .klen = 40, + .input = "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35", + .ilen = 16, + .result = "\xa5\x2c\x85\x6f\x9c\xba\xa0\x97" + "\x9e\xc6\x84\x0f\x17\x21\x07\xee", + .rlen = 16, + }, +}; + +static struct cipher_testvec anubis_dec_tv_template[] = { + { + .key = "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe" + "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe", + .klen = 16, + .input = "\x6d\xc5\xda\xa2\x26\x7d\x62\x6f" + "\x08\xb7\x52\x8e\x6e\x6e\x86\x90", + .ilen = 16, + .result = "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe" + "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe", + .rlen = 16, + }, { + + .key = "\x03\x03\x03\x03\x03\x03\x03\x03" + "\x03\x03\x03\x03\x03\x03\x03\x03" + "\x03\x03\x03\x03", + .klen = 20, + .input = "\xdb\xf1\x42\xf4\xd1\x8a\xc7\x49" + "\x87\x41\x6f\x82\x0a\x98\x64\xae", + .ilen = 16, + .result = "\x03\x03\x03\x03\x03\x03\x03\x03" + "\x03\x03\x03\x03\x03\x03\x03\x03", + .rlen = 16, + }, { + .key = "\x24\x24\x24\x24\x24\x24\x24\x24" + "\x24\x24\x24\x24\x24\x24\x24\x24" + "\x24\x24\x24\x24\x24\x24\x24\x24" + "\x24\x24\x24\x24", + .klen = 28, + .input = "\xfd\x1b\x4a\xe3\xbf\xf0\xad\x3d" + "\x06\xd3\x61\x27\xfd\x13\x9e\xde", + .ilen = 16, + .result = "\x24\x24\x24\x24\x24\x24\x24\x24" + "\x24\x24\x24\x24\x24\x24\x24\x24", + .rlen = 16, + }, { + .key = "\x25\x25\x25\x25\x25\x25\x25\x25" + "\x25\x25\x25\x25\x25\x25\x25\x25" + "\x25\x25\x25\x25\x25\x25\x25\x25" + "\x25\x25\x25\x25\x25\x25\x25\x25", + .klen = 32, + .input = "\x1a\x91\xfb\x2b\xb7\x78\x6b\xc4" + "\x17\xd9\xff\x40\x3b\x0e\xe5\xfe", + .ilen = 16, + .result = "\x25\x25\x25\x25\x25\x25\x25\x25" + "\x25\x25\x25\x25\x25\x25\x25\x25", + .rlen = 16, + }, { + .key = "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35", + .input = "\xa5\x2c\x85\x6f\x9c\xba\xa0\x97" + "\x9e\xc6\x84\x0f\x17\x21\x07\xee", + .klen = 40, + .ilen = 16, + .result = "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35", + .rlen = 16, + }, +}; + +static struct cipher_testvec anubis_cbc_enc_tv_template[] = { + { + .key = "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe" + "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe", + .klen = 16, + .input = "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe" + "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe" + "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe" + "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe", + .ilen = 32, + .result = "\x6d\xc5\xda\xa2\x26\x7d\x62\x6f" + "\x08\xb7\x52\x8e\x6e\x6e\x86\x90" + "\x86\xd8\xb5\x6f\x98\x5e\x8a\x66" + "\x4f\x1f\x78\xa1\xbb\x37\xf1\xbe", + .rlen = 32, + }, { + .key = "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35", + .klen = 40, + .input = "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35", + .ilen = 32, + .result = "\xa5\x2c\x85\x6f\x9c\xba\xa0\x97" + "\x9e\xc6\x84\x0f\x17\x21\x07\xee" + "\xa2\xbc\x06\x98\xc6\x4b\xda\x75" + "\x2e\xaa\xbe\x58\xce\x01\x5b\xc7", + .rlen = 32, + }, +}; + +static struct cipher_testvec anubis_cbc_dec_tv_template[] = { + { + .key = "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe" + "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe", + .klen = 16, + .input = "\x6d\xc5\xda\xa2\x26\x7d\x62\x6f" + "\x08\xb7\x52\x8e\x6e\x6e\x86\x90" + "\x86\xd8\xb5\x6f\x98\x5e\x8a\x66" + "\x4f\x1f\x78\xa1\xbb\x37\xf1\xbe", + .ilen = 32, + .result = "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe" + "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe" + "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe" + "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe", + .rlen = 32, + }, { + .key = "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35", + .klen = 40, + .input = "\xa5\x2c\x85\x6f\x9c\xba\xa0\x97" + "\x9e\xc6\x84\x0f\x17\x21\x07\xee" + "\xa2\xbc\x06\x98\xc6\x4b\xda\x75" + "\x2e\xaa\xbe\x58\xce\x01\x5b\xc7", + .ilen = 32, + .result = "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35" + "\x35\x35\x35\x35\x35\x35\x35\x35", + .rlen = 32, + }, +}; + +/* + * XETA test vectors + */ +#define XETA_ENC_TEST_VECTORS 4 +#define XETA_DEC_TEST_VECTORS 4 + +static struct cipher_testvec xeta_enc_tv_template[] = { + { + .key = zeroed_string, + .klen = 16, + .input = zeroed_string, + .ilen = 8, + .result = "\xaa\x22\x96\xe5\x6c\x61\xf3\x45", + .rlen = 8, + }, { + .key = "\x2b\x02\x05\x68\x06\x14\x49\x76" + "\x77\x5d\x0e\x26\x6c\x28\x78\x43", + .klen = 16, + .input = "\x74\x65\x73\x74\x20\x6d\x65\x2e", + .ilen = 8, + .result = "\x82\x3e\xeb\x35\xdc\xdd\xd9\xc3", + .rlen = 8, + }, { + .key = "\x09\x65\x43\x11\x66\x44\x39\x25" + "\x51\x3a\x16\x10\x0a\x08\x12\x6e", + .klen = 16, + .input = "\x6c\x6f\x6e\x67\x65\x72\x5f\x74" + "\x65\x73\x74\x5f\x76\x65\x63\x74", + .ilen = 16, + .result = "\xe2\x04\xdb\xf2\x89\x85\x9e\xea" + "\x61\x35\xaa\xed\xb5\xcb\x71\x2c", + .rlen = 16, + }, { + .key = "\x4d\x76\x32\x17\x05\x3f\x75\x2c" + "\x5d\x04\x16\x36\x15\x72\x63\x2f", + .klen = 16, + .input = "\x54\x65\x61\x20\x69\x73\x20\x67" + "\x6f\x6f\x64\x20\x66\x6f\x72\x20" + "\x79\x6f\x75\x21\x21\x21\x20\x72" + "\x65\x61\x6c\x6c\x79\x21\x21\x21", + .ilen = 32, + .result = "\x0b\x03\xcd\x8a\xbe\x95\xfd\xb1" + "\xc1\x44\x91\x0b\xa5\xc9\x1b\xb4" + "\xa9\xda\x1e\x9e\xb1\x3e\x2a\x8f" + "\xea\xa5\x6a\x85\xd1\xf4\xa8\xa5", + .rlen = 32, + } +}; + +static struct cipher_testvec xeta_dec_tv_template[] = { + { + .key = zeroed_string, + .klen = 16, + .input = "\xaa\x22\x96\xe5\x6c\x61\xf3\x45", + .ilen = 8, + .result = zeroed_string, + .rlen = 8, + }, { + .key = "\x2b\x02\x05\x68\x06\x14\x49\x76" + "\x77\x5d\x0e\x26\x6c\x28\x78\x43", + .klen = 16, + .input = "\x82\x3e\xeb\x35\xdc\xdd\xd9\xc3", + .ilen = 8, + .result = "\x74\x65\x73\x74\x20\x6d\x65\x2e", + .rlen = 8, + }, { + .key = "\x09\x65\x43\x11\x66\x44\x39\x25" + "\x51\x3a\x16\x10\x0a\x08\x12\x6e", + .klen = 16, + .input = "\xe2\x04\xdb\xf2\x89\x85\x9e\xea" + "\x61\x35\xaa\xed\xb5\xcb\x71\x2c", + .ilen = 16, + .result = "\x6c\x6f\x6e\x67\x65\x72\x5f\x74" + "\x65\x73\x74\x5f\x76\x65\x63\x74", + .rlen = 16, + }, { + .key = "\x4d\x76\x32\x17\x05\x3f\x75\x2c" + "\x5d\x04\x16\x36\x15\x72\x63\x2f", + .klen = 16, + .input = "\x0b\x03\xcd\x8a\xbe\x95\xfd\xb1" + "\xc1\x44\x91\x0b\xa5\xc9\x1b\xb4" + "\xa9\xda\x1e\x9e\xb1\x3e\x2a\x8f" + "\xea\xa5\x6a\x85\xd1\xf4\xa8\xa5", + .ilen = 32, + .result = "\x54\x65\x61\x20\x69\x73\x20\x67" + "\x6f\x6f\x64\x20\x66\x6f\x72\x20" + "\x79\x6f\x75\x21\x21\x21\x20\x72" + "\x65\x61\x6c\x6c\x79\x21\x21\x21", + .rlen = 32, + } +}; + +/* + * FCrypt test vectors + */ +#define FCRYPT_ENC_TEST_VECTORS ARRAY_SIZE(fcrypt_pcbc_enc_tv_template) +#define FCRYPT_DEC_TEST_VECTORS ARRAY_SIZE(fcrypt_pcbc_dec_tv_template) + +static struct cipher_testvec fcrypt_pcbc_enc_tv_template[] = { + { /* http://www.openafs.org/pipermail/openafs-devel/2000-December/005320.html */ + .key = "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 8, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 8, + .result = "\x0E\x09\x00\xC7\x3E\xF7\xED\x41", + .rlen = 8, + }, { + .key = "\x11\x44\x77\xAA\xDD\x00\x33\x66", + .klen = 8, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x12\x34\x56\x78\x9A\xBC\xDE\xF0", + .ilen = 8, + .result = "\xD8\xED\x78\x74\x77\xEC\x06\x80", + .rlen = 8, + }, { /* From Arla */ + .key = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87", + .klen = 8, + .iv = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .input = "The quick brown fox jumps over the lazy dogs.\0\0", + .ilen = 48, + .result = "\x00\xf0\x0e\x11\x75\xe6\x23\x82" + "\xee\xac\x98\x62\x44\x51\xe4\x84" + "\xc3\x59\xd8\xaa\x64\x60\xae\xf7" + "\xd2\xd9\x13\x79\x72\xa3\x45\x03" + "\x23\xb5\x62\xd7\x0c\xf5\x27\xd1" + "\xf8\x91\x3c\xac\x44\x22\x92\xef", + .rlen = 48, + }, { + .key = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .klen = 8, + .iv = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87", + .input = "The quick brown fox jumps over the lazy dogs.\0\0", + .ilen = 48, + .result = "\xca\x90\xf5\x9d\xcb\xd4\xd2\x3c" + "\x01\x88\x7f\x3e\x31\x6e\x62\x9d" + "\xd8\xe0\x57\xa3\x06\x3a\x42\x58" + "\x2a\x28\xfe\x72\x52\x2f\xdd\xe0" + "\x19\x89\x09\x1c\x2a\x8e\x8c\x94" + "\xfc\xc7\x68\xe4\x88\xaa\xde\x0f", + .rlen = 48, + }, { /* split-page version */ + .key = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .klen = 8, + .iv = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87", + .input = "The quick brown fox jumps over the lazy dogs.\0\0", + .ilen = 48, + .result = "\xca\x90\xf5\x9d\xcb\xd4\xd2\x3c" + "\x01\x88\x7f\x3e\x31\x6e\x62\x9d" + "\xd8\xe0\x57\xa3\x06\x3a\x42\x58" + "\x2a\x28\xfe\x72\x52\x2f\xdd\xe0" + "\x19\x89\x09\x1c\x2a\x8e\x8c\x94" + "\xfc\xc7\x68\xe4\x88\xaa\xde\x0f", + .rlen = 48, + .np = 2, + .tap = { 20, 28 }, + } +}; + +static struct cipher_testvec fcrypt_pcbc_dec_tv_template[] = { + { /* http://www.openafs.org/pipermail/openafs-devel/2000-December/005320.html */ + .key = "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 8, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x0E\x09\x00\xC7\x3E\xF7\xED\x41", + .ilen = 8, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00", + .rlen = 8, + }, { + .key = "\x11\x44\x77\xAA\xDD\x00\x33\x66", + .klen = 8, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xD8\xED\x78\x74\x77\xEC\x06\x80", + .ilen = 8, + .result = "\x12\x34\x56\x78\x9A\xBC\xDE\xF0", + .rlen = 8, + }, { /* From Arla */ + .key = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87", + .klen = 8, + .iv = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .input = "\x00\xf0\x0e\x11\x75\xe6\x23\x82" + "\xee\xac\x98\x62\x44\x51\xe4\x84" + "\xc3\x59\xd8\xaa\x64\x60\xae\xf7" + "\xd2\xd9\x13\x79\x72\xa3\x45\x03" + "\x23\xb5\x62\xd7\x0c\xf5\x27\xd1" + "\xf8\x91\x3c\xac\x44\x22\x92\xef", + .ilen = 48, + .result = "The quick brown fox jumps over the lazy dogs.\0\0", + .rlen = 48, + }, { + .key = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .klen = 8, + .iv = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87", + .input = "\xca\x90\xf5\x9d\xcb\xd4\xd2\x3c" + "\x01\x88\x7f\x3e\x31\x6e\x62\x9d" + "\xd8\xe0\x57\xa3\x06\x3a\x42\x58" + "\x2a\x28\xfe\x72\x52\x2f\xdd\xe0" + "\x19\x89\x09\x1c\x2a\x8e\x8c\x94" + "\xfc\xc7\x68\xe4\x88\xaa\xde\x0f", + .ilen = 48, + .result = "The quick brown fox jumps over the lazy dogs.\0\0", + .rlen = 48, + }, { /* split-page version */ + .key = "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .klen = 8, + .iv = "\xf0\xe1\xd2\xc3\xb4\xa5\x96\x87", + .input = "\xca\x90\xf5\x9d\xcb\xd4\xd2\x3c" + "\x01\x88\x7f\x3e\x31\x6e\x62\x9d" + "\xd8\xe0\x57\xa3\x06\x3a\x42\x58" + "\x2a\x28\xfe\x72\x52\x2f\xdd\xe0" + "\x19\x89\x09\x1c\x2a\x8e\x8c\x94" + "\xfc\xc7\x68\xe4\x88\xaa\xde\x0f", + .ilen = 48, + .result = "The quick brown fox jumps over the lazy dogs.\0\0", + .rlen = 48, + .np = 2, + .tap = { 20, 28 }, + } +}; + +/* + * CAMELLIA test vectors. + */ +#define CAMELLIA_ENC_TEST_VECTORS 4 +#define CAMELLIA_DEC_TEST_VECTORS 4 +#define CAMELLIA_CBC_ENC_TEST_VECTORS 3 +#define CAMELLIA_CBC_DEC_TEST_VECTORS 3 +#define CAMELLIA_CTR_ENC_TEST_VECTORS 2 +#define CAMELLIA_CTR_DEC_TEST_VECTORS 2 +#define CAMELLIA_LRW_ENC_TEST_VECTORS 8 +#define CAMELLIA_LRW_DEC_TEST_VECTORS 8 +#define CAMELLIA_XTS_ENC_TEST_VECTORS 5 +#define CAMELLIA_XTS_DEC_TEST_VECTORS 5 + +static struct cipher_testvec camellia_enc_tv_template[] = { + { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .klen = 16, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .ilen = 16, + .result = "\x67\x67\x31\x38\x54\x96\x69\x73" + "\x08\x57\x06\x56\x48\xea\xbe\x43", + .rlen = 16, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10" + "\x00\x11\x22\x33\x44\x55\x66\x77", + .klen = 24, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .ilen = 16, + .result = "\xb4\x99\x34\x01\xb3\xe9\x96\xf8" + "\x4e\xe5\xce\xe7\xd7\x9b\x09\xb9", + .rlen = 16, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10" + "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + .klen = 32, + .input = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .ilen = 16, + .result = "\x9a\xcc\x23\x7d\xff\x16\xd7\x6c" + "\x20\xef\x7c\x91\x9e\x3a\x75\x09", + .rlen = 16, + }, + { /* Generated with Crypto++ */ + .key = "\x3F\x85\x62\x3F\x1C\xF9\xD6\x1C" + "\xF9\xD6\xB3\x90\x6D\x4A\x90\x6D" + "\x4A\x27\x04\xE1\x27\x04\xE1\xBE" + "\x9B\x78\xBE\x9B\x78\x55\x32\x0F", + .klen = 32, + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48", + .ilen = 48, + .result = "\xED\xCD\xDB\xB8\x68\xCE\xBD\xEA" + "\x9D\x9D\xCD\x9F\x4F\xFC\x4D\xB7" + "\xA5\xFF\x6F\x43\x0F\xBA\x32\x04" + "\xB3\xC2\xB9\x03\xAA\x91\x56\x29" + "\x0D\xD0\xFD\xC4\x65\xA5\x69\xB9" + "\xF1\xF6\xB1\xA5\xB2\x75\x4F\x8A", + .rlen = 48, + }, +}; + +static struct cipher_testvec camellia_dec_tv_template[] = { + { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .klen = 16, + .input = "\x67\x67\x31\x38\x54\x96\x69\x73" + "\x08\x57\x06\x56\x48\xea\xbe\x43", + .ilen = 16, + .result = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .rlen = 16, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10" + "\x00\x11\x22\x33\x44\x55\x66\x77", + .klen = 24, + .input = "\xb4\x99\x34\x01\xb3\xe9\x96\xf8" + "\x4e\xe5\xce\xe7\xd7\x9b\x09\xb9", + .ilen = 16, + .result = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .rlen = 16, + }, { + .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10" + "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + .klen = 32, + .input = "\x9a\xcc\x23\x7d\xff\x16\xd7\x6c" + "\x20\xef\x7c\x91\x9e\x3a\x75\x09", + .ilen = 16, + .result = "\x01\x23\x45\x67\x89\xab\xcd\xef" + "\xfe\xdc\xba\x98\x76\x54\x32\x10", + .rlen = 16, + }, + { /* Generated with Crypto++ */ + .key = "\x3F\x85\x62\x3F\x1C\xF9\xD6\x1C" + "\xF9\xD6\xB3\x90\x6D\x4A\x90\x6D" + "\x4A\x27\x04\xE1\x27\x04\xE1\xBE" + "\x9B\x78\xBE\x9B\x78\x55\x32\x0F", + .klen = 32, + .input = "\xED\xCD\xDB\xB8\x68\xCE\xBD\xEA" + "\x9D\x9D\xCD\x9F\x4F\xFC\x4D\xB7" + "\xA5\xFF\x6F\x43\x0F\xBA\x32\x04" + "\xB3\xC2\xB9\x03\xAA\x91\x56\x29" + "\x0D\xD0\xFD\xC4\x65\xA5\x69\xB9" + "\xF1\xF6\xB1\xA5\xB2\x75\x4F\x8A", + .ilen = 48, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48", + .rlen = 48, + }, +}; + +static struct cipher_testvec camellia_cbc_enc_tv_template[] = { + { + .key = "\x06\xa9\x21\x40\x36\xb8\xa1\x5b" + "\x51\x2e\x03\xd5\x34\x12\x00\x06", + .klen = 16, + .iv = "\x3d\xaf\xba\x42\x9d\x9e\xb4\x30" + "\xb4\x22\xda\x80\x2c\x9f\xac\x41", + .input = "Single block msg", + .ilen = 16, + .result = "\xea\x32\x12\x76\x3b\x50\x10\xe7" + "\x18\xf6\xfd\x5d\xf6\x8f\x13\x51", + .rlen = 16, + }, { + .key = "\xc2\x86\x69\x6d\x88\x7c\x9a\xa0" + "\x61\x1b\xbb\x3e\x20\x25\xa4\x5a", + .klen = 16, + .iv = "\x56\x2e\x17\x99\x6d\x09\x3d\x28" + "\xdd\xb3\xba\x69\x5a\x2e\x6f\x58", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .ilen = 32, + .result = "\xa5\xdf\x6e\x50\xda\x70\x6c\x01" + "\x4a\xab\xf3\xf2\xd6\xfc\x6c\xfd" + "\x19\xb4\x3e\x57\x1c\x02\x5e\xa0" + "\x15\x78\xe0\x5e\xf2\xcb\x87\x16", + .rlen = 32, + }, + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48", + .ilen = 48, + .result = "\xCD\x3E\x2A\x3B\x3E\x94\xC5\x77" + "\xBA\xBB\x5B\xB1\xDE\x7B\xA4\x40" + "\x88\x39\xE3\xFD\x94\x4B\x25\x58" + "\xE1\x4B\xC4\x18\x7A\xFD\x17\x2B" + "\xB9\xF9\xC2\x27\x6A\xB6\x31\x27" + "\xA6\xAD\xEF\xE5\x5D\xE4\x02\x01", + .rlen = 48, + }, +}; + +static struct cipher_testvec camellia_cbc_dec_tv_template[] = { + { + .key = "\x06\xa9\x21\x40\x36\xb8\xa1\x5b" + "\x51\x2e\x03\xd5\x34\x12\x00\x06", + .klen = 16, + .iv = "\x3d\xaf\xba\x42\x9d\x9e\xb4\x30" + "\xb4\x22\xda\x80\x2c\x9f\xac\x41", + .input = "\xea\x32\x12\x76\x3b\x50\x10\xe7" + "\x18\xf6\xfd\x5d\xf6\x8f\x13\x51", + .ilen = 16, + .result = "Single block msg", + .rlen = 16, + }, { + .key = "\xc2\x86\x69\x6d\x88\x7c\x9a\xa0" + "\x61\x1b\xbb\x3e\x20\x25\xa4\x5a", + .klen = 16, + .iv = "\x56\x2e\x17\x99\x6d\x09\x3d\x28" + "\xdd\xb3\xba\x69\x5a\x2e\x6f\x58", + .input = "\xa5\xdf\x6e\x50\xda\x70\x6c\x01" + "\x4a\xab\xf3\xf2\xd6\xfc\x6c\xfd" + "\x19\xb4\x3e\x57\x1c\x02\x5e\xa0" + "\x15\x78\xe0\x5e\xf2\xcb\x87\x16", + .ilen = 32, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .rlen = 32, + }, + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\xCD\x3E\x2A\x3B\x3E\x94\xC5\x77" + "\xBA\xBB\x5B\xB1\xDE\x7B\xA4\x40" + "\x88\x39\xE3\xFD\x94\x4B\x25\x58" + "\xE1\x4B\xC4\x18\x7A\xFD\x17\x2B" + "\xB9\xF9\xC2\x27\x6A\xB6\x31\x27" + "\xA6\xAD\xEF\xE5\x5D\xE4\x02\x01", + .ilen = 48, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48", + .rlen = 48, + }, +}; + +static struct cipher_testvec camellia_ctr_enc_tv_template[] = { + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48", + .ilen = 48, + .result = "\xF3\x06\x3A\x84\xCD\xBA\x8E\x11" + "\xB7\x74\x6F\x5C\x97\xFB\x36\xFE" + "\xDE\x71\x58\xD4\x15\xD1\xC1\xA4" + "\xC9\x28\x74\xA6\x6B\xC7\x95\xA6" + "\x6C\x77\xF7\x2F\xDF\xC7\xBB\x85" + "\x60\xFC\xE8\x94\xE8\xB5\x09\x2C", + .rlen = 48, + }, + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D", + .ilen = 51, + .result = "\xF3\x06\x3A\x84\xCD\xBA\x8E\x11" + "\xB7\x74\x6F\x5C\x97\xFB\x36\xFE" + "\xDE\x71\x58\xD4\x15\xD1\xC1\xA4" + "\xC9\x28\x74\xA6\x6B\xC7\x95\xA6" + "\x6C\x77\xF7\x2F\xDF\xC7\xBB\x85" + "\x60\xFC\xE8\x94\xE8\xB5\x09\x2C" + "\x1E\x43\xEF", + .rlen = 51, + }, +}; + +static struct cipher_testvec camellia_ctr_dec_tv_template[] = { + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\xF3\x06\x3A\x84\xCD\xBA\x8E\x11" + "\xB7\x74\x6F\x5C\x97\xFB\x36\xFE" + "\xDE\x71\x58\xD4\x15\xD1\xC1\xA4" + "\xC9\x28\x74\xA6\x6B\xC7\x95\xA6" + "\x6C\x77\xF7\x2F\xDF\xC7\xBB\x85" + "\x60\xFC\xE8\x94\xE8\xB5\x09\x2C", + .ilen = 48, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48", + .rlen = 48, + }, + { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\xF3\x06\x3A\x84\xCD\xBA\x8E\x11" + "\xB7\x74\x6F\x5C\x97\xFB\x36\xFE" + "\xDE\x71\x58\xD4\x15\xD1\xC1\xA4" + "\xC9\x28\x74\xA6\x6B\xC7\x95\xA6" + "\x6C\x77\xF7\x2F\xDF\xC7\xBB\x85" + "\x60\xFC\xE8\x94\xE8\xB5\x09\x2C" + "\x1E\x43\xEF", + .ilen = 51, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D", + .rlen = 51, + }, + +}; + +static struct cipher_testvec camellia_lrw_enc_tv_template[] = { + /* Generated from AES-LRW test vectors */ + { + .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d" + "\x4c\x26\x84\x14\xb5\x68\x01\x85" + "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03" + "\xee\x5a\x83\x0c\xcc\x09\x4c\x87", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x92\x68\x19\xd7\xb7\x5b\x0a\x31" + "\x97\xcc\x72\xbe\x99\x17\xeb\x3e", + .rlen = 16, + }, { + .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c" + "\xd7\x79\xe8\x0f\x54\x88\x79\x44" + "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea" + "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x02", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x73\x09\xb7\x50\xb6\x77\x30\x50" + "\x5c\x8a\x9c\x26\x77\x9d\xfc\x4a", + .rlen = 16, + }, { + .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50" + "\x30\xfe\x69\xe2\x37\x7f\x98\x47" + "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6" + "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x90\xae\x83\xe0\x22\xb9\x60\x91" + "\xfa\xa9\xb7\x98\xe3\xed\x87\x01", + .rlen = 16, + }, { + .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15" + "\x25\x83\xf7\x3c\x1f\x01\x28\x74" + "\xca\xc6\xbc\x35\x4d\x4a\x65\x54" + "\x90\xae\x61\xcf\x7b\xae\xbd\xcc" + "\xad\xe4\x94\xc5\x4a\x29\xae\x70", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x99\xe9\x6e\xd4\xc9\x21\xa5\xf0" + "\xd8\x83\xef\xd9\x07\x16\x5f\x35", + .rlen = 16, + }, { + .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff" + "\xf8\x86\xce\xac\x93\xc5\xad\xc6" + "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd" + "\x52\x13\xb2\xb7\xf0\xff\x11\xd8" + "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x42\x88\xf4\xcb\x21\x11\x6d\x8e" + "\xde\x1a\xf2\x29\xf1\x4a\xe0\x15", + .rlen = 16, + }, { + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x40\xaa\x34\x86\x4a\x8f\x78\xb9" + "\xdb\xdb\x0f\x3d\x48\x70\xbe\x8d", + .rlen = 16, + }, { + .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d" + "\xd4\x70\x98\x0b\xc7\x95\x84\xc8" + "\xb2\xfb\x64\xce\x60\x97\x87\x8d" + "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7" + "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4" + "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .ilen = 16, + .result = "\x04\xab\x28\x37\x31\x7a\x26\xab" + "\xa1\x70\x1b\x9c\xe7\xdd\x83\xff", + .rlen = 16, + }, { + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x05\x11\xb7\x18\xab\xc6\x2d\xac" + "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c" + "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8" + "\x50\x38\x1f\x71\x49\xb6\x57\xd6" + "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90" + "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6" + "\xad\x1e\x9e\x20\x5f\x38\xbe\x04" + "\xda\x10\x8e\xed\xa2\xa4\x87\xab" + "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c" + "\xc9\xac\x42\x31\x95\x7c\xc9\x04" + "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6" + "\x15\xd7\x3f\x4f\x2f\x66\x69\x03" + "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65" + "\x4c\x96\x12\xed\x7c\x92\x03\x01" + "\x6f\xbc\x35\x93\xac\xf1\x27\xf1" + "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50" + "\x89\xa4\x8e\x66\x44\x85\xcc\xfd" + "\x33\x14\x70\xe3\x96\xb2\xc3\xd3" + "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5" + "\x2d\x64\x75\xdd\xb4\x54\xe6\x74" + "\x8c\xd3\x9d\x9e\x86\xab\x51\x53" + "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40" + "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5" + "\x76\x12\x73\x44\x1a\x56\xd7\x72" + "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda" + "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd" + "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60" + "\x1a\xe2\x70\x85\x58\xc2\x1b\x09" + "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9" + "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8" + "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8" + "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10" + "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1" + "\x90\x3e\x76\x4a\x74\xa4\x21\x2c" + "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e" + "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f" + "\x8d\x23\x31\x74\x84\xeb\x88\x6e" + "\xcc\xb9\xbc\x22\x83\x19\x07\x22" + "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78" + "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5" + "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41" + "\x3c\xce\x8f\x42\x60\x71\xa7\x75" + "\x08\x40\x65\x8a\x82\xbf\xf5\x43" + "\x71\x96\xa9\x4d\x44\x8a\x20\xbe" + "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65" + "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9" + "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4" + "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a" + "\x62\x73\x65\xfd\x46\x63\x25\x3d" + "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf" + "\x24\xf3\xb4\xac\x64\xba\xdf\x4b" + "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7" + "\xc5\x68\x77\x84\x32\x2b\xcc\x85" + "\x74\x96\xf0\x12\x77\x61\xb9\xeb" + "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8" + "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24" + "\xda\x39\x87\x45\xc0\x2b\xbb\x01" + "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce" + "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6" + "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32" + "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45" + "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6" + "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" + "\x21\xc4\xc2\x75\x67\x89\x37\x0a", + .ilen = 512, + .result = "\x90\x69\x8e\xf2\x14\x86\x59\xf9" + "\xec\xe7\xfa\x3f\x48\x9d\x7f\x96" + "\x67\x76\xac\x2c\xd2\x63\x18\x93" + "\x13\xf8\xf1\xf6\x71\x77\xb3\xee" + "\x93\xb2\xcc\xf3\x26\xc1\x16\x4f" + "\xd4\xe8\x43\xc1\x68\xa3\x3e\x06" + "\x38\x51\xff\xa8\xb9\xa4\xeb\xb1" + "\x62\xdd\x78\x81\xea\x1d\xef\x04" + "\x1d\x07\xc1\x67\xc8\xd6\x77\xa1" + "\x84\x95\xf4\x9a\xd9\xbc\x2d\xe2" + "\xf6\x80\xfc\x91\x2a\xbc\x42\xa0" + "\x40\x41\x69\xaa\x71\xc0\x37\xec" + "\x39\xf3\xf2\xec\x82\xc3\x88\x79" + "\xbc\xc3\xaa\xb7\xcf\x6a\x72\x80" + "\x4c\xf4\x84\x8f\x13\x9e\x94\x5c" + "\xe5\xb2\x91\xbb\x92\x51\x4d\xf1" + "\xd6\x0d\x71\x6b\x7a\xc2\x2f\x12" + "\x6f\x75\xc7\x80\x99\x50\x84\xcf" + "\xa8\xeb\xd6\xe1\x1c\x59\x81\x7e" + "\xb9\xb3\xde\x7a\x93\x14\x12\xa2" + "\xf7\x43\xb3\x9d\x1a\x87\x65\x91" + "\x42\x08\x40\x82\x06\x1c\x2d\x55" + "\x6e\x48\xd5\x74\x07\x6e\x9d\x80" + "\xeb\xb4\x97\xa1\x36\xdf\xfa\x74" + "\x79\x7f\x5a\x75\xe7\x71\xc8\x8c" + "\x7e\xf8\x3a\x77\xcd\x32\x05\xf9" + "\x3d\xd4\xe9\xa2\xbb\xc4\x8b\x83" + "\x42\x5c\x82\xfa\xe9\x4b\x96\x3b" + "\x7f\x89\x8b\xf9\xf1\x87\xda\xf0" + "\x87\xef\x13\x5d\xf0\xe2\xc5\xc1" + "\xed\x14\xa9\x57\x19\x63\x40\x04" + "\x24\xeb\x6e\x19\xd1\x3d\x70\x78" + "\xeb\xda\x55\x70\x2c\x4f\x41\x5b" + "\x56\x9f\x1a\xd3\xac\xf1\xc0\xc3" + "\x21\xec\xd7\xd2\x55\x32\x7c\x2e" + "\x3c\x48\x8e\xb4\x85\x35\x47\xfe" + "\xe2\x88\x79\x98\x6a\xc9\x8d\xff" + "\xe9\x89\x6e\xb8\xe2\x97\x00\xbd" + "\xa4\x8f\xba\xd0\x8c\xcb\x79\x99" + "\xb3\xb2\xb2\x7a\xc3\xb7\xef\x75" + "\x23\x52\x76\xc3\x50\x6e\x66\xf8" + "\xa2\xe2\xce\xba\x40\x21\x3f\xc9" + "\x0a\x32\x7f\xf7\x08\x8c\x66\xcf" + "\xd3\xdf\x57\x59\x83\xb8\xe1\x85" + "\xd6\x8f\xfb\x48\x1f\x3a\xc4\x2f" + "\xb4\x2d\x58\xab\xd8\x7f\x5e\x3a" + "\xbc\x62\x3e\xe2\x6a\x52\x0d\x76" + "\x2f\x1c\x1a\x30\xed\x95\x2a\x44" + "\x35\xa5\x83\x04\x84\x01\x99\x56" + "\xb7\xe3\x10\x96\xfa\xdc\x19\xdd" + "\xe2\x7f\xcb\xa0\x49\x1b\xff\x4c" + "\x73\xf6\xbb\x94\x00\xe8\xa9\x3d" + "\xe2\x20\xe9\x3f\xfa\x07\x5d\x77" + "\x06\xd5\x4f\x4d\x02\xb8\x40\x1b" + "\x30\xed\x1a\x50\x19\xef\xc4\x2c" + "\x02\xd9\xc5\xd3\x11\x33\x37\xe5" + "\x2b\xa3\x95\xa6\xee\xd8\x74\x1d" + "\x68\xa0\xeb\xbf\xdd\x5e\x99\x96" + "\x91\xc3\x94\x24\xa5\x12\xa2\x37" + "\xb3\xac\xcf\x2a\xfd\x55\x34\xfe" + "\x79\x92\x3e\xe6\x1b\x49\x57\x5d" + "\x93\x6c\x01\xf7\xcc\x4e\x20\xd1" + "\xb2\x1a\xd8\x4c\xbd\x1d\x10\xe9" + "\x5a\xa8\x92\x7f\xba\xe6\x0c\x95", + .rlen = 512, + }, +}; + +static struct cipher_testvec camellia_lrw_dec_tv_template[] = { + /* Generated from AES-LRW test vectors */ + /* same as enc vectors with input and result reversed */ + { + .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d" + "\x4c\x26\x84\x14\xb5\x68\x01\x85" + "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03" + "\xee\x5a\x83\x0c\xcc\x09\x4c\x87", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x92\x68\x19\xd7\xb7\x5b\x0a\x31" + "\x97\xcc\x72\xbe\x99\x17\xeb\x3e", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c" + "\xd7\x79\xe8\x0f\x54\x88\x79\x44" + "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea" + "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x02", + .input = "\x73\x09\xb7\x50\xb6\x77\x30\x50" + "\x5c\x8a\x9c\x26\x77\x9d\xfc\x4a", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50" + "\x30\xfe\x69\xe2\x37\x7f\x98\x47" + "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6" + "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x90\xae\x83\xe0\x22\xb9\x60\x91" + "\xfa\xa9\xb7\x98\xe3\xed\x87\x01", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15" + "\x25\x83\xf7\x3c\x1f\x01\x28\x74" + "\xca\xc6\xbc\x35\x4d\x4a\x65\x54" + "\x90\xae\x61\xcf\x7b\xae\xbd\xcc" + "\xad\xe4\x94\xc5\x4a\x29\xae\x70", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x99\xe9\x6e\xd4\xc9\x21\xa5\xf0" + "\xd8\x83\xef\xd9\x07\x16\x5f\x35", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff" + "\xf8\x86\xce\xac\x93\xc5\xad\xc6" + "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd" + "\x52\x13\xb2\xb7\xf0\xff\x11\xd8" + "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f", + .klen = 40, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x42\x88\xf4\xcb\x21\x11\x6d\x8e" + "\xde\x1a\xf2\x29\xf1\x4a\xe0\x15", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x40\xaa\x34\x86\x4a\x8f\x78\xb9" + "\xdb\xdb\x0f\x3d\x48\x70\xbe\x8d", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d" + "\xd4\x70\x98\x0b\xc7\x95\x84\xc8" + "\xb2\xfb\x64\xce\x60\x97\x87\x8d" + "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7" + "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4" + "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x02\x00\x00\x00\x00", + .input = "\x04\xab\x28\x37\x31\x7a\x26\xab" + "\xa1\x70\x1b\x9c\xe7\xdd\x83\xff", + .ilen = 16, + .result = "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x41\x42\x43\x44\x45\x46", + .rlen = 16, + }, { + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x90\x69\x8e\xf2\x14\x86\x59\xf9" + "\xec\xe7\xfa\x3f\x48\x9d\x7f\x96" + "\x67\x76\xac\x2c\xd2\x63\x18\x93" + "\x13\xf8\xf1\xf6\x71\x77\xb3\xee" + "\x93\xb2\xcc\xf3\x26\xc1\x16\x4f" + "\xd4\xe8\x43\xc1\x68\xa3\x3e\x06" + "\x38\x51\xff\xa8\xb9\xa4\xeb\xb1" + "\x62\xdd\x78\x81\xea\x1d\xef\x04" + "\x1d\x07\xc1\x67\xc8\xd6\x77\xa1" + "\x84\x95\xf4\x9a\xd9\xbc\x2d\xe2" + "\xf6\x80\xfc\x91\x2a\xbc\x42\xa0" + "\x40\x41\x69\xaa\x71\xc0\x37\xec" + "\x39\xf3\xf2\xec\x82\xc3\x88\x79" + "\xbc\xc3\xaa\xb7\xcf\x6a\x72\x80" + "\x4c\xf4\x84\x8f\x13\x9e\x94\x5c" + "\xe5\xb2\x91\xbb\x92\x51\x4d\xf1" + "\xd6\x0d\x71\x6b\x7a\xc2\x2f\x12" + "\x6f\x75\xc7\x80\x99\x50\x84\xcf" + "\xa8\xeb\xd6\xe1\x1c\x59\x81\x7e" + "\xb9\xb3\xde\x7a\x93\x14\x12\xa2" + "\xf7\x43\xb3\x9d\x1a\x87\x65\x91" + "\x42\x08\x40\x82\x06\x1c\x2d\x55" + "\x6e\x48\xd5\x74\x07\x6e\x9d\x80" + "\xeb\xb4\x97\xa1\x36\xdf\xfa\x74" + "\x79\x7f\x5a\x75\xe7\x71\xc8\x8c" + "\x7e\xf8\x3a\x77\xcd\x32\x05\xf9" + "\x3d\xd4\xe9\xa2\xbb\xc4\x8b\x83" + "\x42\x5c\x82\xfa\xe9\x4b\x96\x3b" + "\x7f\x89\x8b\xf9\xf1\x87\xda\xf0" + "\x87\xef\x13\x5d\xf0\xe2\xc5\xc1" + "\xed\x14\xa9\x57\x19\x63\x40\x04" + "\x24\xeb\x6e\x19\xd1\x3d\x70\x78" + "\xeb\xda\x55\x70\x2c\x4f\x41\x5b" + "\x56\x9f\x1a\xd3\xac\xf1\xc0\xc3" + "\x21\xec\xd7\xd2\x55\x32\x7c\x2e" + "\x3c\x48\x8e\xb4\x85\x35\x47\xfe" + "\xe2\x88\x79\x98\x6a\xc9\x8d\xff" + "\xe9\x89\x6e\xb8\xe2\x97\x00\xbd" + "\xa4\x8f\xba\xd0\x8c\xcb\x79\x99" + "\xb3\xb2\xb2\x7a\xc3\xb7\xef\x75" + "\x23\x52\x76\xc3\x50\x6e\x66\xf8" + "\xa2\xe2\xce\xba\x40\x21\x3f\xc9" + "\x0a\x32\x7f\xf7\x08\x8c\x66\xcf" + "\xd3\xdf\x57\x59\x83\xb8\xe1\x85" + "\xd6\x8f\xfb\x48\x1f\x3a\xc4\x2f" + "\xb4\x2d\x58\xab\xd8\x7f\x5e\x3a" + "\xbc\x62\x3e\xe2\x6a\x52\x0d\x76" + "\x2f\x1c\x1a\x30\xed\x95\x2a\x44" + "\x35\xa5\x83\x04\x84\x01\x99\x56" + "\xb7\xe3\x10\x96\xfa\xdc\x19\xdd" + "\xe2\x7f\xcb\xa0\x49\x1b\xff\x4c" + "\x73\xf6\xbb\x94\x00\xe8\xa9\x3d" + "\xe2\x20\xe9\x3f\xfa\x07\x5d\x77" + "\x06\xd5\x4f\x4d\x02\xb8\x40\x1b" + "\x30\xed\x1a\x50\x19\xef\xc4\x2c" + "\x02\xd9\xc5\xd3\x11\x33\x37\xe5" + "\x2b\xa3\x95\xa6\xee\xd8\x74\x1d" + "\x68\xa0\xeb\xbf\xdd\x5e\x99\x96" + "\x91\xc3\x94\x24\xa5\x12\xa2\x37" + "\xb3\xac\xcf\x2a\xfd\x55\x34\xfe" + "\x79\x92\x3e\xe6\x1b\x49\x57\x5d" + "\x93\x6c\x01\xf7\xcc\x4e\x20\xd1" + "\xb2\x1a\xd8\x4c\xbd\x1d\x10\xe9" + "\x5a\xa8\x92\x7f\xba\xe6\x0c\x95", + .ilen = 512, + .result = "\x05\x11\xb7\x18\xab\xc6\x2d\xac" + "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c" + "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8" + "\x50\x38\x1f\x71\x49\xb6\x57\xd6" + "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90" + "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6" + "\xad\x1e\x9e\x20\x5f\x38\xbe\x04" + "\xda\x10\x8e\xed\xa2\xa4\x87\xab" + "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c" + "\xc9\xac\x42\x31\x95\x7c\xc9\x04" + "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6" + "\x15\xd7\x3f\x4f\x2f\x66\x69\x03" + "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65" + "\x4c\x96\x12\xed\x7c\x92\x03\x01" + "\x6f\xbc\x35\x93\xac\xf1\x27\xf1" + "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50" + "\x89\xa4\x8e\x66\x44\x85\xcc\xfd" + "\x33\x14\x70\xe3\x96\xb2\xc3\xd3" + "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5" + "\x2d\x64\x75\xdd\xb4\x54\xe6\x74" + "\x8c\xd3\x9d\x9e\x86\xab\x51\x53" + "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40" + "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5" + "\x76\x12\x73\x44\x1a\x56\xd7\x72" + "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda" + "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd" + "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60" + "\x1a\xe2\x70\x85\x58\xc2\x1b\x09" + "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9" + "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8" + "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8" + "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10" + "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1" + "\x90\x3e\x76\x4a\x74\xa4\x21\x2c" + "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e" + "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f" + "\x8d\x23\x31\x74\x84\xeb\x88\x6e" + "\xcc\xb9\xbc\x22\x83\x19\x07\x22" + "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78" + "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5" + "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41" + "\x3c\xce\x8f\x42\x60\x71\xa7\x75" + "\x08\x40\x65\x8a\x82\xbf\xf5\x43" + "\x71\x96\xa9\x4d\x44\x8a\x20\xbe" + "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65" + "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9" + "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4" + "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a" + "\x62\x73\x65\xfd\x46\x63\x25\x3d" + "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf" + "\x24\xf3\xb4\xac\x64\xba\xdf\x4b" + "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7" + "\xc5\x68\x77\x84\x32\x2b\xcc\x85" + "\x74\x96\xf0\x12\x77\x61\xb9\xeb" + "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8" + "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24" + "\xda\x39\x87\x45\xc0\x2b\xbb\x01" + "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce" + "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6" + "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32" + "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45" + "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6" + "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" + "\x21\xc4\xc2\x75\x67\x89\x37\x0a", + .rlen = 512, + }, +}; + +static struct cipher_testvec camellia_xts_enc_tv_template[] = { + /* Generated from AES-XTS test vectors */ + { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 32, + .result = "\x06\xcb\xa5\xf1\x04\x63\xb2\x41" + "\xdc\xca\xfa\x09\xba\x74\xb9\x05" + "\x78\xba\xa4\xf8\x67\x4d\x7e\xad" + "\x20\x18\xf5\x0c\x41\x16\x2a\x61", + .rlen = 32, + }, { + .key = "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .ilen = 32, + .result = "\xc2\xb9\xdc\x44\x1d\xdf\xf2\x86" + "\x8d\x35\x42\x0a\xa5\x5e\x3d\x4f" + "\xb5\x37\x06\xff\xbd\xd4\x91\x70" + "\x80\x1f\xb2\x39\x10\x89\x44\xf5", + .rlen = 32, + }, { + .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8" + "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .ilen = 32, + .result = "\x52\x1f\x9d\xf5\x5a\x58\x5a\x7e" + "\x9f\xd0\x8e\x02\x9c\x9a\x6a\xa7" + "\xb4\x3b\xce\xe7\x17\xaa\x89\x6a" + "\x35\x3c\x6b\xb5\x61\x1c\x79\x38", + .rlen = 32, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .ilen = 512, + .result = "\xc7\xf9\x0a\xaa\xcb\xb5\x8f\x33" + "\x60\xc3\xe9\x47\x90\xb7\x50\x57" + "\xa3\xad\x81\x2f\xf5\x22\x96\x02" + "\xaa\x7f\xea\xac\x29\x78\xca\x2a" + "\x7c\xcd\x31\x1a\x3c\x40\x0a\x73" + "\x09\x66\xad\x72\x0e\x4d\x5d\x77" + "\xbc\xb8\x76\x80\x37\x59\xa9\x01" + "\x9e\xfb\xdb\x6c\x93\xef\xb6\x8d" + "\x1e\xc1\x94\xa8\xd4\xb5\xb0\x01" + "\xd5\x01\x97\x28\xcd\x7a\x1f\xe8" + "\x08\xda\x76\x00\x65\xcf\x7b\x31" + "\xc6\xfa\xf2\x3b\x00\xa7\x6a\x9e" + "\x6c\x43\x80\x87\xe0\xbb\x4e\xe5" + "\xdc\x8a\xdf\xc3\x1d\x1b\x41\x04" + "\xfb\x54\xdd\x29\x27\xc2\x65\x17" + "\x36\x88\xb0\x85\x8d\x73\x7e\x4b" + "\x1d\x16\x8a\x52\xbc\xa6\xbc\xa4" + "\x8c\xd1\x04\x16\xbf\x8c\x01\x0f" + "\x7e\x6b\x59\x15\x29\xd1\x9b\xd3" + "\x6c\xee\xac\xdc\x45\x58\xca\x5b" + "\x70\x0e\x6a\x12\x86\x82\x79\x9f" + "\x16\xd4\x9d\x67\xcd\x70\x65\x26" + "\x21\x72\x1e\xa1\x94\x8a\x83\x0c" + "\x92\x42\x58\x5e\xa2\xc5\x31\xf3" + "\x7b\xd1\x31\xd4\x15\x80\x31\x61" + "\x5c\x53\x10\xdd\xea\xc8\x83\x5c" + "\x7d\xa7\x05\x66\xcc\x1e\xbb\x05" + "\x47\xae\xb4\x0f\x84\xd8\xf6\xb5" + "\xa1\xc6\x52\x00\x52\xe8\xdc\xd9" + "\x16\x31\xb2\x47\x91\x67\xaa\x28" + "\x2c\x29\x85\xa3\xf7\xf2\x24\x93" + "\x23\x80\x1f\xa8\x1b\x82\x8d\xdc" + "\x9f\x0b\xcd\xb4\x3c\x20\xbc\xec" + "\x4f\xc7\xee\xf8\xfd\xd9\xfb\x7e" + "\x3f\x0d\x23\xfa\x3f\xa7\xcc\x66" + "\x1c\xfe\xa6\x86\xf6\xf7\x85\xc7" + "\x43\xc1\xd4\xfc\xe4\x79\xc9\x1d" + "\xf8\x89\xcd\x20\x27\x84\x5d\x5c" + "\x8e\x4f\x1f\xeb\x08\x21\x4f\xa3" + "\xe0\x7e\x0b\x9c\xe7\x42\xcf\xb7" + "\x3f\x43\xcc\x86\x71\x34\x6a\xd9" + "\x5e\xec\x8f\x36\xc9\x0a\x03\xfe" + "\x18\x41\xdc\x9e\x2e\x75\x20\x3e" + "\xcc\x77\xe0\x8f\xe8\x43\x37\x4c" + "\xed\x1a\x5a\xb3\xfa\x43\xc9\x71" + "\x9f\xc5\xce\xcf\xff\xe7\x77\x1e" + "\x35\x93\xde\x6b\xc0\x6a\x7e\xa9" + "\x34\xb8\x27\x74\x08\xda\xf2\x4a" + "\x23\x5b\x9f\x55\x3a\x57\x82\x52" + "\xea\x6d\xc3\xc7\xf2\xc8\xb5\xdc" + "\xc5\xb9\xbb\xaa\xf2\x29\x9f\x49" + "\x7a\xef\xfe\xdc\x9f\xc9\x28\xe2" + "\x96\x0b\x35\x84\x05\x0d\xd6\x2a" + "\xea\x5a\xbf\x69\xde\xee\x4f\x8f" + "\x84\xb9\xcf\xa7\x57\xea\xe0\xe8" + "\x96\xef\x0f\x0e\xec\xc7\xa6\x74" + "\xb1\xfe\x7a\x6d\x11\xdd\x0e\x15" + "\x4a\x1e\x73\x7f\x55\xea\xf6\xe1" + "\x5b\xb6\x71\xda\xb0\x0c\xba\x26" + "\x5c\x48\x38\x6d\x1c\x32\xb2\x7d" + "\x05\x87\xc2\x1e\x7e\x2d\xd4\x33" + "\xcc\x06\xdb\xe7\x82\x29\x63\xd1" + "\x52\x84\x4f\xee\x27\xe8\x02\xd4" + "\x34\x3c\x69\xc2\xbd\x20\xe6\x7a", + .rlen = 512, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95" + "\x02\x88\x41\x97\x16\x93\x99\x37" + "\x51\x05\x82\x09\x74\x94\x45\x92", + .klen = 64, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .ilen = 512, + .result = "\x49\xcd\xb8\xbf\x2f\x73\x37\x28" + "\x9a\x7f\x6e\x57\x55\xb8\x07\x88" + "\x4a\x0d\x8b\x55\x60\xed\xb6\x7b" + "\xf1\x74\xac\x96\x05\x7b\x32\xca" + "\xd1\x4e\xf1\x58\x29\x16\x24\x6c" + "\xf2\xb3\xe4\x88\x84\xac\x4d\xee" + "\x97\x07\x82\xf0\x07\x12\x38\x0a" + "\x67\x62\xaf\xfd\x85\x9f\x0a\x55" + "\xa5\x20\xc5\x60\xe4\x68\x53\xa4" + "\x0e\x2e\x65\xe3\xe4\x0c\x30\x7c" + "\x1c\x01\x4f\x55\xa9\x13\xeb\x25" + "\x21\x87\xbc\xd3\xe7\x67\x4f\x38" + "\xa8\x14\x25\x71\xe9\x2e\x4c\x21" + "\x41\x82\x0c\x45\x39\x35\xa8\x75" + "\x03\x29\x01\x84\x8c\xab\x48\xbe" + "\x11\x56\x22\x67\xb7\x67\x1a\x09" + "\xa1\x72\x25\x41\x3c\x39\x65\x80" + "\x7d\x2f\xf8\x2c\x73\x04\x58\x9d" + "\xdd\x16\x8b\x63\x70\x4e\xc5\x17" + "\x21\xe0\x84\x51\x4b\x6f\x05\x52" + "\xe3\x63\x34\xfa\xa4\xaf\x33\x20" + "\xc1\xae\x32\xc4\xb8\x2b\xdb\x76" + "\xd9\x02\x31\x2f\xa3\xc6\xd0\x7b" + "\xaf\x1b\x84\xe3\x9b\xbf\xa6\xe0" + "\xb8\x8a\x13\x88\x71\xf4\x11\xa5" + "\xe9\xa9\x10\x33\xe0\xbe\x49\x89" + "\x41\x22\xf5\x9d\x80\x3e\x3b\x76" + "\x01\x16\x50\x6e\x7c\x6a\x81\xe9" + "\x13\x2c\xde\xb2\x5f\x79\xba\xb2" + "\xb1\x75\xae\xd2\x07\x98\x4b\x69" + "\xae\x7d\x5b\x90\xc2\x6c\xe6\x98" + "\xd3\x4c\xa1\xa3\x9c\xc9\x33\x6a" + "\x0d\x23\xb1\x79\x25\x13\x4b\xe5" + "\xaf\x93\x20\x5c\x7f\x06\x7a\x34" + "\x0b\x78\xe3\x67\x26\xe0\xad\x95" + "\xc5\x4e\x26\x22\xcf\x73\x77\x62" + "\x3e\x10\xd7\x90\x4b\x52\x1c\xc9" + "\xef\x38\x52\x18\x0e\x29\x7e\xef" + "\x34\xfe\x31\x95\xc5\xbc\xa8\xe2" + "\xa8\x4e\x9f\xea\xa6\xf0\xfe\x5d" + "\xc5\x39\x86\xed\x2f\x6d\xa0\xfe" + "\x96\xcd\x41\x10\x78\x4e\x0c\xc9" + "\xc3\x6d\x0f\xb7\xe8\xe0\x62\xab" + "\x8b\xf1\x21\x89\xa1\x12\xaa\xfa" + "\x9d\x70\xbe\x4c\xa8\x98\x89\x01" + "\xb9\xe2\x61\xde\x0c\x4a\x0b\xaa" + "\x89\xf5\x14\x79\x18\x8f\x3b\x0d" + "\x21\x17\xf8\x59\x15\x24\x64\x22" + "\x57\x48\x80\xd5\x3d\x92\x30\x07" + "\xd9\xa1\x4a\x23\x16\x43\x48\x0e" + "\x2b\x2d\x1b\x87\xef\x7e\xbd\xfa" + "\x49\xbc\x7e\x68\x6e\xa8\x46\x95" + "\xad\x5e\xfe\x0a\xa8\xd3\x1a\x5d" + "\x6b\x84\xf3\x00\xba\x52\x05\x02" + "\xe3\x96\x4e\xb6\x79\x3f\x43\xd3" + "\x4d\x3f\xd6\xab\x0a\xc4\x75\x2d" + "\xd1\x08\xc3\x6a\xc8\x37\x29\xa0" + "\xcc\x9a\x05\xdd\x5c\xe1\xff\x66" + "\xf2\x7a\x1d\xf2\xaf\xa9\x48\x89" + "\xf5\x21\x0f\x02\x48\x83\x74\xbf" + "\x2e\xe6\x93\x7b\xa0\xf4\xb1\x2b" + "\xb1\x02\x0a\x5c\x79\x19\x3b\x75" + "\xb7\x16\xd8\x12\x5c\xcd\x7d\x4e" + "\xd5\xc6\x99\xcc\x4e\x6c\x94\x95", + .rlen = 512, + }, +}; + +static struct cipher_testvec camellia_xts_dec_tv_template[] = { + /* Generated from AES-XTS test vectors */ + /* same as enc vectors with input and result reversed */ + { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x06\xcb\xa5\xf1\x04\x63\xb2\x41" + "\xdc\xca\xfa\x09\xba\x74\xb9\x05" + "\x78\xba\xa4\xf8\x67\x4d\x7e\xad" + "\x20\x18\xf5\x0c\x41\x16\x2a\x61", + .ilen = 32, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .rlen = 32, + }, { + .key = "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xc2\xb9\xdc\x44\x1d\xdf\xf2\x86" + "\x8d\x35\x42\x0a\xa5\x5e\x3d\x4f" + "\xb5\x37\x06\xff\xbd\xd4\x91\x70" + "\x80\x1f\xb2\x39\x10\x89\x44\xf5", + .ilen = 32, + .result = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .rlen = 32, + }, { + .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8" + "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x52\x1f\x9d\xf5\x5a\x58\x5a\x7e" + "\x9f\xd0\x8e\x02\x9c\x9a\x6a\xa7" + "\xb4\x3b\xce\xe7\x17\xaa\x89\x6a" + "\x35\x3c\x6b\xb5\x61\x1c\x79\x38", + .ilen = 32, + .result = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .rlen = 32, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xc7\xf9\x0a\xaa\xcb\xb5\x8f\x33" + "\x60\xc3\xe9\x47\x90\xb7\x50\x57" + "\xa3\xad\x81\x2f\xf5\x22\x96\x02" + "\xaa\x7f\xea\xac\x29\x78\xca\x2a" + "\x7c\xcd\x31\x1a\x3c\x40\x0a\x73" + "\x09\x66\xad\x72\x0e\x4d\x5d\x77" + "\xbc\xb8\x76\x80\x37\x59\xa9\x01" + "\x9e\xfb\xdb\x6c\x93\xef\xb6\x8d" + "\x1e\xc1\x94\xa8\xd4\xb5\xb0\x01" + "\xd5\x01\x97\x28\xcd\x7a\x1f\xe8" + "\x08\xda\x76\x00\x65\xcf\x7b\x31" + "\xc6\xfa\xf2\x3b\x00\xa7\x6a\x9e" + "\x6c\x43\x80\x87\xe0\xbb\x4e\xe5" + "\xdc\x8a\xdf\xc3\x1d\x1b\x41\x04" + "\xfb\x54\xdd\x29\x27\xc2\x65\x17" + "\x36\x88\xb0\x85\x8d\x73\x7e\x4b" + "\x1d\x16\x8a\x52\xbc\xa6\xbc\xa4" + "\x8c\xd1\x04\x16\xbf\x8c\x01\x0f" + "\x7e\x6b\x59\x15\x29\xd1\x9b\xd3" + "\x6c\xee\xac\xdc\x45\x58\xca\x5b" + "\x70\x0e\x6a\x12\x86\x82\x79\x9f" + "\x16\xd4\x9d\x67\xcd\x70\x65\x26" + "\x21\x72\x1e\xa1\x94\x8a\x83\x0c" + "\x92\x42\x58\x5e\xa2\xc5\x31\xf3" + "\x7b\xd1\x31\xd4\x15\x80\x31\x61" + "\x5c\x53\x10\xdd\xea\xc8\x83\x5c" + "\x7d\xa7\x05\x66\xcc\x1e\xbb\x05" + "\x47\xae\xb4\x0f\x84\xd8\xf6\xb5" + "\xa1\xc6\x52\x00\x52\xe8\xdc\xd9" + "\x16\x31\xb2\x47\x91\x67\xaa\x28" + "\x2c\x29\x85\xa3\xf7\xf2\x24\x93" + "\x23\x80\x1f\xa8\x1b\x82\x8d\xdc" + "\x9f\x0b\xcd\xb4\x3c\x20\xbc\xec" + "\x4f\xc7\xee\xf8\xfd\xd9\xfb\x7e" + "\x3f\x0d\x23\xfa\x3f\xa7\xcc\x66" + "\x1c\xfe\xa6\x86\xf6\xf7\x85\xc7" + "\x43\xc1\xd4\xfc\xe4\x79\xc9\x1d" + "\xf8\x89\xcd\x20\x27\x84\x5d\x5c" + "\x8e\x4f\x1f\xeb\x08\x21\x4f\xa3" + "\xe0\x7e\x0b\x9c\xe7\x42\xcf\xb7" + "\x3f\x43\xcc\x86\x71\x34\x6a\xd9" + "\x5e\xec\x8f\x36\xc9\x0a\x03\xfe" + "\x18\x41\xdc\x9e\x2e\x75\x20\x3e" + "\xcc\x77\xe0\x8f\xe8\x43\x37\x4c" + "\xed\x1a\x5a\xb3\xfa\x43\xc9\x71" + "\x9f\xc5\xce\xcf\xff\xe7\x77\x1e" + "\x35\x93\xde\x6b\xc0\x6a\x7e\xa9" + "\x34\xb8\x27\x74\x08\xda\xf2\x4a" + "\x23\x5b\x9f\x55\x3a\x57\x82\x52" + "\xea\x6d\xc3\xc7\xf2\xc8\xb5\xdc" + "\xc5\xb9\xbb\xaa\xf2\x29\x9f\x49" + "\x7a\xef\xfe\xdc\x9f\xc9\x28\xe2" + "\x96\x0b\x35\x84\x05\x0d\xd6\x2a" + "\xea\x5a\xbf\x69\xde\xee\x4f\x8f" + "\x84\xb9\xcf\xa7\x57\xea\xe0\xe8" + "\x96\xef\x0f\x0e\xec\xc7\xa6\x74" + "\xb1\xfe\x7a\x6d\x11\xdd\x0e\x15" + "\x4a\x1e\x73\x7f\x55\xea\xf6\xe1" + "\x5b\xb6\x71\xda\xb0\x0c\xba\x26" + "\x5c\x48\x38\x6d\x1c\x32\xb2\x7d" + "\x05\x87\xc2\x1e\x7e\x2d\xd4\x33" + "\xcc\x06\xdb\xe7\x82\x29\x63\xd1" + "\x52\x84\x4f\xee\x27\xe8\x02\xd4" + "\x34\x3c\x69\xc2\xbd\x20\xe6\x7a", + .ilen = 512, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .rlen = 512, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95" + "\x02\x88\x41\x97\x16\x93\x99\x37" + "\x51\x05\x82\x09\x74\x94\x45\x92", + .klen = 64, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x49\xcd\xb8\xbf\x2f\x73\x37\x28" + "\x9a\x7f\x6e\x57\x55\xb8\x07\x88" + "\x4a\x0d\x8b\x55\x60\xed\xb6\x7b" + "\xf1\x74\xac\x96\x05\x7b\x32\xca" + "\xd1\x4e\xf1\x58\x29\x16\x24\x6c" + "\xf2\xb3\xe4\x88\x84\xac\x4d\xee" + "\x97\x07\x82\xf0\x07\x12\x38\x0a" + "\x67\x62\xaf\xfd\x85\x9f\x0a\x55" + "\xa5\x20\xc5\x60\xe4\x68\x53\xa4" + "\x0e\x2e\x65\xe3\xe4\x0c\x30\x7c" + "\x1c\x01\x4f\x55\xa9\x13\xeb\x25" + "\x21\x87\xbc\xd3\xe7\x67\x4f\x38" + "\xa8\x14\x25\x71\xe9\x2e\x4c\x21" + "\x41\x82\x0c\x45\x39\x35\xa8\x75" + "\x03\x29\x01\x84\x8c\xab\x48\xbe" + "\x11\x56\x22\x67\xb7\x67\x1a\x09" + "\xa1\x72\x25\x41\x3c\x39\x65\x80" + "\x7d\x2f\xf8\x2c\x73\x04\x58\x9d" + "\xdd\x16\x8b\x63\x70\x4e\xc5\x17" + "\x21\xe0\x84\x51\x4b\x6f\x05\x52" + "\xe3\x63\x34\xfa\xa4\xaf\x33\x20" + "\xc1\xae\x32\xc4\xb8\x2b\xdb\x76" + "\xd9\x02\x31\x2f\xa3\xc6\xd0\x7b" + "\xaf\x1b\x84\xe3\x9b\xbf\xa6\xe0" + "\xb8\x8a\x13\x88\x71\xf4\x11\xa5" + "\xe9\xa9\x10\x33\xe0\xbe\x49\x89" + "\x41\x22\xf5\x9d\x80\x3e\x3b\x76" + "\x01\x16\x50\x6e\x7c\x6a\x81\xe9" + "\x13\x2c\xde\xb2\x5f\x79\xba\xb2" + "\xb1\x75\xae\xd2\x07\x98\x4b\x69" + "\xae\x7d\x5b\x90\xc2\x6c\xe6\x98" + "\xd3\x4c\xa1\xa3\x9c\xc9\x33\x6a" + "\x0d\x23\xb1\x79\x25\x13\x4b\xe5" + "\xaf\x93\x20\x5c\x7f\x06\x7a\x34" + "\x0b\x78\xe3\x67\x26\xe0\xad\x95" + "\xc5\x4e\x26\x22\xcf\x73\x77\x62" + "\x3e\x10\xd7\x90\x4b\x52\x1c\xc9" + "\xef\x38\x52\x18\x0e\x29\x7e\xef" + "\x34\xfe\x31\x95\xc5\xbc\xa8\xe2" + "\xa8\x4e\x9f\xea\xa6\xf0\xfe\x5d" + "\xc5\x39\x86\xed\x2f\x6d\xa0\xfe" + "\x96\xcd\x41\x10\x78\x4e\x0c\xc9" + "\xc3\x6d\x0f\xb7\xe8\xe0\x62\xab" + "\x8b\xf1\x21\x89\xa1\x12\xaa\xfa" + "\x9d\x70\xbe\x4c\xa8\x98\x89\x01" + "\xb9\xe2\x61\xde\x0c\x4a\x0b\xaa" + "\x89\xf5\x14\x79\x18\x8f\x3b\x0d" + "\x21\x17\xf8\x59\x15\x24\x64\x22" + "\x57\x48\x80\xd5\x3d\x92\x30\x07" + "\xd9\xa1\x4a\x23\x16\x43\x48\x0e" + "\x2b\x2d\x1b\x87\xef\x7e\xbd\xfa" + "\x49\xbc\x7e\x68\x6e\xa8\x46\x95" + "\xad\x5e\xfe\x0a\xa8\xd3\x1a\x5d" + "\x6b\x84\xf3\x00\xba\x52\x05\x02" + "\xe3\x96\x4e\xb6\x79\x3f\x43\xd3" + "\x4d\x3f\xd6\xab\x0a\xc4\x75\x2d" + "\xd1\x08\xc3\x6a\xc8\x37\x29\xa0" + "\xcc\x9a\x05\xdd\x5c\xe1\xff\x66" + "\xf2\x7a\x1d\xf2\xaf\xa9\x48\x89" + "\xf5\x21\x0f\x02\x48\x83\x74\xbf" + "\x2e\xe6\x93\x7b\xa0\xf4\xb1\x2b" + "\xb1\x02\x0a\x5c\x79\x19\x3b\x75" + "\xb7\x16\xd8\x12\x5c\xcd\x7d\x4e" + "\xd5\xc6\x99\xcc\x4e\x6c\x94\x95", + .ilen = 512, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .rlen = 512, + }, +}; + +/* + * SEED test vectors + */ +#define SEED_ENC_TEST_VECTORS 4 +#define SEED_DEC_TEST_VECTORS 4 + +static struct cipher_testvec seed_enc_tv_template[] = { + { + .key = zeroed_string, + .klen = 16, + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .ilen = 16, + .result = "\x5e\xba\xc6\xe0\x05\x4e\x16\x68" + "\x19\xaf\xf1\xcc\x6d\x34\x6c\xdb", + .rlen = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .klen = 16, + .input = zeroed_string, + .ilen = 16, + .result = "\xc1\x1f\x22\xf2\x01\x40\x50\x50" + "\x84\x48\x35\x97\xe4\x37\x0f\x43", + .rlen = 16, + }, { + .key = "\x47\x06\x48\x08\x51\xe6\x1b\xe8" + "\x5d\x74\xbf\xb3\xfd\x95\x61\x85", + .klen = 16, + .input = "\x83\xa2\xf8\xa2\x88\x64\x1f\xb9" + "\xa4\xe9\xa5\xcc\x2f\x13\x1c\x7d", + .ilen = 16, + .result = "\xee\x54\xd1\x3e\xbc\xae\x70\x6d" + "\x22\x6b\xc3\x14\x2c\xd4\x0d\x4a", + .rlen = 16, + }, { + .key = "\x28\xdb\xc3\xbc\x49\xff\xd8\x7d" + "\xcf\xa5\x09\xb1\x1d\x42\x2b\xe7", + .klen = 16, + .input = "\xb4\x1e\x6b\xe2\xeb\xa8\x4a\x14" + "\x8e\x2e\xed\x84\x59\x3c\x5e\xc7", + .ilen = 16, + .result = "\x9b\x9b\x7b\xfc\xd1\x81\x3c\xb9" + "\x5d\x0b\x36\x18\xf4\x0f\x51\x22", + .rlen = 16, + } +}; + +static struct cipher_testvec seed_dec_tv_template[] = { + { + .key = zeroed_string, + .klen = 16, + .input = "\x5e\xba\xc6\xe0\x05\x4e\x16\x68" + "\x19\xaf\xf1\xcc\x6d\x34\x6c\xdb", + .ilen = 16, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .rlen = 16, + }, { + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .klen = 16, + .input = "\xc1\x1f\x22\xf2\x01\x40\x50\x50" + "\x84\x48\x35\x97\xe4\x37\x0f\x43", + .ilen = 16, + .result = zeroed_string, + .rlen = 16, + }, { + .key = "\x47\x06\x48\x08\x51\xe6\x1b\xe8" + "\x5d\x74\xbf\xb3\xfd\x95\x61\x85", + .klen = 16, + .input = "\xee\x54\xd1\x3e\xbc\xae\x70\x6d" + "\x22\x6b\xc3\x14\x2c\xd4\x0d\x4a", + .ilen = 16, + .result = "\x83\xa2\xf8\xa2\x88\x64\x1f\xb9" + "\xa4\xe9\xa5\xcc\x2f\x13\x1c\x7d", + .rlen = 16, + }, { + .key = "\x28\xdb\xc3\xbc\x49\xff\xd8\x7d" + "\xcf\xa5\x09\xb1\x1d\x42\x2b\xe7", + .klen = 16, + .input = "\x9b\x9b\x7b\xfc\xd1\x81\x3c\xb9" + "\x5d\x0b\x36\x18\xf4\x0f\x51\x22", + .ilen = 16, + .result = "\xb4\x1e\x6b\xe2\xeb\xa8\x4a\x14" + "\x8e\x2e\xed\x84\x59\x3c\x5e\xc7", + .rlen = 16, + } +}; + +#define SALSA20_STREAM_ENC_TEST_VECTORS 5 +static struct cipher_testvec salsa20_stream_enc_tv_template[] = { + /* + * Testvectors from verified.test-vectors submitted to ECRYPT. + * They are truncated to size 39, 64, 111, 129 to test a variety + * of input length. + */ + { /* Set 3, vector 0 */ + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F", + .klen = 16, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00", + .ilen = 39, + .result = "\x2D\xD5\xC3\xF7\xBA\x2B\x20\xF7" + "\x68\x02\x41\x0C\x68\x86\x88\x89" + "\x5A\xD8\xC1\xBD\x4E\xA6\xC9\xB1" + "\x40\xFB\x9B\x90\xE2\x10\x49\xBF" + "\x58\x3F\x52\x79\x70\xEB\xC1", + .rlen = 39, + }, { /* Set 5, vector 0 */ + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 16, + .iv = "\x80\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 64, + .result = "\xB6\x6C\x1E\x44\x46\xDD\x95\x57" + "\xE5\x78\xE2\x23\xB0\xB7\x68\x01" + "\x7B\x23\xB2\x67\xBB\x02\x34\xAE" + "\x46\x26\xBF\x44\x3F\x21\x97\x76" + "\x43\x6F\xB1\x9F\xD0\xE8\x86\x6F" + "\xCD\x0D\xE9\xA9\x53\x8F\x4A\x09" + "\xCA\x9A\xC0\x73\x2E\x30\xBC\xF9" + "\x8E\x4F\x13\xE4\xB9\xE2\x01\xD9", + .rlen = 64, + }, { /* Set 3, vector 27 */ + .key = "\x1B\x1C\x1D\x1E\x1F\x20\x21\x22" + "\x23\x24\x25\x26\x27\x28\x29\x2A" + "\x2B\x2C\x2D\x2E\x2F\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3A", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00", + .ilen = 111, + .result = "\xAE\x39\x50\x8E\xAC\x9A\xEC\xE7" + "\xBF\x97\xBB\x20\xB9\xDE\xE4\x1F" + "\x87\xD9\x47\xF8\x28\x91\x35\x98" + "\xDB\x72\xCC\x23\x29\x48\x56\x5E" + "\x83\x7E\x0B\xF3\x7D\x5D\x38\x7B" + "\x2D\x71\x02\xB4\x3B\xB5\xD8\x23" + "\xB0\x4A\xDF\x3C\xEC\xB6\xD9\x3B" + "\x9B\xA7\x52\xBE\xC5\xD4\x50\x59" + "\x15\x14\xB4\x0E\x40\xE6\x53\xD1" + "\x83\x9C\x5B\xA0\x92\x29\x6B\x5E" + "\x96\x5B\x1E\x2F\xD3\xAC\xC1\x92" + "\xB1\x41\x3F\x19\x2F\xC4\x3B\xC6" + "\x95\x46\x45\x54\xE9\x75\x03\x08" + "\x44\xAF\xE5\x8A\x81\x12\x09", + .rlen = 111, + }, { /* Set 5, vector 27 */ + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 32, + .iv = "\x00\x00\x00\x10\x00\x00\x00\x00", + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00", + .ilen = 129, + .result = "\xD2\xDB\x1A\x5C\xF1\xC1\xAC\xDB" + "\xE8\x1A\x7A\x43\x40\xEF\x53\x43" + "\x5E\x7F\x4B\x1A\x50\x52\x3F\x8D" + "\x28\x3D\xCF\x85\x1D\x69\x6E\x60" + "\xF2\xDE\x74\x56\x18\x1B\x84\x10" + "\xD4\x62\xBA\x60\x50\xF0\x61\xF2" + "\x1C\x78\x7F\xC1\x24\x34\xAF\x58" + "\xBF\x2C\x59\xCA\x90\x77\xF3\xB0" + "\x5B\x4A\xDF\x89\xCE\x2C\x2F\xFC" + "\x67\xF0\xE3\x45\xE8\xB3\xB3\x75" + "\xA0\x95\x71\xA1\x29\x39\x94\xCA" + "\x45\x2F\xBD\xCB\x10\xB6\xBE\x9F" + "\x8E\xF9\xB2\x01\x0A\x5A\x0A\xB7" + "\x6B\x9D\x70\x8E\x4B\xD6\x2F\xCD" + "\x2E\x40\x48\x75\xE9\xE2\x21\x45" + "\x0B\xC9\xB6\xB5\x66\xBC\x9A\x59" + "\x5A", + .rlen = 129, + }, { /* large test vector generated using Crypto++ */ + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x03\x06\x09\x0c\x0f\x12\x15" + "\x18\x1b\x1e\x21\x24\x27\x2a\x2d" + "\x30\x33\x36\x39\x3c\x3f\x42\x45" + "\x48\x4b\x4e\x51\x54\x57\x5a\x5d" + "\x60\x63\x66\x69\x6c\x6f\x72\x75" + "\x78\x7b\x7e\x81\x84\x87\x8a\x8d" + "\x90\x93\x96\x99\x9c\x9f\xa2\xa5" + "\xa8\xab\xae\xb1\xb4\xb7\xba\xbd" + "\xc0\xc3\xc6\xc9\xcc\xcf\xd2\xd5" + "\xd8\xdb\xde\xe1\xe4\xe7\xea\xed" + "\xf0\xf3\xf6\xf9\xfc\xff\x02\x05" + "\x08\x0b\x0e\x11\x14\x17\x1a\x1d" + "\x20\x23\x26\x29\x2c\x2f\x32\x35" + "\x38\x3b\x3e\x41\x44\x47\x4a\x4d" + "\x50\x53\x56\x59\x5c\x5f\x62\x65" + "\x68\x6b\x6e\x71\x74\x77\x7a\x7d" + "\x80\x83\x86\x89\x8c\x8f\x92\x95" + "\x98\x9b\x9e\xa1\xa4\xa7\xaa\xad" + "\xb0\xb3\xb6\xb9\xbc\xbf\xc2\xc5" + "\xc8\xcb\xce\xd1\xd4\xd7\xda\xdd" + "\xe0\xe3\xe6\xe9\xec\xef\xf2\xf5" + "\xf8\xfb\xfe\x01\x04\x07\x0a\x0d" + "\x10\x13\x16\x19\x1c\x1f\x22\x25" + "\x28\x2b\x2e\x31\x34\x37\x3a\x3d" + "\x40\x43\x46\x49\x4c\x4f\x52\x55" + "\x58\x5b\x5e\x61\x64\x67\x6a\x6d" + "\x70\x73\x76\x79\x7c\x7f\x82\x85" + "\x88\x8b\x8e\x91\x94\x97\x9a\x9d" + "\xa0\xa3\xa6\xa9\xac\xaf\xb2\xb5" + "\xb8\xbb\xbe\xc1\xc4\xc7\xca\xcd" + "\xd0\xd3\xd6\xd9\xdc\xdf\xe2\xe5" + "\xe8\xeb\xee\xf1\xf4\xf7\xfa\xfd" + "\x00\x05\x0a\x0f\x14\x19\x1e\x23" + "\x28\x2d\x32\x37\x3c\x41\x46\x4b" + "\x50\x55\x5a\x5f\x64\x69\x6e\x73" + "\x78\x7d\x82\x87\x8c\x91\x96\x9b" + "\xa0\xa5\xaa\xaf\xb4\xb9\xbe\xc3" + "\xc8\xcd\xd2\xd7\xdc\xe1\xe6\xeb" + "\xf0\xf5\xfa\xff\x04\x09\x0e\x13" + "\x18\x1d\x22\x27\x2c\x31\x36\x3b" + "\x40\x45\x4a\x4f\x54\x59\x5e\x63" + "\x68\x6d\x72\x77\x7c\x81\x86\x8b" + "\x90\x95\x9a\x9f\xa4\xa9\xae\xb3" + "\xb8\xbd\xc2\xc7\xcc\xd1\xd6\xdb" + "\xe0\xe5\xea\xef\xf4\xf9\xfe\x03" + "\x08\x0d\x12\x17\x1c\x21\x26\x2b" + "\x30\x35\x3a\x3f\x44\x49\x4e\x53" + "\x58\x5d\x62\x67\x6c\x71\x76\x7b" + "\x80\x85\x8a\x8f\x94\x99\x9e\xa3" + "\xa8\xad\xb2\xb7\xbc\xc1\xc6\xcb" + "\xd0\xd5\xda\xdf\xe4\xe9\xee\xf3" + "\xf8\xfd\x02\x07\x0c\x11\x16\x1b" + "\x20\x25\x2a\x2f\x34\x39\x3e\x43" + "\x48\x4d\x52\x57\x5c\x61\x66\x6b" + "\x70\x75\x7a\x7f\x84\x89\x8e\x93" + "\x98\x9d\xa2\xa7\xac\xb1\xb6\xbb" + "\xc0\xc5\xca\xcf\xd4\xd9\xde\xe3" + "\xe8\xed\xf2\xf7\xfc\x01\x06\x0b" + "\x10\x15\x1a\x1f\x24\x29\x2e\x33" + "\x38\x3d\x42\x47\x4c\x51\x56\x5b" + "\x60\x65\x6a\x6f\x74\x79\x7e\x83" + "\x88\x8d\x92\x97\x9c\xa1\xa6\xab" + "\xb0\xb5\xba\xbf\xc4\xc9\xce\xd3" + "\xd8\xdd\xe2\xe7\xec\xf1\xf6\xfb" + "\x00\x07\x0e\x15\x1c\x23\x2a\x31" + "\x38\x3f\x46\x4d\x54\x5b\x62\x69" + "\x70\x77\x7e\x85\x8c\x93\x9a\xa1" + "\xa8\xaf\xb6\xbd\xc4\xcb\xd2\xd9" + "\xe0\xe7\xee\xf5\xfc\x03\x0a\x11" + "\x18\x1f\x26\x2d\x34\x3b\x42\x49" + "\x50\x57\x5e\x65\x6c\x73\x7a\x81" + "\x88\x8f\x96\x9d\xa4\xab\xb2\xb9" + "\xc0\xc7\xce\xd5\xdc\xe3\xea\xf1" + "\xf8\xff\x06\x0d\x14\x1b\x22\x29" + "\x30\x37\x3e\x45\x4c\x53\x5a\x61" + "\x68\x6f\x76\x7d\x84\x8b\x92\x99" + "\xa0\xa7\xae\xb5\xbc\xc3\xca\xd1" + "\xd8\xdf\xe6\xed\xf4\xfb\x02\x09" + "\x10\x17\x1e\x25\x2c\x33\x3a\x41" + "\x48\x4f\x56\x5d\x64\x6b\x72\x79" + "\x80\x87\x8e\x95\x9c\xa3\xaa\xb1" + "\xb8\xbf\xc6\xcd\xd4\xdb\xe2\xe9" + "\xf0\xf7\xfe\x05\x0c\x13\x1a\x21" + "\x28\x2f\x36\x3d\x44\x4b\x52\x59" + "\x60\x67\x6e\x75\x7c\x83\x8a\x91" + "\x98\x9f\xa6\xad\xb4\xbb\xc2\xc9" + "\xd0\xd7\xde\xe5\xec\xf3\xfa\x01" + "\x08\x0f\x16\x1d\x24\x2b\x32\x39" + "\x40\x47\x4e\x55\x5c\x63\x6a\x71" + "\x78\x7f\x86\x8d\x94\x9b\xa2\xa9" + "\xb0\xb7\xbe\xc5\xcc\xd3\xda\xe1" + "\xe8\xef\xf6\xfd\x04\x0b\x12\x19" + "\x20\x27\x2e\x35\x3c\x43\x4a\x51" + "\x58\x5f\x66\x6d\x74\x7b\x82\x89" + "\x90\x97\x9e\xa5\xac\xb3\xba\xc1" + "\xc8\xcf\xd6\xdd\xe4\xeb\xf2\xf9" + "\x00\x09\x12\x1b\x24\x2d\x36\x3f" + "\x48\x51\x5a\x63\x6c\x75\x7e\x87" + "\x90\x99\xa2\xab\xb4\xbd\xc6\xcf" + "\xd8\xe1\xea\xf3\xfc\x05\x0e\x17" + "\x20\x29\x32\x3b\x44\x4d\x56\x5f" + "\x68\x71\x7a\x83\x8c\x95\x9e\xa7" + "\xb0\xb9\xc2\xcb\xd4\xdd\xe6\xef" + "\xf8\x01\x0a\x13\x1c\x25\x2e\x37" + "\x40\x49\x52\x5b\x64\x6d\x76\x7f" + "\x88\x91\x9a\xa3\xac\xb5\xbe\xc7" + "\xd0\xd9\xe2\xeb\xf4\xfd\x06\x0f" + "\x18\x21\x2a\x33\x3c\x45\x4e\x57" + "\x60\x69\x72\x7b\x84\x8d\x96\x9f" + "\xa8\xb1\xba\xc3\xcc\xd5\xde\xe7" + "\xf0\xf9\x02\x0b\x14\x1d\x26\x2f" + "\x38\x41\x4a\x53\x5c\x65\x6e\x77" + "\x80\x89\x92\x9b\xa4\xad\xb6\xbf" + "\xc8\xd1\xda\xe3\xec\xf5\xfe\x07" + "\x10\x19\x22\x2b\x34\x3d\x46\x4f" + "\x58\x61\x6a\x73\x7c\x85\x8e\x97" + "\xa0\xa9\xb2\xbb\xc4\xcd\xd6\xdf" + "\xe8\xf1\xfa\x03\x0c\x15\x1e\x27" + "\x30\x39\x42\x4b\x54\x5d\x66\x6f" + "\x78\x81\x8a\x93\x9c\xa5\xae\xb7" + "\xc0\xc9\xd2\xdb\xe4\xed\xf6\xff" + "\x08\x11\x1a\x23\x2c\x35\x3e\x47" + "\x50\x59\x62\x6b\x74\x7d\x86\x8f" + "\x98\xa1\xaa\xb3\xbc\xc5\xce\xd7" + "\xe0\xe9\xf2\xfb\x04\x0d\x16\x1f" + "\x28\x31\x3a\x43\x4c\x55\x5e\x67" + "\x70\x79\x82\x8b\x94\x9d\xa6\xaf" + "\xb8\xc1\xca\xd3\xdc\xe5\xee\xf7" + "\x00\x0b\x16\x21\x2c\x37\x42\x4d" + "\x58\x63\x6e\x79\x84\x8f\x9a\xa5" + "\xb0\xbb\xc6\xd1\xdc\xe7\xf2\xfd" + "\x08\x13\x1e\x29\x34\x3f\x4a\x55" + "\x60\x6b\x76\x81\x8c\x97\xa2\xad" + "\xb8\xc3\xce\xd9\xe4\xef\xfa\x05" + "\x10\x1b\x26\x31\x3c\x47\x52\x5d" + "\x68\x73\x7e\x89\x94\x9f\xaa\xb5" + "\xc0\xcb\xd6\xe1\xec\xf7\x02\x0d" + "\x18\x23\x2e\x39\x44\x4f\x5a\x65" + "\x70\x7b\x86\x91\x9c\xa7\xb2\xbd" + "\xc8\xd3\xde\xe9\xf4\xff\x0a\x15" + "\x20\x2b\x36\x41\x4c\x57\x62\x6d" + "\x78\x83\x8e\x99\xa4\xaf\xba\xc5" + "\xd0\xdb\xe6\xf1\xfc\x07\x12\x1d" + "\x28\x33\x3e\x49\x54\x5f\x6a\x75" + "\x80\x8b\x96\xa1\xac\xb7\xc2\xcd" + "\xd8\xe3\xee\xf9\x04\x0f\x1a\x25" + "\x30\x3b\x46\x51\x5c\x67\x72\x7d" + "\x88\x93\x9e\xa9\xb4\xbf\xca\xd5" + "\xe0\xeb\xf6\x01\x0c\x17\x22\x2d" + "\x38\x43\x4e\x59\x64\x6f\x7a\x85" + "\x90\x9b\xa6\xb1\xbc\xc7\xd2\xdd" + "\xe8\xf3\xfe\x09\x14\x1f\x2a\x35" + "\x40\x4b\x56\x61\x6c\x77\x82\x8d" + "\x98\xa3\xae\xb9\xc4\xcf\xda\xe5" + "\xf0\xfb\x06\x11\x1c\x27\x32\x3d" + "\x48\x53\x5e\x69\x74\x7f\x8a\x95" + "\xa0\xab\xb6\xc1\xcc\xd7\xe2\xed" + "\xf8\x03\x0e\x19\x24\x2f\x3a\x45" + "\x50\x5b\x66\x71\x7c\x87\x92\x9d" + "\xa8\xb3\xbe\xc9\xd4\xdf\xea\xf5" + "\x00\x0d\x1a\x27\x34\x41\x4e\x5b" + "\x68\x75\x82\x8f\x9c\xa9\xb6\xc3" + "\xd0\xdd\xea\xf7\x04\x11\x1e\x2b" + "\x38\x45\x52\x5f\x6c\x79\x86\x93" + "\xa0\xad\xba\xc7\xd4\xe1\xee\xfb" + "\x08\x15\x22\x2f\x3c\x49\x56\x63" + "\x70\x7d\x8a\x97\xa4\xb1\xbe\xcb" + "\xd8\xe5\xf2\xff\x0c\x19\x26\x33" + "\x40\x4d\x5a\x67\x74\x81\x8e\x9b" + "\xa8\xb5\xc2\xcf\xdc\xe9\xf6\x03" + "\x10\x1d\x2a\x37\x44\x51\x5e\x6b" + "\x78\x85\x92\x9f\xac\xb9\xc6\xd3" + "\xe0\xed\xfa\x07\x14\x21\x2e\x3b" + "\x48\x55\x62\x6f\x7c\x89\x96\xa3" + "\xb0\xbd\xca\xd7\xe4\xf1\xfe\x0b" + "\x18\x25\x32\x3f\x4c\x59\x66\x73" + "\x80\x8d\x9a\xa7\xb4\xc1\xce\xdb" + "\xe8\xf5\x02\x0f\x1c\x29\x36\x43" + "\x50\x5d\x6a\x77\x84\x91\x9e\xab" + "\xb8\xc5\xd2\xdf\xec\xf9\x06\x13" + "\x20\x2d\x3a\x47\x54\x61\x6e\x7b" + "\x88\x95\xa2\xaf\xbc\xc9\xd6\xe3" + "\xf0\xfd\x0a\x17\x24\x31\x3e\x4b" + "\x58\x65\x72\x7f\x8c\x99\xa6\xb3" + "\xc0\xcd\xda\xe7\xf4\x01\x0e\x1b" + "\x28\x35\x42\x4f\x5c\x69\x76\x83" + "\x90\x9d\xaa\xb7\xc4\xd1\xde\xeb" + "\xf8\x05\x12\x1f\x2c\x39\x46\x53" + "\x60\x6d\x7a\x87\x94\xa1\xae\xbb" + "\xc8\xd5\xe2\xef\xfc\x09\x16\x23" + "\x30\x3d\x4a\x57\x64\x71\x7e\x8b" + "\x98\xa5\xb2\xbf\xcc\xd9\xe6\xf3" + "\x00\x0f\x1e\x2d\x3c\x4b\x5a\x69" + "\x78\x87\x96\xa5\xb4\xc3\xd2\xe1" + "\xf0\xff\x0e\x1d\x2c\x3b\x4a\x59" + "\x68\x77\x86\x95\xa4\xb3\xc2\xd1" + "\xe0\xef\xfe\x0d\x1c\x2b\x3a\x49" + "\x58\x67\x76\x85\x94\xa3\xb2\xc1" + "\xd0\xdf\xee\xfd\x0c\x1b\x2a\x39" + "\x48\x57\x66\x75\x84\x93\xa2\xb1" + "\xc0\xcf\xde\xed\xfc\x0b\x1a\x29" + "\x38\x47\x56\x65\x74\x83\x92\xa1" + "\xb0\xbf\xce\xdd\xec\xfb\x0a\x19" + "\x28\x37\x46\x55\x64\x73\x82\x91" + "\xa0\xaf\xbe\xcd\xdc\xeb\xfa\x09" + "\x18\x27\x36\x45\x54\x63\x72\x81" + "\x90\x9f\xae\xbd\xcc\xdb\xea\xf9" + "\x08\x17\x26\x35\x44\x53\x62\x71" + "\x80\x8f\x9e\xad\xbc\xcb\xda\xe9" + "\xf8\x07\x16\x25\x34\x43\x52\x61" + "\x70\x7f\x8e\x9d\xac\xbb\xca\xd9" + "\xe8\xf7\x06\x15\x24\x33\x42\x51" + "\x60\x6f\x7e\x8d\x9c\xab\xba\xc9" + "\xd8\xe7\xf6\x05\x14\x23\x32\x41" + "\x50\x5f\x6e\x7d\x8c\x9b\xaa\xb9" + "\xc8\xd7\xe6\xf5\x04\x13\x22\x31" + "\x40\x4f\x5e\x6d\x7c\x8b\x9a\xa9" + "\xb8\xc7\xd6\xe5\xf4\x03\x12\x21" + "\x30\x3f\x4e\x5d\x6c\x7b\x8a\x99" + "\xa8\xb7\xc6\xd5\xe4\xf3\x02\x11" + "\x20\x2f\x3e\x4d\x5c\x6b\x7a\x89" + "\x98\xa7\xb6\xc5\xd4\xe3\xf2\x01" + "\x10\x1f\x2e\x3d\x4c\x5b\x6a\x79" + "\x88\x97\xa6\xb5\xc4\xd3\xe2\xf1" + "\x00\x11\x22\x33\x44\x55\x66\x77" + "\x88\x99\xaa\xbb\xcc\xdd\xee\xff" + "\x10\x21\x32\x43\x54\x65\x76\x87" + "\x98\xa9\xba\xcb\xdc\xed\xfe\x0f" + "\x20\x31\x42\x53\x64\x75\x86\x97" + "\xa8\xb9\xca\xdb\xec\xfd\x0e\x1f" + "\x30\x41\x52\x63\x74\x85\x96\xa7" + "\xb8\xc9\xda\xeb\xfc\x0d\x1e\x2f" + "\x40\x51\x62\x73\x84\x95\xa6\xb7" + "\xc8\xd9\xea\xfb\x0c\x1d\x2e\x3f" + "\x50\x61\x72\x83\x94\xa5\xb6\xc7" + "\xd8\xe9\xfa\x0b\x1c\x2d\x3e\x4f" + "\x60\x71\x82\x93\xa4\xb5\xc6\xd7" + "\xe8\xf9\x0a\x1b\x2c\x3d\x4e\x5f" + "\x70\x81\x92\xa3\xb4\xc5\xd6\xe7" + "\xf8\x09\x1a\x2b\x3c\x4d\x5e\x6f" + "\x80\x91\xa2\xb3\xc4\xd5\xe6\xf7" + "\x08\x19\x2a\x3b\x4c\x5d\x6e\x7f" + "\x90\xa1\xb2\xc3\xd4\xe5\xf6\x07" + "\x18\x29\x3a\x4b\x5c\x6d\x7e\x8f" + "\xa0\xb1\xc2\xd3\xe4\xf5\x06\x17" + "\x28\x39\x4a\x5b\x6c\x7d\x8e\x9f" + "\xb0\xc1\xd2\xe3\xf4\x05\x16\x27" + "\x38\x49\x5a\x6b\x7c\x8d\x9e\xaf" + "\xc0\xd1\xe2\xf3\x04\x15\x26\x37" + "\x48\x59\x6a\x7b\x8c\x9d\xae\xbf" + "\xd0\xe1\xf2\x03\x14\x25\x36\x47" + "\x58\x69\x7a\x8b\x9c\xad\xbe\xcf" + "\xe0\xf1\x02\x13\x24\x35\x46\x57" + "\x68\x79\x8a\x9b\xac\xbd\xce\xdf" + "\xf0\x01\x12\x23\x34\x45\x56\x67" + "\x78\x89\x9a\xab\xbc\xcd\xde\xef" + "\x00\x13\x26\x39\x4c\x5f\x72\x85" + "\x98\xab\xbe\xd1\xe4\xf7\x0a\x1d" + "\x30\x43\x56\x69\x7c\x8f\xa2\xb5" + "\xc8\xdb\xee\x01\x14\x27\x3a\x4d" + "\x60\x73\x86\x99\xac\xbf\xd2\xe5" + "\xf8\x0b\x1e\x31\x44\x57\x6a\x7d" + "\x90\xa3\xb6\xc9\xdc\xef\x02\x15" + "\x28\x3b\x4e\x61\x74\x87\x9a\xad" + "\xc0\xd3\xe6\xf9\x0c\x1f\x32\x45" + "\x58\x6b\x7e\x91\xa4\xb7\xca\xdd" + "\xf0\x03\x16\x29\x3c\x4f\x62\x75" + "\x88\x9b\xae\xc1\xd4\xe7\xfa\x0d" + "\x20\x33\x46\x59\x6c\x7f\x92\xa5" + "\xb8\xcb\xde\xf1\x04\x17\x2a\x3d" + "\x50\x63\x76\x89\x9c\xaf\xc2\xd5" + "\xe8\xfb\x0e\x21\x34\x47\x5a\x6d" + "\x80\x93\xa6\xb9\xcc\xdf\xf2\x05" + "\x18\x2b\x3e\x51\x64\x77\x8a\x9d" + "\xb0\xc3\xd6\xe9\xfc\x0f\x22\x35" + "\x48\x5b\x6e\x81\x94\xa7\xba\xcd" + "\xe0\xf3\x06\x19\x2c\x3f\x52\x65" + "\x78\x8b\x9e\xb1\xc4\xd7\xea\xfd" + "\x10\x23\x36\x49\x5c\x6f\x82\x95" + "\xa8\xbb\xce\xe1\xf4\x07\x1a\x2d" + "\x40\x53\x66\x79\x8c\x9f\xb2\xc5" + "\xd8\xeb\xfe\x11\x24\x37\x4a\x5d" + "\x70\x83\x96\xa9\xbc\xcf\xe2\xf5" + "\x08\x1b\x2e\x41\x54\x67\x7a\x8d" + "\xa0\xb3\xc6\xd9\xec\xff\x12\x25" + "\x38\x4b\x5e\x71\x84\x97\xaa\xbd" + "\xd0\xe3\xf6\x09\x1c\x2f\x42\x55" + "\x68\x7b\x8e\xa1\xb4\xc7\xda\xed" + "\x00\x15\x2a\x3f\x54\x69\x7e\x93" + "\xa8\xbd\xd2\xe7\xfc\x11\x26\x3b" + "\x50\x65\x7a\x8f\xa4\xb9\xce\xe3" + "\xf8\x0d\x22\x37\x4c\x61\x76\x8b" + "\xa0\xb5\xca\xdf\xf4\x09\x1e\x33" + "\x48\x5d\x72\x87\x9c\xb1\xc6\xdb" + "\xf0\x05\x1a\x2f\x44\x59\x6e\x83" + "\x98\xad\xc2\xd7\xec\x01\x16\x2b" + "\x40\x55\x6a\x7f\x94\xa9\xbe\xd3" + "\xe8\xfd\x12\x27\x3c\x51\x66\x7b" + "\x90\xa5\xba\xcf\xe4\xf9\x0e\x23" + "\x38\x4d\x62\x77\x8c\xa1\xb6\xcb" + "\xe0\xf5\x0a\x1f\x34\x49\x5e\x73" + "\x88\x9d\xb2\xc7\xdc\xf1\x06\x1b" + "\x30\x45\x5a\x6f\x84\x99\xae\xc3" + "\xd8\xed\x02\x17\x2c\x41\x56\x6b" + "\x80\x95\xaa\xbf\xd4\xe9\xfe\x13" + "\x28\x3d\x52\x67\x7c\x91\xa6\xbb" + "\xd0\xe5\xfa\x0f\x24\x39\x4e\x63" + "\x78\x8d\xa2\xb7\xcc\xe1\xf6\x0b" + "\x20\x35\x4a\x5f\x74\x89\x9e\xb3" + "\xc8\xdd\xf2\x07\x1c\x31\x46\x5b" + "\x70\x85\x9a\xaf\xc4\xd9\xee\x03" + "\x18\x2d\x42\x57\x6c\x81\x96\xab" + "\xc0\xd5\xea\xff\x14\x29\x3e\x53" + "\x68\x7d\x92\xa7\xbc\xd1\xe6\xfb" + "\x10\x25\x3a\x4f\x64\x79\x8e\xa3" + "\xb8\xcd\xe2\xf7\x0c\x21\x36\x4b" + "\x60\x75\x8a\x9f\xb4\xc9\xde\xf3" + "\x08\x1d\x32\x47\x5c\x71\x86\x9b" + "\xb0\xc5\xda\xef\x04\x19\x2e\x43" + "\x58\x6d\x82\x97\xac\xc1\xd6\xeb" + "\x00\x17\x2e\x45\x5c\x73\x8a\xa1" + "\xb8\xcf\xe6\xfd\x14\x2b\x42\x59" + "\x70\x87\x9e\xb5\xcc\xe3\xfa\x11" + "\x28\x3f\x56\x6d\x84\x9b\xb2\xc9" + "\xe0\xf7\x0e\x25\x3c\x53\x6a\x81" + "\x98\xaf\xc6\xdd\xf4\x0b\x22\x39" + "\x50\x67\x7e\x95\xac\xc3\xda\xf1" + "\x08\x1f\x36\x4d\x64\x7b\x92\xa9" + "\xc0\xd7\xee\x05\x1c\x33\x4a\x61" + "\x78\x8f\xa6\xbd\xd4\xeb\x02\x19" + "\x30\x47\x5e\x75\x8c\xa3\xba\xd1" + "\xe8\xff\x16\x2d\x44\x5b\x72\x89" + "\xa0\xb7\xce\xe5\xfc\x13\x2a\x41" + "\x58\x6f\x86\x9d\xb4\xcb\xe2\xf9" + "\x10\x27\x3e\x55\x6c\x83\x9a\xb1" + "\xc8\xdf\xf6\x0d\x24\x3b\x52\x69" + "\x80\x97\xae\xc5\xdc\xf3\x0a\x21" + "\x38\x4f\x66\x7d\x94\xab\xc2\xd9" + "\xf0\x07\x1e\x35\x4c\x63\x7a\x91" + "\xa8\xbf\xd6\xed\x04\x1b\x32\x49" + "\x60\x77\x8e\xa5\xbc\xd3\xea\x01" + "\x18\x2f\x46\x5d\x74\x8b\xa2\xb9" + "\xd0\xe7\xfe\x15\x2c\x43\x5a\x71" + "\x88\x9f\xb6\xcd\xe4\xfb\x12\x29" + "\x40\x57\x6e\x85\x9c\xb3\xca\xe1" + "\xf8\x0f\x26\x3d\x54\x6b\x82\x99" + "\xb0\xc7\xde\xf5\x0c\x23\x3a\x51" + "\x68\x7f\x96\xad\xc4\xdb\xf2\x09" + "\x20\x37\x4e\x65\x7c\x93\xaa\xc1" + "\xd8\xef\x06\x1d\x34\x4b\x62\x79" + "\x90\xa7\xbe\xd5\xec\x03\x1a\x31" + "\x48\x5f\x76\x8d\xa4\xbb\xd2\xe9" + "\x00\x19\x32\x4b\x64\x7d\x96\xaf" + "\xc8\xe1\xfa\x13\x2c\x45\x5e\x77" + "\x90\xa9\xc2\xdb\xf4\x0d\x26\x3f" + "\x58\x71\x8a\xa3\xbc\xd5\xee\x07" + "\x20\x39\x52\x6b\x84\x9d\xb6\xcf" + "\xe8\x01\x1a\x33\x4c\x65\x7e\x97" + "\xb0\xc9\xe2\xfb\x14\x2d\x46\x5f" + "\x78\x91\xaa\xc3\xdc\xf5\x0e\x27" + "\x40\x59\x72\x8b\xa4\xbd\xd6\xef" + "\x08\x21\x3a\x53\x6c\x85\x9e\xb7" + "\xd0\xe9\x02\x1b\x34\x4d\x66\x7f" + "\x98\xb1\xca\xe3\xfc\x15\x2e\x47" + "\x60\x79\x92\xab\xc4\xdd\xf6\x0f" + "\x28\x41\x5a\x73\x8c\xa5\xbe\xd7" + "\xf0\x09\x22\x3b\x54\x6d\x86\x9f" + "\xb8\xd1\xea\x03\x1c\x35\x4e\x67" + "\x80\x99\xb2\xcb\xe4\xfd\x16\x2f" + "\x48\x61\x7a\x93\xac\xc5\xde\xf7" + "\x10\x29\x42\x5b\x74\x8d\xa6\xbf" + "\xd8\xf1\x0a\x23\x3c\x55\x6e\x87" + "\xa0\xb9\xd2\xeb\x04\x1d\x36\x4f" + "\x68\x81\x9a\xb3\xcc\xe5\xfe\x17" + "\x30\x49\x62\x7b\x94\xad\xc6\xdf" + "\xf8\x11\x2a\x43\x5c\x75\x8e\xa7" + "\xc0\xd9\xf2\x0b\x24\x3d\x56\x6f" + "\x88\xa1\xba\xd3\xec\x05\x1e\x37" + "\x50\x69\x82\x9b\xb4\xcd\xe6\xff" + "\x18\x31\x4a\x63\x7c\x95\xae\xc7" + "\xe0\xf9\x12\x2b\x44\x5d\x76\x8f" + "\xa8\xc1\xda\xf3\x0c\x25\x3e\x57" + "\x70\x89\xa2\xbb\xd4\xed\x06\x1f" + "\x38\x51\x6a\x83\x9c\xb5\xce\xe7" + "\x00\x1b\x36\x51\x6c\x87\xa2\xbd" + "\xd8\xf3\x0e\x29\x44\x5f\x7a\x95" + "\xb0\xcb\xe6\x01\x1c\x37\x52\x6d" + "\x88\xa3\xbe\xd9\xf4\x0f\x2a\x45" + "\x60\x7b\x96\xb1\xcc\xe7\x02\x1d" + "\x38\x53\x6e\x89\xa4\xbf\xda\xf5" + "\x10\x2b\x46\x61\x7c\x97\xb2\xcd" + "\xe8\x03\x1e\x39\x54\x6f\x8a\xa5" + "\xc0\xdb\xf6\x11\x2c\x47\x62\x7d" + "\x98\xb3\xce\xe9\x04\x1f\x3a\x55" + "\x70\x8b\xa6\xc1\xdc\xf7\x12\x2d" + "\x48\x63\x7e\x99\xb4\xcf\xea\x05" + "\x20\x3b\x56\x71\x8c\xa7\xc2\xdd" + "\xf8\x13\x2e\x49\x64\x7f\x9a\xb5" + "\xd0\xeb\x06\x21\x3c\x57\x72\x8d" + "\xa8\xc3\xde\xf9\x14\x2f\x4a\x65" + "\x80\x9b\xb6\xd1\xec\x07\x22\x3d" + "\x58\x73\x8e\xa9\xc4\xdf\xfa\x15" + "\x30\x4b\x66\x81\x9c\xb7\xd2\xed" + "\x08\x23\x3e\x59\x74\x8f\xaa\xc5" + "\xe0\xfb\x16\x31\x4c\x67\x82\x9d" + "\xb8\xd3\xee\x09\x24\x3f\x5a\x75" + "\x90\xab\xc6\xe1\xfc\x17\x32\x4d" + "\x68\x83\x9e\xb9\xd4\xef\x0a\x25" + "\x40\x5b\x76\x91\xac\xc7\xe2\xfd" + "\x18\x33\x4e\x69\x84\x9f\xba\xd5" + "\xf0\x0b\x26\x41\x5c\x77\x92\xad" + "\xc8\xe3\xfe\x19\x34\x4f\x6a\x85" + "\xa0\xbb\xd6\xf1\x0c\x27\x42\x5d" + "\x78\x93\xae\xc9\xe4\xff\x1a\x35" + "\x50\x6b\x86\xa1\xbc\xd7\xf2\x0d" + "\x28\x43\x5e\x79\x94\xaf\xca\xe5" + "\x00\x1d\x3a\x57\x74\x91\xae\xcb" + "\xe8\x05\x22\x3f\x5c\x79\x96\xb3" + "\xd0\xed\x0a\x27\x44\x61\x7e\x9b" + "\xb8\xd5\xf2\x0f\x2c\x49\x66\x83" + "\xa0\xbd\xda\xf7\x14\x31\x4e\x6b" + "\x88\xa5\xc2\xdf\xfc\x19\x36\x53" + "\x70\x8d\xaa\xc7\xe4\x01\x1e\x3b" + "\x58\x75\x92\xaf\xcc\xe9\x06\x23" + "\x40\x5d\x7a\x97\xb4\xd1\xee\x0b" + "\x28\x45\x62\x7f\x9c\xb9\xd6\xf3" + "\x10\x2d\x4a\x67\x84\xa1\xbe\xdb" + "\xf8\x15\x32\x4f\x6c\x89\xa6\xc3" + "\xe0\xfd\x1a\x37\x54\x71\x8e\xab" + "\xc8\xe5\x02\x1f\x3c\x59\x76\x93" + "\xb0\xcd\xea\x07\x24\x41\x5e\x7b" + "\x98\xb5\xd2\xef\x0c\x29\x46\x63" + "\x80\x9d\xba\xd7\xf4\x11\x2e\x4b" + "\x68\x85\xa2\xbf\xdc\xf9\x16\x33" + "\x50\x6d\x8a\xa7\xc4\xe1\xfe\x1b" + "\x38\x55\x72\x8f\xac\xc9\xe6\x03" + "\x20\x3d\x5a\x77\x94\xb1\xce\xeb" + "\x08\x25\x42\x5f\x7c\x99\xb6\xd3" + "\xf0\x0d\x2a\x47\x64\x81\x9e\xbb" + "\xd8\xf5\x12\x2f\x4c\x69\x86\xa3" + "\xc0\xdd\xfa\x17\x34\x51\x6e\x8b" + "\xa8\xc5\xe2\xff\x1c\x39\x56\x73" + "\x90\xad\xca\xe7\x04\x21\x3e\x5b" + "\x78\x95\xb2\xcf\xec\x09\x26\x43" + "\x60\x7d\x9a\xb7\xd4\xf1\x0e\x2b" + "\x48\x65\x82\x9f\xbc\xd9\xf6\x13" + "\x30\x4d\x6a\x87\xa4\xc1\xde\xfb" + "\x18\x35\x52\x6f\x8c\xa9\xc6\xe3" + "\x00\x1f\x3e\x5d\x7c\x9b\xba\xd9" + "\xf8\x17\x36\x55\x74\x93\xb2\xd1" + "\xf0\x0f\x2e\x4d\x6c\x8b\xaa\xc9" + "\xe8\x07\x26\x45\x64\x83\xa2\xc1" + "\xe0\xff\x1e\x3d\x5c\x7b\x9a\xb9" + "\xd8\xf7\x16\x35\x54\x73\x92\xb1" + "\xd0\xef\x0e\x2d\x4c\x6b\x8a\xa9" + "\xc8\xe7\x06\x25\x44\x63\x82\xa1" + "\xc0\xdf\xfe\x1d\x3c\x5b\x7a\x99" + "\xb8\xd7\xf6\x15\x34\x53\x72\x91" + "\xb0\xcf\xee\x0d\x2c\x4b\x6a\x89" + "\xa8\xc7\xe6\x05\x24\x43\x62\x81" + "\xa0\xbf\xde\xfd\x1c\x3b\x5a\x79" + "\x98\xb7\xd6\xf5\x14\x33\x52\x71" + "\x90\xaf\xce\xed\x0c\x2b\x4a\x69" + "\x88\xa7\xc6\xe5\x04\x23\x42\x61" + "\x80\x9f\xbe\xdd\xfc\x1b\x3a\x59" + "\x78\x97\xb6\xd5\xf4\x13\x32\x51" + "\x70\x8f\xae\xcd\xec\x0b\x2a\x49" + "\x68\x87\xa6\xc5\xe4\x03\x22\x41" + "\x60\x7f\x9e\xbd\xdc\xfb\x1a\x39" + "\x58\x77\x96\xb5\xd4\xf3\x12\x31" + "\x50\x6f\x8e\xad\xcc\xeb\x0a\x29" + "\x48\x67\x86\xa5\xc4\xe3\x02\x21" + "\x40\x5f\x7e\x9d\xbc\xdb\xfa\x19" + "\x38\x57\x76\x95\xb4\xd3\xf2\x11" + "\x30\x4f\x6e\x8d\xac\xcb\xea\x09" + "\x28\x47\x66\x85\xa4\xc3\xe2\x01" + "\x20\x3f\x5e\x7d\x9c\xbb\xda\xf9" + "\x18\x37\x56\x75\x94\xb3\xd2\xf1" + "\x10\x2f\x4e\x6d\x8c\xab\xca\xe9" + "\x08\x27\x46\x65\x84\xa3\xc2\xe1" + "\x00\x21\x42\x63", + .ilen = 4100, + .result = + "\xb5\x81\xf5\x64\x18\x73\xe3\xf0" + "\x4c\x13\xf2\x77\x18\x60\x65\x5e" + "\x29\x01\xce\x98\x55\x53\xf9\x0c" + "\x2a\x08\xd5\x09\xb3\x57\x55\x56" + "\xc5\xe9\x56\x90\xcb\x6a\xa3\xc0" + "\xff\xc4\x79\xb4\xd2\x97\x5d\xc4" + "\x43\xd1\xfe\x94\x7b\x88\x06\x5a" + "\xb2\x9e\x2c\xfc\x44\x03\xb7\x90" + "\xa0\xc1\xba\x6a\x33\xb8\xc7\xb2" + "\x9d\xe1\x12\x4f\xc0\x64\xd4\x01" + "\xfe\x8c\x7a\x66\xf7\xe6\x5a\x91" + "\xbb\xde\x56\x86\xab\x65\x21\x30" + "\x00\x84\x65\x24\xa5\x7d\x85\xb4" + "\xe3\x17\xed\x3a\xb7\x6f\xb4\x0b" + "\x0b\xaf\x15\xae\x5a\x8f\xf2\x0c" + "\x2f\x27\xf4\x09\xd8\xd2\x96\xb7" + "\x71\xf2\xc5\x99\x4d\x7e\x7f\x75" + "\x77\x89\x30\x8b\x59\xdb\xa2\xb2" + "\xa0\xf3\x19\x39\x2b\xc5\x7e\x3f" + "\x4f\xd9\xd3\x56\x28\x97\x44\xdc" + "\xc0\x8b\x77\x24\xd9\x52\xe7\xc5" + "\xaf\xf6\x7d\x59\xb2\x44\x05\x1d" + "\xb1\xb0\x11\xa5\x0f\xec\x33\xe1" + "\x6d\x1b\x4e\x1f\xff\x57\x91\xb4" + "\x5b\x9a\x96\xc5\x53\xbc\xae\x20" + "\x3c\xbb\x14\xe2\xe8\x22\x33\xc1" + "\x5e\x76\x9e\x46\x99\xf6\x2a\x15" + "\xc6\x97\x02\xa0\x66\x43\xd1\xa6" + "\x31\xa6\x9f\xfb\xf4\xd3\x69\xe5" + "\xcd\x76\x95\xb8\x7a\x82\x7f\x21" + "\x45\xff\x3f\xce\x55\xf6\x95\x10" + "\x08\x77\x10\x43\xc6\xf3\x09\xe5" + "\x68\xe7\x3c\xad\x00\x52\x45\x0d" + "\xfe\x2d\xc6\xc2\x94\x8c\x12\x1d" + "\xe6\x25\xae\x98\x12\x8e\x19\x9c" + "\x81\x68\xb1\x11\xf6\x69\xda\xe3" + "\x62\x08\x18\x7a\x25\x49\x28\xac" + "\xba\x71\x12\x0b\xe4\xa2\xe5\xc7" + "\x5d\x8e\xec\x49\x40\x21\xbf\x5a" + "\x98\xf3\x02\x68\x55\x03\x7f\x8a" + "\xe5\x94\x0c\x32\x5c\x07\x82\x63" + "\xaf\x6f\x91\x40\x84\x8e\x52\x25" + "\xd0\xb0\x29\x53\x05\xe2\x50\x7a" + "\x34\xeb\xc9\x46\x20\xa8\x3d\xde" + "\x7f\x16\x5f\x36\xc5\x2e\xdc\xd1" + "\x15\x47\xc7\x50\x40\x6d\x91\xc5" + "\xe7\x93\x95\x1a\xd3\x57\xbc\x52" + "\x33\xee\x14\x19\x22\x52\x89\xa7" + "\x4a\x25\x56\x77\x4b\xca\xcf\x0a" + "\xe1\xf5\x35\x85\x30\x7e\x59\x4a" + "\xbd\x14\x5b\xdf\xe3\x46\xcb\xac" + "\x1f\x6c\x96\x0e\xf4\x81\xd1\x99" + "\xca\x88\x63\x3d\x02\x58\x6b\xa9" + "\xe5\x9f\xb3\x00\xb2\x54\xc6\x74" + "\x1c\xbf\x46\xab\x97\xcc\xf8\x54" + "\x04\x07\x08\x52\xe6\xc0\xda\x93" + "\x74\x7d\x93\x99\x5d\x78\x68\xa6" + "\x2e\x6b\xd3\x6a\x69\xcc\x12\x6b" + "\xd4\xc7\xa5\xc6\xe7\xf6\x03\x04" + "\x5d\xcd\x61\x5e\x17\x40\xdc\xd1" + "\x5c\xf5\x08\xdf\x5c\x90\x85\xa4" + "\xaf\xf6\x78\xbb\x0d\xf1\xf4\xa4" + "\x54\x26\x72\x9e\x61\xfa\x86\xcf" + "\xe8\x9e\xa1\xe0\xc7\x48\x23\xae" + "\x5a\x90\xae\x75\x0a\x74\x18\x89" + "\x05\xb1\x92\xb2\x7f\xd0\x1b\xa6" + "\x62\x07\x25\x01\xc7\xc2\x4f\xf9" + "\xe8\xfe\x63\x95\x80\x07\xb4\x26" + "\xcc\xd1\x26\xb6\xc4\x3f\x9e\xcb" + "\x8e\x3b\x2e\x44\x16\xd3\x10\x9a" + "\x95\x08\xeb\xc8\xcb\xeb\xbf\x6f" + "\x0b\xcd\x1f\xc8\xca\x86\xaa\xec" + "\x33\xe6\x69\xf4\x45\x25\x86\x3a" + "\x22\x94\x4f\x00\x23\x6a\x44\xc2" + "\x49\x97\x33\xab\x36\x14\x0a\x70" + "\x24\xc3\xbe\x04\x3b\x79\xa0\xf9" + "\xb8\xe7\x76\x29\x22\x83\xd7\xf2" + "\x94\xf4\x41\x49\xba\x5f\x7b\x07" + "\xb5\xfb\xdb\x03\x1a\x9f\xb6\x4c" + "\xc2\x2e\x37\x40\x49\xc3\x38\x16" + "\xe2\x4f\x77\x82\xb0\x68\x4c\x71" + "\x1d\x57\x61\x9c\xd9\x4e\x54\x99" + "\x47\x13\x28\x73\x3c\xbb\x00\x90" + "\xf3\x4d\xc9\x0e\xfd\xe7\xb1\x71" + "\xd3\x15\x79\xbf\xcc\x26\x2f\xbd" + "\xad\x6c\x50\x69\x6c\x3e\x6d\x80" + "\x9a\xea\x78\xaf\x19\xb2\x0d\x4d" + "\xad\x04\x07\xae\x22\x90\x4a\x93" + "\x32\x0e\x36\x9b\x1b\x46\xba\x3b" + "\xb4\xac\xc6\xd1\xa2\x31\x53\x3b" + "\x2a\x3d\x45\xfe\x03\x61\x10\x85" + "\x17\x69\xa6\x78\xcc\x6c\x87\x49" + "\x53\xf9\x80\x10\xde\x80\xa2\x41" + "\x6a\xc3\x32\x02\xad\x6d\x3c\x56" + "\x00\x71\x51\x06\xa7\xbd\xfb\xef" + "\x3c\xb5\x9f\xfc\x48\x7d\x53\x7c" + "\x66\xb0\x49\x23\xc4\x47\x10\x0e" + "\xe5\x6c\x74\x13\xe6\xc5\x3f\xaa" + "\xde\xff\x07\x44\xdd\x56\x1b\xad" + "\x09\x77\xfb\x5b\x12\xb8\x0d\x38" + "\x17\x37\x35\x7b\x9b\xbc\xfe\xd4" + "\x7e\x8b\xda\x7e\x5b\x04\xa7\x22" + "\xa7\x31\xa1\x20\x86\xc7\x1b\x99" + "\xdb\xd1\x89\xf4\x94\xa3\x53\x69" + "\x8d\xe7\xe8\x74\x11\x8d\x74\xd6" + "\x07\x37\x91\x9f\xfd\x67\x50\x3a" + "\xc9\xe1\xf4\x36\xd5\xa0\x47\xd1" + "\xf9\xe5\x39\xa3\x31\xac\x07\x36" + "\x23\xf8\x66\x18\x14\x28\x34\x0f" + "\xb8\xd0\xe7\x29\xb3\x04\x4b\x55" + "\x01\x41\xb2\x75\x8d\xcb\x96\x85" + "\x3a\xfb\xab\x2b\x9e\xfa\x58\x20" + "\x44\x1f\xc0\x14\x22\x75\x61\xe8" + "\xaa\x19\xcf\xf1\x82\x56\xf4\xd7" + "\x78\x7b\x3d\x5f\xb3\x9e\x0b\x8a" + "\x57\x50\xdb\x17\x41\x65\x4d\xa3" + "\x02\xc9\x9c\x9c\x53\xfb\x39\x39" + "\x9b\x1d\x72\x24\xda\xb7\x39\xbe" + "\x13\x3b\xfa\x29\xda\x9e\x54\x64" + "\x6e\xba\xd8\xa1\xcb\xb3\x36\xfa" + "\xcb\x47\x85\xe9\x61\x38\xbc\xbe" + "\xc5\x00\x38\x2a\x54\xf7\xc4\xb9" + "\xb3\xd3\x7b\xa0\xa0\xf8\x72\x7f" + "\x8c\x8e\x82\x0e\xc6\x1c\x75\x9d" + "\xca\x8e\x61\x87\xde\xad\x80\xd2" + "\xf5\xf9\x80\xef\x15\x75\xaf\xf5" + "\x80\xfb\xff\x6d\x1e\x25\xb7\x40" + "\x61\x6a\x39\x5a\x6a\xb5\x31\xab" + "\x97\x8a\x19\x89\x44\x40\xc0\xa6" + "\xb4\x4e\x30\x32\x7b\x13\xe7\x67" + "\xa9\x8b\x57\x04\xc2\x01\xa6\xf4" + "\x28\x99\xad\x2c\x76\xa3\x78\xc2" + "\x4a\xe6\xca\x5c\x50\x6a\xc1\xb0" + "\x62\x4b\x10\x8e\x7c\x17\x43\xb3" + "\x17\x66\x1c\x3e\x8d\x69\xf0\x5a" + "\x71\xf5\x97\xdc\xd1\x45\xdd\x28" + "\xf3\x5d\xdf\x53\x7b\x11\xe5\xbc" + "\x4c\xdb\x1b\x51\x6b\xe9\xfb\x3d" + "\xc1\xc3\x2c\xb9\x71\xf5\xb6\xb2" + "\x13\x36\x79\x80\x53\xe8\xd3\xa6" + "\x0a\xaf\xfd\x56\x97\xf7\x40\x8e" + "\x45\xce\xf8\xb0\x9e\x5c\x33\x82" + "\xb0\x44\x56\xfc\x05\x09\xe9\x2a" + "\xac\x26\x80\x14\x1d\xc8\x3a\x35" + "\x4c\x82\x97\xfd\x76\xb7\xa9\x0a" + "\x35\x58\x79\x8e\x0f\x66\xea\xaf" + "\x51\x6c\x09\xa9\x6e\x9b\xcb\x9a" + "\x31\x47\xa0\x2f\x7c\x71\xb4\x4a" + "\x11\xaa\x8c\x66\xc5\x64\xe6\x3a" + "\x54\xda\x24\x6a\xc4\x41\x65\x46" + "\x82\xa0\x0a\x0f\x5f\xfb\x25\xd0" + "\x2c\x91\xa7\xee\xc4\x81\x07\x86" + "\x75\x5e\x33\x69\x97\xe4\x2c\xa8" + "\x9d\x9f\x0b\x6a\xbe\xad\x98\xda" + "\x6d\x94\x41\xda\x2c\x1e\x89\xc4" + "\xc2\xaf\x1e\x00\x05\x0b\x83\x60" + "\xbd\x43\xea\x15\x23\x7f\xb9\xac" + "\xee\x4f\x2c\xaf\x2a\xf3\xdf\xd0" + "\xf3\x19\x31\xbb\x4a\x74\x84\x17" + "\x52\x32\x2c\x7d\x61\xe4\xcb\xeb" + "\x80\x38\x15\x52\xcb\x6f\xea\xe5" + "\x73\x9c\xd9\x24\x69\xc6\x95\x32" + "\x21\xc8\x11\xe4\xdc\x36\xd7\x93" + "\x38\x66\xfb\xb2\x7f\x3a\xb9\xaf" + "\x31\xdd\x93\x75\x78\x8a\x2c\x94" + "\x87\x1a\x58\xec\x9e\x7d\x4d\xba" + "\xe1\xe5\x4d\xfc\xbc\xa4\x2a\x14" + "\xef\xcc\xa7\xec\xab\x43\x09\x18" + "\xd3\xab\x68\xd1\x07\x99\x44\x47" + "\xd6\x83\x85\x3b\x30\xea\xa9\x6b" + "\x63\xea\xc4\x07\xfb\x43\x2f\xa4" + "\xaa\xb0\xab\x03\x89\xce\x3f\x8c" + "\x02\x7c\x86\x54\xbc\x88\xaf\x75" + "\xd2\xdc\x63\x17\xd3\x26\xf6\x96" + "\xa9\x3c\xf1\x61\x8c\x11\x18\xcc" + "\xd6\xea\x5b\xe2\xcd\xf0\xf1\xb2" + "\xe5\x35\x90\x1f\x85\x4c\x76\x5b" + "\x66\xce\x44\xa4\x32\x9f\xe6\x7b" + "\x71\x6e\x9f\x58\x15\x67\x72\x87" + "\x64\x8e\x3a\x44\x45\xd4\x76\xfa" + "\xc2\xf6\xef\x85\x05\x18\x7a\x9b" + "\xba\x41\x54\xac\xf0\xfc\x59\x12" + "\x3f\xdf\xa0\xe5\x8a\x65\xfd\x3a" + "\x62\x8d\x83\x2c\x03\xbe\x05\x76" + "\x2e\x53\x49\x97\x94\x33\xae\x40" + "\x81\x15\xdb\x6e\xad\xaa\xf5\x4b" + "\xe3\x98\x70\xdf\xe0\x7c\xcd\xdb" + "\x02\xd4\x7d\x2f\xc1\xe6\xb4\xf3" + "\xd7\x0d\x7a\xd9\x23\x9e\x87\x2d" + "\xce\x87\xad\xcc\x72\x05\x00\x29" + "\xdc\x73\x7f\x64\xc1\x15\x0e\xc2" + "\xdf\xa7\x5f\xeb\x41\xa1\xcd\xef" + "\x5c\x50\x79\x2a\x56\x56\x71\x8c" + "\xac\xc0\x79\x50\x69\xca\x59\x32" + "\x65\xf2\x54\xe4\x52\x38\x76\xd1" + "\x5e\xde\x26\x9e\xfb\x75\x2e\x11" + "\xb5\x10\xf4\x17\x73\xf5\x89\xc7" + "\x4f\x43\x5c\x8e\x7c\xb9\x05\x52" + "\x24\x40\x99\xfe\x9b\x85\x0b\x6c" + "\x22\x3e\x8b\xae\x86\xa1\xd2\x79" + "\x05\x68\x6b\xab\xe3\x41\x49\xed" + "\x15\xa1\x8d\x40\x2d\x61\xdf\x1a" + "\x59\xc9\x26\x8b\xef\x30\x4c\x88" + "\x4b\x10\xf8\x8d\xa6\x92\x9f\x4b" + "\xf3\xc4\x53\x0b\x89\x5d\x28\x92" + "\xcf\x78\xb2\xc0\x5d\xed\x7e\xfc" + "\xc0\x12\x23\x5f\x5a\x78\x86\x43" + "\x6e\x27\xf7\x5a\xa7\x6a\xed\x19" + "\x04\xf0\xb3\x12\xd1\xbd\x0e\x89" + "\x6e\xbc\x96\xa8\xd8\x49\x39\x9f" + "\x7e\x67\xf0\x2e\x3e\x01\xa9\xba" + "\xec\x8b\x62\x8e\xcb\x4a\x70\x43" + "\xc7\xc2\xc4\xca\x82\x03\x73\xe9" + "\x11\xdf\xcf\x54\xea\xc9\xb0\x95" + "\x51\xc0\x13\x3d\x92\x05\xfa\xf4" + "\xa9\x34\xc8\xce\x6c\x3d\x54\xcc" + "\xc4\xaf\xf1\xdc\x11\x44\x26\xa2" + "\xaf\xf1\x85\x75\x7d\x03\x61\x68" + "\x4e\x78\xc6\x92\x7d\x86\x7d\x77" + "\xdc\x71\x72\xdb\xc6\xae\xa1\xcb" + "\x70\x9a\x0b\x19\xbe\x4a\x6c\x2a" + "\xe2\xba\x6c\x64\x9a\x13\x28\xdf" + "\x85\x75\xe6\x43\xf6\x87\x08\x68" + "\x6e\xba\x6e\x79\x9f\x04\xbc\x23" + "\x50\xf6\x33\x5c\x1f\x24\x25\xbe" + "\x33\x47\x80\x45\x56\xa3\xa7\xd7" + "\x7a\xb1\x34\x0b\x90\x3c\x9c\xad" + "\x44\x5f\x9e\x0e\x9d\xd4\xbd\x93" + "\x5e\xfa\x3c\xe0\xb0\xd9\xed\xf3" + "\xd6\x2e\xff\x24\xd8\x71\x6c\xed" + "\xaf\x55\xeb\x22\xac\x93\x68\x32" + "\x05\x5b\x47\xdd\xc6\x4a\xcb\xc7" + "\x10\xe1\x3c\x92\x1a\xf3\x23\x78" + "\x2b\xa1\xd2\x80\xf4\x12\xb1\x20" + "\x8f\xff\x26\x35\xdd\xfb\xc7\x4e" + "\x78\xf1\x2d\x50\x12\x77\xa8\x60" + "\x7c\x0f\xf5\x16\x2f\x63\x70\x2a" + "\xc0\x96\x80\x4e\x0a\xb4\x93\x35" + "\x5d\x1d\x3f\x56\xf7\x2f\xbb\x90" + "\x11\x16\x8f\xa2\xec\x47\xbe\xac" + "\x56\x01\x26\x56\xb1\x8c\xb2\x10" + "\xf9\x1a\xca\xf5\xd1\xb7\x39\x20" + "\x63\xf1\x69\x20\x4f\x13\x12\x1f" + "\x5b\x65\xfc\x98\xf7\xc4\x7a\xbe" + "\xf7\x26\x4d\x2b\x84\x7b\x42\xad" + "\xd8\x7a\x0a\xb4\xd8\x74\xbf\xc1" + "\xf0\x6e\xb4\x29\xa3\xbb\xca\x46" + "\x67\x70\x6a\x2d\xce\x0e\xa2\x8a" + "\xa9\x87\xbf\x05\xc4\xc1\x04\xa3" + "\xab\xd4\x45\x43\x8c\xb6\x02\xb0" + "\x41\xc8\xfc\x44\x3d\x59\xaa\x2e" + "\x44\x21\x2a\x8d\x88\x9d\x57\xf4" + "\xa0\x02\x77\xb8\xa6\xa0\xe6\x75" + "\x5c\x82\x65\x3e\x03\x5c\x29\x8f" + "\x38\x55\xab\x33\x26\xef\x9f\x43" + "\x52\xfd\x68\xaf\x36\xb4\xbb\x9a" + "\x58\x09\x09\x1b\xc3\x65\x46\x46" + "\x1d\xa7\x94\x18\x23\x50\x2c\xca" + "\x2c\x55\x19\x97\x01\x9d\x93\x3b" + "\x63\x86\xf2\x03\x67\x45\xd2\x72" + "\x28\x52\x6c\xf4\xe3\x1c\xb5\x11" + "\x13\xf1\xeb\x21\xc7\xd9\x56\x82" + "\x2b\x82\x39\xbd\x69\x54\xed\x62" + "\xc3\xe2\xde\x73\xd4\x6a\x12\xae" + "\x13\x21\x7f\x4b\x5b\xfc\xbf\xe8" + "\x2b\xbe\x56\xba\x68\x8b\x9a\xb1" + "\x6e\xfa\xbf\x7e\x5a\x4b\xf1\xac" + "\x98\x65\x85\xd1\x93\x53\xd3\x7b" + "\x09\xdd\x4b\x10\x6d\x84\xb0\x13" + "\x65\xbd\xcf\x52\x09\xc4\x85\xe2" + "\x84\x74\x15\x65\xb7\xf7\x51\xaf" + "\x55\xad\xa4\xd1\x22\x54\x70\x94" + "\xa0\x1c\x90\x41\xfd\x99\xd7\x5a" + "\x31\xef\xaa\x25\xd0\x7f\x4f\xea" + "\x1d\x55\x42\xe5\x49\xb0\xd0\x46" + "\x62\x36\x43\xb2\x82\x15\x75\x50" + "\xa4\x72\xeb\x54\x27\x1f\x8a\xe4" + "\x7d\xe9\x66\xc5\xf1\x53\xa4\xd1" + "\x0c\xeb\xb8\xf8\xbc\xd4\xe2\xe7" + "\xe1\xf8\x4b\xcb\xa9\xa1\xaf\x15" + "\x83\xcb\x72\xd0\x33\x79\x00\x2d" + "\x9f\xd7\xf1\x2e\x1e\x10\xe4\x45" + "\xc0\x75\x3a\x39\xea\x68\xf7\x5d" + "\x1b\x73\x8f\xe9\x8e\x0f\x72\x47" + "\xae\x35\x0a\x31\x7a\x14\x4d\x4a" + "\x6f\x47\xf7\x7e\x91\x6e\x74\x8b" + "\x26\x47\xf9\xc3\xf9\xde\x70\xf5" + "\x61\xab\xa9\x27\x9f\x82\xe4\x9c" + "\x89\x91\x3f\x2e\x6a\xfd\xb5\x49" + "\xe9\xfd\x59\x14\x36\x49\x40\x6d" + "\x32\xd8\x85\x42\xf3\xa5\xdf\x0c" + "\xa8\x27\xd7\x54\xe2\x63\x2f\xf2" + "\x7e\x8b\x8b\xe7\xf1\x9a\x95\x35" + "\x43\xdc\x3a\xe4\xb6\xf4\xd0\xdf" + "\x9c\xcb\x94\xf3\x21\xa0\x77\x50" + "\xe2\xc6\xc4\xc6\x5f\x09\x64\x5b" + "\x92\x90\xd8\xe1\xd1\xed\x4b\x42" + "\xd7\x37\xaf\x65\x3d\x11\x39\xb6" + "\x24\x8a\x60\xae\xd6\x1e\xbf\x0e" + "\x0d\xd7\xdc\x96\x0e\x65\x75\x4e" + "\x29\x06\x9d\xa4\x51\x3a\x10\x63" + "\x8f\x17\x07\xd5\x8e\x3c\xf4\x28" + "\x00\x5a\x5b\x05\x19\xd8\xc0\x6c" + "\xe5\x15\xe4\x9c\x9d\x71\x9d\x5e" + "\x94\x29\x1a\xa7\x80\xfa\x0e\x33" + "\x03\xdd\xb7\x3e\x9a\xa9\x26\x18" + "\x37\xa9\x64\x08\x4d\x94\x5a\x88" + "\xca\x35\xce\x81\x02\xe3\x1f\x1b" + "\x89\x1a\x77\x85\xe3\x41\x6d\x32" + "\x42\x19\x23\x7d\xc8\x73\xee\x25" + "\x85\x0d\xf8\x31\x25\x79\x1b\x6f" + "\x79\x25\xd2\xd8\xd4\x23\xfd\xf7" + "\x82\x36\x6a\x0c\x46\x22\x15\xe9" + "\xff\x72\x41\x91\x91\x7d\x3a\xb7" + "\xdd\x65\x99\x70\xf6\x8d\x84\xf8" + "\x67\x15\x20\x11\xd6\xb2\x55\x7b" + "\xdb\x87\xee\xef\x55\x89\x2a\x59" + "\x2b\x07\x8f\x43\x8a\x59\x3c\x01" + "\x8b\x65\x54\xa1\x66\xd5\x38\xbd" + "\xc6\x30\xa9\xcc\x49\xb6\xa8\x1b" + "\xb8\xc0\x0e\xe3\x45\x28\xe2\xff" + "\x41\x9f\x7e\x7c\xd1\xae\x9e\x25" + "\x3f\x4c\x7c\x7c\xf4\xa8\x26\x4d" + "\x5c\xfd\x4b\x27\x18\xf9\x61\x76" + "\x48\xba\x0c\x6b\xa9\x4d\xfc\xf5" + "\x3b\x35\x7e\x2f\x4a\xa9\xc2\x9a" + "\xae\xab\x86\x09\x89\xc9\xc2\x40" + "\x39\x2c\x81\xb3\xb8\x17\x67\xc2" + "\x0d\x32\x4a\x3a\x67\x81\xd7\x1a" + "\x34\x52\xc5\xdb\x0a\xf5\x63\x39" + "\xea\x1f\xe1\x7c\xa1\x9e\xc1\x35" + "\xe3\xb1\x18\x45\x67\xf9\x22\x38" + "\x95\xd9\x34\x34\x86\xc6\x41\x94" + "\x15\xf9\x5b\x41\xa6\x87\x8b\xf8" + "\xd5\xe1\x1b\xe2\x5b\xf3\x86\x10" + "\xff\xe6\xae\x69\x76\xbc\x0d\xb4" + "\x09\x90\x0c\xa2\x65\x0c\xad\x74" + "\xf5\xd7\xff\xda\xc1\xce\x85\xbe" + "\x00\xa7\xff\x4d\x2f\x65\xd3\x8c" + "\x86\x2d\x05\xe8\xed\x3e\x6b\x8b" + "\x0f\x3d\x83\x8c\xf1\x1d\x5b\x96" + "\x2e\xb1\x9c\xc2\x98\xe1\x70\xb9" + "\xba\x5c\x8a\x43\xd6\x34\xa7\x2d" + "\xc9\x92\xae\xf2\xa5\x7b\x05\x49" + "\xa7\x33\x34\x86\xca\xe4\x96\x23" + "\x76\x5b\xf2\xc6\xf1\x51\x28\x42" + "\x7b\xcc\x76\x8f\xfa\xa2\xad\x31" + "\xd4\xd6\x7a\x6d\x25\x25\x54\xe4" + "\x3f\x50\x59\xe1\x5c\x05\xb7\x27" + "\x48\xbf\x07\xec\x1b\x13\xbe\x2b" + "\xa1\x57\x2b\xd5\xab\xd7\xd0\x4c" + "\x1e\xcb\x71\x9b\xc5\x90\x85\xd3" + "\xde\x59\xec\x71\xeb\x89\xbb\xd0" + "\x09\x50\xe1\x16\x3f\xfd\x1c\x34" + "\xc3\x1c\xa1\x10\x77\x53\x98\xef" + "\xf2\xfd\xa5\x01\x59\xc2\x9b\x26" + "\xc7\x42\xd9\x49\xda\x58\x2b\x6e" + "\x9f\x53\x19\x76\x7e\xd9\xc9\x0e" + "\x68\xc8\x7f\x51\x22\x42\xef\x49" + "\xa4\x55\xb6\x36\xac\x09\xc7\x31" + "\x88\x15\x4b\x2e\x8f\x3a\x08\xf7" + "\xd8\xf7\xa8\xc5\xa9\x33\xa6\x45" + "\xe4\xc4\x94\x76\xf3\x0d\x8f\x7e" + "\xc8\xf6\xbc\x23\x0a\xb6\x4c\xd3" + "\x6a\xcd\x36\xc2\x90\x5c\x5c\x3c" + "\x65\x7b\xc2\xd6\xcc\xe6\x0d\x87" + "\x73\x2e\x71\x79\x16\x06\x63\x28" + "\x09\x15\xd8\x89\x38\x38\x3d\xb5" + "\x42\x1c\x08\x24\xf7\x2a\xd2\x9d" + "\xc8\xca\xef\xf9\x27\xd8\x07\x86" + "\xf7\x43\x0b\x55\x15\x3f\x9f\x83" + "\xef\xdc\x49\x9d\x2a\xc1\x54\x62" + "\xbd\x9b\x66\x55\x9f\xb7\x12\xf3" + "\x1b\x4d\x9d\x2a\x5c\xed\x87\x75" + "\x87\x26\xec\x61\x2c\xb4\x0f\x89" + "\xb0\xfb\x2e\x68\x5d\x15\xc7\x8d" + "\x2e\xc0\xd9\xec\xaf\x4f\xd2\x25" + "\x29\xe8\xd2\x26\x2b\x67\xe9\xfc" + "\x2b\xa8\x67\x96\x12\x1f\x5b\x96" + "\xc6\x14\x53\xaf\x44\xea\xd6\xe2" + "\x94\x98\xe4\x12\x93\x4c\x92\xe0" + "\x18\xa5\x8d\x2d\xe4\x71\x3c\x47" + "\x4c\xf7\xe6\x47\x9e\xc0\x68\xdf" + "\xd4\xf5\x5a\x74\xb1\x2b\x29\x03" + "\x19\x07\xaf\x90\x62\x5c\x68\x98" + "\x48\x16\x11\x02\x9d\xee\xb4\x9b" + "\xe5\x42\x7f\x08\xfd\x16\x32\x0b" + "\xd0\xb3\xfa\x2b\xb7\x99\xf9\x29" + "\xcd\x20\x45\x9f\xb3\x1a\x5d\xa2" + "\xaf\x4d\xe0\xbd\x42\x0d\xbc\x74" + "\x99\x9c\x8e\x53\x1a\xb4\x3e\xbd" + "\xa2\x9a\x2d\xf7\xf8\x39\x0f\x67" + "\x63\xfc\x6b\xc0\xaf\xb3\x4b\x4f" + "\x55\xc4\xcf\xa7\xc8\x04\x11\x3e" + "\x14\x32\xbb\x1b\x38\x77\xd6\x7f" + "\x54\x4c\xdf\x75\xf3\x07\x2d\x33" + "\x9b\xa8\x20\xe1\x7b\x12\xb5\xf3" + "\xef\x2f\xce\x72\xe5\x24\x60\xc1" + "\x30\xe2\xab\xa1\x8e\x11\x09\xa8" + "\x21\x33\x44\xfe\x7f\x35\x32\x93" + "\x39\xa7\xad\x8b\x79\x06\xb2\xcb" + "\x4e\xa9\x5f\xc7\xba\x74\x29\xec" + "\x93\xa0\x4e\x54\x93\xc0\xbc\x55" + "\x64\xf0\x48\xe5\x57\x99\xee\x75" + "\xd6\x79\x0f\x66\xb7\xc6\x57\x76" + "\xf7\xb7\xf3\x9c\xc5\x60\xe8\x7f" + "\x83\x76\xd6\x0e\xaa\xe6\x90\x39" + "\x1d\xa6\x32\x6a\x34\xe3\x55\xf8" + "\x58\xa0\x58\x7d\x33\xe0\x22\x39" + "\x44\x64\x87\x86\x5a\x2f\xa7\x7e" + "\x0f\x38\xea\xb0\x30\xcc\x61\xa5" + "\x6a\x32\xae\x1e\xf7\xe9\xd0\xa9" + "\x0c\x32\x4b\xb5\x49\x28\xab\x85" + "\x2f\x8e\x01\x36\x38\x52\xd0\xba" + "\xd6\x02\x78\xf8\x0e\x3e\x9c\x8b" + "\x6b\x45\x99\x3f\x5c\xfe\x58\xf1" + "\x5c\x94\x04\xe1\xf5\x18\x6d\x51" + "\xb2\x5d\x18\x20\xb6\xc2\x9a\x42" + "\x1d\xb3\xab\x3c\xb6\x3a\x13\x03" + "\xb2\x46\x82\x4f\xfc\x64\xbc\x4f" + "\xca\xfa\x9c\xc0\xd5\xa7\xbd\x11" + "\xb7\xe4\x5a\xf6\x6f\x4d\x4d\x54" + "\xea\xa4\x98\x66\xd4\x22\x3b\xd3" + "\x8f\x34\x47\xd9\x7c\xf4\x72\x3b" + "\x4d\x02\x77\xf6\xd6\xdd\x08\x0a" + "\x81\xe1\x86\x89\x3e\x56\x10\x3c" + "\xba\xd7\x81\x8c\x08\xbc\x8b\xe2" + "\x53\xec\xa7\x89\xee\xc8\x56\xb5" + "\x36\x2c\xb2\x03\xba\x99\xdd\x7c" + "\x48\xa0\xb0\xbc\x91\x33\xe9\xa8" + "\xcb\xcd\xcf\x59\x5f\x1f\x15\xe2" + "\x56\xf5\x4e\x01\x35\x27\x45\x77" + "\x47\xc8\xbc\xcb\x7e\x39\xc1\x97" + "\x28\xd3\x84\xfc\x2c\x3e\xc8\xad" + "\x9c\xf8\x8a\x61\x9c\x28\xaa\xc5" + "\x99\x20\x43\x85\x9d\xa5\xe2\x8b" + "\xb8\xae\xeb\xd0\x32\x0d\x52\x78" + "\x09\x56\x3f\xc7\xd8\x7e\x26\xfc" + "\x37\xfb\x6f\x04\xfc\xfa\x92\x10" + "\xac\xf8\x3e\x21\xdc\x8c\x21\x16" + "\x7d\x67\x6e\xf6\xcd\xda\xb6\x98" + "\x23\xab\x23\x3c\xb2\x10\xa0\x53" + "\x5a\x56\x9f\xc5\xd0\xff\xbb\xe4" + "\x98\x3c\x69\x1e\xdb\x38\x8f\x7e" + "\x0f\xd2\x98\x88\x81\x8b\x45\x67" + "\xea\x33\xf1\xeb\xe9\x97\x55\x2e" + "\xd9\xaa\xeb\x5a\xec\xda\xe1\x68" + "\xa8\x9d\x3c\x84\x7c\x05\x3d\x62" + "\x87\x8f\x03\x21\x28\x95\x0c\x89" + "\x25\x22\x4a\xb0\x93\xa9\x50\xa2" + "\x2f\x57\x6e\x18\x42\x19\x54\x0c" + "\x55\x67\xc6\x11\x49\xf4\x5c\xd2" + "\xe9\x3d\xdd\x8b\x48\x71\x21\x00" + "\xc3\x9a\x6c\x85\x74\x28\x83\x4a" + "\x1b\x31\x05\xe1\x06\x92\xe7\xda" + "\x85\x73\x78\x45\x20\x7f\xae\x13" + "\x7c\x33\x06\x22\xf4\x83\xf9\x35" + "\x3f\x6c\x71\xa8\x4e\x48\xbe\x9b" + "\xce\x8a\xba\xda\xbe\x28\x08\xf7" + "\xe2\x14\x8c\x71\xea\x72\xf9\x33" + "\xf2\x88\x3f\xd7\xbb\x69\x6c\x29" + "\x19\xdc\x84\xce\x1f\x12\x4f\xc8" + "\xaf\xa5\x04\xba\x5a\xab\xb0\xd9" + "\x14\x1f\x6c\x68\x98\x39\x89\x7a" + "\xd9\xd8\x2f\xdf\xa8\x47\x4a\x25" + "\xe2\xfb\x33\xf4\x59\x78\xe1\x68" + "\x85\xcf\xfe\x59\x20\xd4\x05\x1d" + "\x80\x99\xae\xbc\xca\xae\x0f\x2f" + "\x65\x43\x34\x8e\x7e\xac\xd3\x93" + "\x2f\xac\x6d\x14\x3d\x02\x07\x70" + "\x9d\xa4\xf3\x1b\x5c\x36\xfc\x01" + "\x73\x34\x85\x0c\x6c\xd6\xf1\xbd" + "\x3f\xdf\xee\xf5\xd9\xba\x56\xef" + "\xf4\x9b\x6b\xee\x9f\x5a\x78\x6d" + "\x32\x19\xf4\xf7\xf8\x4c\x69\x0b" + "\x4b\xbc\xbb\xb7\xf2\x85\xaf\x70" + "\x75\x24\x6c\x54\xa7\x0e\x4d\x1d" + "\x01\xbf\x08\xac\xcf\x7f\x2c\xe3" + "\x14\x89\x5e\x70\x5a\x99\x92\xcd" + "\x01\x84\xc8\xd2\xab\xe5\x4f\x58" + "\xe7\x0f\x2f\x0e\xff\x68\xea\xfd" + "\x15\xb3\x17\xe6\xb0\xe7\x85\xd8" + "\x23\x2e\x05\xc7\xc9\xc4\x46\x1f" + "\xe1\x9e\x49\x20\x23\x24\x4d\x7e" + "\x29\x65\xff\xf4\xb6\xfd\x1a\x85" + "\xc4\x16\xec\xfc\xea\x7b\xd6\x2c" + "\x43\xf8\xb7\xbf\x79\xc0\x85\xcd" + "\xef\xe1\x98\xd3\xa5\xf7\x90\x8c" + "\xe9\x7f\x80\x6b\xd2\xac\x4c\x30" + "\xa7\xc6\x61\x6c\xd2\xf9\x2c\xff" + "\x30\xbc\x22\x81\x7d\x93\x12\xe4" + "\x0a\xcd\xaf\xdd\xe8\xab\x0a\x1e" + "\x13\xa4\x27\xc3\x5f\xf7\x4b\xbb" + "\x37\x09\x4b\x91\x6f\x92\x4f\xaf" + "\x52\xee\xdf\xef\x09\x6f\xf7\x5c" + "\x6e\x12\x17\x72\x63\x57\xc7\xba" + "\x3b\x6b\x38\x32\x73\x1b\x9c\x80" + "\xc1\x7a\xc6\xcf\xcd\x35\xc0\x6b" + "\x31\x1a\x6b\xe9\xd8\x2c\x29\x3f" + "\x96\xfb\xb6\xcd\x13\x91\x3b\xc2" + "\xd2\xa3\x31\x8d\xa4\xcd\x57\xcd" + "\x13\x3d\x64\xfd\x06\xce\xe6\xdc" + "\x0c\x24\x43\x31\x40\x57\xf1\x72" + "\x17\xe3\x3a\x63\x6d\x35\xcf\x5d" + "\x97\x40\x59\xdd\xf7\x3c\x02\xf7" + "\x1c\x7e\x05\xbb\xa9\x0d\x01\xb1" + "\x8e\xc0\x30\xa9\x53\x24\xc9\x89" + "\x84\x6d\xaa\xd0\xcd\x91\xc2\x4d" + "\x91\xb0\x89\xe2\xbf\x83\x44\xaa" + "\x28\x72\x23\xa0\xc2\xad\xad\x1c" + "\xfc\x3f\x09\x7a\x0b\xdc\xc5\x1b" + "\x87\x13\xc6\x5b\x59\x8d\xf2\xc8" + "\xaf\xdf\x11\x95", + .rlen = 4100, + .np = 2, + .tap = { 4064, 36 }, + }, +}; + +/* + * CTS (Cipher Text Stealing) mode tests + */ +#define CTS_MODE_ENC_TEST_VECTORS 6 +#define CTS_MODE_DEC_TEST_VECTORS 6 +static struct cipher_testvec cts_mode_enc_tv_template[] = { + { /* from rfc3962 */ + .klen = 16, + .key = "\x63\x68\x69\x63\x6b\x65\x6e\x20" + "\x74\x65\x72\x69\x79\x61\x6b\x69", + .ilen = 17, + .input = "\x49\x20\x77\x6f\x75\x6c\x64\x20" + "\x6c\x69\x6b\x65\x20\x74\x68\x65" + "\x20", + .rlen = 17, + .result = "\xc6\x35\x35\x68\xf2\xbf\x8c\xb4" + "\xd8\xa5\x80\x36\x2d\xa7\xff\x7f" + "\x97", + }, { + .klen = 16, + .key = "\x63\x68\x69\x63\x6b\x65\x6e\x20" + "\x74\x65\x72\x69\x79\x61\x6b\x69", + .ilen = 31, + .input = "\x49\x20\x77\x6f\x75\x6c\x64\x20" + "\x6c\x69\x6b\x65\x20\x74\x68\x65" + "\x20\x47\x65\x6e\x65\x72\x61\x6c" + "\x20\x47\x61\x75\x27\x73\x20", + .rlen = 31, + .result = "\xfc\x00\x78\x3e\x0e\xfd\xb2\xc1" + "\xd4\x45\xd4\xc8\xef\xf7\xed\x22" + "\x97\x68\x72\x68\xd6\xec\xcc\xc0" + "\xc0\x7b\x25\xe2\x5e\xcf\xe5", + }, { + .klen = 16, + .key = "\x63\x68\x69\x63\x6b\x65\x6e\x20" + "\x74\x65\x72\x69\x79\x61\x6b\x69", + .ilen = 32, + .input = "\x49\x20\x77\x6f\x75\x6c\x64\x20" + "\x6c\x69\x6b\x65\x20\x74\x68\x65" + "\x20\x47\x65\x6e\x65\x72\x61\x6c" + "\x20\x47\x61\x75\x27\x73\x20\x43", + .rlen = 32, + .result = "\x39\x31\x25\x23\xa7\x86\x62\xd5" + "\xbe\x7f\xcb\xcc\x98\xeb\xf5\xa8" + "\x97\x68\x72\x68\xd6\xec\xcc\xc0" + "\xc0\x7b\x25\xe2\x5e\xcf\xe5\x84", + }, { + .klen = 16, + .key = "\x63\x68\x69\x63\x6b\x65\x6e\x20" + "\x74\x65\x72\x69\x79\x61\x6b\x69", + .ilen = 47, + .input = "\x49\x20\x77\x6f\x75\x6c\x64\x20" + "\x6c\x69\x6b\x65\x20\x74\x68\x65" + "\x20\x47\x65\x6e\x65\x72\x61\x6c" + "\x20\x47\x61\x75\x27\x73\x20\x43" + "\x68\x69\x63\x6b\x65\x6e\x2c\x20" + "\x70\x6c\x65\x61\x73\x65\x2c", + .rlen = 47, + .result = "\x97\x68\x72\x68\xd6\xec\xcc\xc0" + "\xc0\x7b\x25\xe2\x5e\xcf\xe5\x84" + "\xb3\xff\xfd\x94\x0c\x16\xa1\x8c" + "\x1b\x55\x49\xd2\xf8\x38\x02\x9e" + "\x39\x31\x25\x23\xa7\x86\x62\xd5" + "\xbe\x7f\xcb\xcc\x98\xeb\xf5", + }, { + .klen = 16, + .key = "\x63\x68\x69\x63\x6b\x65\x6e\x20" + "\x74\x65\x72\x69\x79\x61\x6b\x69", + .ilen = 48, + .input = "\x49\x20\x77\x6f\x75\x6c\x64\x20" + "\x6c\x69\x6b\x65\x20\x74\x68\x65" + "\x20\x47\x65\x6e\x65\x72\x61\x6c" + "\x20\x47\x61\x75\x27\x73\x20\x43" + "\x68\x69\x63\x6b\x65\x6e\x2c\x20" + "\x70\x6c\x65\x61\x73\x65\x2c\x20", + .rlen = 48, + .result = "\x97\x68\x72\x68\xd6\xec\xcc\xc0" + "\xc0\x7b\x25\xe2\x5e\xcf\xe5\x84" + "\x9d\xad\x8b\xbb\x96\xc4\xcd\xc0" + "\x3b\xc1\x03\xe1\xa1\x94\xbb\xd8" + "\x39\x31\x25\x23\xa7\x86\x62\xd5" + "\xbe\x7f\xcb\xcc\x98\xeb\xf5\xa8", + }, { + .klen = 16, + .key = "\x63\x68\x69\x63\x6b\x65\x6e\x20" + "\x74\x65\x72\x69\x79\x61\x6b\x69", + .ilen = 64, + .input = "\x49\x20\x77\x6f\x75\x6c\x64\x20" + "\x6c\x69\x6b\x65\x20\x74\x68\x65" + "\x20\x47\x65\x6e\x65\x72\x61\x6c" + "\x20\x47\x61\x75\x27\x73\x20\x43" + "\x68\x69\x63\x6b\x65\x6e\x2c\x20" + "\x70\x6c\x65\x61\x73\x65\x2c\x20" + "\x61\x6e\x64\x20\x77\x6f\x6e\x74" + "\x6f\x6e\x20\x73\x6f\x75\x70\x2e", + .rlen = 64, + .result = "\x97\x68\x72\x68\xd6\xec\xcc\xc0" + "\xc0\x7b\x25\xe2\x5e\xcf\xe5\x84" + "\x39\x31\x25\x23\xa7\x86\x62\xd5" + "\xbe\x7f\xcb\xcc\x98\xeb\xf5\xa8" + "\x48\x07\xef\xe8\x36\xee\x89\xa5" + "\x26\x73\x0d\xbc\x2f\x7b\xc8\x40" + "\x9d\xad\x8b\xbb\x96\xc4\xcd\xc0" + "\x3b\xc1\x03\xe1\xa1\x94\xbb\xd8", + } +}; + +static struct cipher_testvec cts_mode_dec_tv_template[] = { + { /* from rfc3962 */ + .klen = 16, + .key = "\x63\x68\x69\x63\x6b\x65\x6e\x20" + "\x74\x65\x72\x69\x79\x61\x6b\x69", + .rlen = 17, + .result = "\x49\x20\x77\x6f\x75\x6c\x64\x20" + "\x6c\x69\x6b\x65\x20\x74\x68\x65" + "\x20", + .ilen = 17, + .input = "\xc6\x35\x35\x68\xf2\xbf\x8c\xb4" + "\xd8\xa5\x80\x36\x2d\xa7\xff\x7f" + "\x97", + }, { + .klen = 16, + .key = "\x63\x68\x69\x63\x6b\x65\x6e\x20" + "\x74\x65\x72\x69\x79\x61\x6b\x69", + .rlen = 31, + .result = "\x49\x20\x77\x6f\x75\x6c\x64\x20" + "\x6c\x69\x6b\x65\x20\x74\x68\x65" + "\x20\x47\x65\x6e\x65\x72\x61\x6c" + "\x20\x47\x61\x75\x27\x73\x20", + .ilen = 31, + .input = "\xfc\x00\x78\x3e\x0e\xfd\xb2\xc1" + "\xd4\x45\xd4\xc8\xef\xf7\xed\x22" + "\x97\x68\x72\x68\xd6\xec\xcc\xc0" + "\xc0\x7b\x25\xe2\x5e\xcf\xe5", + }, { + .klen = 16, + .key = "\x63\x68\x69\x63\x6b\x65\x6e\x20" + "\x74\x65\x72\x69\x79\x61\x6b\x69", + .rlen = 32, + .result = "\x49\x20\x77\x6f\x75\x6c\x64\x20" + "\x6c\x69\x6b\x65\x20\x74\x68\x65" + "\x20\x47\x65\x6e\x65\x72\x61\x6c" + "\x20\x47\x61\x75\x27\x73\x20\x43", + .ilen = 32, + .input = "\x39\x31\x25\x23\xa7\x86\x62\xd5" + "\xbe\x7f\xcb\xcc\x98\xeb\xf5\xa8" + "\x97\x68\x72\x68\xd6\xec\xcc\xc0" + "\xc0\x7b\x25\xe2\x5e\xcf\xe5\x84", + }, { + .klen = 16, + .key = "\x63\x68\x69\x63\x6b\x65\x6e\x20" + "\x74\x65\x72\x69\x79\x61\x6b\x69", + .rlen = 47, + .result = "\x49\x20\x77\x6f\x75\x6c\x64\x20" + "\x6c\x69\x6b\x65\x20\x74\x68\x65" + "\x20\x47\x65\x6e\x65\x72\x61\x6c" + "\x20\x47\x61\x75\x27\x73\x20\x43" + "\x68\x69\x63\x6b\x65\x6e\x2c\x20" + "\x70\x6c\x65\x61\x73\x65\x2c", + .ilen = 47, + .input = "\x97\x68\x72\x68\xd6\xec\xcc\xc0" + "\xc0\x7b\x25\xe2\x5e\xcf\xe5\x84" + "\xb3\xff\xfd\x94\x0c\x16\xa1\x8c" + "\x1b\x55\x49\xd2\xf8\x38\x02\x9e" + "\x39\x31\x25\x23\xa7\x86\x62\xd5" + "\xbe\x7f\xcb\xcc\x98\xeb\xf5", + }, { + .klen = 16, + .key = "\x63\x68\x69\x63\x6b\x65\x6e\x20" + "\x74\x65\x72\x69\x79\x61\x6b\x69", + .rlen = 48, + .result = "\x49\x20\x77\x6f\x75\x6c\x64\x20" + "\x6c\x69\x6b\x65\x20\x74\x68\x65" + "\x20\x47\x65\x6e\x65\x72\x61\x6c" + "\x20\x47\x61\x75\x27\x73\x20\x43" + "\x68\x69\x63\x6b\x65\x6e\x2c\x20" + "\x70\x6c\x65\x61\x73\x65\x2c\x20", + .ilen = 48, + .input = "\x97\x68\x72\x68\xd6\xec\xcc\xc0" + "\xc0\x7b\x25\xe2\x5e\xcf\xe5\x84" + "\x9d\xad\x8b\xbb\x96\xc4\xcd\xc0" + "\x3b\xc1\x03\xe1\xa1\x94\xbb\xd8" + "\x39\x31\x25\x23\xa7\x86\x62\xd5" + "\xbe\x7f\xcb\xcc\x98\xeb\xf5\xa8", + }, { + .klen = 16, + .key = "\x63\x68\x69\x63\x6b\x65\x6e\x20" + "\x74\x65\x72\x69\x79\x61\x6b\x69", + .rlen = 64, + .result = "\x49\x20\x77\x6f\x75\x6c\x64\x20" + "\x6c\x69\x6b\x65\x20\x74\x68\x65" + "\x20\x47\x65\x6e\x65\x72\x61\x6c" + "\x20\x47\x61\x75\x27\x73\x20\x43" + "\x68\x69\x63\x6b\x65\x6e\x2c\x20" + "\x70\x6c\x65\x61\x73\x65\x2c\x20" + "\x61\x6e\x64\x20\x77\x6f\x6e\x74" + "\x6f\x6e\x20\x73\x6f\x75\x70\x2e", + .ilen = 64, + .input = "\x97\x68\x72\x68\xd6\xec\xcc\xc0" + "\xc0\x7b\x25\xe2\x5e\xcf\xe5\x84" + "\x39\x31\x25\x23\xa7\x86\x62\xd5" + "\xbe\x7f\xcb\xcc\x98\xeb\xf5\xa8" + "\x48\x07\xef\xe8\x36\xee\x89\xa5" + "\x26\x73\x0d\xbc\x2f\x7b\xc8\x40" + "\x9d\xad\x8b\xbb\x96\xc4\xcd\xc0" + "\x3b\xc1\x03\xe1\xa1\x94\xbb\xd8", + } +}; + +/* + * Compression stuff. + */ +#define COMP_BUF_SIZE 512 + +struct comp_testvec { + int inlen, outlen; + char input[COMP_BUF_SIZE]; + char output[COMP_BUF_SIZE]; +}; + +struct pcomp_testvec { + void *params; + unsigned int paramsize; + int inlen, outlen; + char input[COMP_BUF_SIZE]; + char output[COMP_BUF_SIZE]; +}; + +/* + * Deflate test vectors (null-terminated strings). + * Params: winbits=-11, Z_DEFAULT_COMPRESSION, MAX_MEM_LEVEL. + */ + +#define DEFLATE_COMP_TEST_VECTORS 2 +#define DEFLATE_DECOMP_TEST_VECTORS 2 + +static struct comp_testvec deflate_comp_tv_template[] = { + { + .inlen = 70, + .outlen = 38, + .input = "Join us now and share the software " + "Join us now and share the software ", + .output = "\xf3\xca\xcf\xcc\x53\x28\x2d\x56" + "\xc8\xcb\x2f\x57\x48\xcc\x4b\x51" + "\x28\xce\x48\x2c\x4a\x55\x28\xc9" + "\x48\x55\x28\xce\x4f\x2b\x29\x07" + "\x71\xbc\x08\x2b\x01\x00", + }, { + .inlen = 191, + .outlen = 122, + .input = "This document describes a compression method based on the DEFLATE" + "compression algorithm. This document defines the application of " + "the DEFLATE algorithm to the IP Payload Compression Protocol.", + .output = "\x5d\x8d\x31\x0e\xc2\x30\x10\x04" + "\xbf\xb2\x2f\xc8\x1f\x10\x04\x09" + "\x89\xc2\x85\x3f\x70\xb1\x2f\xf8" + "\x24\xdb\x67\xd9\x47\xc1\xef\x49" + "\x68\x12\x51\xae\x76\x67\xd6\x27" + "\x19\x88\x1a\xde\x85\xab\x21\xf2" + "\x08\x5d\x16\x1e\x20\x04\x2d\xad" + "\xf3\x18\xa2\x15\x85\x2d\x69\xc4" + "\x42\x83\x23\xb6\x6c\x89\x71\x9b" + "\xef\xcf\x8b\x9f\xcf\x33\xca\x2f" + "\xed\x62\xa9\x4c\x80\xff\x13\xaf" + "\x52\x37\xed\x0e\x52\x6b\x59\x02" + "\xd9\x4e\xe8\x7a\x76\x1d\x02\x98" + "\xfe\x8a\x87\x83\xa3\x4f\x56\x8a" + "\xb8\x9e\x8e\x5c\x57\xd3\xa0\x79" + "\xfa\x02", + }, +}; + +static struct comp_testvec deflate_decomp_tv_template[] = { + { + .inlen = 122, + .outlen = 191, + .input = "\x5d\x8d\x31\x0e\xc2\x30\x10\x04" + "\xbf\xb2\x2f\xc8\x1f\x10\x04\x09" + "\x89\xc2\x85\x3f\x70\xb1\x2f\xf8" + "\x24\xdb\x67\xd9\x47\xc1\xef\x49" + "\x68\x12\x51\xae\x76\x67\xd6\x27" + "\x19\x88\x1a\xde\x85\xab\x21\xf2" + "\x08\x5d\x16\x1e\x20\x04\x2d\xad" + "\xf3\x18\xa2\x15\x85\x2d\x69\xc4" + "\x42\x83\x23\xb6\x6c\x89\x71\x9b" + "\xef\xcf\x8b\x9f\xcf\x33\xca\x2f" + "\xed\x62\xa9\x4c\x80\xff\x13\xaf" + "\x52\x37\xed\x0e\x52\x6b\x59\x02" + "\xd9\x4e\xe8\x7a\x76\x1d\x02\x98" + "\xfe\x8a\x87\x83\xa3\x4f\x56\x8a" + "\xb8\x9e\x8e\x5c\x57\xd3\xa0\x79" + "\xfa\x02", + .output = "This document describes a compression method based on the DEFLATE" + "compression algorithm. This document defines the application of " + "the DEFLATE algorithm to the IP Payload Compression Protocol.", + }, { + .inlen = 38, + .outlen = 70, + .input = "\xf3\xca\xcf\xcc\x53\x28\x2d\x56" + "\xc8\xcb\x2f\x57\x48\xcc\x4b\x51" + "\x28\xce\x48\x2c\x4a\x55\x28\xc9" + "\x48\x55\x28\xce\x4f\x2b\x29\x07" + "\x71\xbc\x08\x2b\x01\x00", + .output = "Join us now and share the software " + "Join us now and share the software ", + }, +}; + +#define ZLIB_COMP_TEST_VECTORS 2 +#define ZLIB_DECOMP_TEST_VECTORS 2 + +static const struct { + struct nlattr nla; + int val; +} deflate_comp_params[] = { + { + .nla = { + .nla_len = NLA_HDRLEN + sizeof(int), + .nla_type = ZLIB_COMP_LEVEL, + }, + .val = Z_DEFAULT_COMPRESSION, + }, { + .nla = { + .nla_len = NLA_HDRLEN + sizeof(int), + .nla_type = ZLIB_COMP_METHOD, + }, + .val = Z_DEFLATED, + }, { + .nla = { + .nla_len = NLA_HDRLEN + sizeof(int), + .nla_type = ZLIB_COMP_WINDOWBITS, + }, + .val = -11, + }, { + .nla = { + .nla_len = NLA_HDRLEN + sizeof(int), + .nla_type = ZLIB_COMP_MEMLEVEL, + }, + .val = MAX_MEM_LEVEL, + }, { + .nla = { + .nla_len = NLA_HDRLEN + sizeof(int), + .nla_type = ZLIB_COMP_STRATEGY, + }, + .val = Z_DEFAULT_STRATEGY, + } +}; + +static const struct { + struct nlattr nla; + int val; +} deflate_decomp_params[] = { + { + .nla = { + .nla_len = NLA_HDRLEN + sizeof(int), + .nla_type = ZLIB_DECOMP_WINDOWBITS, + }, + .val = -11, + } +}; + +static struct pcomp_testvec zlib_comp_tv_template[] = { + { + .params = &deflate_comp_params, + .paramsize = sizeof(deflate_comp_params), + .inlen = 70, + .outlen = 38, + .input = "Join us now and share the software " + "Join us now and share the software ", + .output = "\xf3\xca\xcf\xcc\x53\x28\x2d\x56" + "\xc8\xcb\x2f\x57\x48\xcc\x4b\x51" + "\x28\xce\x48\x2c\x4a\x55\x28\xc9" + "\x48\x55\x28\xce\x4f\x2b\x29\x07" + "\x71\xbc\x08\x2b\x01\x00", + }, { + .params = &deflate_comp_params, + .paramsize = sizeof(deflate_comp_params), + .inlen = 191, + .outlen = 122, + .input = "This document describes a compression method based on the DEFLATE" + "compression algorithm. This document defines the application of " + "the DEFLATE algorithm to the IP Payload Compression Protocol.", + .output = "\x5d\x8d\x31\x0e\xc2\x30\x10\x04" + "\xbf\xb2\x2f\xc8\x1f\x10\x04\x09" + "\x89\xc2\x85\x3f\x70\xb1\x2f\xf8" + "\x24\xdb\x67\xd9\x47\xc1\xef\x49" + "\x68\x12\x51\xae\x76\x67\xd6\x27" + "\x19\x88\x1a\xde\x85\xab\x21\xf2" + "\x08\x5d\x16\x1e\x20\x04\x2d\xad" + "\xf3\x18\xa2\x15\x85\x2d\x69\xc4" + "\x42\x83\x23\xb6\x6c\x89\x71\x9b" + "\xef\xcf\x8b\x9f\xcf\x33\xca\x2f" + "\xed\x62\xa9\x4c\x80\xff\x13\xaf" + "\x52\x37\xed\x0e\x52\x6b\x59\x02" + "\xd9\x4e\xe8\x7a\x76\x1d\x02\x98" + "\xfe\x8a\x87\x83\xa3\x4f\x56\x8a" + "\xb8\x9e\x8e\x5c\x57\xd3\xa0\x79" + "\xfa\x02", + }, +}; + +static struct pcomp_testvec zlib_decomp_tv_template[] = { + { + .params = &deflate_decomp_params, + .paramsize = sizeof(deflate_decomp_params), + .inlen = 122, + .outlen = 191, + .input = "\x5d\x8d\x31\x0e\xc2\x30\x10\x04" + "\xbf\xb2\x2f\xc8\x1f\x10\x04\x09" + "\x89\xc2\x85\x3f\x70\xb1\x2f\xf8" + "\x24\xdb\x67\xd9\x47\xc1\xef\x49" + "\x68\x12\x51\xae\x76\x67\xd6\x27" + "\x19\x88\x1a\xde\x85\xab\x21\xf2" + "\x08\x5d\x16\x1e\x20\x04\x2d\xad" + "\xf3\x18\xa2\x15\x85\x2d\x69\xc4" + "\x42\x83\x23\xb6\x6c\x89\x71\x9b" + "\xef\xcf\x8b\x9f\xcf\x33\xca\x2f" + "\xed\x62\xa9\x4c\x80\xff\x13\xaf" + "\x52\x37\xed\x0e\x52\x6b\x59\x02" + "\xd9\x4e\xe8\x7a\x76\x1d\x02\x98" + "\xfe\x8a\x87\x83\xa3\x4f\x56\x8a" + "\xb8\x9e\x8e\x5c\x57\xd3\xa0\x79" + "\xfa\x02", + .output = "This document describes a compression method based on the DEFLATE" + "compression algorithm. This document defines the application of " + "the DEFLATE algorithm to the IP Payload Compression Protocol.", + }, { + .params = &deflate_decomp_params, + .paramsize = sizeof(deflate_decomp_params), + .inlen = 38, + .outlen = 70, + .input = "\xf3\xca\xcf\xcc\x53\x28\x2d\x56" + "\xc8\xcb\x2f\x57\x48\xcc\x4b\x51" + "\x28\xce\x48\x2c\x4a\x55\x28\xc9" + "\x48\x55\x28\xce\x4f\x2b\x29\x07" + "\x71\xbc\x08\x2b\x01\x00", + .output = "Join us now and share the software " + "Join us now and share the software ", + }, +}; + +/* + * LZO test vectors (null-terminated strings). + */ +#define LZO_COMP_TEST_VECTORS 2 +#define LZO_DECOMP_TEST_VECTORS 2 + +static struct comp_testvec lzo_comp_tv_template[] = { + { + .inlen = 70, + .outlen = 46, + .input = "Join us now and share the software " + "Join us now and share the software ", + .output = "\x00\x0d\x4a\x6f\x69\x6e\x20\x75" + "\x73\x20\x6e\x6f\x77\x20\x61\x6e" + "\x64\x20\x73\x68\x61\x72\x65\x20" + "\x74\x68\x65\x20\x73\x6f\x66\x74" + "\x77\x70\x01\x01\x4a\x6f\x69\x6e" + "\x3d\x88\x00\x11\x00\x00", + }, { + .inlen = 159, + .outlen = 133, + .input = "This document describes a compression method based on the LZO " + "compression algorithm. This document defines the application of " + "the LZO algorithm used in UBIFS.", + .output = "\x00\x2b\x54\x68\x69\x73\x20\x64" + "\x6f\x63\x75\x6d\x65\x6e\x74\x20" + "\x64\x65\x73\x63\x72\x69\x62\x65" + "\x73\x20\x61\x20\x63\x6f\x6d\x70" + "\x72\x65\x73\x73\x69\x6f\x6e\x20" + "\x6d\x65\x74\x68\x6f\x64\x20\x62" + "\x61\x73\x65\x64\x20\x6f\x6e\x20" + "\x74\x68\x65\x20\x4c\x5a\x4f\x2b" + "\x8c\x00\x0d\x61\x6c\x67\x6f\x72" + "\x69\x74\x68\x6d\x2e\x20\x20\x54" + "\x68\x69\x73\x2a\x54\x01\x02\x66" + "\x69\x6e\x65\x73\x94\x06\x05\x61" + "\x70\x70\x6c\x69\x63\x61\x74\x76" + "\x0a\x6f\x66\x88\x02\x60\x09\x27" + "\xf0\x00\x0c\x20\x75\x73\x65\x64" + "\x20\x69\x6e\x20\x55\x42\x49\x46" + "\x53\x2e\x11\x00\x00", + }, +}; + +static struct comp_testvec lzo_decomp_tv_template[] = { + { + .inlen = 133, + .outlen = 159, + .input = "\x00\x2b\x54\x68\x69\x73\x20\x64" + "\x6f\x63\x75\x6d\x65\x6e\x74\x20" + "\x64\x65\x73\x63\x72\x69\x62\x65" + "\x73\x20\x61\x20\x63\x6f\x6d\x70" + "\x72\x65\x73\x73\x69\x6f\x6e\x20" + "\x6d\x65\x74\x68\x6f\x64\x20\x62" + "\x61\x73\x65\x64\x20\x6f\x6e\x20" + "\x74\x68\x65\x20\x4c\x5a\x4f\x2b" + "\x8c\x00\x0d\x61\x6c\x67\x6f\x72" + "\x69\x74\x68\x6d\x2e\x20\x20\x54" + "\x68\x69\x73\x2a\x54\x01\x02\x66" + "\x69\x6e\x65\x73\x94\x06\x05\x61" + "\x70\x70\x6c\x69\x63\x61\x74\x76" + "\x0a\x6f\x66\x88\x02\x60\x09\x27" + "\xf0\x00\x0c\x20\x75\x73\x65\x64" + "\x20\x69\x6e\x20\x55\x42\x49\x46" + "\x53\x2e\x11\x00\x00", + .output = "This document describes a compression method based on the LZO " + "compression algorithm. This document defines the application of " + "the LZO algorithm used in UBIFS.", + }, { + .inlen = 46, + .outlen = 70, + .input = "\x00\x0d\x4a\x6f\x69\x6e\x20\x75" + "\x73\x20\x6e\x6f\x77\x20\x61\x6e" + "\x64\x20\x73\x68\x61\x72\x65\x20" + "\x74\x68\x65\x20\x73\x6f\x66\x74" + "\x77\x70\x01\x01\x4a\x6f\x69\x6e" + "\x3d\x88\x00\x11\x00\x00", + .output = "Join us now and share the software " + "Join us now and share the software ", + }, +}; + +/* + * Michael MIC test vectors from IEEE 802.11i + */ +#define MICHAEL_MIC_TEST_VECTORS 6 + +static struct hash_testvec michael_mic_tv_template[] = { + { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00", + .ksize = 8, + .plaintext = zeroed_string, + .psize = 0, + .digest = "\x82\x92\x5c\x1c\xa1\xd1\x30\xb8", + }, + { + .key = "\x82\x92\x5c\x1c\xa1\xd1\x30\xb8", + .ksize = 8, + .plaintext = "M", + .psize = 1, + .digest = "\x43\x47\x21\xca\x40\x63\x9b\x3f", + }, + { + .key = "\x43\x47\x21\xca\x40\x63\x9b\x3f", + .ksize = 8, + .plaintext = "Mi", + .psize = 2, + .digest = "\xe8\xf9\xbe\xca\xe9\x7e\x5d\x29", + }, + { + .key = "\xe8\xf9\xbe\xca\xe9\x7e\x5d\x29", + .ksize = 8, + .plaintext = "Mic", + .psize = 3, + .digest = "\x90\x03\x8f\xc6\xcf\x13\xc1\xdb", + }, + { + .key = "\x90\x03\x8f\xc6\xcf\x13\xc1\xdb", + .ksize = 8, + .plaintext = "Mich", + .psize = 4, + .digest = "\xd5\x5e\x10\x05\x10\x12\x89\x86", + }, + { + .key = "\xd5\x5e\x10\x05\x10\x12\x89\x86", + .ksize = 8, + .plaintext = "Michael", + .psize = 7, + .digest = "\x0a\x94\x2b\x12\x4e\xca\xa5\x46", + } +}; + +/* + * CRC32C test vectors + */ +#define CRC32C_TEST_VECTORS 14 + +static struct hash_testvec crc32c_tv_template[] = { + { + .psize = 0, + .digest = "\x00\x00\x00\x00", + }, + { + .key = "\x87\xa9\xcb\xed", + .ksize = 4, + .psize = 0, + .digest = "\x78\x56\x34\x12", + }, + { + .key = "\xff\xff\xff\xff", + .ksize = 4, + .plaintext = "\x01\x02\x03\x04\x05\x06\x07\x08" + "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18" + "\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20" + "\x21\x22\x23\x24\x25\x26\x27\x28", + .psize = 40, + .digest = "\x7f\x15\x2c\x0e", + }, + { + .key = "\xff\xff\xff\xff", + .ksize = 4, + .plaintext = "\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38" + "\x39\x3a\x3b\x3c\x3d\x3e\x3f\x40" + "\x41\x42\x43\x44\x45\x46\x47\x48" + "\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50", + .psize = 40, + .digest = "\xf6\xeb\x80\xe9", + }, + { + .key = "\xff\xff\xff\xff", + .ksize = 4, + .plaintext = "\x51\x52\x53\x54\x55\x56\x57\x58" + "\x59\x5a\x5b\x5c\x5d\x5e\x5f\x60" + "\x61\x62\x63\x64\x65\x66\x67\x68" + "\x69\x6a\x6b\x6c\x6d\x6e\x6f\x70" + "\x71\x72\x73\x74\x75\x76\x77\x78", + .psize = 40, + .digest = "\xed\xbd\x74\xde", + }, + { + .key = "\xff\xff\xff\xff", + .ksize = 4, + .plaintext = "\x79\x7a\x7b\x7c\x7d\x7e\x7f\x80" + "\x81\x82\x83\x84\x85\x86\x87\x88" + "\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98" + "\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0", + .psize = 40, + .digest = "\x62\xc8\x79\xd5", + }, + { + .key = "\xff\xff\xff\xff", + .ksize = 4, + .plaintext = "\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8" + "\xa9\xaa\xab\xac\xad\xae\xaf\xb0" + "\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8" + "\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0" + "\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8", + .psize = 40, + .digest = "\xd0\x9a\x97\xba", + }, + { + .key = "\xff\xff\xff\xff", + .ksize = 4, + .plaintext = "\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0" + "\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8" + "\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0" + "\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8" + "\xe9\xea\xeb\xec\xed\xee\xef\xf0", + .psize = 40, + .digest = "\x13\xd9\x29\x2b", + }, + { + .key = "\x80\xea\xd3\xf1", + .ksize = 4, + .plaintext = "\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38" + "\x39\x3a\x3b\x3c\x3d\x3e\x3f\x40" + "\x41\x42\x43\x44\x45\x46\x47\x48" + "\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50", + .psize = 40, + .digest = "\x0c\xb5\xe2\xa2", + }, + { + .key = "\xf3\x4a\x1d\x5d", + .ksize = 4, + .plaintext = "\x51\x52\x53\x54\x55\x56\x57\x58" + "\x59\x5a\x5b\x5c\x5d\x5e\x5f\x60" + "\x61\x62\x63\x64\x65\x66\x67\x68" + "\x69\x6a\x6b\x6c\x6d\x6e\x6f\x70" + "\x71\x72\x73\x74\x75\x76\x77\x78", + .psize = 40, + .digest = "\xd1\x7f\xfb\xa6", + }, + { + .key = "\x2e\x80\x04\x59", + .ksize = 4, + .plaintext = "\x79\x7a\x7b\x7c\x7d\x7e\x7f\x80" + "\x81\x82\x83\x84\x85\x86\x87\x88" + "\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98" + "\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0", + .psize = 40, + .digest = "\x59\x33\xe6\x7a", + }, + { + .key = "\xa6\xcc\x19\x85", + .ksize = 4, + .plaintext = "\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8" + "\xa9\xaa\xab\xac\xad\xae\xaf\xb0" + "\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8" + "\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0" + "\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8", + .psize = 40, + .digest = "\xbe\x03\x01\xd2", + }, + { + .key = "\x41\xfc\xfe\x2d", + .ksize = 4, + .plaintext = "\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0" + "\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8" + "\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0" + "\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8" + "\xe9\xea\xeb\xec\xed\xee\xef\xf0", + .psize = 40, + .digest = "\x75\xd3\xc5\x24", + }, + { + .key = "\xff\xff\xff\xff", + .ksize = 4, + .plaintext = "\x01\x02\x03\x04\x05\x06\x07\x08" + "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18" + "\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20" + "\x21\x22\x23\x24\x25\x26\x27\x28" + "\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38" + "\x39\x3a\x3b\x3c\x3d\x3e\x3f\x40" + "\x41\x42\x43\x44\x45\x46\x47\x48" + "\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50" + "\x51\x52\x53\x54\x55\x56\x57\x58" + "\x59\x5a\x5b\x5c\x5d\x5e\x5f\x60" + "\x61\x62\x63\x64\x65\x66\x67\x68" + "\x69\x6a\x6b\x6c\x6d\x6e\x6f\x70" + "\x71\x72\x73\x74\x75\x76\x77\x78" + "\x79\x7a\x7b\x7c\x7d\x7e\x7f\x80" + "\x81\x82\x83\x84\x85\x86\x87\x88" + "\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98" + "\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0" + "\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8" + "\xa9\xaa\xab\xac\xad\xae\xaf\xb0" + "\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8" + "\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0" + "\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8" + "\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0" + "\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8" + "\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0" + "\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8" + "\xe9\xea\xeb\xec\xed\xee\xef\xf0", + .psize = 240, + .digest = "\x75\xd3\xc5\x24", + .np = 2, + .tap = { 31, 209 } + }, +}; + +#endif /* _CRYPTO_TESTMGR_H */ diff --git a/crypto/tgr192.c b/crypto/tgr192.c new file mode 100644 index 00000000..cbca4f20 --- /dev/null +++ b/crypto/tgr192.c @@ -0,0 +1,712 @@ +/* + * Cryptographic API. + * + * Tiger hashing Algorithm + * + * Copyright (C) 1998 Free Software Foundation, Inc. + * + * The Tiger algorithm was developed by Ross Anderson and Eli Biham. + * It was optimized for 64-bit processors while still delievering + * decent performance on 32 and 16-bit processors. + * + * This version is derived from the GnuPG implementation and the + * Tiger-Perl interface written by Rafael Sevilla + * + * Adapted for Linux Kernel Crypto by Aaron Grothe + * ajgrothe@yahoo.com, February 22, 2005 + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + */ +#include <crypto/internal/hash.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <asm/byteorder.h> +#include <linux/types.h> + +#define TGR192_DIGEST_SIZE 24 +#define TGR160_DIGEST_SIZE 20 +#define TGR128_DIGEST_SIZE 16 + +#define TGR192_BLOCK_SIZE 64 + +struct tgr192_ctx { + u64 a, b, c; + u8 hash[64]; + int count; + u32 nblocks; +}; + +static const u64 sbox1[256] = { + 0x02aab17cf7e90c5eULL, 0xac424b03e243a8ecULL, 0x72cd5be30dd5fcd3ULL, + 0x6d019b93f6f97f3aULL, 0xcd9978ffd21f9193ULL, 0x7573a1c9708029e2ULL, + 0xb164326b922a83c3ULL, 0x46883eee04915870ULL, 0xeaace3057103ece6ULL, + 0xc54169b808a3535cULL, 0x4ce754918ddec47cULL, 0x0aa2f4dfdc0df40cULL, + 0x10b76f18a74dbefaULL, 0xc6ccb6235ad1ab6aULL, 0x13726121572fe2ffULL, + 0x1a488c6f199d921eULL, 0x4bc9f9f4da0007caULL, 0x26f5e6f6e85241c7ULL, + 0x859079dbea5947b6ULL, 0x4f1885c5c99e8c92ULL, 0xd78e761ea96f864bULL, + 0x8e36428c52b5c17dULL, 0x69cf6827373063c1ULL, 0xb607c93d9bb4c56eULL, + 0x7d820e760e76b5eaULL, 0x645c9cc6f07fdc42ULL, 0xbf38a078243342e0ULL, + 0x5f6b343c9d2e7d04ULL, 0xf2c28aeb600b0ec6ULL, 0x6c0ed85f7254bcacULL, + 0x71592281a4db4fe5ULL, 0x1967fa69ce0fed9fULL, 0xfd5293f8b96545dbULL, + 0xc879e9d7f2a7600bULL, 0x860248920193194eULL, 0xa4f9533b2d9cc0b3ULL, + 0x9053836c15957613ULL, 0xdb6dcf8afc357bf1ULL, 0x18beea7a7a370f57ULL, + 0x037117ca50b99066ULL, 0x6ab30a9774424a35ULL, 0xf4e92f02e325249bULL, + 0x7739db07061ccae1ULL, 0xd8f3b49ceca42a05ULL, 0xbd56be3f51382f73ULL, + 0x45faed5843b0bb28ULL, 0x1c813d5c11bf1f83ULL, 0x8af0e4b6d75fa169ULL, + 0x33ee18a487ad9999ULL, 0x3c26e8eab1c94410ULL, 0xb510102bc0a822f9ULL, + 0x141eef310ce6123bULL, 0xfc65b90059ddb154ULL, 0xe0158640c5e0e607ULL, + 0x884e079826c3a3cfULL, 0x930d0d9523c535fdULL, 0x35638d754e9a2b00ULL, + 0x4085fccf40469dd5ULL, 0xc4b17ad28be23a4cULL, 0xcab2f0fc6a3e6a2eULL, + 0x2860971a6b943fcdULL, 0x3dde6ee212e30446ULL, 0x6222f32ae01765aeULL, + 0x5d550bb5478308feULL, 0xa9efa98da0eda22aULL, 0xc351a71686c40da7ULL, + 0x1105586d9c867c84ULL, 0xdcffee85fda22853ULL, 0xccfbd0262c5eef76ULL, + 0xbaf294cb8990d201ULL, 0xe69464f52afad975ULL, 0x94b013afdf133e14ULL, + 0x06a7d1a32823c958ULL, 0x6f95fe5130f61119ULL, 0xd92ab34e462c06c0ULL, + 0xed7bde33887c71d2ULL, 0x79746d6e6518393eULL, 0x5ba419385d713329ULL, + 0x7c1ba6b948a97564ULL, 0x31987c197bfdac67ULL, 0xde6c23c44b053d02ULL, + 0x581c49fed002d64dULL, 0xdd474d6338261571ULL, 0xaa4546c3e473d062ULL, + 0x928fce349455f860ULL, 0x48161bbacaab94d9ULL, 0x63912430770e6f68ULL, + 0x6ec8a5e602c6641cULL, 0x87282515337ddd2bULL, 0x2cda6b42034b701bULL, + 0xb03d37c181cb096dULL, 0xe108438266c71c6fULL, 0x2b3180c7eb51b255ULL, + 0xdf92b82f96c08bbcULL, 0x5c68c8c0a632f3baULL, 0x5504cc861c3d0556ULL, + 0xabbfa4e55fb26b8fULL, 0x41848b0ab3baceb4ULL, 0xb334a273aa445d32ULL, + 0xbca696f0a85ad881ULL, 0x24f6ec65b528d56cULL, 0x0ce1512e90f4524aULL, + 0x4e9dd79d5506d35aULL, 0x258905fac6ce9779ULL, 0x2019295b3e109b33ULL, + 0xf8a9478b73a054ccULL, 0x2924f2f934417eb0ULL, 0x3993357d536d1bc4ULL, + 0x38a81ac21db6ff8bULL, 0x47c4fbf17d6016bfULL, 0x1e0faadd7667e3f5ULL, + 0x7abcff62938beb96ULL, 0xa78dad948fc179c9ULL, 0x8f1f98b72911e50dULL, + 0x61e48eae27121a91ULL, 0x4d62f7ad31859808ULL, 0xeceba345ef5ceaebULL, + 0xf5ceb25ebc9684ceULL, 0xf633e20cb7f76221ULL, 0xa32cdf06ab8293e4ULL, + 0x985a202ca5ee2ca4ULL, 0xcf0b8447cc8a8fb1ULL, 0x9f765244979859a3ULL, + 0xa8d516b1a1240017ULL, 0x0bd7ba3ebb5dc726ULL, 0xe54bca55b86adb39ULL, + 0x1d7a3afd6c478063ULL, 0x519ec608e7669eddULL, 0x0e5715a2d149aa23ULL, + 0x177d4571848ff194ULL, 0xeeb55f3241014c22ULL, 0x0f5e5ca13a6e2ec2ULL, + 0x8029927b75f5c361ULL, 0xad139fabc3d6e436ULL, 0x0d5df1a94ccf402fULL, + 0x3e8bd948bea5dfc8ULL, 0xa5a0d357bd3ff77eULL, 0xa2d12e251f74f645ULL, + 0x66fd9e525e81a082ULL, 0x2e0c90ce7f687a49ULL, 0xc2e8bcbeba973bc5ULL, + 0x000001bce509745fULL, 0x423777bbe6dab3d6ULL, 0xd1661c7eaef06eb5ULL, + 0xa1781f354daacfd8ULL, 0x2d11284a2b16affcULL, 0xf1fc4f67fa891d1fULL, + 0x73ecc25dcb920adaULL, 0xae610c22c2a12651ULL, 0x96e0a810d356b78aULL, + 0x5a9a381f2fe7870fULL, 0xd5ad62ede94e5530ULL, 0xd225e5e8368d1427ULL, + 0x65977b70c7af4631ULL, 0x99f889b2de39d74fULL, 0x233f30bf54e1d143ULL, + 0x9a9675d3d9a63c97ULL, 0x5470554ff334f9a8ULL, 0x166acb744a4f5688ULL, + 0x70c74caab2e4aeadULL, 0xf0d091646f294d12ULL, 0x57b82a89684031d1ULL, + 0xefd95a5a61be0b6bULL, 0x2fbd12e969f2f29aULL, 0x9bd37013feff9fe8ULL, + 0x3f9b0404d6085a06ULL, 0x4940c1f3166cfe15ULL, 0x09542c4dcdf3defbULL, + 0xb4c5218385cd5ce3ULL, 0xc935b7dc4462a641ULL, 0x3417f8a68ed3b63fULL, + 0xb80959295b215b40ULL, 0xf99cdaef3b8c8572ULL, 0x018c0614f8fcb95dULL, + 0x1b14accd1a3acdf3ULL, 0x84d471f200bb732dULL, 0xc1a3110e95e8da16ULL, + 0x430a7220bf1a82b8ULL, 0xb77e090d39df210eULL, 0x5ef4bd9f3cd05e9dULL, + 0x9d4ff6da7e57a444ULL, 0xda1d60e183d4a5f8ULL, 0xb287c38417998e47ULL, + 0xfe3edc121bb31886ULL, 0xc7fe3ccc980ccbefULL, 0xe46fb590189bfd03ULL, + 0x3732fd469a4c57dcULL, 0x7ef700a07cf1ad65ULL, 0x59c64468a31d8859ULL, + 0x762fb0b4d45b61f6ULL, 0x155baed099047718ULL, 0x68755e4c3d50baa6ULL, + 0xe9214e7f22d8b4dfULL, 0x2addbf532eac95f4ULL, 0x32ae3909b4bd0109ULL, + 0x834df537b08e3450ULL, 0xfa209da84220728dULL, 0x9e691d9b9efe23f7ULL, + 0x0446d288c4ae8d7fULL, 0x7b4cc524e169785bULL, 0x21d87f0135ca1385ULL, + 0xcebb400f137b8aa5ULL, 0x272e2b66580796beULL, 0x3612264125c2b0deULL, + 0x057702bdad1efbb2ULL, 0xd4babb8eacf84be9ULL, 0x91583139641bc67bULL, + 0x8bdc2de08036e024ULL, 0x603c8156f49f68edULL, 0xf7d236f7dbef5111ULL, + 0x9727c4598ad21e80ULL, 0xa08a0896670a5fd7ULL, 0xcb4a8f4309eba9cbULL, + 0x81af564b0f7036a1ULL, 0xc0b99aa778199abdULL, 0x959f1ec83fc8e952ULL, + 0x8c505077794a81b9ULL, 0x3acaaf8f056338f0ULL, 0x07b43f50627a6778ULL, + 0x4a44ab49f5eccc77ULL, 0x3bc3d6e4b679ee98ULL, 0x9cc0d4d1cf14108cULL, + 0x4406c00b206bc8a0ULL, 0x82a18854c8d72d89ULL, 0x67e366b35c3c432cULL, + 0xb923dd61102b37f2ULL, 0x56ab2779d884271dULL, 0xbe83e1b0ff1525afULL, + 0xfb7c65d4217e49a9ULL, 0x6bdbe0e76d48e7d4ULL, 0x08df828745d9179eULL, + 0x22ea6a9add53bd34ULL, 0xe36e141c5622200aULL, 0x7f805d1b8cb750eeULL, + 0xafe5c7a59f58e837ULL, 0xe27f996a4fb1c23cULL, 0xd3867dfb0775f0d0ULL, + 0xd0e673de6e88891aULL, 0x123aeb9eafb86c25ULL, 0x30f1d5d5c145b895ULL, + 0xbb434a2dee7269e7ULL, 0x78cb67ecf931fa38ULL, 0xf33b0372323bbf9cULL, + 0x52d66336fb279c74ULL, 0x505f33ac0afb4eaaULL, 0xe8a5cd99a2cce187ULL, + 0x534974801e2d30bbULL, 0x8d2d5711d5876d90ULL, 0x1f1a412891bc038eULL, + 0xd6e2e71d82e56648ULL, 0x74036c3a497732b7ULL, 0x89b67ed96361f5abULL, + 0xffed95d8f1ea02a2ULL, 0xe72b3bd61464d43dULL, 0xa6300f170bdc4820ULL, + 0xebc18760ed78a77aULL +}; + +static const u64 sbox2[256] = { + 0xe6a6be5a05a12138ULL, 0xb5a122a5b4f87c98ULL, 0x563c6089140b6990ULL, + 0x4c46cb2e391f5dd5ULL, 0xd932addbc9b79434ULL, 0x08ea70e42015aff5ULL, + 0xd765a6673e478cf1ULL, 0xc4fb757eab278d99ULL, 0xdf11c6862d6e0692ULL, + 0xddeb84f10d7f3b16ULL, 0x6f2ef604a665ea04ULL, 0x4a8e0f0ff0e0dfb3ULL, + 0xa5edeef83dbcba51ULL, 0xfc4f0a2a0ea4371eULL, 0xe83e1da85cb38429ULL, + 0xdc8ff882ba1b1ce2ULL, 0xcd45505e8353e80dULL, 0x18d19a00d4db0717ULL, + 0x34a0cfeda5f38101ULL, 0x0be77e518887caf2ULL, 0x1e341438b3c45136ULL, + 0xe05797f49089ccf9ULL, 0xffd23f9df2591d14ULL, 0x543dda228595c5cdULL, + 0x661f81fd99052a33ULL, 0x8736e641db0f7b76ULL, 0x15227725418e5307ULL, + 0xe25f7f46162eb2faULL, 0x48a8b2126c13d9feULL, 0xafdc541792e76eeaULL, + 0x03d912bfc6d1898fULL, 0x31b1aafa1b83f51bULL, 0xf1ac2796e42ab7d9ULL, + 0x40a3a7d7fcd2ebacULL, 0x1056136d0afbbcc5ULL, 0x7889e1dd9a6d0c85ULL, + 0xd33525782a7974aaULL, 0xa7e25d09078ac09bULL, 0xbd4138b3eac6edd0ULL, + 0x920abfbe71eb9e70ULL, 0xa2a5d0f54fc2625cULL, 0xc054e36b0b1290a3ULL, + 0xf6dd59ff62fe932bULL, 0x3537354511a8ac7dULL, 0xca845e9172fadcd4ULL, + 0x84f82b60329d20dcULL, 0x79c62ce1cd672f18ULL, 0x8b09a2add124642cULL, + 0xd0c1e96a19d9e726ULL, 0x5a786a9b4ba9500cULL, 0x0e020336634c43f3ULL, + 0xc17b474aeb66d822ULL, 0x6a731ae3ec9baac2ULL, 0x8226667ae0840258ULL, + 0x67d4567691caeca5ULL, 0x1d94155c4875adb5ULL, 0x6d00fd985b813fdfULL, + 0x51286efcb774cd06ULL, 0x5e8834471fa744afULL, 0xf72ca0aee761ae2eULL, + 0xbe40e4cdaee8e09aULL, 0xe9970bbb5118f665ULL, 0x726e4beb33df1964ULL, + 0x703b000729199762ULL, 0x4631d816f5ef30a7ULL, 0xb880b5b51504a6beULL, + 0x641793c37ed84b6cULL, 0x7b21ed77f6e97d96ULL, 0x776306312ef96b73ULL, + 0xae528948e86ff3f4ULL, 0x53dbd7f286a3f8f8ULL, 0x16cadce74cfc1063ULL, + 0x005c19bdfa52c6ddULL, 0x68868f5d64d46ad3ULL, 0x3a9d512ccf1e186aULL, + 0x367e62c2385660aeULL, 0xe359e7ea77dcb1d7ULL, 0x526c0773749abe6eULL, + 0x735ae5f9d09f734bULL, 0x493fc7cc8a558ba8ULL, 0xb0b9c1533041ab45ULL, + 0x321958ba470a59bdULL, 0x852db00b5f46c393ULL, 0x91209b2bd336b0e5ULL, + 0x6e604f7d659ef19fULL, 0xb99a8ae2782ccb24ULL, 0xccf52ab6c814c4c7ULL, + 0x4727d9afbe11727bULL, 0x7e950d0c0121b34dULL, 0x756f435670ad471fULL, + 0xf5add442615a6849ULL, 0x4e87e09980b9957aULL, 0x2acfa1df50aee355ULL, + 0xd898263afd2fd556ULL, 0xc8f4924dd80c8fd6ULL, 0xcf99ca3d754a173aULL, + 0xfe477bacaf91bf3cULL, 0xed5371f6d690c12dULL, 0x831a5c285e687094ULL, + 0xc5d3c90a3708a0a4ULL, 0x0f7f903717d06580ULL, 0x19f9bb13b8fdf27fULL, + 0xb1bd6f1b4d502843ULL, 0x1c761ba38fff4012ULL, 0x0d1530c4e2e21f3bULL, + 0x8943ce69a7372c8aULL, 0xe5184e11feb5ce66ULL, 0x618bdb80bd736621ULL, + 0x7d29bad68b574d0bULL, 0x81bb613e25e6fe5bULL, 0x071c9c10bc07913fULL, + 0xc7beeb7909ac2d97ULL, 0xc3e58d353bc5d757ULL, 0xeb017892f38f61e8ULL, + 0xd4effb9c9b1cc21aULL, 0x99727d26f494f7abULL, 0xa3e063a2956b3e03ULL, + 0x9d4a8b9a4aa09c30ULL, 0x3f6ab7d500090fb4ULL, 0x9cc0f2a057268ac0ULL, + 0x3dee9d2dedbf42d1ULL, 0x330f49c87960a972ULL, 0xc6b2720287421b41ULL, + 0x0ac59ec07c00369cULL, 0xef4eac49cb353425ULL, 0xf450244eef0129d8ULL, + 0x8acc46e5caf4deb6ULL, 0x2ffeab63989263f7ULL, 0x8f7cb9fe5d7a4578ULL, + 0x5bd8f7644e634635ULL, 0x427a7315bf2dc900ULL, 0x17d0c4aa2125261cULL, + 0x3992486c93518e50ULL, 0xb4cbfee0a2d7d4c3ULL, 0x7c75d6202c5ddd8dULL, + 0xdbc295d8e35b6c61ULL, 0x60b369d302032b19ULL, 0xce42685fdce44132ULL, + 0x06f3ddb9ddf65610ULL, 0x8ea4d21db5e148f0ULL, 0x20b0fce62fcd496fULL, + 0x2c1b912358b0ee31ULL, 0xb28317b818f5a308ULL, 0xa89c1e189ca6d2cfULL, + 0x0c6b18576aaadbc8ULL, 0xb65deaa91299fae3ULL, 0xfb2b794b7f1027e7ULL, + 0x04e4317f443b5bebULL, 0x4b852d325939d0a6ULL, 0xd5ae6beefb207ffcULL, + 0x309682b281c7d374ULL, 0xbae309a194c3b475ULL, 0x8cc3f97b13b49f05ULL, + 0x98a9422ff8293967ULL, 0x244b16b01076ff7cULL, 0xf8bf571c663d67eeULL, + 0x1f0d6758eee30da1ULL, 0xc9b611d97adeb9b7ULL, 0xb7afd5887b6c57a2ULL, + 0x6290ae846b984fe1ULL, 0x94df4cdeacc1a5fdULL, 0x058a5bd1c5483affULL, + 0x63166cc142ba3c37ULL, 0x8db8526eb2f76f40ULL, 0xe10880036f0d6d4eULL, + 0x9e0523c9971d311dULL, 0x45ec2824cc7cd691ULL, 0x575b8359e62382c9ULL, + 0xfa9e400dc4889995ULL, 0xd1823ecb45721568ULL, 0xdafd983b8206082fULL, + 0xaa7d29082386a8cbULL, 0x269fcd4403b87588ULL, 0x1b91f5f728bdd1e0ULL, + 0xe4669f39040201f6ULL, 0x7a1d7c218cf04adeULL, 0x65623c29d79ce5ceULL, + 0x2368449096c00bb1ULL, 0xab9bf1879da503baULL, 0xbc23ecb1a458058eULL, + 0x9a58df01bb401eccULL, 0xa070e868a85f143dULL, 0x4ff188307df2239eULL, + 0x14d565b41a641183ULL, 0xee13337452701602ULL, 0x950e3dcf3f285e09ULL, + 0x59930254b9c80953ULL, 0x3bf299408930da6dULL, 0xa955943f53691387ULL, + 0xa15edecaa9cb8784ULL, 0x29142127352be9a0ULL, 0x76f0371fff4e7afbULL, + 0x0239f450274f2228ULL, 0xbb073af01d5e868bULL, 0xbfc80571c10e96c1ULL, + 0xd267088568222e23ULL, 0x9671a3d48e80b5b0ULL, 0x55b5d38ae193bb81ULL, + 0x693ae2d0a18b04b8ULL, 0x5c48b4ecadd5335fULL, 0xfd743b194916a1caULL, + 0x2577018134be98c4ULL, 0xe77987e83c54a4adULL, 0x28e11014da33e1b9ULL, + 0x270cc59e226aa213ULL, 0x71495f756d1a5f60ULL, 0x9be853fb60afef77ULL, + 0xadc786a7f7443dbfULL, 0x0904456173b29a82ULL, 0x58bc7a66c232bd5eULL, + 0xf306558c673ac8b2ULL, 0x41f639c6b6c9772aULL, 0x216defe99fda35daULL, + 0x11640cc71c7be615ULL, 0x93c43694565c5527ULL, 0xea038e6246777839ULL, + 0xf9abf3ce5a3e2469ULL, 0x741e768d0fd312d2ULL, 0x0144b883ced652c6ULL, + 0xc20b5a5ba33f8552ULL, 0x1ae69633c3435a9dULL, 0x97a28ca4088cfdecULL, + 0x8824a43c1e96f420ULL, 0x37612fa66eeea746ULL, 0x6b4cb165f9cf0e5aULL, + 0x43aa1c06a0abfb4aULL, 0x7f4dc26ff162796bULL, 0x6cbacc8e54ed9b0fULL, + 0xa6b7ffefd2bb253eULL, 0x2e25bc95b0a29d4fULL, 0x86d6a58bdef1388cULL, + 0xded74ac576b6f054ULL, 0x8030bdbc2b45805dULL, 0x3c81af70e94d9289ULL, + 0x3eff6dda9e3100dbULL, 0xb38dc39fdfcc8847ULL, 0x123885528d17b87eULL, + 0xf2da0ed240b1b642ULL, 0x44cefadcd54bf9a9ULL, 0x1312200e433c7ee6ULL, + 0x9ffcc84f3a78c748ULL, 0xf0cd1f72248576bbULL, 0xec6974053638cfe4ULL, + 0x2ba7b67c0cec4e4cULL, 0xac2f4df3e5ce32edULL, 0xcb33d14326ea4c11ULL, + 0xa4e9044cc77e58bcULL, 0x5f513293d934fcefULL, 0x5dc9645506e55444ULL, + 0x50de418f317de40aULL, 0x388cb31a69dde259ULL, 0x2db4a83455820a86ULL, + 0x9010a91e84711ae9ULL, 0x4df7f0b7b1498371ULL, 0xd62a2eabc0977179ULL, + 0x22fac097aa8d5c0eULL +}; + +static const u64 sbox3[256] = { + 0xf49fcc2ff1daf39bULL, 0x487fd5c66ff29281ULL, 0xe8a30667fcdca83fULL, + 0x2c9b4be3d2fcce63ULL, 0xda3ff74b93fbbbc2ULL, 0x2fa165d2fe70ba66ULL, + 0xa103e279970e93d4ULL, 0xbecdec77b0e45e71ULL, 0xcfb41e723985e497ULL, + 0xb70aaa025ef75017ULL, 0xd42309f03840b8e0ULL, 0x8efc1ad035898579ULL, + 0x96c6920be2b2abc5ULL, 0x66af4163375a9172ULL, 0x2174abdcca7127fbULL, + 0xb33ccea64a72ff41ULL, 0xf04a4933083066a5ULL, 0x8d970acdd7289af5ULL, + 0x8f96e8e031c8c25eULL, 0xf3fec02276875d47ULL, 0xec7bf310056190ddULL, + 0xf5adb0aebb0f1491ULL, 0x9b50f8850fd58892ULL, 0x4975488358b74de8ULL, + 0xa3354ff691531c61ULL, 0x0702bbe481d2c6eeULL, 0x89fb24057deded98ULL, + 0xac3075138596e902ULL, 0x1d2d3580172772edULL, 0xeb738fc28e6bc30dULL, + 0x5854ef8f63044326ULL, 0x9e5c52325add3bbeULL, 0x90aa53cf325c4623ULL, + 0xc1d24d51349dd067ULL, 0x2051cfeea69ea624ULL, 0x13220f0a862e7e4fULL, + 0xce39399404e04864ULL, 0xd9c42ca47086fcb7ULL, 0x685ad2238a03e7ccULL, + 0x066484b2ab2ff1dbULL, 0xfe9d5d70efbf79ecULL, 0x5b13b9dd9c481854ULL, + 0x15f0d475ed1509adULL, 0x0bebcd060ec79851ULL, 0xd58c6791183ab7f8ULL, + 0xd1187c5052f3eee4ULL, 0xc95d1192e54e82ffULL, 0x86eea14cb9ac6ca2ULL, + 0x3485beb153677d5dULL, 0xdd191d781f8c492aULL, 0xf60866baa784ebf9ULL, + 0x518f643ba2d08c74ULL, 0x8852e956e1087c22ULL, 0xa768cb8dc410ae8dULL, + 0x38047726bfec8e1aULL, 0xa67738b4cd3b45aaULL, 0xad16691cec0dde19ULL, + 0xc6d4319380462e07ULL, 0xc5a5876d0ba61938ULL, 0x16b9fa1fa58fd840ULL, + 0x188ab1173ca74f18ULL, 0xabda2f98c99c021fULL, 0x3e0580ab134ae816ULL, + 0x5f3b05b773645abbULL, 0x2501a2be5575f2f6ULL, 0x1b2f74004e7e8ba9ULL, + 0x1cd7580371e8d953ULL, 0x7f6ed89562764e30ULL, 0xb15926ff596f003dULL, + 0x9f65293da8c5d6b9ULL, 0x6ecef04dd690f84cULL, 0x4782275fff33af88ULL, + 0xe41433083f820801ULL, 0xfd0dfe409a1af9b5ULL, 0x4325a3342cdb396bULL, + 0x8ae77e62b301b252ULL, 0xc36f9e9f6655615aULL, 0x85455a2d92d32c09ULL, + 0xf2c7dea949477485ULL, 0x63cfb4c133a39ebaULL, 0x83b040cc6ebc5462ULL, + 0x3b9454c8fdb326b0ULL, 0x56f56a9e87ffd78cULL, 0x2dc2940d99f42bc6ULL, + 0x98f7df096b096e2dULL, 0x19a6e01e3ad852bfULL, 0x42a99ccbdbd4b40bULL, + 0xa59998af45e9c559ULL, 0x366295e807d93186ULL, 0x6b48181bfaa1f773ULL, + 0x1fec57e2157a0a1dULL, 0x4667446af6201ad5ULL, 0xe615ebcacfb0f075ULL, + 0xb8f31f4f68290778ULL, 0x22713ed6ce22d11eULL, 0x3057c1a72ec3c93bULL, + 0xcb46acc37c3f1f2fULL, 0xdbb893fd02aaf50eULL, 0x331fd92e600b9fcfULL, + 0xa498f96148ea3ad6ULL, 0xa8d8426e8b6a83eaULL, 0xa089b274b7735cdcULL, + 0x87f6b3731e524a11ULL, 0x118808e5cbc96749ULL, 0x9906e4c7b19bd394ULL, + 0xafed7f7e9b24a20cULL, 0x6509eadeeb3644a7ULL, 0x6c1ef1d3e8ef0edeULL, + 0xb9c97d43e9798fb4ULL, 0xa2f2d784740c28a3ULL, 0x7b8496476197566fULL, + 0x7a5be3e6b65f069dULL, 0xf96330ed78be6f10ULL, 0xeee60de77a076a15ULL, + 0x2b4bee4aa08b9bd0ULL, 0x6a56a63ec7b8894eULL, 0x02121359ba34fef4ULL, + 0x4cbf99f8283703fcULL, 0x398071350caf30c8ULL, 0xd0a77a89f017687aULL, + 0xf1c1a9eb9e423569ULL, 0x8c7976282dee8199ULL, 0x5d1737a5dd1f7abdULL, + 0x4f53433c09a9fa80ULL, 0xfa8b0c53df7ca1d9ULL, 0x3fd9dcbc886ccb77ULL, + 0xc040917ca91b4720ULL, 0x7dd00142f9d1dcdfULL, 0x8476fc1d4f387b58ULL, + 0x23f8e7c5f3316503ULL, 0x032a2244e7e37339ULL, 0x5c87a5d750f5a74bULL, + 0x082b4cc43698992eULL, 0xdf917becb858f63cULL, 0x3270b8fc5bf86ddaULL, + 0x10ae72bb29b5dd76ULL, 0x576ac94e7700362bULL, 0x1ad112dac61efb8fULL, + 0x691bc30ec5faa427ULL, 0xff246311cc327143ULL, 0x3142368e30e53206ULL, + 0x71380e31e02ca396ULL, 0x958d5c960aad76f1ULL, 0xf8d6f430c16da536ULL, + 0xc8ffd13f1be7e1d2ULL, 0x7578ae66004ddbe1ULL, 0x05833f01067be646ULL, + 0xbb34b5ad3bfe586dULL, 0x095f34c9a12b97f0ULL, 0x247ab64525d60ca8ULL, + 0xdcdbc6f3017477d1ULL, 0x4a2e14d4decad24dULL, 0xbdb5e6d9be0a1eebULL, + 0x2a7e70f7794301abULL, 0xdef42d8a270540fdULL, 0x01078ec0a34c22c1ULL, + 0xe5de511af4c16387ULL, 0x7ebb3a52bd9a330aULL, 0x77697857aa7d6435ULL, + 0x004e831603ae4c32ULL, 0xe7a21020ad78e312ULL, 0x9d41a70c6ab420f2ULL, + 0x28e06c18ea1141e6ULL, 0xd2b28cbd984f6b28ULL, 0x26b75f6c446e9d83ULL, + 0xba47568c4d418d7fULL, 0xd80badbfe6183d8eULL, 0x0e206d7f5f166044ULL, + 0xe258a43911cbca3eULL, 0x723a1746b21dc0bcULL, 0xc7caa854f5d7cdd3ULL, + 0x7cac32883d261d9cULL, 0x7690c26423ba942cULL, 0x17e55524478042b8ULL, + 0xe0be477656a2389fULL, 0x4d289b5e67ab2da0ULL, 0x44862b9c8fbbfd31ULL, + 0xb47cc8049d141365ULL, 0x822c1b362b91c793ULL, 0x4eb14655fb13dfd8ULL, + 0x1ecbba0714e2a97bULL, 0x6143459d5cde5f14ULL, 0x53a8fbf1d5f0ac89ULL, + 0x97ea04d81c5e5b00ULL, 0x622181a8d4fdb3f3ULL, 0xe9bcd341572a1208ULL, + 0x1411258643cce58aULL, 0x9144c5fea4c6e0a4ULL, 0x0d33d06565cf620fULL, + 0x54a48d489f219ca1ULL, 0xc43e5eac6d63c821ULL, 0xa9728b3a72770dafULL, + 0xd7934e7b20df87efULL, 0xe35503b61a3e86e5ULL, 0xcae321fbc819d504ULL, + 0x129a50b3ac60bfa6ULL, 0xcd5e68ea7e9fb6c3ULL, 0xb01c90199483b1c7ULL, + 0x3de93cd5c295376cULL, 0xaed52edf2ab9ad13ULL, 0x2e60f512c0a07884ULL, + 0xbc3d86a3e36210c9ULL, 0x35269d9b163951ceULL, 0x0c7d6e2ad0cdb5faULL, + 0x59e86297d87f5733ULL, 0x298ef221898db0e7ULL, 0x55000029d1a5aa7eULL, + 0x8bc08ae1b5061b45ULL, 0xc2c31c2b6c92703aULL, 0x94cc596baf25ef42ULL, + 0x0a1d73db22540456ULL, 0x04b6a0f9d9c4179aULL, 0xeffdafa2ae3d3c60ULL, + 0xf7c8075bb49496c4ULL, 0x9cc5c7141d1cd4e3ULL, 0x78bd1638218e5534ULL, + 0xb2f11568f850246aULL, 0xedfabcfa9502bc29ULL, 0x796ce5f2da23051bULL, + 0xaae128b0dc93537cULL, 0x3a493da0ee4b29aeULL, 0xb5df6b2c416895d7ULL, + 0xfcabbd25122d7f37ULL, 0x70810b58105dc4b1ULL, 0xe10fdd37f7882a90ULL, + 0x524dcab5518a3f5cULL, 0x3c9e85878451255bULL, 0x4029828119bd34e2ULL, + 0x74a05b6f5d3ceccbULL, 0xb610021542e13ecaULL, 0x0ff979d12f59e2acULL, + 0x6037da27e4f9cc50ULL, 0x5e92975a0df1847dULL, 0xd66de190d3e623feULL, + 0x5032d6b87b568048ULL, 0x9a36b7ce8235216eULL, 0x80272a7a24f64b4aULL, + 0x93efed8b8c6916f7ULL, 0x37ddbff44cce1555ULL, 0x4b95db5d4b99bd25ULL, + 0x92d3fda169812fc0ULL, 0xfb1a4a9a90660bb6ULL, 0x730c196946a4b9b2ULL, + 0x81e289aa7f49da68ULL, 0x64669a0f83b1a05fULL, 0x27b3ff7d9644f48bULL, + 0xcc6b615c8db675b3ULL, 0x674f20b9bcebbe95ULL, 0x6f31238275655982ULL, + 0x5ae488713e45cf05ULL, 0xbf619f9954c21157ULL, 0xeabac46040a8eae9ULL, + 0x454c6fe9f2c0c1cdULL, 0x419cf6496412691cULL, 0xd3dc3bef265b0f70ULL, + 0x6d0e60f5c3578a9eULL +}; + +static const u64 sbox4[256] = { + 0x5b0e608526323c55ULL, 0x1a46c1a9fa1b59f5ULL, 0xa9e245a17c4c8ffaULL, + 0x65ca5159db2955d7ULL, 0x05db0a76ce35afc2ULL, 0x81eac77ea9113d45ULL, + 0x528ef88ab6ac0a0dULL, 0xa09ea253597be3ffULL, 0x430ddfb3ac48cd56ULL, + 0xc4b3a67af45ce46fULL, 0x4ececfd8fbe2d05eULL, 0x3ef56f10b39935f0ULL, + 0x0b22d6829cd619c6ULL, 0x17fd460a74df2069ULL, 0x6cf8cc8e8510ed40ULL, + 0xd6c824bf3a6ecaa7ULL, 0x61243d581a817049ULL, 0x048bacb6bbc163a2ULL, + 0xd9a38ac27d44cc32ULL, 0x7fddff5baaf410abULL, 0xad6d495aa804824bULL, + 0xe1a6a74f2d8c9f94ULL, 0xd4f7851235dee8e3ULL, 0xfd4b7f886540d893ULL, + 0x247c20042aa4bfdaULL, 0x096ea1c517d1327cULL, 0xd56966b4361a6685ULL, + 0x277da5c31221057dULL, 0x94d59893a43acff7ULL, 0x64f0c51ccdc02281ULL, + 0x3d33bcc4ff6189dbULL, 0xe005cb184ce66af1ULL, 0xff5ccd1d1db99beaULL, + 0xb0b854a7fe42980fULL, 0x7bd46a6a718d4b9fULL, 0xd10fa8cc22a5fd8cULL, + 0xd31484952be4bd31ULL, 0xc7fa975fcb243847ULL, 0x4886ed1e5846c407ULL, + 0x28cddb791eb70b04ULL, 0xc2b00be2f573417fULL, 0x5c9590452180f877ULL, + 0x7a6bddfff370eb00ULL, 0xce509e38d6d9d6a4ULL, 0xebeb0f00647fa702ULL, + 0x1dcc06cf76606f06ULL, 0xe4d9f28ba286ff0aULL, 0xd85a305dc918c262ULL, + 0x475b1d8732225f54ULL, 0x2d4fb51668ccb5feULL, 0xa679b9d9d72bba20ULL, + 0x53841c0d912d43a5ULL, 0x3b7eaa48bf12a4e8ULL, 0x781e0e47f22f1ddfULL, + 0xeff20ce60ab50973ULL, 0x20d261d19dffb742ULL, 0x16a12b03062a2e39ULL, + 0x1960eb2239650495ULL, 0x251c16fed50eb8b8ULL, 0x9ac0c330f826016eULL, + 0xed152665953e7671ULL, 0x02d63194a6369570ULL, 0x5074f08394b1c987ULL, + 0x70ba598c90b25ce1ULL, 0x794a15810b9742f6ULL, 0x0d5925e9fcaf8c6cULL, + 0x3067716cd868744eULL, 0x910ab077e8d7731bULL, 0x6a61bbdb5ac42f61ULL, + 0x93513efbf0851567ULL, 0xf494724b9e83e9d5ULL, 0xe887e1985c09648dULL, + 0x34b1d3c675370cfdULL, 0xdc35e433bc0d255dULL, 0xd0aab84234131be0ULL, + 0x08042a50b48b7eafULL, 0x9997c4ee44a3ab35ULL, 0x829a7b49201799d0ULL, + 0x263b8307b7c54441ULL, 0x752f95f4fd6a6ca6ULL, 0x927217402c08c6e5ULL, + 0x2a8ab754a795d9eeULL, 0xa442f7552f72943dULL, 0x2c31334e19781208ULL, + 0x4fa98d7ceaee6291ULL, 0x55c3862f665db309ULL, 0xbd0610175d53b1f3ULL, + 0x46fe6cb840413f27ULL, 0x3fe03792df0cfa59ULL, 0xcfe700372eb85e8fULL, + 0xa7be29e7adbce118ULL, 0xe544ee5cde8431ddULL, 0x8a781b1b41f1873eULL, + 0xa5c94c78a0d2f0e7ULL, 0x39412e2877b60728ULL, 0xa1265ef3afc9a62cULL, + 0xbcc2770c6a2506c5ULL, 0x3ab66dd5dce1ce12ULL, 0xe65499d04a675b37ULL, + 0x7d8f523481bfd216ULL, 0x0f6f64fcec15f389ULL, 0x74efbe618b5b13c8ULL, + 0xacdc82b714273e1dULL, 0xdd40bfe003199d17ULL, 0x37e99257e7e061f8ULL, + 0xfa52626904775aaaULL, 0x8bbbf63a463d56f9ULL, 0xf0013f1543a26e64ULL, + 0xa8307e9f879ec898ULL, 0xcc4c27a4150177ccULL, 0x1b432f2cca1d3348ULL, + 0xde1d1f8f9f6fa013ULL, 0x606602a047a7ddd6ULL, 0xd237ab64cc1cb2c7ULL, + 0x9b938e7225fcd1d3ULL, 0xec4e03708e0ff476ULL, 0xfeb2fbda3d03c12dULL, + 0xae0bced2ee43889aULL, 0x22cb8923ebfb4f43ULL, 0x69360d013cf7396dULL, + 0x855e3602d2d4e022ULL, 0x073805bad01f784cULL, 0x33e17a133852f546ULL, + 0xdf4874058ac7b638ULL, 0xba92b29c678aa14aULL, 0x0ce89fc76cfaadcdULL, + 0x5f9d4e0908339e34ULL, 0xf1afe9291f5923b9ULL, 0x6e3480f60f4a265fULL, + 0xeebf3a2ab29b841cULL, 0xe21938a88f91b4adULL, 0x57dfeff845c6d3c3ULL, + 0x2f006b0bf62caaf2ULL, 0x62f479ef6f75ee78ULL, 0x11a55ad41c8916a9ULL, + 0xf229d29084fed453ULL, 0x42f1c27b16b000e6ULL, 0x2b1f76749823c074ULL, + 0x4b76eca3c2745360ULL, 0x8c98f463b91691bdULL, 0x14bcc93cf1ade66aULL, + 0x8885213e6d458397ULL, 0x8e177df0274d4711ULL, 0xb49b73b5503f2951ULL, + 0x10168168c3f96b6bULL, 0x0e3d963b63cab0aeULL, 0x8dfc4b5655a1db14ULL, + 0xf789f1356e14de5cULL, 0x683e68af4e51dac1ULL, 0xc9a84f9d8d4b0fd9ULL, + 0x3691e03f52a0f9d1ULL, 0x5ed86e46e1878e80ULL, 0x3c711a0e99d07150ULL, + 0x5a0865b20c4e9310ULL, 0x56fbfc1fe4f0682eULL, 0xea8d5de3105edf9bULL, + 0x71abfdb12379187aULL, 0x2eb99de1bee77b9cULL, 0x21ecc0ea33cf4523ULL, + 0x59a4d7521805c7a1ULL, 0x3896f5eb56ae7c72ULL, 0xaa638f3db18f75dcULL, + 0x9f39358dabe9808eULL, 0xb7defa91c00b72acULL, 0x6b5541fd62492d92ULL, + 0x6dc6dee8f92e4d5bULL, 0x353f57abc4beea7eULL, 0x735769d6da5690ceULL, + 0x0a234aa642391484ULL, 0xf6f9508028f80d9dULL, 0xb8e319a27ab3f215ULL, + 0x31ad9c1151341a4dULL, 0x773c22a57bef5805ULL, 0x45c7561a07968633ULL, + 0xf913da9e249dbe36ULL, 0xda652d9b78a64c68ULL, 0x4c27a97f3bc334efULL, + 0x76621220e66b17f4ULL, 0x967743899acd7d0bULL, 0xf3ee5bcae0ed6782ULL, + 0x409f753600c879fcULL, 0x06d09a39b5926db6ULL, 0x6f83aeb0317ac588ULL, + 0x01e6ca4a86381f21ULL, 0x66ff3462d19f3025ULL, 0x72207c24ddfd3bfbULL, + 0x4af6b6d3e2ece2ebULL, 0x9c994dbec7ea08deULL, 0x49ace597b09a8bc4ULL, + 0xb38c4766cf0797baULL, 0x131b9373c57c2a75ULL, 0xb1822cce61931e58ULL, + 0x9d7555b909ba1c0cULL, 0x127fafdd937d11d2ULL, 0x29da3badc66d92e4ULL, + 0xa2c1d57154c2ecbcULL, 0x58c5134d82f6fe24ULL, 0x1c3ae3515b62274fULL, + 0xe907c82e01cb8126ULL, 0xf8ed091913e37fcbULL, 0x3249d8f9c80046c9ULL, + 0x80cf9bede388fb63ULL, 0x1881539a116cf19eULL, 0x5103f3f76bd52457ULL, + 0x15b7e6f5ae47f7a8ULL, 0xdbd7c6ded47e9ccfULL, 0x44e55c410228bb1aULL, + 0xb647d4255edb4e99ULL, 0x5d11882bb8aafc30ULL, 0xf5098bbb29d3212aULL, + 0x8fb5ea14e90296b3ULL, 0x677b942157dd025aULL, 0xfb58e7c0a390acb5ULL, + 0x89d3674c83bd4a01ULL, 0x9e2da4df4bf3b93bULL, 0xfcc41e328cab4829ULL, + 0x03f38c96ba582c52ULL, 0xcad1bdbd7fd85db2ULL, 0xbbb442c16082ae83ULL, + 0xb95fe86ba5da9ab0ULL, 0xb22e04673771a93fULL, 0x845358c9493152d8ULL, + 0xbe2a488697b4541eULL, 0x95a2dc2dd38e6966ULL, 0xc02c11ac923c852bULL, + 0x2388b1990df2a87bULL, 0x7c8008fa1b4f37beULL, 0x1f70d0c84d54e503ULL, + 0x5490adec7ece57d4ULL, 0x002b3c27d9063a3aULL, 0x7eaea3848030a2bfULL, + 0xc602326ded2003c0ULL, 0x83a7287d69a94086ULL, 0xc57a5fcb30f57a8aULL, + 0xb56844e479ebe779ULL, 0xa373b40f05dcbce9ULL, 0xd71a786e88570ee2ULL, + 0x879cbacdbde8f6a0ULL, 0x976ad1bcc164a32fULL, 0xab21e25e9666d78bULL, + 0x901063aae5e5c33cULL, 0x9818b34448698d90ULL, 0xe36487ae3e1e8abbULL, + 0xafbdf931893bdcb4ULL, 0x6345a0dc5fbbd519ULL, 0x8628fe269b9465caULL, + 0x1e5d01603f9c51ecULL, 0x4de44006a15049b7ULL, 0xbf6c70e5f776cbb1ULL, + 0x411218f2ef552bedULL, 0xcb0c0708705a36a3ULL, 0xe74d14754f986044ULL, + 0xcd56d9430ea8280eULL, 0xc12591d7535f5065ULL, 0xc83223f1720aef96ULL, + 0xc3a0396f7363a51fULL +}; + + +static void tgr192_round(u64 * ra, u64 * rb, u64 * rc, u64 x, int mul) +{ + u64 a = *ra; + u64 b = *rb; + u64 c = *rc; + + c ^= x; + a -= sbox1[c & 0xff] ^ sbox2[(c >> 16) & 0xff] + ^ sbox3[(c >> 32) & 0xff] ^ sbox4[(c >> 48) & 0xff]; + b += sbox4[(c >> 8) & 0xff] ^ sbox3[(c >> 24) & 0xff] + ^ sbox2[(c >> 40) & 0xff] ^ sbox1[(c >> 56) & 0xff]; + b *= mul; + + *ra = a; + *rb = b; + *rc = c; +} + + +static void tgr192_pass(u64 * ra, u64 * rb, u64 * rc, u64 * x, int mul) +{ + u64 a = *ra; + u64 b = *rb; + u64 c = *rc; + + tgr192_round(&a, &b, &c, x[0], mul); + tgr192_round(&b, &c, &a, x[1], mul); + tgr192_round(&c, &a, &b, x[2], mul); + tgr192_round(&a, &b, &c, x[3], mul); + tgr192_round(&b, &c, &a, x[4], mul); + tgr192_round(&c, &a, &b, x[5], mul); + tgr192_round(&a, &b, &c, x[6], mul); + tgr192_round(&b, &c, &a, x[7], mul); + + *ra = a; + *rb = b; + *rc = c; +} + + +static void tgr192_key_schedule(u64 * x) +{ + x[0] -= x[7] ^ 0xa5a5a5a5a5a5a5a5ULL; + x[1] ^= x[0]; + x[2] += x[1]; + x[3] -= x[2] ^ ((~x[1]) << 19); + x[4] ^= x[3]; + x[5] += x[4]; + x[6] -= x[5] ^ ((~x[4]) >> 23); + x[7] ^= x[6]; + x[0] += x[7]; + x[1] -= x[0] ^ ((~x[7]) << 19); + x[2] ^= x[1]; + x[3] += x[2]; + x[4] -= x[3] ^ ((~x[2]) >> 23); + x[5] ^= x[4]; + x[6] += x[5]; + x[7] -= x[6] ^ 0x0123456789abcdefULL; +} + + +/**************** + * Transform the message DATA which consists of 512 bytes (8 words) + */ + +static void tgr192_transform(struct tgr192_ctx *tctx, const u8 * data) +{ + u64 a, b, c, aa, bb, cc; + u64 x[8]; + int i; + const __le64 *ptr = (const __le64 *)data; + + for (i = 0; i < 8; i++) + x[i] = le64_to_cpu(ptr[i]); + + /* save */ + a = aa = tctx->a; + b = bb = tctx->b; + c = cc = tctx->c; + + tgr192_pass(&a, &b, &c, x, 5); + tgr192_key_schedule(x); + tgr192_pass(&c, &a, &b, x, 7); + tgr192_key_schedule(x); + tgr192_pass(&b, &c, &a, x, 9); + + + /* feedforward */ + a ^= aa; + b -= bb; + c += cc; + /* store */ + tctx->a = a; + tctx->b = b; + tctx->c = c; +} + +static int tgr192_init(struct shash_desc *desc) +{ + struct tgr192_ctx *tctx = shash_desc_ctx(desc); + + tctx->a = 0x0123456789abcdefULL; + tctx->b = 0xfedcba9876543210ULL; + tctx->c = 0xf096a5b4c3b2e187ULL; + tctx->nblocks = 0; + tctx->count = 0; + + return 0; +} + + +/* Update the message digest with the contents + * of INBUF with length INLEN. */ +static int tgr192_update(struct shash_desc *desc, const u8 *inbuf, + unsigned int len) +{ + struct tgr192_ctx *tctx = shash_desc_ctx(desc); + + if (tctx->count == 64) { /* flush the buffer */ + tgr192_transform(tctx, tctx->hash); + tctx->count = 0; + tctx->nblocks++; + } + if (!inbuf) { + return 0; + } + if (tctx->count) { + for (; len && tctx->count < 64; len--) { + tctx->hash[tctx->count++] = *inbuf++; + } + tgr192_update(desc, NULL, 0); + if (!len) { + return 0; + } + + } + + while (len >= 64) { + tgr192_transform(tctx, inbuf); + tctx->count = 0; + tctx->nblocks++; + len -= 64; + inbuf += 64; + } + for (; len && tctx->count < 64; len--) { + tctx->hash[tctx->count++] = *inbuf++; + } + + return 0; +} + + + +/* The routine terminates the computation */ +static int tgr192_final(struct shash_desc *desc, u8 * out) +{ + struct tgr192_ctx *tctx = shash_desc_ctx(desc); + __be64 *dst = (__be64 *)out; + __be64 *be64p; + __le32 *le32p; + u32 t, msb, lsb; + + tgr192_update(desc, NULL, 0); /* flush */ ; + + msb = 0; + t = tctx->nblocks; + if ((lsb = t << 6) < t) { /* multiply by 64 to make a byte count */ + msb++; + } + msb += t >> 26; + t = lsb; + if ((lsb = t + tctx->count) < t) { /* add the count */ + msb++; + } + t = lsb; + if ((lsb = t << 3) < t) { /* multiply by 8 to make a bit count */ + msb++; + } + msb += t >> 29; + + if (tctx->count < 56) { /* enough room */ + tctx->hash[tctx->count++] = 0x01; /* pad */ + while (tctx->count < 56) { + tctx->hash[tctx->count++] = 0; /* pad */ + } + } else { /* need one extra block */ + tctx->hash[tctx->count++] = 0x01; /* pad character */ + while (tctx->count < 64) { + tctx->hash[tctx->count++] = 0; + } + tgr192_update(desc, NULL, 0); /* flush */ ; + memset(tctx->hash, 0, 56); /* fill next block with zeroes */ + } + /* append the 64 bit count */ + le32p = (__le32 *)&tctx->hash[56]; + le32p[0] = cpu_to_le32(lsb); + le32p[1] = cpu_to_le32(msb); + + tgr192_transform(tctx, tctx->hash); + + be64p = (__be64 *)tctx->hash; + dst[0] = be64p[0] = cpu_to_be64(tctx->a); + dst[1] = be64p[1] = cpu_to_be64(tctx->b); + dst[2] = be64p[2] = cpu_to_be64(tctx->c); + + return 0; +} + +static int tgr160_final(struct shash_desc *desc, u8 * out) +{ + u8 D[64]; + + tgr192_final(desc, D); + memcpy(out, D, TGR160_DIGEST_SIZE); + memset(D, 0, TGR192_DIGEST_SIZE); + + return 0; +} + +static int tgr128_final(struct shash_desc *desc, u8 * out) +{ + u8 D[64]; + + tgr192_final(desc, D); + memcpy(out, D, TGR128_DIGEST_SIZE); + memset(D, 0, TGR192_DIGEST_SIZE); + + return 0; +} + +static struct shash_alg tgr192 = { + .digestsize = TGR192_DIGEST_SIZE, + .init = tgr192_init, + .update = tgr192_update, + .final = tgr192_final, + .descsize = sizeof(struct tgr192_ctx), + .base = { + .cra_name = "tgr192", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = TGR192_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static struct shash_alg tgr160 = { + .digestsize = TGR160_DIGEST_SIZE, + .init = tgr192_init, + .update = tgr192_update, + .final = tgr160_final, + .descsize = sizeof(struct tgr192_ctx), + .base = { + .cra_name = "tgr160", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = TGR192_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static struct shash_alg tgr128 = { + .digestsize = TGR128_DIGEST_SIZE, + .init = tgr192_init, + .update = tgr192_update, + .final = tgr128_final, + .descsize = sizeof(struct tgr192_ctx), + .base = { + .cra_name = "tgr128", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = TGR192_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static int __init tgr192_mod_init(void) +{ + int ret = 0; + + ret = crypto_register_shash(&tgr192); + + if (ret < 0) { + goto out; + } + + ret = crypto_register_shash(&tgr160); + if (ret < 0) { + crypto_unregister_shash(&tgr192); + goto out; + } + + ret = crypto_register_shash(&tgr128); + if (ret < 0) { + crypto_unregister_shash(&tgr192); + crypto_unregister_shash(&tgr160); + } + out: + return ret; +} + +static void __exit tgr192_mod_fini(void) +{ + crypto_unregister_shash(&tgr192); + crypto_unregister_shash(&tgr160); + crypto_unregister_shash(&tgr128); +} + +MODULE_ALIAS("tgr160"); +MODULE_ALIAS("tgr128"); + +module_init(tgr192_mod_init); +module_exit(tgr192_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Tiger Message Digest Algorithm"); diff --git a/crypto/twofish_common.c b/crypto/twofish_common.c new file mode 100644 index 00000000..5f62c4f9 --- /dev/null +++ b/crypto/twofish_common.c @@ -0,0 +1,711 @@ +/* + * Common Twofish algorithm parts shared between the c and assembler + * implementations + * + * Originally Twofish for GPG + * By Matthew Skala <mskala@ansuz.sooke.bc.ca>, July 26, 1998 + * 256-bit key length added March 20, 1999 + * Some modifications to reduce the text size by Werner Koch, April, 1998 + * Ported to the kerneli patch by Marc Mutz <Marc@Mutz.com> + * Ported to CryptoAPI by Colin Slater <hoho@tacomeat.net> + * + * The original author has disclaimed all copyright interest in this + * code and thus put it in the public domain. The subsequent authors + * have put this under the GNU General Public License. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 + * USA + * + * This code is a "clean room" implementation, written from the paper + * _Twofish: A 128-Bit Block Cipher_ by Bruce Schneier, John Kelsey, + * Doug Whiting, David Wagner, Chris Hall, and Niels Ferguson, available + * through http://www.counterpane.com/twofish.html + * + * For background information on multiplication in finite fields, used for + * the matrix operations in the key schedule, see the book _Contemporary + * Abstract Algebra_ by Joseph A. Gallian, especially chapter 22 in the + * Third Edition. + */ + +#include <crypto/twofish.h> +#include <linux/bitops.h> +#include <linux/crypto.h> +#include <linux/errno.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/types.h> + + +/* The large precomputed tables for the Twofish cipher (twofish.c) + * Taken from the same source as twofish.c + * Marc Mutz <Marc@Mutz.com> + */ + +/* These two tables are the q0 and q1 permutations, exactly as described in + * the Twofish paper. */ + +static const u8 q0[256] = { + 0xA9, 0x67, 0xB3, 0xE8, 0x04, 0xFD, 0xA3, 0x76, 0x9A, 0x92, 0x80, 0x78, + 0xE4, 0xDD, 0xD1, 0x38, 0x0D, 0xC6, 0x35, 0x98, 0x18, 0xF7, 0xEC, 0x6C, + 0x43, 0x75, 0x37, 0x26, 0xFA, 0x13, 0x94, 0x48, 0xF2, 0xD0, 0x8B, 0x30, + 0x84, 0x54, 0xDF, 0x23, 0x19, 0x5B, 0x3D, 0x59, 0xF3, 0xAE, 0xA2, 0x82, + 0x63, 0x01, 0x83, 0x2E, 0xD9, 0x51, 0x9B, 0x7C, 0xA6, 0xEB, 0xA5, 0xBE, + 0x16, 0x0C, 0xE3, 0x61, 0xC0, 0x8C, 0x3A, 0xF5, 0x73, 0x2C, 0x25, 0x0B, + 0xBB, 0x4E, 0x89, 0x6B, 0x53, 0x6A, 0xB4, 0xF1, 0xE1, 0xE6, 0xBD, 0x45, + 0xE2, 0xF4, 0xB6, 0x66, 0xCC, 0x95, 0x03, 0x56, 0xD4, 0x1C, 0x1E, 0xD7, + 0xFB, 0xC3, 0x8E, 0xB5, 0xE9, 0xCF, 0xBF, 0xBA, 0xEA, 0x77, 0x39, 0xAF, + 0x33, 0xC9, 0x62, 0x71, 0x81, 0x79, 0x09, 0xAD, 0x24, 0xCD, 0xF9, 0xD8, + 0xE5, 0xC5, 0xB9, 0x4D, 0x44, 0x08, 0x86, 0xE7, 0xA1, 0x1D, 0xAA, 0xED, + 0x06, 0x70, 0xB2, 0xD2, 0x41, 0x7B, 0xA0, 0x11, 0x31, 0xC2, 0x27, 0x90, + 0x20, 0xF6, 0x60, 0xFF, 0x96, 0x5C, 0xB1, 0xAB, 0x9E, 0x9C, 0x52, 0x1B, + 0x5F, 0x93, 0x0A, 0xEF, 0x91, 0x85, 0x49, 0xEE, 0x2D, 0x4F, 0x8F, 0x3B, + 0x47, 0x87, 0x6D, 0x46, 0xD6, 0x3E, 0x69, 0x64, 0x2A, 0xCE, 0xCB, 0x2F, + 0xFC, 0x97, 0x05, 0x7A, 0xAC, 0x7F, 0xD5, 0x1A, 0x4B, 0x0E, 0xA7, 0x5A, + 0x28, 0x14, 0x3F, 0x29, 0x88, 0x3C, 0x4C, 0x02, 0xB8, 0xDA, 0xB0, 0x17, + 0x55, 0x1F, 0x8A, 0x7D, 0x57, 0xC7, 0x8D, 0x74, 0xB7, 0xC4, 0x9F, 0x72, + 0x7E, 0x15, 0x22, 0x12, 0x58, 0x07, 0x99, 0x34, 0x6E, 0x50, 0xDE, 0x68, + 0x65, 0xBC, 0xDB, 0xF8, 0xC8, 0xA8, 0x2B, 0x40, 0xDC, 0xFE, 0x32, 0xA4, + 0xCA, 0x10, 0x21, 0xF0, 0xD3, 0x5D, 0x0F, 0x00, 0x6F, 0x9D, 0x36, 0x42, + 0x4A, 0x5E, 0xC1, 0xE0 +}; + +static const u8 q1[256] = { + 0x75, 0xF3, 0xC6, 0xF4, 0xDB, 0x7B, 0xFB, 0xC8, 0x4A, 0xD3, 0xE6, 0x6B, + 0x45, 0x7D, 0xE8, 0x4B, 0xD6, 0x32, 0xD8, 0xFD, 0x37, 0x71, 0xF1, 0xE1, + 0x30, 0x0F, 0xF8, 0x1B, 0x87, 0xFA, 0x06, 0x3F, 0x5E, 0xBA, 0xAE, 0x5B, + 0x8A, 0x00, 0xBC, 0x9D, 0x6D, 0xC1, 0xB1, 0x0E, 0x80, 0x5D, 0xD2, 0xD5, + 0xA0, 0x84, 0x07, 0x14, 0xB5, 0x90, 0x2C, 0xA3, 0xB2, 0x73, 0x4C, 0x54, + 0x92, 0x74, 0x36, 0x51, 0x38, 0xB0, 0xBD, 0x5A, 0xFC, 0x60, 0x62, 0x96, + 0x6C, 0x42, 0xF7, 0x10, 0x7C, 0x28, 0x27, 0x8C, 0x13, 0x95, 0x9C, 0xC7, + 0x24, 0x46, 0x3B, 0x70, 0xCA, 0xE3, 0x85, 0xCB, 0x11, 0xD0, 0x93, 0xB8, + 0xA6, 0x83, 0x20, 0xFF, 0x9F, 0x77, 0xC3, 0xCC, 0x03, 0x6F, 0x08, 0xBF, + 0x40, 0xE7, 0x2B, 0xE2, 0x79, 0x0C, 0xAA, 0x82, 0x41, 0x3A, 0xEA, 0xB9, + 0xE4, 0x9A, 0xA4, 0x97, 0x7E, 0xDA, 0x7A, 0x17, 0x66, 0x94, 0xA1, 0x1D, + 0x3D, 0xF0, 0xDE, 0xB3, 0x0B, 0x72, 0xA7, 0x1C, 0xEF, 0xD1, 0x53, 0x3E, + 0x8F, 0x33, 0x26, 0x5F, 0xEC, 0x76, 0x2A, 0x49, 0x81, 0x88, 0xEE, 0x21, + 0xC4, 0x1A, 0xEB, 0xD9, 0xC5, 0x39, 0x99, 0xCD, 0xAD, 0x31, 0x8B, 0x01, + 0x18, 0x23, 0xDD, 0x1F, 0x4E, 0x2D, 0xF9, 0x48, 0x4F, 0xF2, 0x65, 0x8E, + 0x78, 0x5C, 0x58, 0x19, 0x8D, 0xE5, 0x98, 0x57, 0x67, 0x7F, 0x05, 0x64, + 0xAF, 0x63, 0xB6, 0xFE, 0xF5, 0xB7, 0x3C, 0xA5, 0xCE, 0xE9, 0x68, 0x44, + 0xE0, 0x4D, 0x43, 0x69, 0x29, 0x2E, 0xAC, 0x15, 0x59, 0xA8, 0x0A, 0x9E, + 0x6E, 0x47, 0xDF, 0x34, 0x35, 0x6A, 0xCF, 0xDC, 0x22, 0xC9, 0xC0, 0x9B, + 0x89, 0xD4, 0xED, 0xAB, 0x12, 0xA2, 0x0D, 0x52, 0xBB, 0x02, 0x2F, 0xA9, + 0xD7, 0x61, 0x1E, 0xB4, 0x50, 0x04, 0xF6, 0xC2, 0x16, 0x25, 0x86, 0x56, + 0x55, 0x09, 0xBE, 0x91 +}; + +/* These MDS tables are actually tables of MDS composed with q0 and q1, + * because it is only ever used that way and we can save some time by + * precomputing. Of course the main saving comes from precomputing the + * GF(2^8) multiplication involved in the MDS matrix multiply; by looking + * things up in these tables we reduce the matrix multiply to four lookups + * and three XORs. Semi-formally, the definition of these tables is: + * mds[0][i] = MDS (q1[i] 0 0 0)^T mds[1][i] = MDS (0 q0[i] 0 0)^T + * mds[2][i] = MDS (0 0 q1[i] 0)^T mds[3][i] = MDS (0 0 0 q0[i])^T + * where ^T means "transpose", the matrix multiply is performed in GF(2^8) + * represented as GF(2)[x]/v(x) where v(x)=x^8+x^6+x^5+x^3+1 as described + * by Schneier et al, and I'm casually glossing over the byte/word + * conversion issues. */ + +static const u32 mds[4][256] = { + { + 0xBCBC3275, 0xECEC21F3, 0x202043C6, 0xB3B3C9F4, 0xDADA03DB, 0x02028B7B, + 0xE2E22BFB, 0x9E9EFAC8, 0xC9C9EC4A, 0xD4D409D3, 0x18186BE6, 0x1E1E9F6B, + 0x98980E45, 0xB2B2387D, 0xA6A6D2E8, 0x2626B74B, 0x3C3C57D6, 0x93938A32, + 0x8282EED8, 0x525298FD, 0x7B7BD437, 0xBBBB3771, 0x5B5B97F1, 0x474783E1, + 0x24243C30, 0x5151E20F, 0xBABAC6F8, 0x4A4AF31B, 0xBFBF4887, 0x0D0D70FA, + 0xB0B0B306, 0x7575DE3F, 0xD2D2FD5E, 0x7D7D20BA, 0x666631AE, 0x3A3AA35B, + 0x59591C8A, 0x00000000, 0xCDCD93BC, 0x1A1AE09D, 0xAEAE2C6D, 0x7F7FABC1, + 0x2B2BC7B1, 0xBEBEB90E, 0xE0E0A080, 0x8A8A105D, 0x3B3B52D2, 0x6464BAD5, + 0xD8D888A0, 0xE7E7A584, 0x5F5FE807, 0x1B1B1114, 0x2C2CC2B5, 0xFCFCB490, + 0x3131272C, 0x808065A3, 0x73732AB2, 0x0C0C8173, 0x79795F4C, 0x6B6B4154, + 0x4B4B0292, 0x53536974, 0x94948F36, 0x83831F51, 0x2A2A3638, 0xC4C49CB0, + 0x2222C8BD, 0xD5D5F85A, 0xBDBDC3FC, 0x48487860, 0xFFFFCE62, 0x4C4C0796, + 0x4141776C, 0xC7C7E642, 0xEBEB24F7, 0x1C1C1410, 0x5D5D637C, 0x36362228, + 0x6767C027, 0xE9E9AF8C, 0x4444F913, 0x1414EA95, 0xF5F5BB9C, 0xCFCF18C7, + 0x3F3F2D24, 0xC0C0E346, 0x7272DB3B, 0x54546C70, 0x29294CCA, 0xF0F035E3, + 0x0808FE85, 0xC6C617CB, 0xF3F34F11, 0x8C8CE4D0, 0xA4A45993, 0xCACA96B8, + 0x68683BA6, 0xB8B84D83, 0x38382820, 0xE5E52EFF, 0xADAD569F, 0x0B0B8477, + 0xC8C81DC3, 0x9999FFCC, 0x5858ED03, 0x19199A6F, 0x0E0E0A08, 0x95957EBF, + 0x70705040, 0xF7F730E7, 0x6E6ECF2B, 0x1F1F6EE2, 0xB5B53D79, 0x09090F0C, + 0x616134AA, 0x57571682, 0x9F9F0B41, 0x9D9D803A, 0x111164EA, 0x2525CDB9, + 0xAFAFDDE4, 0x4545089A, 0xDFDF8DA4, 0xA3A35C97, 0xEAEAD57E, 0x353558DA, + 0xEDEDD07A, 0x4343FC17, 0xF8F8CB66, 0xFBFBB194, 0x3737D3A1, 0xFAFA401D, + 0xC2C2683D, 0xB4B4CCF0, 0x32325DDE, 0x9C9C71B3, 0x5656E70B, 0xE3E3DA72, + 0x878760A7, 0x15151B1C, 0xF9F93AEF, 0x6363BFD1, 0x3434A953, 0x9A9A853E, + 0xB1B1428F, 0x7C7CD133, 0x88889B26, 0x3D3DA65F, 0xA1A1D7EC, 0xE4E4DF76, + 0x8181942A, 0x91910149, 0x0F0FFB81, 0xEEEEAA88, 0x161661EE, 0xD7D77321, + 0x9797F5C4, 0xA5A5A81A, 0xFEFE3FEB, 0x6D6DB5D9, 0x7878AEC5, 0xC5C56D39, + 0x1D1DE599, 0x7676A4CD, 0x3E3EDCAD, 0xCBCB6731, 0xB6B6478B, 0xEFEF5B01, + 0x12121E18, 0x6060C523, 0x6A6AB0DD, 0x4D4DF61F, 0xCECEE94E, 0xDEDE7C2D, + 0x55559DF9, 0x7E7E5A48, 0x2121B24F, 0x03037AF2, 0xA0A02665, 0x5E5E198E, + 0x5A5A6678, 0x65654B5C, 0x62624E58, 0xFDFD4519, 0x0606F48D, 0x404086E5, + 0xF2F2BE98, 0x3333AC57, 0x17179067, 0x05058E7F, 0xE8E85E05, 0x4F4F7D64, + 0x89896AAF, 0x10109563, 0x74742FB6, 0x0A0A75FE, 0x5C5C92F5, 0x9B9B74B7, + 0x2D2D333C, 0x3030D6A5, 0x2E2E49CE, 0x494989E9, 0x46467268, 0x77775544, + 0xA8A8D8E0, 0x9696044D, 0x2828BD43, 0xA9A92969, 0xD9D97929, 0x8686912E, + 0xD1D187AC, 0xF4F44A15, 0x8D8D1559, 0xD6D682A8, 0xB9B9BC0A, 0x42420D9E, + 0xF6F6C16E, 0x2F2FB847, 0xDDDD06DF, 0x23233934, 0xCCCC6235, 0xF1F1C46A, + 0xC1C112CF, 0x8585EBDC, 0x8F8F9E22, 0x7171A1C9, 0x9090F0C0, 0xAAAA539B, + 0x0101F189, 0x8B8BE1D4, 0x4E4E8CED, 0x8E8E6FAB, 0xABABA212, 0x6F6F3EA2, + 0xE6E6540D, 0xDBDBF252, 0x92927BBB, 0xB7B7B602, 0x6969CA2F, 0x3939D9A9, + 0xD3D30CD7, 0xA7A72361, 0xA2A2AD1E, 0xC3C399B4, 0x6C6C4450, 0x07070504, + 0x04047FF6, 0x272746C2, 0xACACA716, 0xD0D07625, 0x50501386, 0xDCDCF756, + 0x84841A55, 0xE1E15109, 0x7A7A25BE, 0x1313EF91}, + + { + 0xA9D93939, 0x67901717, 0xB3719C9C, 0xE8D2A6A6, 0x04050707, 0xFD985252, + 0xA3658080, 0x76DFE4E4, 0x9A084545, 0x92024B4B, 0x80A0E0E0, 0x78665A5A, + 0xE4DDAFAF, 0xDDB06A6A, 0xD1BF6363, 0x38362A2A, 0x0D54E6E6, 0xC6432020, + 0x3562CCCC, 0x98BEF2F2, 0x181E1212, 0xF724EBEB, 0xECD7A1A1, 0x6C774141, + 0x43BD2828, 0x7532BCBC, 0x37D47B7B, 0x269B8888, 0xFA700D0D, 0x13F94444, + 0x94B1FBFB, 0x485A7E7E, 0xF27A0303, 0xD0E48C8C, 0x8B47B6B6, 0x303C2424, + 0x84A5E7E7, 0x54416B6B, 0xDF06DDDD, 0x23C56060, 0x1945FDFD, 0x5BA33A3A, + 0x3D68C2C2, 0x59158D8D, 0xF321ECEC, 0xAE316666, 0xA23E6F6F, 0x82165757, + 0x63951010, 0x015BEFEF, 0x834DB8B8, 0x2E918686, 0xD9B56D6D, 0x511F8383, + 0x9B53AAAA, 0x7C635D5D, 0xA63B6868, 0xEB3FFEFE, 0xA5D63030, 0xBE257A7A, + 0x16A7ACAC, 0x0C0F0909, 0xE335F0F0, 0x6123A7A7, 0xC0F09090, 0x8CAFE9E9, + 0x3A809D9D, 0xF5925C5C, 0x73810C0C, 0x2C273131, 0x2576D0D0, 0x0BE75656, + 0xBB7B9292, 0x4EE9CECE, 0x89F10101, 0x6B9F1E1E, 0x53A93434, 0x6AC4F1F1, + 0xB499C3C3, 0xF1975B5B, 0xE1834747, 0xE66B1818, 0xBDC82222, 0x450E9898, + 0xE26E1F1F, 0xF4C9B3B3, 0xB62F7474, 0x66CBF8F8, 0xCCFF9999, 0x95EA1414, + 0x03ED5858, 0x56F7DCDC, 0xD4E18B8B, 0x1C1B1515, 0x1EADA2A2, 0xD70CD3D3, + 0xFB2BE2E2, 0xC31DC8C8, 0x8E195E5E, 0xB5C22C2C, 0xE9894949, 0xCF12C1C1, + 0xBF7E9595, 0xBA207D7D, 0xEA641111, 0x77840B0B, 0x396DC5C5, 0xAF6A8989, + 0x33D17C7C, 0xC9A17171, 0x62CEFFFF, 0x7137BBBB, 0x81FB0F0F, 0x793DB5B5, + 0x0951E1E1, 0xADDC3E3E, 0x242D3F3F, 0xCDA47676, 0xF99D5555, 0xD8EE8282, + 0xE5864040, 0xC5AE7878, 0xB9CD2525, 0x4D049696, 0x44557777, 0x080A0E0E, + 0x86135050, 0xE730F7F7, 0xA1D33737, 0x1D40FAFA, 0xAA346161, 0xED8C4E4E, + 0x06B3B0B0, 0x706C5454, 0xB22A7373, 0xD2523B3B, 0x410B9F9F, 0x7B8B0202, + 0xA088D8D8, 0x114FF3F3, 0x3167CBCB, 0xC2462727, 0x27C06767, 0x90B4FCFC, + 0x20283838, 0xF67F0404, 0x60784848, 0xFF2EE5E5, 0x96074C4C, 0x5C4B6565, + 0xB1C72B2B, 0xAB6F8E8E, 0x9E0D4242, 0x9CBBF5F5, 0x52F2DBDB, 0x1BF34A4A, + 0x5FA63D3D, 0x9359A4A4, 0x0ABCB9B9, 0xEF3AF9F9, 0x91EF1313, 0x85FE0808, + 0x49019191, 0xEE611616, 0x2D7CDEDE, 0x4FB22121, 0x8F42B1B1, 0x3BDB7272, + 0x47B82F2F, 0x8748BFBF, 0x6D2CAEAE, 0x46E3C0C0, 0xD6573C3C, 0x3E859A9A, + 0x6929A9A9, 0x647D4F4F, 0x2A948181, 0xCE492E2E, 0xCB17C6C6, 0x2FCA6969, + 0xFCC3BDBD, 0x975CA3A3, 0x055EE8E8, 0x7AD0EDED, 0xAC87D1D1, 0x7F8E0505, + 0xD5BA6464, 0x1AA8A5A5, 0x4BB72626, 0x0EB9BEBE, 0xA7608787, 0x5AF8D5D5, + 0x28223636, 0x14111B1B, 0x3FDE7575, 0x2979D9D9, 0x88AAEEEE, 0x3C332D2D, + 0x4C5F7979, 0x02B6B7B7, 0xB896CACA, 0xDA583535, 0xB09CC4C4, 0x17FC4343, + 0x551A8484, 0x1FF64D4D, 0x8A1C5959, 0x7D38B2B2, 0x57AC3333, 0xC718CFCF, + 0x8DF40606, 0x74695353, 0xB7749B9B, 0xC4F59797, 0x9F56ADAD, 0x72DAE3E3, + 0x7ED5EAEA, 0x154AF4F4, 0x229E8F8F, 0x12A2ABAB, 0x584E6262, 0x07E85F5F, + 0x99E51D1D, 0x34392323, 0x6EC1F6F6, 0x50446C6C, 0xDE5D3232, 0x68724646, + 0x6526A0A0, 0xBC93CDCD, 0xDB03DADA, 0xF8C6BABA, 0xC8FA9E9E, 0xA882D6D6, + 0x2BCF6E6E, 0x40507070, 0xDCEB8585, 0xFE750A0A, 0x328A9393, 0xA48DDFDF, + 0xCA4C2929, 0x10141C1C, 0x2173D7D7, 0xF0CCB4B4, 0xD309D4D4, 0x5D108A8A, + 0x0FE25151, 0x00000000, 0x6F9A1919, 0x9DE01A1A, 0x368F9494, 0x42E6C7C7, + 0x4AECC9C9, 0x5EFDD2D2, 0xC1AB7F7F, 0xE0D8A8A8}, + + { + 0xBC75BC32, 0xECF3EC21, 0x20C62043, 0xB3F4B3C9, 0xDADBDA03, 0x027B028B, + 0xE2FBE22B, 0x9EC89EFA, 0xC94AC9EC, 0xD4D3D409, 0x18E6186B, 0x1E6B1E9F, + 0x9845980E, 0xB27DB238, 0xA6E8A6D2, 0x264B26B7, 0x3CD63C57, 0x9332938A, + 0x82D882EE, 0x52FD5298, 0x7B377BD4, 0xBB71BB37, 0x5BF15B97, 0x47E14783, + 0x2430243C, 0x510F51E2, 0xBAF8BAC6, 0x4A1B4AF3, 0xBF87BF48, 0x0DFA0D70, + 0xB006B0B3, 0x753F75DE, 0xD25ED2FD, 0x7DBA7D20, 0x66AE6631, 0x3A5B3AA3, + 0x598A591C, 0x00000000, 0xCDBCCD93, 0x1A9D1AE0, 0xAE6DAE2C, 0x7FC17FAB, + 0x2BB12BC7, 0xBE0EBEB9, 0xE080E0A0, 0x8A5D8A10, 0x3BD23B52, 0x64D564BA, + 0xD8A0D888, 0xE784E7A5, 0x5F075FE8, 0x1B141B11, 0x2CB52CC2, 0xFC90FCB4, + 0x312C3127, 0x80A38065, 0x73B2732A, 0x0C730C81, 0x794C795F, 0x6B546B41, + 0x4B924B02, 0x53745369, 0x9436948F, 0x8351831F, 0x2A382A36, 0xC4B0C49C, + 0x22BD22C8, 0xD55AD5F8, 0xBDFCBDC3, 0x48604878, 0xFF62FFCE, 0x4C964C07, + 0x416C4177, 0xC742C7E6, 0xEBF7EB24, 0x1C101C14, 0x5D7C5D63, 0x36283622, + 0x672767C0, 0xE98CE9AF, 0x441344F9, 0x149514EA, 0xF59CF5BB, 0xCFC7CF18, + 0x3F243F2D, 0xC046C0E3, 0x723B72DB, 0x5470546C, 0x29CA294C, 0xF0E3F035, + 0x088508FE, 0xC6CBC617, 0xF311F34F, 0x8CD08CE4, 0xA493A459, 0xCAB8CA96, + 0x68A6683B, 0xB883B84D, 0x38203828, 0xE5FFE52E, 0xAD9FAD56, 0x0B770B84, + 0xC8C3C81D, 0x99CC99FF, 0x580358ED, 0x196F199A, 0x0E080E0A, 0x95BF957E, + 0x70407050, 0xF7E7F730, 0x6E2B6ECF, 0x1FE21F6E, 0xB579B53D, 0x090C090F, + 0x61AA6134, 0x57825716, 0x9F419F0B, 0x9D3A9D80, 0x11EA1164, 0x25B925CD, + 0xAFE4AFDD, 0x459A4508, 0xDFA4DF8D, 0xA397A35C, 0xEA7EEAD5, 0x35DA3558, + 0xED7AEDD0, 0x431743FC, 0xF866F8CB, 0xFB94FBB1, 0x37A137D3, 0xFA1DFA40, + 0xC23DC268, 0xB4F0B4CC, 0x32DE325D, 0x9CB39C71, 0x560B56E7, 0xE372E3DA, + 0x87A78760, 0x151C151B, 0xF9EFF93A, 0x63D163BF, 0x345334A9, 0x9A3E9A85, + 0xB18FB142, 0x7C337CD1, 0x8826889B, 0x3D5F3DA6, 0xA1ECA1D7, 0xE476E4DF, + 0x812A8194, 0x91499101, 0x0F810FFB, 0xEE88EEAA, 0x16EE1661, 0xD721D773, + 0x97C497F5, 0xA51AA5A8, 0xFEEBFE3F, 0x6DD96DB5, 0x78C578AE, 0xC539C56D, + 0x1D991DE5, 0x76CD76A4, 0x3EAD3EDC, 0xCB31CB67, 0xB68BB647, 0xEF01EF5B, + 0x1218121E, 0x602360C5, 0x6ADD6AB0, 0x4D1F4DF6, 0xCE4ECEE9, 0xDE2DDE7C, + 0x55F9559D, 0x7E487E5A, 0x214F21B2, 0x03F2037A, 0xA065A026, 0x5E8E5E19, + 0x5A785A66, 0x655C654B, 0x6258624E, 0xFD19FD45, 0x068D06F4, 0x40E54086, + 0xF298F2BE, 0x335733AC, 0x17671790, 0x057F058E, 0xE805E85E, 0x4F644F7D, + 0x89AF896A, 0x10631095, 0x74B6742F, 0x0AFE0A75, 0x5CF55C92, 0x9BB79B74, + 0x2D3C2D33, 0x30A530D6, 0x2ECE2E49, 0x49E94989, 0x46684672, 0x77447755, + 0xA8E0A8D8, 0x964D9604, 0x284328BD, 0xA969A929, 0xD929D979, 0x862E8691, + 0xD1ACD187, 0xF415F44A, 0x8D598D15, 0xD6A8D682, 0xB90AB9BC, 0x429E420D, + 0xF66EF6C1, 0x2F472FB8, 0xDDDFDD06, 0x23342339, 0xCC35CC62, 0xF16AF1C4, + 0xC1CFC112, 0x85DC85EB, 0x8F228F9E, 0x71C971A1, 0x90C090F0, 0xAA9BAA53, + 0x018901F1, 0x8BD48BE1, 0x4EED4E8C, 0x8EAB8E6F, 0xAB12ABA2, 0x6FA26F3E, + 0xE60DE654, 0xDB52DBF2, 0x92BB927B, 0xB702B7B6, 0x692F69CA, 0x39A939D9, + 0xD3D7D30C, 0xA761A723, 0xA21EA2AD, 0xC3B4C399, 0x6C506C44, 0x07040705, + 0x04F6047F, 0x27C22746, 0xAC16ACA7, 0xD025D076, 0x50865013, 0xDC56DCF7, + 0x8455841A, 0xE109E151, 0x7ABE7A25, 0x139113EF}, + + { + 0xD939A9D9, 0x90176790, 0x719CB371, 0xD2A6E8D2, 0x05070405, 0x9852FD98, + 0x6580A365, 0xDFE476DF, 0x08459A08, 0x024B9202, 0xA0E080A0, 0x665A7866, + 0xDDAFE4DD, 0xB06ADDB0, 0xBF63D1BF, 0x362A3836, 0x54E60D54, 0x4320C643, + 0x62CC3562, 0xBEF298BE, 0x1E12181E, 0x24EBF724, 0xD7A1ECD7, 0x77416C77, + 0xBD2843BD, 0x32BC7532, 0xD47B37D4, 0x9B88269B, 0x700DFA70, 0xF94413F9, + 0xB1FB94B1, 0x5A7E485A, 0x7A03F27A, 0xE48CD0E4, 0x47B68B47, 0x3C24303C, + 0xA5E784A5, 0x416B5441, 0x06DDDF06, 0xC56023C5, 0x45FD1945, 0xA33A5BA3, + 0x68C23D68, 0x158D5915, 0x21ECF321, 0x3166AE31, 0x3E6FA23E, 0x16578216, + 0x95106395, 0x5BEF015B, 0x4DB8834D, 0x91862E91, 0xB56DD9B5, 0x1F83511F, + 0x53AA9B53, 0x635D7C63, 0x3B68A63B, 0x3FFEEB3F, 0xD630A5D6, 0x257ABE25, + 0xA7AC16A7, 0x0F090C0F, 0x35F0E335, 0x23A76123, 0xF090C0F0, 0xAFE98CAF, + 0x809D3A80, 0x925CF592, 0x810C7381, 0x27312C27, 0x76D02576, 0xE7560BE7, + 0x7B92BB7B, 0xE9CE4EE9, 0xF10189F1, 0x9F1E6B9F, 0xA93453A9, 0xC4F16AC4, + 0x99C3B499, 0x975BF197, 0x8347E183, 0x6B18E66B, 0xC822BDC8, 0x0E98450E, + 0x6E1FE26E, 0xC9B3F4C9, 0x2F74B62F, 0xCBF866CB, 0xFF99CCFF, 0xEA1495EA, + 0xED5803ED, 0xF7DC56F7, 0xE18BD4E1, 0x1B151C1B, 0xADA21EAD, 0x0CD3D70C, + 0x2BE2FB2B, 0x1DC8C31D, 0x195E8E19, 0xC22CB5C2, 0x8949E989, 0x12C1CF12, + 0x7E95BF7E, 0x207DBA20, 0x6411EA64, 0x840B7784, 0x6DC5396D, 0x6A89AF6A, + 0xD17C33D1, 0xA171C9A1, 0xCEFF62CE, 0x37BB7137, 0xFB0F81FB, 0x3DB5793D, + 0x51E10951, 0xDC3EADDC, 0x2D3F242D, 0xA476CDA4, 0x9D55F99D, 0xEE82D8EE, + 0x8640E586, 0xAE78C5AE, 0xCD25B9CD, 0x04964D04, 0x55774455, 0x0A0E080A, + 0x13508613, 0x30F7E730, 0xD337A1D3, 0x40FA1D40, 0x3461AA34, 0x8C4EED8C, + 0xB3B006B3, 0x6C54706C, 0x2A73B22A, 0x523BD252, 0x0B9F410B, 0x8B027B8B, + 0x88D8A088, 0x4FF3114F, 0x67CB3167, 0x4627C246, 0xC06727C0, 0xB4FC90B4, + 0x28382028, 0x7F04F67F, 0x78486078, 0x2EE5FF2E, 0x074C9607, 0x4B655C4B, + 0xC72BB1C7, 0x6F8EAB6F, 0x0D429E0D, 0xBBF59CBB, 0xF2DB52F2, 0xF34A1BF3, + 0xA63D5FA6, 0x59A49359, 0xBCB90ABC, 0x3AF9EF3A, 0xEF1391EF, 0xFE0885FE, + 0x01914901, 0x6116EE61, 0x7CDE2D7C, 0xB2214FB2, 0x42B18F42, 0xDB723BDB, + 0xB82F47B8, 0x48BF8748, 0x2CAE6D2C, 0xE3C046E3, 0x573CD657, 0x859A3E85, + 0x29A96929, 0x7D4F647D, 0x94812A94, 0x492ECE49, 0x17C6CB17, 0xCA692FCA, + 0xC3BDFCC3, 0x5CA3975C, 0x5EE8055E, 0xD0ED7AD0, 0x87D1AC87, 0x8E057F8E, + 0xBA64D5BA, 0xA8A51AA8, 0xB7264BB7, 0xB9BE0EB9, 0x6087A760, 0xF8D55AF8, + 0x22362822, 0x111B1411, 0xDE753FDE, 0x79D92979, 0xAAEE88AA, 0x332D3C33, + 0x5F794C5F, 0xB6B702B6, 0x96CAB896, 0x5835DA58, 0x9CC4B09C, 0xFC4317FC, + 0x1A84551A, 0xF64D1FF6, 0x1C598A1C, 0x38B27D38, 0xAC3357AC, 0x18CFC718, + 0xF4068DF4, 0x69537469, 0x749BB774, 0xF597C4F5, 0x56AD9F56, 0xDAE372DA, + 0xD5EA7ED5, 0x4AF4154A, 0x9E8F229E, 0xA2AB12A2, 0x4E62584E, 0xE85F07E8, + 0xE51D99E5, 0x39233439, 0xC1F66EC1, 0x446C5044, 0x5D32DE5D, 0x72466872, + 0x26A06526, 0x93CDBC93, 0x03DADB03, 0xC6BAF8C6, 0xFA9EC8FA, 0x82D6A882, + 0xCF6E2BCF, 0x50704050, 0xEB85DCEB, 0x750AFE75, 0x8A93328A, 0x8DDFA48D, + 0x4C29CA4C, 0x141C1014, 0x73D72173, 0xCCB4F0CC, 0x09D4D309, 0x108A5D10, + 0xE2510FE2, 0x00000000, 0x9A196F9A, 0xE01A9DE0, 0x8F94368F, 0xE6C742E6, + 0xECC94AEC, 0xFDD25EFD, 0xAB7FC1AB, 0xD8A8E0D8} +}; + +/* The exp_to_poly and poly_to_exp tables are used to perform efficient + * operations in GF(2^8) represented as GF(2)[x]/w(x) where + * w(x)=x^8+x^6+x^3+x^2+1. We care about doing that because it's part of the + * definition of the RS matrix in the key schedule. Elements of that field + * are polynomials of degree not greater than 7 and all coefficients 0 or 1, + * which can be represented naturally by bytes (just substitute x=2). In that + * form, GF(2^8) addition is the same as bitwise XOR, but GF(2^8) + * multiplication is inefficient without hardware support. To multiply + * faster, I make use of the fact x is a generator for the nonzero elements, + * so that every element p of GF(2)[x]/w(x) is either 0 or equal to (x)^n for + * some n in 0..254. Note that that caret is exponentiation in GF(2^8), + * *not* polynomial notation. So if I want to compute pq where p and q are + * in GF(2^8), I can just say: + * 1. if p=0 or q=0 then pq=0 + * 2. otherwise, find m and n such that p=x^m and q=x^n + * 3. pq=(x^m)(x^n)=x^(m+n), so add m and n and find pq + * The translations in steps 2 and 3 are looked up in the tables + * poly_to_exp (for step 2) and exp_to_poly (for step 3). To see this + * in action, look at the CALC_S macro. As additional wrinkles, note that + * one of my operands is always a constant, so the poly_to_exp lookup on it + * is done in advance; I included the original values in the comments so + * readers can have some chance of recognizing that this *is* the RS matrix + * from the Twofish paper. I've only included the table entries I actually + * need; I never do a lookup on a variable input of zero and the biggest + * exponents I'll ever see are 254 (variable) and 237 (constant), so they'll + * never sum to more than 491. I'm repeating part of the exp_to_poly table + * so that I don't have to do mod-255 reduction in the exponent arithmetic. + * Since I know my constant operands are never zero, I only have to worry + * about zero values in the variable operand, and I do it with a simple + * conditional branch. I know conditionals are expensive, but I couldn't + * see a non-horrible way of avoiding them, and I did manage to group the + * statements so that each if covers four group multiplications. */ + +static const u8 poly_to_exp[255] = { + 0x00, 0x01, 0x17, 0x02, 0x2E, 0x18, 0x53, 0x03, 0x6A, 0x2F, 0x93, 0x19, + 0x34, 0x54, 0x45, 0x04, 0x5C, 0x6B, 0xB6, 0x30, 0xA6, 0x94, 0x4B, 0x1A, + 0x8C, 0x35, 0x81, 0x55, 0xAA, 0x46, 0x0D, 0x05, 0x24, 0x5D, 0x87, 0x6C, + 0x9B, 0xB7, 0xC1, 0x31, 0x2B, 0xA7, 0xA3, 0x95, 0x98, 0x4C, 0xCA, 0x1B, + 0xE6, 0x8D, 0x73, 0x36, 0xCD, 0x82, 0x12, 0x56, 0x62, 0xAB, 0xF0, 0x47, + 0x4F, 0x0E, 0xBD, 0x06, 0xD4, 0x25, 0xD2, 0x5E, 0x27, 0x88, 0x66, 0x6D, + 0xD6, 0x9C, 0x79, 0xB8, 0x08, 0xC2, 0xDF, 0x32, 0x68, 0x2C, 0xFD, 0xA8, + 0x8A, 0xA4, 0x5A, 0x96, 0x29, 0x99, 0x22, 0x4D, 0x60, 0xCB, 0xE4, 0x1C, + 0x7B, 0xE7, 0x3B, 0x8E, 0x9E, 0x74, 0xF4, 0x37, 0xD8, 0xCE, 0xF9, 0x83, + 0x6F, 0x13, 0xB2, 0x57, 0xE1, 0x63, 0xDC, 0xAC, 0xC4, 0xF1, 0xAF, 0x48, + 0x0A, 0x50, 0x42, 0x0F, 0xBA, 0xBE, 0xC7, 0x07, 0xDE, 0xD5, 0x78, 0x26, + 0x65, 0xD3, 0xD1, 0x5F, 0xE3, 0x28, 0x21, 0x89, 0x59, 0x67, 0xFC, 0x6E, + 0xB1, 0xD7, 0xF8, 0x9D, 0xF3, 0x7A, 0x3A, 0xB9, 0xC6, 0x09, 0x41, 0xC3, + 0xAE, 0xE0, 0xDB, 0x33, 0x44, 0x69, 0x92, 0x2D, 0x52, 0xFE, 0x16, 0xA9, + 0x0C, 0x8B, 0x80, 0xA5, 0x4A, 0x5B, 0xB5, 0x97, 0xC9, 0x2A, 0xA2, 0x9A, + 0xC0, 0x23, 0x86, 0x4E, 0xBC, 0x61, 0xEF, 0xCC, 0x11, 0xE5, 0x72, 0x1D, + 0x3D, 0x7C, 0xEB, 0xE8, 0xE9, 0x3C, 0xEA, 0x8F, 0x7D, 0x9F, 0xEC, 0x75, + 0x1E, 0xF5, 0x3E, 0x38, 0xF6, 0xD9, 0x3F, 0xCF, 0x76, 0xFA, 0x1F, 0x84, + 0xA0, 0x70, 0xED, 0x14, 0x90, 0xB3, 0x7E, 0x58, 0xFB, 0xE2, 0x20, 0x64, + 0xD0, 0xDD, 0x77, 0xAD, 0xDA, 0xC5, 0x40, 0xF2, 0x39, 0xB0, 0xF7, 0x49, + 0xB4, 0x0B, 0x7F, 0x51, 0x15, 0x43, 0x91, 0x10, 0x71, 0xBB, 0xEE, 0xBF, + 0x85, 0xC8, 0xA1 +}; + +static const u8 exp_to_poly[492] = { + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x4D, 0x9A, 0x79, 0xF2, + 0xA9, 0x1F, 0x3E, 0x7C, 0xF8, 0xBD, 0x37, 0x6E, 0xDC, 0xF5, 0xA7, 0x03, + 0x06, 0x0C, 0x18, 0x30, 0x60, 0xC0, 0xCD, 0xD7, 0xE3, 0x8B, 0x5B, 0xB6, + 0x21, 0x42, 0x84, 0x45, 0x8A, 0x59, 0xB2, 0x29, 0x52, 0xA4, 0x05, 0x0A, + 0x14, 0x28, 0x50, 0xA0, 0x0D, 0x1A, 0x34, 0x68, 0xD0, 0xED, 0x97, 0x63, + 0xC6, 0xC1, 0xCF, 0xD3, 0xEB, 0x9B, 0x7B, 0xF6, 0xA1, 0x0F, 0x1E, 0x3C, + 0x78, 0xF0, 0xAD, 0x17, 0x2E, 0x5C, 0xB8, 0x3D, 0x7A, 0xF4, 0xA5, 0x07, + 0x0E, 0x1C, 0x38, 0x70, 0xE0, 0x8D, 0x57, 0xAE, 0x11, 0x22, 0x44, 0x88, + 0x5D, 0xBA, 0x39, 0x72, 0xE4, 0x85, 0x47, 0x8E, 0x51, 0xA2, 0x09, 0x12, + 0x24, 0x48, 0x90, 0x6D, 0xDA, 0xF9, 0xBF, 0x33, 0x66, 0xCC, 0xD5, 0xE7, + 0x83, 0x4B, 0x96, 0x61, 0xC2, 0xC9, 0xDF, 0xF3, 0xAB, 0x1B, 0x36, 0x6C, + 0xD8, 0xFD, 0xB7, 0x23, 0x46, 0x8C, 0x55, 0xAA, 0x19, 0x32, 0x64, 0xC8, + 0xDD, 0xF7, 0xA3, 0x0B, 0x16, 0x2C, 0x58, 0xB0, 0x2D, 0x5A, 0xB4, 0x25, + 0x4A, 0x94, 0x65, 0xCA, 0xD9, 0xFF, 0xB3, 0x2B, 0x56, 0xAC, 0x15, 0x2A, + 0x54, 0xA8, 0x1D, 0x3A, 0x74, 0xE8, 0x9D, 0x77, 0xEE, 0x91, 0x6F, 0xDE, + 0xF1, 0xAF, 0x13, 0x26, 0x4C, 0x98, 0x7D, 0xFA, 0xB9, 0x3F, 0x7E, 0xFC, + 0xB5, 0x27, 0x4E, 0x9C, 0x75, 0xEA, 0x99, 0x7F, 0xFE, 0xB1, 0x2F, 0x5E, + 0xBC, 0x35, 0x6A, 0xD4, 0xE5, 0x87, 0x43, 0x86, 0x41, 0x82, 0x49, 0x92, + 0x69, 0xD2, 0xE9, 0x9F, 0x73, 0xE6, 0x81, 0x4F, 0x9E, 0x71, 0xE2, 0x89, + 0x5F, 0xBE, 0x31, 0x62, 0xC4, 0xC5, 0xC7, 0xC3, 0xCB, 0xDB, 0xFB, 0xBB, + 0x3B, 0x76, 0xEC, 0x95, 0x67, 0xCE, 0xD1, 0xEF, 0x93, 0x6B, 0xD6, 0xE1, + 0x8F, 0x53, 0xA6, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x4D, + 0x9A, 0x79, 0xF2, 0xA9, 0x1F, 0x3E, 0x7C, 0xF8, 0xBD, 0x37, 0x6E, 0xDC, + 0xF5, 0xA7, 0x03, 0x06, 0x0C, 0x18, 0x30, 0x60, 0xC0, 0xCD, 0xD7, 0xE3, + 0x8B, 0x5B, 0xB6, 0x21, 0x42, 0x84, 0x45, 0x8A, 0x59, 0xB2, 0x29, 0x52, + 0xA4, 0x05, 0x0A, 0x14, 0x28, 0x50, 0xA0, 0x0D, 0x1A, 0x34, 0x68, 0xD0, + 0xED, 0x97, 0x63, 0xC6, 0xC1, 0xCF, 0xD3, 0xEB, 0x9B, 0x7B, 0xF6, 0xA1, + 0x0F, 0x1E, 0x3C, 0x78, 0xF0, 0xAD, 0x17, 0x2E, 0x5C, 0xB8, 0x3D, 0x7A, + 0xF4, 0xA5, 0x07, 0x0E, 0x1C, 0x38, 0x70, 0xE0, 0x8D, 0x57, 0xAE, 0x11, + 0x22, 0x44, 0x88, 0x5D, 0xBA, 0x39, 0x72, 0xE4, 0x85, 0x47, 0x8E, 0x51, + 0xA2, 0x09, 0x12, 0x24, 0x48, 0x90, 0x6D, 0xDA, 0xF9, 0xBF, 0x33, 0x66, + 0xCC, 0xD5, 0xE7, 0x83, 0x4B, 0x96, 0x61, 0xC2, 0xC9, 0xDF, 0xF3, 0xAB, + 0x1B, 0x36, 0x6C, 0xD8, 0xFD, 0xB7, 0x23, 0x46, 0x8C, 0x55, 0xAA, 0x19, + 0x32, 0x64, 0xC8, 0xDD, 0xF7, 0xA3, 0x0B, 0x16, 0x2C, 0x58, 0xB0, 0x2D, + 0x5A, 0xB4, 0x25, 0x4A, 0x94, 0x65, 0xCA, 0xD9, 0xFF, 0xB3, 0x2B, 0x56, + 0xAC, 0x15, 0x2A, 0x54, 0xA8, 0x1D, 0x3A, 0x74, 0xE8, 0x9D, 0x77, 0xEE, + 0x91, 0x6F, 0xDE, 0xF1, 0xAF, 0x13, 0x26, 0x4C, 0x98, 0x7D, 0xFA, 0xB9, + 0x3F, 0x7E, 0xFC, 0xB5, 0x27, 0x4E, 0x9C, 0x75, 0xEA, 0x99, 0x7F, 0xFE, + 0xB1, 0x2F, 0x5E, 0xBC, 0x35, 0x6A, 0xD4, 0xE5, 0x87, 0x43, 0x86, 0x41, + 0x82, 0x49, 0x92, 0x69, 0xD2, 0xE9, 0x9F, 0x73, 0xE6, 0x81, 0x4F, 0x9E, + 0x71, 0xE2, 0x89, 0x5F, 0xBE, 0x31, 0x62, 0xC4, 0xC5, 0xC7, 0xC3, 0xCB +}; + + +/* The table constants are indices of + * S-box entries, preprocessed through q0 and q1. */ +static const u8 calc_sb_tbl[512] = { + 0xA9, 0x75, 0x67, 0xF3, 0xB3, 0xC6, 0xE8, 0xF4, + 0x04, 0xDB, 0xFD, 0x7B, 0xA3, 0xFB, 0x76, 0xC8, + 0x9A, 0x4A, 0x92, 0xD3, 0x80, 0xE6, 0x78, 0x6B, + 0xE4, 0x45, 0xDD, 0x7D, 0xD1, 0xE8, 0x38, 0x4B, + 0x0D, 0xD6, 0xC6, 0x32, 0x35, 0xD8, 0x98, 0xFD, + 0x18, 0x37, 0xF7, 0x71, 0xEC, 0xF1, 0x6C, 0xE1, + 0x43, 0x30, 0x75, 0x0F, 0x37, 0xF8, 0x26, 0x1B, + 0xFA, 0x87, 0x13, 0xFA, 0x94, 0x06, 0x48, 0x3F, + 0xF2, 0x5E, 0xD0, 0xBA, 0x8B, 0xAE, 0x30, 0x5B, + 0x84, 0x8A, 0x54, 0x00, 0xDF, 0xBC, 0x23, 0x9D, + 0x19, 0x6D, 0x5B, 0xC1, 0x3D, 0xB1, 0x59, 0x0E, + 0xF3, 0x80, 0xAE, 0x5D, 0xA2, 0xD2, 0x82, 0xD5, + 0x63, 0xA0, 0x01, 0x84, 0x83, 0x07, 0x2E, 0x14, + 0xD9, 0xB5, 0x51, 0x90, 0x9B, 0x2C, 0x7C, 0xA3, + 0xA6, 0xB2, 0xEB, 0x73, 0xA5, 0x4C, 0xBE, 0x54, + 0x16, 0x92, 0x0C, 0x74, 0xE3, 0x36, 0x61, 0x51, + 0xC0, 0x38, 0x8C, 0xB0, 0x3A, 0xBD, 0xF5, 0x5A, + 0x73, 0xFC, 0x2C, 0x60, 0x25, 0x62, 0x0B, 0x96, + 0xBB, 0x6C, 0x4E, 0x42, 0x89, 0xF7, 0x6B, 0x10, + 0x53, 0x7C, 0x6A, 0x28, 0xB4, 0x27, 0xF1, 0x8C, + 0xE1, 0x13, 0xE6, 0x95, 0xBD, 0x9C, 0x45, 0xC7, + 0xE2, 0x24, 0xF4, 0x46, 0xB6, 0x3B, 0x66, 0x70, + 0xCC, 0xCA, 0x95, 0xE3, 0x03, 0x85, 0x56, 0xCB, + 0xD4, 0x11, 0x1C, 0xD0, 0x1E, 0x93, 0xD7, 0xB8, + 0xFB, 0xA6, 0xC3, 0x83, 0x8E, 0x20, 0xB5, 0xFF, + 0xE9, 0x9F, 0xCF, 0x77, 0xBF, 0xC3, 0xBA, 0xCC, + 0xEA, 0x03, 0x77, 0x6F, 0x39, 0x08, 0xAF, 0xBF, + 0x33, 0x40, 0xC9, 0xE7, 0x62, 0x2B, 0x71, 0xE2, + 0x81, 0x79, 0x79, 0x0C, 0x09, 0xAA, 0xAD, 0x82, + 0x24, 0x41, 0xCD, 0x3A, 0xF9, 0xEA, 0xD8, 0xB9, + 0xE5, 0xE4, 0xC5, 0x9A, 0xB9, 0xA4, 0x4D, 0x97, + 0x44, 0x7E, 0x08, 0xDA, 0x86, 0x7A, 0xE7, 0x17, + 0xA1, 0x66, 0x1D, 0x94, 0xAA, 0xA1, 0xED, 0x1D, + 0x06, 0x3D, 0x70, 0xF0, 0xB2, 0xDE, 0xD2, 0xB3, + 0x41, 0x0B, 0x7B, 0x72, 0xA0, 0xA7, 0x11, 0x1C, + 0x31, 0xEF, 0xC2, 0xD1, 0x27, 0x53, 0x90, 0x3E, + 0x20, 0x8F, 0xF6, 0x33, 0x60, 0x26, 0xFF, 0x5F, + 0x96, 0xEC, 0x5C, 0x76, 0xB1, 0x2A, 0xAB, 0x49, + 0x9E, 0x81, 0x9C, 0x88, 0x52, 0xEE, 0x1B, 0x21, + 0x5F, 0xC4, 0x93, 0x1A, 0x0A, 0xEB, 0xEF, 0xD9, + 0x91, 0xC5, 0x85, 0x39, 0x49, 0x99, 0xEE, 0xCD, + 0x2D, 0xAD, 0x4F, 0x31, 0x8F, 0x8B, 0x3B, 0x01, + 0x47, 0x18, 0x87, 0x23, 0x6D, 0xDD, 0x46, 0x1F, + 0xD6, 0x4E, 0x3E, 0x2D, 0x69, 0xF9, 0x64, 0x48, + 0x2A, 0x4F, 0xCE, 0xF2, 0xCB, 0x65, 0x2F, 0x8E, + 0xFC, 0x78, 0x97, 0x5C, 0x05, 0x58, 0x7A, 0x19, + 0xAC, 0x8D, 0x7F, 0xE5, 0xD5, 0x98, 0x1A, 0x57, + 0x4B, 0x67, 0x0E, 0x7F, 0xA7, 0x05, 0x5A, 0x64, + 0x28, 0xAF, 0x14, 0x63, 0x3F, 0xB6, 0x29, 0xFE, + 0x88, 0xF5, 0x3C, 0xB7, 0x4C, 0x3C, 0x02, 0xA5, + 0xB8, 0xCE, 0xDA, 0xE9, 0xB0, 0x68, 0x17, 0x44, + 0x55, 0xE0, 0x1F, 0x4D, 0x8A, 0x43, 0x7D, 0x69, + 0x57, 0x29, 0xC7, 0x2E, 0x8D, 0xAC, 0x74, 0x15, + 0xB7, 0x59, 0xC4, 0xA8, 0x9F, 0x0A, 0x72, 0x9E, + 0x7E, 0x6E, 0x15, 0x47, 0x22, 0xDF, 0x12, 0x34, + 0x58, 0x35, 0x07, 0x6A, 0x99, 0xCF, 0x34, 0xDC, + 0x6E, 0x22, 0x50, 0xC9, 0xDE, 0xC0, 0x68, 0x9B, + 0x65, 0x89, 0xBC, 0xD4, 0xDB, 0xED, 0xF8, 0xAB, + 0xC8, 0x12, 0xA8, 0xA2, 0x2B, 0x0D, 0x40, 0x52, + 0xDC, 0xBB, 0xFE, 0x02, 0x32, 0x2F, 0xA4, 0xA9, + 0xCA, 0xD7, 0x10, 0x61, 0x21, 0x1E, 0xF0, 0xB4, + 0xD3, 0x50, 0x5D, 0x04, 0x0F, 0xF6, 0x00, 0xC2, + 0x6F, 0x16, 0x9D, 0x25, 0x36, 0x86, 0x42, 0x56, + 0x4A, 0x55, 0x5E, 0x09, 0xC1, 0xBE, 0xE0, 0x91 +}; + +/* Macro to perform one column of the RS matrix multiplication. The + * parameters a, b, c, and d are the four bytes of output; i is the index + * of the key bytes, and w, x, y, and z, are the column of constants from + * the RS matrix, preprocessed through the poly_to_exp table. */ + +#define CALC_S(a, b, c, d, i, w, x, y, z) \ + if (key[i]) { \ + tmp = poly_to_exp[key[i] - 1]; \ + (a) ^= exp_to_poly[tmp + (w)]; \ + (b) ^= exp_to_poly[tmp + (x)]; \ + (c) ^= exp_to_poly[tmp + (y)]; \ + (d) ^= exp_to_poly[tmp + (z)]; \ + } + +/* Macros to calculate the key-dependent S-boxes for a 128-bit key using + * the S vector from CALC_S. CALC_SB_2 computes a single entry in all + * four S-boxes, where i is the index of the entry to compute, and a and b + * are the index numbers preprocessed through the q0 and q1 tables + * respectively. */ + +#define CALC_SB_2(i, a, b) \ + ctx->s[0][i] = mds[0][q0[(a) ^ sa] ^ se]; \ + ctx->s[1][i] = mds[1][q0[(b) ^ sb] ^ sf]; \ + ctx->s[2][i] = mds[2][q1[(a) ^ sc] ^ sg]; \ + ctx->s[3][i] = mds[3][q1[(b) ^ sd] ^ sh] + +/* Macro exactly like CALC_SB_2, but for 192-bit keys. */ + +#define CALC_SB192_2(i, a, b) \ + ctx->s[0][i] = mds[0][q0[q0[(b) ^ sa] ^ se] ^ si]; \ + ctx->s[1][i] = mds[1][q0[q1[(b) ^ sb] ^ sf] ^ sj]; \ + ctx->s[2][i] = mds[2][q1[q0[(a) ^ sc] ^ sg] ^ sk]; \ + ctx->s[3][i] = mds[3][q1[q1[(a) ^ sd] ^ sh] ^ sl]; + +/* Macro exactly like CALC_SB_2, but for 256-bit keys. */ + +#define CALC_SB256_2(i, a, b) \ + ctx->s[0][i] = mds[0][q0[q0[q1[(b) ^ sa] ^ se] ^ si] ^ sm]; \ + ctx->s[1][i] = mds[1][q0[q1[q1[(a) ^ sb] ^ sf] ^ sj] ^ sn]; \ + ctx->s[2][i] = mds[2][q1[q0[q0[(a) ^ sc] ^ sg] ^ sk] ^ so]; \ + ctx->s[3][i] = mds[3][q1[q1[q0[(b) ^ sd] ^ sh] ^ sl] ^ sp]; + +/* Macros to calculate the whitening and round subkeys. CALC_K_2 computes the + * last two stages of the h() function for a given index (either 2i or 2i+1). + * a, b, c, and d are the four bytes going into the last two stages. For + * 128-bit keys, this is the entire h() function and a and c are the index + * preprocessed through q0 and q1 respectively; for longer keys they are the + * output of previous stages. j is the index of the first key byte to use. + * CALC_K computes a pair of subkeys for 128-bit Twofish, by calling CALC_K_2 + * twice, doing the Pseudo-Hadamard Transform, and doing the necessary + * rotations. Its parameters are: a, the array to write the results into, + * j, the index of the first output entry, k and l, the preprocessed indices + * for index 2i, and m and n, the preprocessed indices for index 2i+1. + * CALC_K192_2 expands CALC_K_2 to handle 192-bit keys, by doing an + * additional lookup-and-XOR stage. The parameters a, b, c and d are the + * four bytes going into the last three stages. For 192-bit keys, c = d + * are the index preprocessed through q0, and a = b are the index + * preprocessed through q1; j is the index of the first key byte to use. + * CALC_K192 is identical to CALC_K but for using the CALC_K192_2 macro + * instead of CALC_K_2. + * CALC_K256_2 expands CALC_K192_2 to handle 256-bit keys, by doing an + * additional lookup-and-XOR stage. The parameters a and b are the index + * preprocessed through q0 and q1 respectively; j is the index of the first + * key byte to use. CALC_K256 is identical to CALC_K but for using the + * CALC_K256_2 macro instead of CALC_K_2. */ + +#define CALC_K_2(a, b, c, d, j) \ + mds[0][q0[a ^ key[(j) + 8]] ^ key[j]] \ + ^ mds[1][q0[b ^ key[(j) + 9]] ^ key[(j) + 1]] \ + ^ mds[2][q1[c ^ key[(j) + 10]] ^ key[(j) + 2]] \ + ^ mds[3][q1[d ^ key[(j) + 11]] ^ key[(j) + 3]] + +#define CALC_K(a, j, k, l, m, n) \ + x = CALC_K_2 (k, l, k, l, 0); \ + y = CALC_K_2 (m, n, m, n, 4); \ + y = rol32(y, 8); \ + x += y; y += x; ctx->a[j] = x; \ + ctx->a[(j) + 1] = rol32(y, 9) + +#define CALC_K192_2(a, b, c, d, j) \ + CALC_K_2 (q0[a ^ key[(j) + 16]], \ + q1[b ^ key[(j) + 17]], \ + q0[c ^ key[(j) + 18]], \ + q1[d ^ key[(j) + 19]], j) + +#define CALC_K192(a, j, k, l, m, n) \ + x = CALC_K192_2 (l, l, k, k, 0); \ + y = CALC_K192_2 (n, n, m, m, 4); \ + y = rol32(y, 8); \ + x += y; y += x; ctx->a[j] = x; \ + ctx->a[(j) + 1] = rol32(y, 9) + +#define CALC_K256_2(a, b, j) \ + CALC_K192_2 (q1[b ^ key[(j) + 24]], \ + q1[a ^ key[(j) + 25]], \ + q0[a ^ key[(j) + 26]], \ + q0[b ^ key[(j) + 27]], j) + +#define CALC_K256(a, j, k, l, m, n) \ + x = CALC_K256_2 (k, l, 0); \ + y = CALC_K256_2 (m, n, 4); \ + y = rol32(y, 8); \ + x += y; y += x; ctx->a[j] = x; \ + ctx->a[(j) + 1] = rol32(y, 9) + +/* Perform the key setup. */ +int __twofish_setkey(struct twofish_ctx *ctx, const u8 *key, + unsigned int key_len, u32 *flags) +{ + int i, j, k; + + /* Temporaries for CALC_K. */ + u32 x, y; + + /* The S vector used to key the S-boxes, split up into individual bytes. + * 128-bit keys use only sa through sh; 256-bit use all of them. */ + u8 sa = 0, sb = 0, sc = 0, sd = 0, se = 0, sf = 0, sg = 0, sh = 0; + u8 si = 0, sj = 0, sk = 0, sl = 0, sm = 0, sn = 0, so = 0, sp = 0; + + /* Temporary for CALC_S. */ + u8 tmp; + + /* Check key length. */ + if (key_len % 8) + { + *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; /* unsupported key length */ + } + + /* Compute the first two words of the S vector. The magic numbers are + * the entries of the RS matrix, preprocessed through poly_to_exp. The + * numbers in the comments are the original (polynomial form) matrix + * entries. */ + CALC_S (sa, sb, sc, sd, 0, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */ + CALC_S (sa, sb, sc, sd, 1, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */ + CALC_S (sa, sb, sc, sd, 2, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */ + CALC_S (sa, sb, sc, sd, 3, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */ + CALC_S (sa, sb, sc, sd, 4, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */ + CALC_S (sa, sb, sc, sd, 5, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */ + CALC_S (sa, sb, sc, sd, 6, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */ + CALC_S (sa, sb, sc, sd, 7, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */ + CALC_S (se, sf, sg, sh, 8, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */ + CALC_S (se, sf, sg, sh, 9, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */ + CALC_S (se, sf, sg, sh, 10, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */ + CALC_S (se, sf, sg, sh, 11, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */ + CALC_S (se, sf, sg, sh, 12, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */ + CALC_S (se, sf, sg, sh, 13, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */ + CALC_S (se, sf, sg, sh, 14, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */ + CALC_S (se, sf, sg, sh, 15, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */ + + if (key_len == 24 || key_len == 32) { /* 192- or 256-bit key */ + /* Calculate the third word of the S vector */ + CALC_S (si, sj, sk, sl, 16, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */ + CALC_S (si, sj, sk, sl, 17, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */ + CALC_S (si, sj, sk, sl, 18, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */ + CALC_S (si, sj, sk, sl, 19, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */ + CALC_S (si, sj, sk, sl, 20, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */ + CALC_S (si, sj, sk, sl, 21, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */ + CALC_S (si, sj, sk, sl, 22, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */ + CALC_S (si, sj, sk, sl, 23, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */ + } + + if (key_len == 32) { /* 256-bit key */ + /* Calculate the fourth word of the S vector */ + CALC_S (sm, sn, so, sp, 24, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */ + CALC_S (sm, sn, so, sp, 25, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */ + CALC_S (sm, sn, so, sp, 26, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */ + CALC_S (sm, sn, so, sp, 27, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */ + CALC_S (sm, sn, so, sp, 28, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */ + CALC_S (sm, sn, so, sp, 29, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */ + CALC_S (sm, sn, so, sp, 30, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */ + CALC_S (sm, sn, so, sp, 31, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */ + + /* Compute the S-boxes. */ + for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) { + CALC_SB256_2( i, calc_sb_tbl[j], calc_sb_tbl[k] ); + } + + /* CALC_K256/CALC_K192/CALC_K loops were unrolled. + * Unrolling produced x2.5 more code (+18k on i386), + * and speeded up key setup by 7%: + * unrolled: twofish_setkey/sec: 41128 + * loop: twofish_setkey/sec: 38148 + * CALC_K256: ~100 insns each + * CALC_K192: ~90 insns + * CALC_K: ~70 insns + */ + /* Calculate whitening and round subkeys */ + for ( i = 0; i < 8; i += 2 ) { + CALC_K256 (w, i, q0[i], q1[i], q0[i+1], q1[i+1]); + } + for ( i = 0; i < 32; i += 2 ) { + CALC_K256 (k, i, q0[i+8], q1[i+8], q0[i+9], q1[i+9]); + } + } else if (key_len == 24) { /* 192-bit key */ + /* Compute the S-boxes. */ + for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) { + CALC_SB192_2( i, calc_sb_tbl[j], calc_sb_tbl[k] ); + } + + /* Calculate whitening and round subkeys */ + for ( i = 0; i < 8; i += 2 ) { + CALC_K192 (w, i, q0[i], q1[i], q0[i+1], q1[i+1]); + } + for ( i = 0; i < 32; i += 2 ) { + CALC_K192 (k, i, q0[i+8], q1[i+8], q0[i+9], q1[i+9]); + } + } else { /* 128-bit key */ + /* Compute the S-boxes. */ + for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) { + CALC_SB_2( i, calc_sb_tbl[j], calc_sb_tbl[k] ); + } + + /* Calculate whitening and round subkeys */ + for ( i = 0; i < 8; i += 2 ) { + CALC_K (w, i, q0[i], q1[i], q0[i+1], q1[i+1]); + } + for ( i = 0; i < 32; i += 2 ) { + CALC_K (k, i, q0[i+8], q1[i+8], q0[i+9], q1[i+9]); + } + } + + return 0; +} +EXPORT_SYMBOL_GPL(__twofish_setkey); + +int twofish_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int key_len) +{ + return __twofish_setkey(crypto_tfm_ctx(tfm), key, key_len, + &tfm->crt_flags); +} +EXPORT_SYMBOL_GPL(twofish_setkey); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Twofish cipher common functions"); diff --git a/crypto/twofish_generic.c b/crypto/twofish_generic.c new file mode 100644 index 00000000..1f07b843 --- /dev/null +++ b/crypto/twofish_generic.c @@ -0,0 +1,215 @@ +/* + * Twofish for CryptoAPI + * + * Originally Twofish for GPG + * By Matthew Skala <mskala@ansuz.sooke.bc.ca>, July 26, 1998 + * 256-bit key length added March 20, 1999 + * Some modifications to reduce the text size by Werner Koch, April, 1998 + * Ported to the kerneli patch by Marc Mutz <Marc@Mutz.com> + * Ported to CryptoAPI by Colin Slater <hoho@tacomeat.net> + * + * The original author has disclaimed all copyright interest in this + * code and thus put it in the public domain. The subsequent authors + * have put this under the GNU General Public License. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 + * USA + * + * This code is a "clean room" implementation, written from the paper + * _Twofish: A 128-Bit Block Cipher_ by Bruce Schneier, John Kelsey, + * Doug Whiting, David Wagner, Chris Hall, and Niels Ferguson, available + * through http://www.counterpane.com/twofish.html + * + * For background information on multiplication in finite fields, used for + * the matrix operations in the key schedule, see the book _Contemporary + * Abstract Algebra_ by Joseph A. Gallian, especially chapter 22 in the + * Third Edition. + */ + +#include <asm/byteorder.h> +#include <crypto/twofish.h> +#include <linux/module.h> +#include <linux/init.h> +#include <linux/types.h> +#include <linux/errno.h> +#include <linux/crypto.h> +#include <linux/bitops.h> + +/* Macros to compute the g() function in the encryption and decryption + * rounds. G1 is the straight g() function; G2 includes the 8-bit + * rotation for the high 32-bit word. */ + +#define G1(a) \ + (ctx->s[0][(a) & 0xFF]) ^ (ctx->s[1][((a) >> 8) & 0xFF]) \ + ^ (ctx->s[2][((a) >> 16) & 0xFF]) ^ (ctx->s[3][(a) >> 24]) + +#define G2(b) \ + (ctx->s[1][(b) & 0xFF]) ^ (ctx->s[2][((b) >> 8) & 0xFF]) \ + ^ (ctx->s[3][((b) >> 16) & 0xFF]) ^ (ctx->s[0][(b) >> 24]) + +/* Encryption and decryption Feistel rounds. Each one calls the two g() + * macros, does the PHT, and performs the XOR and the appropriate bit + * rotations. The parameters are the round number (used to select subkeys), + * and the four 32-bit chunks of the text. */ + +#define ENCROUND(n, a, b, c, d) \ + x = G1 (a); y = G2 (b); \ + x += y; y += x + ctx->k[2 * (n) + 1]; \ + (c) ^= x + ctx->k[2 * (n)]; \ + (c) = ror32((c), 1); \ + (d) = rol32((d), 1) ^ y + +#define DECROUND(n, a, b, c, d) \ + x = G1 (a); y = G2 (b); \ + x += y; y += x; \ + (d) ^= y + ctx->k[2 * (n) + 1]; \ + (d) = ror32((d), 1); \ + (c) = rol32((c), 1); \ + (c) ^= (x + ctx->k[2 * (n)]) + +/* Encryption and decryption cycles; each one is simply two Feistel rounds + * with the 32-bit chunks re-ordered to simulate the "swap" */ + +#define ENCCYCLE(n) \ + ENCROUND (2 * (n), a, b, c, d); \ + ENCROUND (2 * (n) + 1, c, d, a, b) + +#define DECCYCLE(n) \ + DECROUND (2 * (n) + 1, c, d, a, b); \ + DECROUND (2 * (n), a, b, c, d) + +/* Macros to convert the input and output bytes into 32-bit words, + * and simultaneously perform the whitening step. INPACK packs word + * number n into the variable named by x, using whitening subkey number m. + * OUTUNPACK unpacks word number n from the variable named by x, using + * whitening subkey number m. */ + +#define INPACK(n, x, m) \ + x = le32_to_cpu(src[n]) ^ ctx->w[m] + +#define OUTUNPACK(n, x, m) \ + x ^= ctx->w[m]; \ + dst[n] = cpu_to_le32(x) + + + +/* Encrypt one block. in and out may be the same. */ +static void twofish_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + struct twofish_ctx *ctx = crypto_tfm_ctx(tfm); + const __le32 *src = (const __le32 *)in; + __le32 *dst = (__le32 *)out; + + /* The four 32-bit chunks of the text. */ + u32 a, b, c, d; + + /* Temporaries used by the round function. */ + u32 x, y; + + /* Input whitening and packing. */ + INPACK (0, a, 0); + INPACK (1, b, 1); + INPACK (2, c, 2); + INPACK (3, d, 3); + + /* Encryption Feistel cycles. */ + ENCCYCLE (0); + ENCCYCLE (1); + ENCCYCLE (2); + ENCCYCLE (3); + ENCCYCLE (4); + ENCCYCLE (5); + ENCCYCLE (6); + ENCCYCLE (7); + + /* Output whitening and unpacking. */ + OUTUNPACK (0, c, 4); + OUTUNPACK (1, d, 5); + OUTUNPACK (2, a, 6); + OUTUNPACK (3, b, 7); + +} + +/* Decrypt one block. in and out may be the same. */ +static void twofish_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + struct twofish_ctx *ctx = crypto_tfm_ctx(tfm); + const __le32 *src = (const __le32 *)in; + __le32 *dst = (__le32 *)out; + + /* The four 32-bit chunks of the text. */ + u32 a, b, c, d; + + /* Temporaries used by the round function. */ + u32 x, y; + + /* Input whitening and packing. */ + INPACK (0, c, 4); + INPACK (1, d, 5); + INPACK (2, a, 6); + INPACK (3, b, 7); + + /* Encryption Feistel cycles. */ + DECCYCLE (7); + DECCYCLE (6); + DECCYCLE (5); + DECCYCLE (4); + DECCYCLE (3); + DECCYCLE (2); + DECCYCLE (1); + DECCYCLE (0); + + /* Output whitening and unpacking. */ + OUTUNPACK (0, a, 0); + OUTUNPACK (1, b, 1); + OUTUNPACK (2, c, 2); + OUTUNPACK (3, d, 3); + +} + +static struct crypto_alg alg = { + .cra_name = "twofish", + .cra_driver_name = "twofish-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = TF_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct twofish_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = TF_MIN_KEY_SIZE, + .cia_max_keysize = TF_MAX_KEY_SIZE, + .cia_setkey = twofish_setkey, + .cia_encrypt = twofish_encrypt, + .cia_decrypt = twofish_decrypt } } +}; + +static int __init twofish_mod_init(void) +{ + return crypto_register_alg(&alg); +} + +static void __exit twofish_mod_fini(void) +{ + crypto_unregister_alg(&alg); +} + +module_init(twofish_mod_init); +module_exit(twofish_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION ("Twofish Cipher Algorithm"); +MODULE_ALIAS("twofish"); diff --git a/crypto/vmac.c b/crypto/vmac.c new file mode 100644 index 00000000..4243905b --- /dev/null +++ b/crypto/vmac.c @@ -0,0 +1,676 @@ +/* + * Modified to interface to the Linux kernel + * Copyright (c) 2009, Intel Corporation. + * + * This program is free software; you can redistribute it and/or modify it + * under the terms and conditions of the GNU General Public License, + * version 2, as published by the Free Software Foundation. + * + * This program is distributed in the hope it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., 59 Temple + * Place - Suite 330, Boston, MA 02111-1307 USA. + */ + +/* -------------------------------------------------------------------------- + * VMAC and VHASH Implementation by Ted Krovetz (tdk@acm.org) and Wei Dai. + * This implementation is herby placed in the public domain. + * The authors offers no warranty. Use at your own risk. + * Please send bug reports to the authors. + * Last modified: 17 APR 08, 1700 PDT + * ----------------------------------------------------------------------- */ + +#include <linux/init.h> +#include <linux/types.h> +#include <linux/crypto.h> +#include <linux/module.h> +#include <linux/scatterlist.h> +#include <asm/byteorder.h> +#include <crypto/scatterwalk.h> +#include <crypto/vmac.h> +#include <crypto/internal/hash.h> + +/* + * Constants and masks + */ +#define UINT64_C(x) x##ULL +const u64 p64 = UINT64_C(0xfffffffffffffeff); /* 2^64 - 257 prime */ +const u64 m62 = UINT64_C(0x3fffffffffffffff); /* 62-bit mask */ +const u64 m63 = UINT64_C(0x7fffffffffffffff); /* 63-bit mask */ +const u64 m64 = UINT64_C(0xffffffffffffffff); /* 64-bit mask */ +const u64 mpoly = UINT64_C(0x1fffffff1fffffff); /* Poly key mask */ + +#define pe64_to_cpup le64_to_cpup /* Prefer little endian */ + +#ifdef __LITTLE_ENDIAN +#define INDEX_HIGH 1 +#define INDEX_LOW 0 +#else +#define INDEX_HIGH 0 +#define INDEX_LOW 1 +#endif + +/* + * The following routines are used in this implementation. They are + * written via macros to simulate zero-overhead call-by-reference. + * + * MUL64: 64x64->128-bit multiplication + * PMUL64: assumes top bits cleared on inputs + * ADD128: 128x128->128-bit addition + */ + +#define ADD128(rh, rl, ih, il) \ + do { \ + u64 _il = (il); \ + (rl) += (_il); \ + if ((rl) < (_il)) \ + (rh)++; \ + (rh) += (ih); \ + } while (0) + +#define MUL32(i1, i2) ((u64)(u32)(i1)*(u32)(i2)) + +#define PMUL64(rh, rl, i1, i2) /* Assumes m doesn't overflow */ \ + do { \ + u64 _i1 = (i1), _i2 = (i2); \ + u64 m = MUL32(_i1, _i2>>32) + MUL32(_i1>>32, _i2); \ + rh = MUL32(_i1>>32, _i2>>32); \ + rl = MUL32(_i1, _i2); \ + ADD128(rh, rl, (m >> 32), (m << 32)); \ + } while (0) + +#define MUL64(rh, rl, i1, i2) \ + do { \ + u64 _i1 = (i1), _i2 = (i2); \ + u64 m1 = MUL32(_i1, _i2>>32); \ + u64 m2 = MUL32(_i1>>32, _i2); \ + rh = MUL32(_i1>>32, _i2>>32); \ + rl = MUL32(_i1, _i2); \ + ADD128(rh, rl, (m1 >> 32), (m1 << 32)); \ + ADD128(rh, rl, (m2 >> 32), (m2 << 32)); \ + } while (0) + +/* + * For highest performance the L1 NH and L2 polynomial hashes should be + * carefully implemented to take advantage of one's target architecture. + * Here these two hash functions are defined multiple time; once for + * 64-bit architectures, once for 32-bit SSE2 architectures, and once + * for the rest (32-bit) architectures. + * For each, nh_16 *must* be defined (works on multiples of 16 bytes). + * Optionally, nh_vmac_nhbytes can be defined (for multiples of + * VMAC_NHBYTES), and nh_16_2 and nh_vmac_nhbytes_2 (versions that do two + * NH computations at once). + */ + +#ifdef CONFIG_64BIT + +#define nh_16(mp, kp, nw, rh, rl) \ + do { \ + int i; u64 th, tl; \ + rh = rl = 0; \ + for (i = 0; i < nw; i += 2) { \ + MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i], \ + pe64_to_cpup((mp)+i+1)+(kp)[i+1]); \ + ADD128(rh, rl, th, tl); \ + } \ + } while (0) + +#define nh_16_2(mp, kp, nw, rh, rl, rh1, rl1) \ + do { \ + int i; u64 th, tl; \ + rh1 = rl1 = rh = rl = 0; \ + for (i = 0; i < nw; i += 2) { \ + MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i], \ + pe64_to_cpup((mp)+i+1)+(kp)[i+1]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i+2], \ + pe64_to_cpup((mp)+i+1)+(kp)[i+3]); \ + ADD128(rh1, rl1, th, tl); \ + } \ + } while (0) + +#if (VMAC_NHBYTES >= 64) /* These versions do 64-bytes of message at a time */ +#define nh_vmac_nhbytes(mp, kp, nw, rh, rl) \ + do { \ + int i; u64 th, tl; \ + rh = rl = 0; \ + for (i = 0; i < nw; i += 8) { \ + MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i], \ + pe64_to_cpup((mp)+i+1)+(kp)[i+1]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, pe64_to_cpup((mp)+i+2)+(kp)[i+2], \ + pe64_to_cpup((mp)+i+3)+(kp)[i+3]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, pe64_to_cpup((mp)+i+4)+(kp)[i+4], \ + pe64_to_cpup((mp)+i+5)+(kp)[i+5]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, pe64_to_cpup((mp)+i+6)+(kp)[i+6], \ + pe64_to_cpup((mp)+i+7)+(kp)[i+7]); \ + ADD128(rh, rl, th, tl); \ + } \ + } while (0) + +#define nh_vmac_nhbytes_2(mp, kp, nw, rh, rl, rh1, rl1) \ + do { \ + int i; u64 th, tl; \ + rh1 = rl1 = rh = rl = 0; \ + for (i = 0; i < nw; i += 8) { \ + MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i], \ + pe64_to_cpup((mp)+i+1)+(kp)[i+1]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i+2], \ + pe64_to_cpup((mp)+i+1)+(kp)[i+3]); \ + ADD128(rh1, rl1, th, tl); \ + MUL64(th, tl, pe64_to_cpup((mp)+i+2)+(kp)[i+2], \ + pe64_to_cpup((mp)+i+3)+(kp)[i+3]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, pe64_to_cpup((mp)+i+2)+(kp)[i+4], \ + pe64_to_cpup((mp)+i+3)+(kp)[i+5]); \ + ADD128(rh1, rl1, th, tl); \ + MUL64(th, tl, pe64_to_cpup((mp)+i+4)+(kp)[i+4], \ + pe64_to_cpup((mp)+i+5)+(kp)[i+5]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, pe64_to_cpup((mp)+i+4)+(kp)[i+6], \ + pe64_to_cpup((mp)+i+5)+(kp)[i+7]); \ + ADD128(rh1, rl1, th, tl); \ + MUL64(th, tl, pe64_to_cpup((mp)+i+6)+(kp)[i+6], \ + pe64_to_cpup((mp)+i+7)+(kp)[i+7]); \ + ADD128(rh, rl, th, tl); \ + MUL64(th, tl, pe64_to_cpup((mp)+i+6)+(kp)[i+8], \ + pe64_to_cpup((mp)+i+7)+(kp)[i+9]); \ + ADD128(rh1, rl1, th, tl); \ + } \ + } while (0) +#endif + +#define poly_step(ah, al, kh, kl, mh, ml) \ + do { \ + u64 t1h, t1l, t2h, t2l, t3h, t3l, z = 0; \ + /* compute ab*cd, put bd into result registers */ \ + PMUL64(t3h, t3l, al, kh); \ + PMUL64(t2h, t2l, ah, kl); \ + PMUL64(t1h, t1l, ah, 2*kh); \ + PMUL64(ah, al, al, kl); \ + /* add 2 * ac to result */ \ + ADD128(ah, al, t1h, t1l); \ + /* add together ad + bc */ \ + ADD128(t2h, t2l, t3h, t3l); \ + /* now (ah,al), (t2l,2*t2h) need summing */ \ + /* first add the high registers, carrying into t2h */ \ + ADD128(t2h, ah, z, t2l); \ + /* double t2h and add top bit of ah */ \ + t2h = 2 * t2h + (ah >> 63); \ + ah &= m63; \ + /* now add the low registers */ \ + ADD128(ah, al, mh, ml); \ + ADD128(ah, al, z, t2h); \ + } while (0) + +#else /* ! CONFIG_64BIT */ + +#ifndef nh_16 +#define nh_16(mp, kp, nw, rh, rl) \ + do { \ + u64 t1, t2, m1, m2, t; \ + int i; \ + rh = rl = t = 0; \ + for (i = 0; i < nw; i += 2) { \ + t1 = pe64_to_cpup(mp+i) + kp[i]; \ + t2 = pe64_to_cpup(mp+i+1) + kp[i+1]; \ + m2 = MUL32(t1 >> 32, t2); \ + m1 = MUL32(t1, t2 >> 32); \ + ADD128(rh, rl, MUL32(t1 >> 32, t2 >> 32), \ + MUL32(t1, t2)); \ + rh += (u64)(u32)(m1 >> 32) \ + + (u32)(m2 >> 32); \ + t += (u64)(u32)m1 + (u32)m2; \ + } \ + ADD128(rh, rl, (t >> 32), (t << 32)); \ + } while (0) +#endif + +static void poly_step_func(u64 *ahi, u64 *alo, + const u64 *kh, const u64 *kl, + const u64 *mh, const u64 *ml) +{ +#define a0 (*(((u32 *)alo)+INDEX_LOW)) +#define a1 (*(((u32 *)alo)+INDEX_HIGH)) +#define a2 (*(((u32 *)ahi)+INDEX_LOW)) +#define a3 (*(((u32 *)ahi)+INDEX_HIGH)) +#define k0 (*(((u32 *)kl)+INDEX_LOW)) +#define k1 (*(((u32 *)kl)+INDEX_HIGH)) +#define k2 (*(((u32 *)kh)+INDEX_LOW)) +#define k3 (*(((u32 *)kh)+INDEX_HIGH)) + + u64 p, q, t; + u32 t2; + + p = MUL32(a3, k3); + p += p; + p += *(u64 *)mh; + p += MUL32(a0, k2); + p += MUL32(a1, k1); + p += MUL32(a2, k0); + t = (u32)(p); + p >>= 32; + p += MUL32(a0, k3); + p += MUL32(a1, k2); + p += MUL32(a2, k1); + p += MUL32(a3, k0); + t |= ((u64)((u32)p & 0x7fffffff)) << 32; + p >>= 31; + p += (u64)(((u32 *)ml)[INDEX_LOW]); + p += MUL32(a0, k0); + q = MUL32(a1, k3); + q += MUL32(a2, k2); + q += MUL32(a3, k1); + q += q; + p += q; + t2 = (u32)(p); + p >>= 32; + p += (u64)(((u32 *)ml)[INDEX_HIGH]); + p += MUL32(a0, k1); + p += MUL32(a1, k0); + q = MUL32(a2, k3); + q += MUL32(a3, k2); + q += q; + p += q; + *(u64 *)(alo) = (p << 32) | t2; + p >>= 32; + *(u64 *)(ahi) = p + t; + +#undef a0 +#undef a1 +#undef a2 +#undef a3 +#undef k0 +#undef k1 +#undef k2 +#undef k3 +} + +#define poly_step(ah, al, kh, kl, mh, ml) \ + poly_step_func(&(ah), &(al), &(kh), &(kl), &(mh), &(ml)) + +#endif /* end of specialized NH and poly definitions */ + +/* At least nh_16 is defined. Defined others as needed here */ +#ifndef nh_16_2 +#define nh_16_2(mp, kp, nw, rh, rl, rh2, rl2) \ + do { \ + nh_16(mp, kp, nw, rh, rl); \ + nh_16(mp, ((kp)+2), nw, rh2, rl2); \ + } while (0) +#endif +#ifndef nh_vmac_nhbytes +#define nh_vmac_nhbytes(mp, kp, nw, rh, rl) \ + nh_16(mp, kp, nw, rh, rl) +#endif +#ifndef nh_vmac_nhbytes_2 +#define nh_vmac_nhbytes_2(mp, kp, nw, rh, rl, rh2, rl2) \ + do { \ + nh_vmac_nhbytes(mp, kp, nw, rh, rl); \ + nh_vmac_nhbytes(mp, ((kp)+2), nw, rh2, rl2); \ + } while (0) +#endif + +static void vhash_abort(struct vmac_ctx *ctx) +{ + ctx->polytmp[0] = ctx->polykey[0] ; + ctx->polytmp[1] = ctx->polykey[1] ; + ctx->first_block_processed = 0; +} + +static u64 l3hash(u64 p1, u64 p2, u64 k1, u64 k2, u64 len) +{ + u64 rh, rl, t, z = 0; + + /* fully reduce (p1,p2)+(len,0) mod p127 */ + t = p1 >> 63; + p1 &= m63; + ADD128(p1, p2, len, t); + /* At this point, (p1,p2) is at most 2^127+(len<<64) */ + t = (p1 > m63) + ((p1 == m63) && (p2 == m64)); + ADD128(p1, p2, z, t); + p1 &= m63; + + /* compute (p1,p2)/(2^64-2^32) and (p1,p2)%(2^64-2^32) */ + t = p1 + (p2 >> 32); + t += (t >> 32); + t += (u32)t > 0xfffffffeu; + p1 += (t >> 32); + p2 += (p1 << 32); + + /* compute (p1+k1)%p64 and (p2+k2)%p64 */ + p1 += k1; + p1 += (0 - (p1 < k1)) & 257; + p2 += k2; + p2 += (0 - (p2 < k2)) & 257; + + /* compute (p1+k1)*(p2+k2)%p64 */ + MUL64(rh, rl, p1, p2); + t = rh >> 56; + ADD128(t, rl, z, rh); + rh <<= 8; + ADD128(t, rl, z, rh); + t += t << 8; + rl += t; + rl += (0 - (rl < t)) & 257; + rl += (0 - (rl > p64-1)) & 257; + return rl; +} + +static void vhash_update(const unsigned char *m, + unsigned int mbytes, /* Pos multiple of VMAC_NHBYTES */ + struct vmac_ctx *ctx) +{ + u64 rh, rl, *mptr; + const u64 *kptr = (u64 *)ctx->nhkey; + int i; + u64 ch, cl; + u64 pkh = ctx->polykey[0]; + u64 pkl = ctx->polykey[1]; + + mptr = (u64 *)m; + i = mbytes / VMAC_NHBYTES; /* Must be non-zero */ + + ch = ctx->polytmp[0]; + cl = ctx->polytmp[1]; + + if (!ctx->first_block_processed) { + ctx->first_block_processed = 1; + nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, rh, rl); + rh &= m62; + ADD128(ch, cl, rh, rl); + mptr += (VMAC_NHBYTES/sizeof(u64)); + i--; + } + + while (i--) { + nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, rh, rl); + rh &= m62; + poly_step(ch, cl, pkh, pkl, rh, rl); + mptr += (VMAC_NHBYTES/sizeof(u64)); + } + + ctx->polytmp[0] = ch; + ctx->polytmp[1] = cl; +} + +static u64 vhash(unsigned char m[], unsigned int mbytes, + u64 *tagl, struct vmac_ctx *ctx) +{ + u64 rh, rl, *mptr; + const u64 *kptr = (u64 *)ctx->nhkey; + int i, remaining; + u64 ch, cl; + u64 pkh = ctx->polykey[0]; + u64 pkl = ctx->polykey[1]; + + mptr = (u64 *)m; + i = mbytes / VMAC_NHBYTES; + remaining = mbytes % VMAC_NHBYTES; + + if (ctx->first_block_processed) { + ch = ctx->polytmp[0]; + cl = ctx->polytmp[1]; + } else if (i) { + nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, ch, cl); + ch &= m62; + ADD128(ch, cl, pkh, pkl); + mptr += (VMAC_NHBYTES/sizeof(u64)); + i--; + } else if (remaining) { + nh_16(mptr, kptr, 2*((remaining+15)/16), ch, cl); + ch &= m62; + ADD128(ch, cl, pkh, pkl); + mptr += (VMAC_NHBYTES/sizeof(u64)); + goto do_l3; + } else {/* Empty String */ + ch = pkh; cl = pkl; + goto do_l3; + } + + while (i--) { + nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, rh, rl); + rh &= m62; + poly_step(ch, cl, pkh, pkl, rh, rl); + mptr += (VMAC_NHBYTES/sizeof(u64)); + } + if (remaining) { + nh_16(mptr, kptr, 2*((remaining+15)/16), rh, rl); + rh &= m62; + poly_step(ch, cl, pkh, pkl, rh, rl); + } + +do_l3: + vhash_abort(ctx); + remaining *= 8; + return l3hash(ch, cl, ctx->l3key[0], ctx->l3key[1], remaining); +} + +static u64 vmac(unsigned char m[], unsigned int mbytes, + unsigned char n[16], u64 *tagl, + struct vmac_ctx_t *ctx) +{ + u64 *in_n, *out_p; + u64 p, h; + int i; + + in_n = ctx->__vmac_ctx.cached_nonce; + out_p = ctx->__vmac_ctx.cached_aes; + + i = n[15] & 1; + if ((*(u64 *)(n+8) != in_n[1]) || (*(u64 *)(n) != in_n[0])) { + in_n[0] = *(u64 *)(n); + in_n[1] = *(u64 *)(n+8); + ((unsigned char *)in_n)[15] &= 0xFE; + crypto_cipher_encrypt_one(ctx->child, + (unsigned char *)out_p, (unsigned char *)in_n); + + ((unsigned char *)in_n)[15] |= (unsigned char)(1-i); + } + p = be64_to_cpup(out_p + i); + h = vhash(m, mbytes, (u64 *)0, &ctx->__vmac_ctx); + return le64_to_cpu(p + h); +} + +static int vmac_set_key(unsigned char user_key[], struct vmac_ctx_t *ctx) +{ + u64 in[2] = {0}, out[2]; + unsigned i; + int err = 0; + + err = crypto_cipher_setkey(ctx->child, user_key, VMAC_KEY_LEN); + if (err) + return err; + + /* Fill nh key */ + ((unsigned char *)in)[0] = 0x80; + for (i = 0; i < sizeof(ctx->__vmac_ctx.nhkey)/8; i += 2) { + crypto_cipher_encrypt_one(ctx->child, + (unsigned char *)out, (unsigned char *)in); + ctx->__vmac_ctx.nhkey[i] = be64_to_cpup(out); + ctx->__vmac_ctx.nhkey[i+1] = be64_to_cpup(out+1); + ((unsigned char *)in)[15] += 1; + } + + /* Fill poly key */ + ((unsigned char *)in)[0] = 0xC0; + in[1] = 0; + for (i = 0; i < sizeof(ctx->__vmac_ctx.polykey)/8; i += 2) { + crypto_cipher_encrypt_one(ctx->child, + (unsigned char *)out, (unsigned char *)in); + ctx->__vmac_ctx.polytmp[i] = + ctx->__vmac_ctx.polykey[i] = + be64_to_cpup(out) & mpoly; + ctx->__vmac_ctx.polytmp[i+1] = + ctx->__vmac_ctx.polykey[i+1] = + be64_to_cpup(out+1) & mpoly; + ((unsigned char *)in)[15] += 1; + } + + /* Fill ip key */ + ((unsigned char *)in)[0] = 0xE0; + in[1] = 0; + for (i = 0; i < sizeof(ctx->__vmac_ctx.l3key)/8; i += 2) { + do { + crypto_cipher_encrypt_one(ctx->child, + (unsigned char *)out, (unsigned char *)in); + ctx->__vmac_ctx.l3key[i] = be64_to_cpup(out); + ctx->__vmac_ctx.l3key[i+1] = be64_to_cpup(out+1); + ((unsigned char *)in)[15] += 1; + } while (ctx->__vmac_ctx.l3key[i] >= p64 + || ctx->__vmac_ctx.l3key[i+1] >= p64); + } + + /* Invalidate nonce/aes cache and reset other elements */ + ctx->__vmac_ctx.cached_nonce[0] = (u64)-1; /* Ensure illegal nonce */ + ctx->__vmac_ctx.cached_nonce[1] = (u64)0; /* Ensure illegal nonce */ + ctx->__vmac_ctx.first_block_processed = 0; + + return err; +} + +static int vmac_setkey(struct crypto_shash *parent, + const u8 *key, unsigned int keylen) +{ + struct vmac_ctx_t *ctx = crypto_shash_ctx(parent); + + if (keylen != VMAC_KEY_LEN) { + crypto_shash_set_flags(parent, CRYPTO_TFM_RES_BAD_KEY_LEN); + return -EINVAL; + } + + return vmac_set_key((u8 *)key, ctx); +} + +static int vmac_init(struct shash_desc *pdesc) +{ + return 0; +} + +static int vmac_update(struct shash_desc *pdesc, const u8 *p, + unsigned int len) +{ + struct crypto_shash *parent = pdesc->tfm; + struct vmac_ctx_t *ctx = crypto_shash_ctx(parent); + + vhash_update(p, len, &ctx->__vmac_ctx); + + return 0; +} + +static int vmac_final(struct shash_desc *pdesc, u8 *out) +{ + struct crypto_shash *parent = pdesc->tfm; + struct vmac_ctx_t *ctx = crypto_shash_ctx(parent); + vmac_t mac; + u8 nonce[16] = {}; + + mac = vmac(NULL, 0, nonce, NULL, ctx); + memcpy(out, &mac, sizeof(vmac_t)); + memset(&mac, 0, sizeof(vmac_t)); + memset(&ctx->__vmac_ctx, 0, sizeof(struct vmac_ctx)); + return 0; +} + +static int vmac_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_cipher *cipher; + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_spawn *spawn = crypto_instance_ctx(inst); + struct vmac_ctx_t *ctx = crypto_tfm_ctx(tfm); + + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; + return 0; +} + +static void vmac_exit_tfm(struct crypto_tfm *tfm) +{ + struct vmac_ctx_t *ctx = crypto_tfm_ctx(tfm); + crypto_free_cipher(ctx->child); +} + +static int vmac_create(struct crypto_template *tmpl, struct rtattr **tb) +{ + struct shash_instance *inst; + struct crypto_alg *alg; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); + if (err) + return err; + + alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK); + if (IS_ERR(alg)) + return PTR_ERR(alg); + + inst = shash_alloc_instance("vmac", alg); + err = PTR_ERR(inst); + if (IS_ERR(inst)) + goto out_put_alg; + + err = crypto_init_spawn(shash_instance_ctx(inst), alg, + shash_crypto_instance(inst), + CRYPTO_ALG_TYPE_MASK); + if (err) + goto out_free_inst; + + inst->alg.base.cra_priority = alg->cra_priority; + inst->alg.base.cra_blocksize = alg->cra_blocksize; + inst->alg.base.cra_alignmask = alg->cra_alignmask; + + inst->alg.digestsize = sizeof(vmac_t); + inst->alg.base.cra_ctxsize = sizeof(struct vmac_ctx_t); + inst->alg.base.cra_init = vmac_init_tfm; + inst->alg.base.cra_exit = vmac_exit_tfm; + + inst->alg.init = vmac_init; + inst->alg.update = vmac_update; + inst->alg.final = vmac_final; + inst->alg.setkey = vmac_setkey; + + err = shash_register_instance(tmpl, inst); + if (err) { +out_free_inst: + shash_free_instance(shash_crypto_instance(inst)); + } + +out_put_alg: + crypto_mod_put(alg); + return err; +} + +static struct crypto_template vmac_tmpl = { + .name = "vmac", + .create = vmac_create, + .free = shash_free_instance, + .module = THIS_MODULE, +}; + +static int __init vmac_module_init(void) +{ + return crypto_register_template(&vmac_tmpl); +} + +static void __exit vmac_module_exit(void) +{ + crypto_unregister_template(&vmac_tmpl); +} + +module_init(vmac_module_init); +module_exit(vmac_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("VMAC hash algorithm"); + diff --git a/crypto/wp512.c b/crypto/wp512.c new file mode 100644 index 00000000..71719a2b --- /dev/null +++ b/crypto/wp512.c @@ -0,0 +1,1204 @@ +/* + * Cryptographic API. + * + * Whirlpool hashing Algorithm + * + * The Whirlpool algorithm was developed by Paulo S. L. M. Barreto and + * Vincent Rijmen. It has been selected as one of cryptographic + * primitives by the NESSIE project http://www.cryptonessie.org/ + * + * The original authors have disclaimed all copyright interest in this + * code and thus put it in the public domain. The subsequent authors + * have put this under the GNU General Public License. + * + * By Aaron Grothe ajgrothe@yahoo.com, August 23, 2004 + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + */ +#include <crypto/internal/hash.h> +#include <linux/init.h> +#include <linux/module.h> +#include <linux/mm.h> +#include <asm/byteorder.h> +#include <linux/types.h> + +#define WP512_DIGEST_SIZE 64 +#define WP384_DIGEST_SIZE 48 +#define WP256_DIGEST_SIZE 32 + +#define WP512_BLOCK_SIZE 64 +#define WP512_LENGTHBYTES 32 + +#define WHIRLPOOL_ROUNDS 10 + +struct wp512_ctx { + u8 bitLength[WP512_LENGTHBYTES]; + u8 buffer[WP512_BLOCK_SIZE]; + int bufferBits; + int bufferPos; + u64 hash[WP512_DIGEST_SIZE/8]; +}; + +/* + * Though Whirlpool is endianness-neutral, the encryption tables are listed + * in BIG-ENDIAN format, which is adopted throughout this implementation + * (but little-endian notation would be equally suitable if consistently + * employed). + */ + +static const u64 C0[256] = { + 0x18186018c07830d8ULL, 0x23238c2305af4626ULL, 0xc6c63fc67ef991b8ULL, + 0xe8e887e8136fcdfbULL, 0x878726874ca113cbULL, 0xb8b8dab8a9626d11ULL, + 0x0101040108050209ULL, 0x4f4f214f426e9e0dULL, 0x3636d836adee6c9bULL, + 0xa6a6a2a6590451ffULL, 0xd2d26fd2debdb90cULL, 0xf5f5f3f5fb06f70eULL, + 0x7979f979ef80f296ULL, 0x6f6fa16f5fcede30ULL, 0x91917e91fcef3f6dULL, + 0x52525552aa07a4f8ULL, 0x60609d6027fdc047ULL, 0xbcbccabc89766535ULL, + 0x9b9b569baccd2b37ULL, 0x8e8e028e048c018aULL, 0xa3a3b6a371155bd2ULL, + 0x0c0c300c603c186cULL, 0x7b7bf17bff8af684ULL, 0x3535d435b5e16a80ULL, + 0x1d1d741de8693af5ULL, 0xe0e0a7e05347ddb3ULL, 0xd7d77bd7f6acb321ULL, + 0xc2c22fc25eed999cULL, 0x2e2eb82e6d965c43ULL, 0x4b4b314b627a9629ULL, + 0xfefedffea321e15dULL, 0x575741578216aed5ULL, 0x15155415a8412abdULL, + 0x7777c1779fb6eee8ULL, 0x3737dc37a5eb6e92ULL, 0xe5e5b3e57b56d79eULL, + 0x9f9f469f8cd92313ULL, 0xf0f0e7f0d317fd23ULL, 0x4a4a354a6a7f9420ULL, + 0xdada4fda9e95a944ULL, 0x58587d58fa25b0a2ULL, 0xc9c903c906ca8fcfULL, + 0x2929a429558d527cULL, 0x0a0a280a5022145aULL, 0xb1b1feb1e14f7f50ULL, + 0xa0a0baa0691a5dc9ULL, 0x6b6bb16b7fdad614ULL, 0x85852e855cab17d9ULL, + 0xbdbdcebd8173673cULL, 0x5d5d695dd234ba8fULL, 0x1010401080502090ULL, + 0xf4f4f7f4f303f507ULL, 0xcbcb0bcb16c08bddULL, 0x3e3ef83eedc67cd3ULL, + 0x0505140528110a2dULL, 0x676781671fe6ce78ULL, 0xe4e4b7e47353d597ULL, + 0x27279c2725bb4e02ULL, 0x4141194132588273ULL, 0x8b8b168b2c9d0ba7ULL, + 0xa7a7a6a7510153f6ULL, 0x7d7de97dcf94fab2ULL, 0x95956e95dcfb3749ULL, + 0xd8d847d88e9fad56ULL, 0xfbfbcbfb8b30eb70ULL, 0xeeee9fee2371c1cdULL, + 0x7c7ced7cc791f8bbULL, 0x6666856617e3cc71ULL, 0xdddd53dda68ea77bULL, + 0x17175c17b84b2eafULL, 0x4747014702468e45ULL, 0x9e9e429e84dc211aULL, + 0xcaca0fca1ec589d4ULL, 0x2d2db42d75995a58ULL, 0xbfbfc6bf9179632eULL, + 0x07071c07381b0e3fULL, 0xadad8ead012347acULL, 0x5a5a755aea2fb4b0ULL, + 0x838336836cb51befULL, 0x3333cc3385ff66b6ULL, 0x636391633ff2c65cULL, + 0x02020802100a0412ULL, 0xaaaa92aa39384993ULL, 0x7171d971afa8e2deULL, + 0xc8c807c80ecf8dc6ULL, 0x19196419c87d32d1ULL, 0x494939497270923bULL, + 0xd9d943d9869aaf5fULL, 0xf2f2eff2c31df931ULL, 0xe3e3abe34b48dba8ULL, + 0x5b5b715be22ab6b9ULL, 0x88881a8834920dbcULL, 0x9a9a529aa4c8293eULL, + 0x262698262dbe4c0bULL, 0x3232c8328dfa64bfULL, 0xb0b0fab0e94a7d59ULL, + 0xe9e983e91b6acff2ULL, 0x0f0f3c0f78331e77ULL, 0xd5d573d5e6a6b733ULL, + 0x80803a8074ba1df4ULL, 0xbebec2be997c6127ULL, 0xcdcd13cd26de87ebULL, + 0x3434d034bde46889ULL, 0x48483d487a759032ULL, 0xffffdbffab24e354ULL, + 0x7a7af57af78ff48dULL, 0x90907a90f4ea3d64ULL, 0x5f5f615fc23ebe9dULL, + 0x202080201da0403dULL, 0x6868bd6867d5d00fULL, 0x1a1a681ad07234caULL, + 0xaeae82ae192c41b7ULL, 0xb4b4eab4c95e757dULL, 0x54544d549a19a8ceULL, + 0x93937693ece53b7fULL, 0x222288220daa442fULL, 0x64648d6407e9c863ULL, + 0xf1f1e3f1db12ff2aULL, 0x7373d173bfa2e6ccULL, 0x12124812905a2482ULL, + 0x40401d403a5d807aULL, 0x0808200840281048ULL, 0xc3c32bc356e89b95ULL, + 0xecec97ec337bc5dfULL, 0xdbdb4bdb9690ab4dULL, 0xa1a1bea1611f5fc0ULL, + 0x8d8d0e8d1c830791ULL, 0x3d3df43df5c97ac8ULL, 0x97976697ccf1335bULL, + 0x0000000000000000ULL, 0xcfcf1bcf36d483f9ULL, 0x2b2bac2b4587566eULL, + 0x7676c57697b3ece1ULL, 0x8282328264b019e6ULL, 0xd6d67fd6fea9b128ULL, + 0x1b1b6c1bd87736c3ULL, 0xb5b5eeb5c15b7774ULL, 0xafaf86af112943beULL, + 0x6a6ab56a77dfd41dULL, 0x50505d50ba0da0eaULL, 0x45450945124c8a57ULL, + 0xf3f3ebf3cb18fb38ULL, 0x3030c0309df060adULL, 0xefef9bef2b74c3c4ULL, + 0x3f3ffc3fe5c37edaULL, 0x55554955921caac7ULL, 0xa2a2b2a2791059dbULL, + 0xeaea8fea0365c9e9ULL, 0x656589650fecca6aULL, 0xbabad2bab9686903ULL, + 0x2f2fbc2f65935e4aULL, 0xc0c027c04ee79d8eULL, 0xdede5fdebe81a160ULL, + 0x1c1c701ce06c38fcULL, 0xfdfdd3fdbb2ee746ULL, 0x4d4d294d52649a1fULL, + 0x92927292e4e03976ULL, 0x7575c9758fbceafaULL, 0x06061806301e0c36ULL, + 0x8a8a128a249809aeULL, 0xb2b2f2b2f940794bULL, 0xe6e6bfe66359d185ULL, + 0x0e0e380e70361c7eULL, 0x1f1f7c1ff8633ee7ULL, 0x6262956237f7c455ULL, + 0xd4d477d4eea3b53aULL, 0xa8a89aa829324d81ULL, 0x96966296c4f43152ULL, + 0xf9f9c3f99b3aef62ULL, 0xc5c533c566f697a3ULL, 0x2525942535b14a10ULL, + 0x59597959f220b2abULL, 0x84842a8454ae15d0ULL, 0x7272d572b7a7e4c5ULL, + 0x3939e439d5dd72ecULL, 0x4c4c2d4c5a619816ULL, 0x5e5e655eca3bbc94ULL, + 0x7878fd78e785f09fULL, 0x3838e038ddd870e5ULL, 0x8c8c0a8c14860598ULL, + 0xd1d163d1c6b2bf17ULL, 0xa5a5aea5410b57e4ULL, 0xe2e2afe2434dd9a1ULL, + 0x616199612ff8c24eULL, 0xb3b3f6b3f1457b42ULL, 0x2121842115a54234ULL, + 0x9c9c4a9c94d62508ULL, 0x1e1e781ef0663ceeULL, 0x4343114322528661ULL, + 0xc7c73bc776fc93b1ULL, 0xfcfcd7fcb32be54fULL, 0x0404100420140824ULL, + 0x51515951b208a2e3ULL, 0x99995e99bcc72f25ULL, 0x6d6da96d4fc4da22ULL, + 0x0d0d340d68391a65ULL, 0xfafacffa8335e979ULL, 0xdfdf5bdfb684a369ULL, + 0x7e7ee57ed79bfca9ULL, 0x242490243db44819ULL, 0x3b3bec3bc5d776feULL, + 0xabab96ab313d4b9aULL, 0xcece1fce3ed181f0ULL, 0x1111441188552299ULL, + 0x8f8f068f0c890383ULL, 0x4e4e254e4a6b9c04ULL, 0xb7b7e6b7d1517366ULL, + 0xebeb8beb0b60cbe0ULL, 0x3c3cf03cfdcc78c1ULL, 0x81813e817cbf1ffdULL, + 0x94946a94d4fe3540ULL, 0xf7f7fbf7eb0cf31cULL, 0xb9b9deb9a1676f18ULL, + 0x13134c13985f268bULL, 0x2c2cb02c7d9c5851ULL, 0xd3d36bd3d6b8bb05ULL, + 0xe7e7bbe76b5cd38cULL, 0x6e6ea56e57cbdc39ULL, 0xc4c437c46ef395aaULL, + 0x03030c03180f061bULL, 0x565645568a13acdcULL, 0x44440d441a49885eULL, + 0x7f7fe17fdf9efea0ULL, 0xa9a99ea921374f88ULL, 0x2a2aa82a4d825467ULL, + 0xbbbbd6bbb16d6b0aULL, 0xc1c123c146e29f87ULL, 0x53535153a202a6f1ULL, + 0xdcdc57dcae8ba572ULL, 0x0b0b2c0b58271653ULL, 0x9d9d4e9d9cd32701ULL, + 0x6c6cad6c47c1d82bULL, 0x3131c43195f562a4ULL, 0x7474cd7487b9e8f3ULL, + 0xf6f6fff6e309f115ULL, 0x464605460a438c4cULL, 0xacac8aac092645a5ULL, + 0x89891e893c970fb5ULL, 0x14145014a04428b4ULL, 0xe1e1a3e15b42dfbaULL, + 0x16165816b04e2ca6ULL, 0x3a3ae83acdd274f7ULL, 0x6969b9696fd0d206ULL, + 0x09092409482d1241ULL, 0x7070dd70a7ade0d7ULL, 0xb6b6e2b6d954716fULL, + 0xd0d067d0ceb7bd1eULL, 0xeded93ed3b7ec7d6ULL, 0xcccc17cc2edb85e2ULL, + 0x424215422a578468ULL, 0x98985a98b4c22d2cULL, 0xa4a4aaa4490e55edULL, + 0x2828a0285d885075ULL, 0x5c5c6d5cda31b886ULL, 0xf8f8c7f8933fed6bULL, + 0x8686228644a411c2ULL, +}; + +static const u64 C1[256] = { + 0xd818186018c07830ULL, 0x2623238c2305af46ULL, 0xb8c6c63fc67ef991ULL, + 0xfbe8e887e8136fcdULL, 0xcb878726874ca113ULL, 0x11b8b8dab8a9626dULL, + 0x0901010401080502ULL, 0x0d4f4f214f426e9eULL, 0x9b3636d836adee6cULL, + 0xffa6a6a2a6590451ULL, 0x0cd2d26fd2debdb9ULL, 0x0ef5f5f3f5fb06f7ULL, + 0x967979f979ef80f2ULL, 0x306f6fa16f5fcedeULL, 0x6d91917e91fcef3fULL, + 0xf852525552aa07a4ULL, 0x4760609d6027fdc0ULL, 0x35bcbccabc897665ULL, + 0x379b9b569baccd2bULL, 0x8a8e8e028e048c01ULL, 0xd2a3a3b6a371155bULL, + 0x6c0c0c300c603c18ULL, 0x847b7bf17bff8af6ULL, 0x803535d435b5e16aULL, + 0xf51d1d741de8693aULL, 0xb3e0e0a7e05347ddULL, 0x21d7d77bd7f6acb3ULL, + 0x9cc2c22fc25eed99ULL, 0x432e2eb82e6d965cULL, 0x294b4b314b627a96ULL, + 0x5dfefedffea321e1ULL, 0xd5575741578216aeULL, 0xbd15155415a8412aULL, + 0xe87777c1779fb6eeULL, 0x923737dc37a5eb6eULL, 0x9ee5e5b3e57b56d7ULL, + 0x139f9f469f8cd923ULL, 0x23f0f0e7f0d317fdULL, 0x204a4a354a6a7f94ULL, + 0x44dada4fda9e95a9ULL, 0xa258587d58fa25b0ULL, 0xcfc9c903c906ca8fULL, + 0x7c2929a429558d52ULL, 0x5a0a0a280a502214ULL, 0x50b1b1feb1e14f7fULL, + 0xc9a0a0baa0691a5dULL, 0x146b6bb16b7fdad6ULL, 0xd985852e855cab17ULL, + 0x3cbdbdcebd817367ULL, 0x8f5d5d695dd234baULL, 0x9010104010805020ULL, + 0x07f4f4f7f4f303f5ULL, 0xddcbcb0bcb16c08bULL, 0xd33e3ef83eedc67cULL, + 0x2d0505140528110aULL, 0x78676781671fe6ceULL, 0x97e4e4b7e47353d5ULL, + 0x0227279c2725bb4eULL, 0x7341411941325882ULL, 0xa78b8b168b2c9d0bULL, + 0xf6a7a7a6a7510153ULL, 0xb27d7de97dcf94faULL, 0x4995956e95dcfb37ULL, + 0x56d8d847d88e9fadULL, 0x70fbfbcbfb8b30ebULL, 0xcdeeee9fee2371c1ULL, + 0xbb7c7ced7cc791f8ULL, 0x716666856617e3ccULL, 0x7bdddd53dda68ea7ULL, + 0xaf17175c17b84b2eULL, 0x454747014702468eULL, 0x1a9e9e429e84dc21ULL, + 0xd4caca0fca1ec589ULL, 0x582d2db42d75995aULL, 0x2ebfbfc6bf917963ULL, + 0x3f07071c07381b0eULL, 0xacadad8ead012347ULL, 0xb05a5a755aea2fb4ULL, + 0xef838336836cb51bULL, 0xb63333cc3385ff66ULL, 0x5c636391633ff2c6ULL, + 0x1202020802100a04ULL, 0x93aaaa92aa393849ULL, 0xde7171d971afa8e2ULL, + 0xc6c8c807c80ecf8dULL, 0xd119196419c87d32ULL, 0x3b49493949727092ULL, + 0x5fd9d943d9869aafULL, 0x31f2f2eff2c31df9ULL, 0xa8e3e3abe34b48dbULL, + 0xb95b5b715be22ab6ULL, 0xbc88881a8834920dULL, 0x3e9a9a529aa4c829ULL, + 0x0b262698262dbe4cULL, 0xbf3232c8328dfa64ULL, 0x59b0b0fab0e94a7dULL, + 0xf2e9e983e91b6acfULL, 0x770f0f3c0f78331eULL, 0x33d5d573d5e6a6b7ULL, + 0xf480803a8074ba1dULL, 0x27bebec2be997c61ULL, 0xebcdcd13cd26de87ULL, + 0x893434d034bde468ULL, 0x3248483d487a7590ULL, 0x54ffffdbffab24e3ULL, + 0x8d7a7af57af78ff4ULL, 0x6490907a90f4ea3dULL, 0x9d5f5f615fc23ebeULL, + 0x3d202080201da040ULL, 0x0f6868bd6867d5d0ULL, 0xca1a1a681ad07234ULL, + 0xb7aeae82ae192c41ULL, 0x7db4b4eab4c95e75ULL, 0xce54544d549a19a8ULL, + 0x7f93937693ece53bULL, 0x2f222288220daa44ULL, 0x6364648d6407e9c8ULL, + 0x2af1f1e3f1db12ffULL, 0xcc7373d173bfa2e6ULL, 0x8212124812905a24ULL, + 0x7a40401d403a5d80ULL, 0x4808082008402810ULL, 0x95c3c32bc356e89bULL, + 0xdfecec97ec337bc5ULL, 0x4ddbdb4bdb9690abULL, 0xc0a1a1bea1611f5fULL, + 0x918d8d0e8d1c8307ULL, 0xc83d3df43df5c97aULL, 0x5b97976697ccf133ULL, + 0x0000000000000000ULL, 0xf9cfcf1bcf36d483ULL, 0x6e2b2bac2b458756ULL, + 0xe17676c57697b3ecULL, 0xe68282328264b019ULL, 0x28d6d67fd6fea9b1ULL, + 0xc31b1b6c1bd87736ULL, 0x74b5b5eeb5c15b77ULL, 0xbeafaf86af112943ULL, + 0x1d6a6ab56a77dfd4ULL, 0xea50505d50ba0da0ULL, 0x5745450945124c8aULL, + 0x38f3f3ebf3cb18fbULL, 0xad3030c0309df060ULL, 0xc4efef9bef2b74c3ULL, + 0xda3f3ffc3fe5c37eULL, 0xc755554955921caaULL, 0xdba2a2b2a2791059ULL, + 0xe9eaea8fea0365c9ULL, 0x6a656589650feccaULL, 0x03babad2bab96869ULL, + 0x4a2f2fbc2f65935eULL, 0x8ec0c027c04ee79dULL, 0x60dede5fdebe81a1ULL, + 0xfc1c1c701ce06c38ULL, 0x46fdfdd3fdbb2ee7ULL, 0x1f4d4d294d52649aULL, + 0x7692927292e4e039ULL, 0xfa7575c9758fbceaULL, 0x3606061806301e0cULL, + 0xae8a8a128a249809ULL, 0x4bb2b2f2b2f94079ULL, 0x85e6e6bfe66359d1ULL, + 0x7e0e0e380e70361cULL, 0xe71f1f7c1ff8633eULL, 0x556262956237f7c4ULL, + 0x3ad4d477d4eea3b5ULL, 0x81a8a89aa829324dULL, 0x5296966296c4f431ULL, + 0x62f9f9c3f99b3aefULL, 0xa3c5c533c566f697ULL, 0x102525942535b14aULL, + 0xab59597959f220b2ULL, 0xd084842a8454ae15ULL, 0xc57272d572b7a7e4ULL, + 0xec3939e439d5dd72ULL, 0x164c4c2d4c5a6198ULL, 0x945e5e655eca3bbcULL, + 0x9f7878fd78e785f0ULL, 0xe53838e038ddd870ULL, 0x988c8c0a8c148605ULL, + 0x17d1d163d1c6b2bfULL, 0xe4a5a5aea5410b57ULL, 0xa1e2e2afe2434dd9ULL, + 0x4e616199612ff8c2ULL, 0x42b3b3f6b3f1457bULL, 0x342121842115a542ULL, + 0x089c9c4a9c94d625ULL, 0xee1e1e781ef0663cULL, 0x6143431143225286ULL, + 0xb1c7c73bc776fc93ULL, 0x4ffcfcd7fcb32be5ULL, 0x2404041004201408ULL, + 0xe351515951b208a2ULL, 0x2599995e99bcc72fULL, 0x226d6da96d4fc4daULL, + 0x650d0d340d68391aULL, 0x79fafacffa8335e9ULL, 0x69dfdf5bdfb684a3ULL, + 0xa97e7ee57ed79bfcULL, 0x19242490243db448ULL, 0xfe3b3bec3bc5d776ULL, + 0x9aabab96ab313d4bULL, 0xf0cece1fce3ed181ULL, 0x9911114411885522ULL, + 0x838f8f068f0c8903ULL, 0x044e4e254e4a6b9cULL, 0x66b7b7e6b7d15173ULL, + 0xe0ebeb8beb0b60cbULL, 0xc13c3cf03cfdcc78ULL, 0xfd81813e817cbf1fULL, + 0x4094946a94d4fe35ULL, 0x1cf7f7fbf7eb0cf3ULL, 0x18b9b9deb9a1676fULL, + 0x8b13134c13985f26ULL, 0x512c2cb02c7d9c58ULL, 0x05d3d36bd3d6b8bbULL, + 0x8ce7e7bbe76b5cd3ULL, 0x396e6ea56e57cbdcULL, 0xaac4c437c46ef395ULL, + 0x1b03030c03180f06ULL, 0xdc565645568a13acULL, 0x5e44440d441a4988ULL, + 0xa07f7fe17fdf9efeULL, 0x88a9a99ea921374fULL, 0x672a2aa82a4d8254ULL, + 0x0abbbbd6bbb16d6bULL, 0x87c1c123c146e29fULL, 0xf153535153a202a6ULL, + 0x72dcdc57dcae8ba5ULL, 0x530b0b2c0b582716ULL, 0x019d9d4e9d9cd327ULL, + 0x2b6c6cad6c47c1d8ULL, 0xa43131c43195f562ULL, 0xf37474cd7487b9e8ULL, + 0x15f6f6fff6e309f1ULL, 0x4c464605460a438cULL, 0xa5acac8aac092645ULL, + 0xb589891e893c970fULL, 0xb414145014a04428ULL, 0xbae1e1a3e15b42dfULL, + 0xa616165816b04e2cULL, 0xf73a3ae83acdd274ULL, 0x066969b9696fd0d2ULL, + 0x4109092409482d12ULL, 0xd77070dd70a7ade0ULL, 0x6fb6b6e2b6d95471ULL, + 0x1ed0d067d0ceb7bdULL, 0xd6eded93ed3b7ec7ULL, 0xe2cccc17cc2edb85ULL, + 0x68424215422a5784ULL, 0x2c98985a98b4c22dULL, 0xeda4a4aaa4490e55ULL, + 0x752828a0285d8850ULL, 0x865c5c6d5cda31b8ULL, 0x6bf8f8c7f8933fedULL, + 0xc28686228644a411ULL, +}; + +static const u64 C2[256] = { + 0x30d818186018c078ULL, 0x462623238c2305afULL, 0x91b8c6c63fc67ef9ULL, + 0xcdfbe8e887e8136fULL, 0x13cb878726874ca1ULL, 0x6d11b8b8dab8a962ULL, + 0x0209010104010805ULL, 0x9e0d4f4f214f426eULL, 0x6c9b3636d836adeeULL, + 0x51ffa6a6a2a65904ULL, 0xb90cd2d26fd2debdULL, 0xf70ef5f5f3f5fb06ULL, + 0xf2967979f979ef80ULL, 0xde306f6fa16f5fceULL, 0x3f6d91917e91fcefULL, + 0xa4f852525552aa07ULL, 0xc04760609d6027fdULL, 0x6535bcbccabc8976ULL, + 0x2b379b9b569baccdULL, 0x018a8e8e028e048cULL, 0x5bd2a3a3b6a37115ULL, + 0x186c0c0c300c603cULL, 0xf6847b7bf17bff8aULL, 0x6a803535d435b5e1ULL, + 0x3af51d1d741de869ULL, 0xddb3e0e0a7e05347ULL, 0xb321d7d77bd7f6acULL, + 0x999cc2c22fc25eedULL, 0x5c432e2eb82e6d96ULL, 0x96294b4b314b627aULL, + 0xe15dfefedffea321ULL, 0xaed5575741578216ULL, 0x2abd15155415a841ULL, + 0xeee87777c1779fb6ULL, 0x6e923737dc37a5ebULL, 0xd79ee5e5b3e57b56ULL, + 0x23139f9f469f8cd9ULL, 0xfd23f0f0e7f0d317ULL, 0x94204a4a354a6a7fULL, + 0xa944dada4fda9e95ULL, 0xb0a258587d58fa25ULL, 0x8fcfc9c903c906caULL, + 0x527c2929a429558dULL, 0x145a0a0a280a5022ULL, 0x7f50b1b1feb1e14fULL, + 0x5dc9a0a0baa0691aULL, 0xd6146b6bb16b7fdaULL, 0x17d985852e855cabULL, + 0x673cbdbdcebd8173ULL, 0xba8f5d5d695dd234ULL, 0x2090101040108050ULL, + 0xf507f4f4f7f4f303ULL, 0x8bddcbcb0bcb16c0ULL, 0x7cd33e3ef83eedc6ULL, + 0x0a2d050514052811ULL, 0xce78676781671fe6ULL, 0xd597e4e4b7e47353ULL, + 0x4e0227279c2725bbULL, 0x8273414119413258ULL, 0x0ba78b8b168b2c9dULL, + 0x53f6a7a7a6a75101ULL, 0xfab27d7de97dcf94ULL, 0x374995956e95dcfbULL, + 0xad56d8d847d88e9fULL, 0xeb70fbfbcbfb8b30ULL, 0xc1cdeeee9fee2371ULL, + 0xf8bb7c7ced7cc791ULL, 0xcc716666856617e3ULL, 0xa77bdddd53dda68eULL, + 0x2eaf17175c17b84bULL, 0x8e45474701470246ULL, 0x211a9e9e429e84dcULL, + 0x89d4caca0fca1ec5ULL, 0x5a582d2db42d7599ULL, 0x632ebfbfc6bf9179ULL, + 0x0e3f07071c07381bULL, 0x47acadad8ead0123ULL, 0xb4b05a5a755aea2fULL, + 0x1bef838336836cb5ULL, 0x66b63333cc3385ffULL, 0xc65c636391633ff2ULL, + 0x041202020802100aULL, 0x4993aaaa92aa3938ULL, 0xe2de7171d971afa8ULL, + 0x8dc6c8c807c80ecfULL, 0x32d119196419c87dULL, 0x923b494939497270ULL, + 0xaf5fd9d943d9869aULL, 0xf931f2f2eff2c31dULL, 0xdba8e3e3abe34b48ULL, + 0xb6b95b5b715be22aULL, 0x0dbc88881a883492ULL, 0x293e9a9a529aa4c8ULL, + 0x4c0b262698262dbeULL, 0x64bf3232c8328dfaULL, 0x7d59b0b0fab0e94aULL, + 0xcff2e9e983e91b6aULL, 0x1e770f0f3c0f7833ULL, 0xb733d5d573d5e6a6ULL, + 0x1df480803a8074baULL, 0x6127bebec2be997cULL, 0x87ebcdcd13cd26deULL, + 0x68893434d034bde4ULL, 0x903248483d487a75ULL, 0xe354ffffdbffab24ULL, + 0xf48d7a7af57af78fULL, 0x3d6490907a90f4eaULL, 0xbe9d5f5f615fc23eULL, + 0x403d202080201da0ULL, 0xd00f6868bd6867d5ULL, 0x34ca1a1a681ad072ULL, + 0x41b7aeae82ae192cULL, 0x757db4b4eab4c95eULL, 0xa8ce54544d549a19ULL, + 0x3b7f93937693ece5ULL, 0x442f222288220daaULL, 0xc86364648d6407e9ULL, + 0xff2af1f1e3f1db12ULL, 0xe6cc7373d173bfa2ULL, 0x248212124812905aULL, + 0x807a40401d403a5dULL, 0x1048080820084028ULL, 0x9b95c3c32bc356e8ULL, + 0xc5dfecec97ec337bULL, 0xab4ddbdb4bdb9690ULL, 0x5fc0a1a1bea1611fULL, + 0x07918d8d0e8d1c83ULL, 0x7ac83d3df43df5c9ULL, 0x335b97976697ccf1ULL, + 0x0000000000000000ULL, 0x83f9cfcf1bcf36d4ULL, 0x566e2b2bac2b4587ULL, + 0xece17676c57697b3ULL, 0x19e68282328264b0ULL, 0xb128d6d67fd6fea9ULL, + 0x36c31b1b6c1bd877ULL, 0x7774b5b5eeb5c15bULL, 0x43beafaf86af1129ULL, + 0xd41d6a6ab56a77dfULL, 0xa0ea50505d50ba0dULL, 0x8a5745450945124cULL, + 0xfb38f3f3ebf3cb18ULL, 0x60ad3030c0309df0ULL, 0xc3c4efef9bef2b74ULL, + 0x7eda3f3ffc3fe5c3ULL, 0xaac755554955921cULL, 0x59dba2a2b2a27910ULL, + 0xc9e9eaea8fea0365ULL, 0xca6a656589650fecULL, 0x6903babad2bab968ULL, + 0x5e4a2f2fbc2f6593ULL, 0x9d8ec0c027c04ee7ULL, 0xa160dede5fdebe81ULL, + 0x38fc1c1c701ce06cULL, 0xe746fdfdd3fdbb2eULL, 0x9a1f4d4d294d5264ULL, + 0x397692927292e4e0ULL, 0xeafa7575c9758fbcULL, 0x0c3606061806301eULL, + 0x09ae8a8a128a2498ULL, 0x794bb2b2f2b2f940ULL, 0xd185e6e6bfe66359ULL, + 0x1c7e0e0e380e7036ULL, 0x3ee71f1f7c1ff863ULL, 0xc4556262956237f7ULL, + 0xb53ad4d477d4eea3ULL, 0x4d81a8a89aa82932ULL, 0x315296966296c4f4ULL, + 0xef62f9f9c3f99b3aULL, 0x97a3c5c533c566f6ULL, 0x4a102525942535b1ULL, + 0xb2ab59597959f220ULL, 0x15d084842a8454aeULL, 0xe4c57272d572b7a7ULL, + 0x72ec3939e439d5ddULL, 0x98164c4c2d4c5a61ULL, 0xbc945e5e655eca3bULL, + 0xf09f7878fd78e785ULL, 0x70e53838e038ddd8ULL, 0x05988c8c0a8c1486ULL, + 0xbf17d1d163d1c6b2ULL, 0x57e4a5a5aea5410bULL, 0xd9a1e2e2afe2434dULL, + 0xc24e616199612ff8ULL, 0x7b42b3b3f6b3f145ULL, 0x42342121842115a5ULL, + 0x25089c9c4a9c94d6ULL, 0x3cee1e1e781ef066ULL, 0x8661434311432252ULL, + 0x93b1c7c73bc776fcULL, 0xe54ffcfcd7fcb32bULL, 0x0824040410042014ULL, + 0xa2e351515951b208ULL, 0x2f2599995e99bcc7ULL, 0xda226d6da96d4fc4ULL, + 0x1a650d0d340d6839ULL, 0xe979fafacffa8335ULL, 0xa369dfdf5bdfb684ULL, + 0xfca97e7ee57ed79bULL, 0x4819242490243db4ULL, 0x76fe3b3bec3bc5d7ULL, + 0x4b9aabab96ab313dULL, 0x81f0cece1fce3ed1ULL, 0x2299111144118855ULL, + 0x03838f8f068f0c89ULL, 0x9c044e4e254e4a6bULL, 0x7366b7b7e6b7d151ULL, + 0xcbe0ebeb8beb0b60ULL, 0x78c13c3cf03cfdccULL, 0x1ffd81813e817cbfULL, + 0x354094946a94d4feULL, 0xf31cf7f7fbf7eb0cULL, 0x6f18b9b9deb9a167ULL, + 0x268b13134c13985fULL, 0x58512c2cb02c7d9cULL, 0xbb05d3d36bd3d6b8ULL, + 0xd38ce7e7bbe76b5cULL, 0xdc396e6ea56e57cbULL, 0x95aac4c437c46ef3ULL, + 0x061b03030c03180fULL, 0xacdc565645568a13ULL, 0x885e44440d441a49ULL, + 0xfea07f7fe17fdf9eULL, 0x4f88a9a99ea92137ULL, 0x54672a2aa82a4d82ULL, + 0x6b0abbbbd6bbb16dULL, 0x9f87c1c123c146e2ULL, 0xa6f153535153a202ULL, + 0xa572dcdc57dcae8bULL, 0x16530b0b2c0b5827ULL, 0x27019d9d4e9d9cd3ULL, + 0xd82b6c6cad6c47c1ULL, 0x62a43131c43195f5ULL, 0xe8f37474cd7487b9ULL, + 0xf115f6f6fff6e309ULL, 0x8c4c464605460a43ULL, 0x45a5acac8aac0926ULL, + 0x0fb589891e893c97ULL, 0x28b414145014a044ULL, 0xdfbae1e1a3e15b42ULL, + 0x2ca616165816b04eULL, 0x74f73a3ae83acdd2ULL, 0xd2066969b9696fd0ULL, + 0x124109092409482dULL, 0xe0d77070dd70a7adULL, 0x716fb6b6e2b6d954ULL, + 0xbd1ed0d067d0ceb7ULL, 0xc7d6eded93ed3b7eULL, 0x85e2cccc17cc2edbULL, + 0x8468424215422a57ULL, 0x2d2c98985a98b4c2ULL, 0x55eda4a4aaa4490eULL, + 0x50752828a0285d88ULL, 0xb8865c5c6d5cda31ULL, 0xed6bf8f8c7f8933fULL, + 0x11c28686228644a4ULL, +}; + +static const u64 C3[256] = { + 0x7830d818186018c0ULL, 0xaf462623238c2305ULL, 0xf991b8c6c63fc67eULL, + 0x6fcdfbe8e887e813ULL, 0xa113cb878726874cULL, 0x626d11b8b8dab8a9ULL, + 0x0502090101040108ULL, 0x6e9e0d4f4f214f42ULL, 0xee6c9b3636d836adULL, + 0x0451ffa6a6a2a659ULL, 0xbdb90cd2d26fd2deULL, 0x06f70ef5f5f3f5fbULL, + 0x80f2967979f979efULL, 0xcede306f6fa16f5fULL, 0xef3f6d91917e91fcULL, + 0x07a4f852525552aaULL, 0xfdc04760609d6027ULL, 0x766535bcbccabc89ULL, + 0xcd2b379b9b569bacULL, 0x8c018a8e8e028e04ULL, 0x155bd2a3a3b6a371ULL, + 0x3c186c0c0c300c60ULL, 0x8af6847b7bf17bffULL, 0xe16a803535d435b5ULL, + 0x693af51d1d741de8ULL, 0x47ddb3e0e0a7e053ULL, 0xacb321d7d77bd7f6ULL, + 0xed999cc2c22fc25eULL, 0x965c432e2eb82e6dULL, 0x7a96294b4b314b62ULL, + 0x21e15dfefedffea3ULL, 0x16aed55757415782ULL, 0x412abd15155415a8ULL, + 0xb6eee87777c1779fULL, 0xeb6e923737dc37a5ULL, 0x56d79ee5e5b3e57bULL, + 0xd923139f9f469f8cULL, 0x17fd23f0f0e7f0d3ULL, 0x7f94204a4a354a6aULL, + 0x95a944dada4fda9eULL, 0x25b0a258587d58faULL, 0xca8fcfc9c903c906ULL, + 0x8d527c2929a42955ULL, 0x22145a0a0a280a50ULL, 0x4f7f50b1b1feb1e1ULL, + 0x1a5dc9a0a0baa069ULL, 0xdad6146b6bb16b7fULL, 0xab17d985852e855cULL, + 0x73673cbdbdcebd81ULL, 0x34ba8f5d5d695dd2ULL, 0x5020901010401080ULL, + 0x03f507f4f4f7f4f3ULL, 0xc08bddcbcb0bcb16ULL, 0xc67cd33e3ef83eedULL, + 0x110a2d0505140528ULL, 0xe6ce78676781671fULL, 0x53d597e4e4b7e473ULL, + 0xbb4e0227279c2725ULL, 0x5882734141194132ULL, 0x9d0ba78b8b168b2cULL, + 0x0153f6a7a7a6a751ULL, 0x94fab27d7de97dcfULL, 0xfb374995956e95dcULL, + 0x9fad56d8d847d88eULL, 0x30eb70fbfbcbfb8bULL, 0x71c1cdeeee9fee23ULL, + 0x91f8bb7c7ced7cc7ULL, 0xe3cc716666856617ULL, 0x8ea77bdddd53dda6ULL, + 0x4b2eaf17175c17b8ULL, 0x468e454747014702ULL, 0xdc211a9e9e429e84ULL, + 0xc589d4caca0fca1eULL, 0x995a582d2db42d75ULL, 0x79632ebfbfc6bf91ULL, + 0x1b0e3f07071c0738ULL, 0x2347acadad8ead01ULL, 0x2fb4b05a5a755aeaULL, + 0xb51bef838336836cULL, 0xff66b63333cc3385ULL, 0xf2c65c636391633fULL, + 0x0a04120202080210ULL, 0x384993aaaa92aa39ULL, 0xa8e2de7171d971afULL, + 0xcf8dc6c8c807c80eULL, 0x7d32d119196419c8ULL, 0x70923b4949394972ULL, + 0x9aaf5fd9d943d986ULL, 0x1df931f2f2eff2c3ULL, 0x48dba8e3e3abe34bULL, + 0x2ab6b95b5b715be2ULL, 0x920dbc88881a8834ULL, 0xc8293e9a9a529aa4ULL, + 0xbe4c0b262698262dULL, 0xfa64bf3232c8328dULL, 0x4a7d59b0b0fab0e9ULL, + 0x6acff2e9e983e91bULL, 0x331e770f0f3c0f78ULL, 0xa6b733d5d573d5e6ULL, + 0xba1df480803a8074ULL, 0x7c6127bebec2be99ULL, 0xde87ebcdcd13cd26ULL, + 0xe468893434d034bdULL, 0x75903248483d487aULL, 0x24e354ffffdbffabULL, + 0x8ff48d7a7af57af7ULL, 0xea3d6490907a90f4ULL, 0x3ebe9d5f5f615fc2ULL, + 0xa0403d202080201dULL, 0xd5d00f6868bd6867ULL, 0x7234ca1a1a681ad0ULL, + 0x2c41b7aeae82ae19ULL, 0x5e757db4b4eab4c9ULL, 0x19a8ce54544d549aULL, + 0xe53b7f93937693ecULL, 0xaa442f222288220dULL, 0xe9c86364648d6407ULL, + 0x12ff2af1f1e3f1dbULL, 0xa2e6cc7373d173bfULL, 0x5a24821212481290ULL, + 0x5d807a40401d403aULL, 0x2810480808200840ULL, 0xe89b95c3c32bc356ULL, + 0x7bc5dfecec97ec33ULL, 0x90ab4ddbdb4bdb96ULL, 0x1f5fc0a1a1bea161ULL, + 0x8307918d8d0e8d1cULL, 0xc97ac83d3df43df5ULL, 0xf1335b97976697ccULL, + 0x0000000000000000ULL, 0xd483f9cfcf1bcf36ULL, 0x87566e2b2bac2b45ULL, + 0xb3ece17676c57697ULL, 0xb019e68282328264ULL, 0xa9b128d6d67fd6feULL, + 0x7736c31b1b6c1bd8ULL, 0x5b7774b5b5eeb5c1ULL, 0x2943beafaf86af11ULL, + 0xdfd41d6a6ab56a77ULL, 0x0da0ea50505d50baULL, 0x4c8a574545094512ULL, + 0x18fb38f3f3ebf3cbULL, 0xf060ad3030c0309dULL, 0x74c3c4efef9bef2bULL, + 0xc37eda3f3ffc3fe5ULL, 0x1caac75555495592ULL, 0x1059dba2a2b2a279ULL, + 0x65c9e9eaea8fea03ULL, 0xecca6a656589650fULL, 0x686903babad2bab9ULL, + 0x935e4a2f2fbc2f65ULL, 0xe79d8ec0c027c04eULL, 0x81a160dede5fdebeULL, + 0x6c38fc1c1c701ce0ULL, 0x2ee746fdfdd3fdbbULL, 0x649a1f4d4d294d52ULL, + 0xe0397692927292e4ULL, 0xbceafa7575c9758fULL, 0x1e0c360606180630ULL, + 0x9809ae8a8a128a24ULL, 0x40794bb2b2f2b2f9ULL, 0x59d185e6e6bfe663ULL, + 0x361c7e0e0e380e70ULL, 0x633ee71f1f7c1ff8ULL, 0xf7c4556262956237ULL, + 0xa3b53ad4d477d4eeULL, 0x324d81a8a89aa829ULL, 0xf4315296966296c4ULL, + 0x3aef62f9f9c3f99bULL, 0xf697a3c5c533c566ULL, 0xb14a102525942535ULL, + 0x20b2ab59597959f2ULL, 0xae15d084842a8454ULL, 0xa7e4c57272d572b7ULL, + 0xdd72ec3939e439d5ULL, 0x6198164c4c2d4c5aULL, 0x3bbc945e5e655ecaULL, + 0x85f09f7878fd78e7ULL, 0xd870e53838e038ddULL, 0x8605988c8c0a8c14ULL, + 0xb2bf17d1d163d1c6ULL, 0x0b57e4a5a5aea541ULL, 0x4dd9a1e2e2afe243ULL, + 0xf8c24e616199612fULL, 0x457b42b3b3f6b3f1ULL, 0xa542342121842115ULL, + 0xd625089c9c4a9c94ULL, 0x663cee1e1e781ef0ULL, 0x5286614343114322ULL, + 0xfc93b1c7c73bc776ULL, 0x2be54ffcfcd7fcb3ULL, 0x1408240404100420ULL, + 0x08a2e351515951b2ULL, 0xc72f2599995e99bcULL, 0xc4da226d6da96d4fULL, + 0x391a650d0d340d68ULL, 0x35e979fafacffa83ULL, 0x84a369dfdf5bdfb6ULL, + 0x9bfca97e7ee57ed7ULL, 0xb44819242490243dULL, 0xd776fe3b3bec3bc5ULL, + 0x3d4b9aabab96ab31ULL, 0xd181f0cece1fce3eULL, 0x5522991111441188ULL, + 0x8903838f8f068f0cULL, 0x6b9c044e4e254e4aULL, 0x517366b7b7e6b7d1ULL, + 0x60cbe0ebeb8beb0bULL, 0xcc78c13c3cf03cfdULL, 0xbf1ffd81813e817cULL, + 0xfe354094946a94d4ULL, 0x0cf31cf7f7fbf7ebULL, 0x676f18b9b9deb9a1ULL, + 0x5f268b13134c1398ULL, 0x9c58512c2cb02c7dULL, 0xb8bb05d3d36bd3d6ULL, + 0x5cd38ce7e7bbe76bULL, 0xcbdc396e6ea56e57ULL, 0xf395aac4c437c46eULL, + 0x0f061b03030c0318ULL, 0x13acdc565645568aULL, 0x49885e44440d441aULL, + 0x9efea07f7fe17fdfULL, 0x374f88a9a99ea921ULL, 0x8254672a2aa82a4dULL, + 0x6d6b0abbbbd6bbb1ULL, 0xe29f87c1c123c146ULL, 0x02a6f153535153a2ULL, + 0x8ba572dcdc57dcaeULL, 0x2716530b0b2c0b58ULL, 0xd327019d9d4e9d9cULL, + 0xc1d82b6c6cad6c47ULL, 0xf562a43131c43195ULL, 0xb9e8f37474cd7487ULL, + 0x09f115f6f6fff6e3ULL, 0x438c4c464605460aULL, 0x2645a5acac8aac09ULL, + 0x970fb589891e893cULL, 0x4428b414145014a0ULL, 0x42dfbae1e1a3e15bULL, + 0x4e2ca616165816b0ULL, 0xd274f73a3ae83acdULL, 0xd0d2066969b9696fULL, + 0x2d12410909240948ULL, 0xade0d77070dd70a7ULL, 0x54716fb6b6e2b6d9ULL, + 0xb7bd1ed0d067d0ceULL, 0x7ec7d6eded93ed3bULL, 0xdb85e2cccc17cc2eULL, + 0x578468424215422aULL, 0xc22d2c98985a98b4ULL, 0x0e55eda4a4aaa449ULL, + 0x8850752828a0285dULL, 0x31b8865c5c6d5cdaULL, 0x3fed6bf8f8c7f893ULL, + 0xa411c28686228644ULL, +}; + +static const u64 C4[256] = { + 0xc07830d818186018ULL, 0x05af462623238c23ULL, 0x7ef991b8c6c63fc6ULL, + 0x136fcdfbe8e887e8ULL, 0x4ca113cb87872687ULL, 0xa9626d11b8b8dab8ULL, + 0x0805020901010401ULL, 0x426e9e0d4f4f214fULL, 0xadee6c9b3636d836ULL, + 0x590451ffa6a6a2a6ULL, 0xdebdb90cd2d26fd2ULL, 0xfb06f70ef5f5f3f5ULL, + 0xef80f2967979f979ULL, 0x5fcede306f6fa16fULL, 0xfcef3f6d91917e91ULL, + 0xaa07a4f852525552ULL, 0x27fdc04760609d60ULL, 0x89766535bcbccabcULL, + 0xaccd2b379b9b569bULL, 0x048c018a8e8e028eULL, 0x71155bd2a3a3b6a3ULL, + 0x603c186c0c0c300cULL, 0xff8af6847b7bf17bULL, 0xb5e16a803535d435ULL, + 0xe8693af51d1d741dULL, 0x5347ddb3e0e0a7e0ULL, 0xf6acb321d7d77bd7ULL, + 0x5eed999cc2c22fc2ULL, 0x6d965c432e2eb82eULL, 0x627a96294b4b314bULL, + 0xa321e15dfefedffeULL, 0x8216aed557574157ULL, 0xa8412abd15155415ULL, + 0x9fb6eee87777c177ULL, 0xa5eb6e923737dc37ULL, 0x7b56d79ee5e5b3e5ULL, + 0x8cd923139f9f469fULL, 0xd317fd23f0f0e7f0ULL, 0x6a7f94204a4a354aULL, + 0x9e95a944dada4fdaULL, 0xfa25b0a258587d58ULL, 0x06ca8fcfc9c903c9ULL, + 0x558d527c2929a429ULL, 0x5022145a0a0a280aULL, 0xe14f7f50b1b1feb1ULL, + 0x691a5dc9a0a0baa0ULL, 0x7fdad6146b6bb16bULL, 0x5cab17d985852e85ULL, + 0x8173673cbdbdcebdULL, 0xd234ba8f5d5d695dULL, 0x8050209010104010ULL, + 0xf303f507f4f4f7f4ULL, 0x16c08bddcbcb0bcbULL, 0xedc67cd33e3ef83eULL, + 0x28110a2d05051405ULL, 0x1fe6ce7867678167ULL, 0x7353d597e4e4b7e4ULL, + 0x25bb4e0227279c27ULL, 0x3258827341411941ULL, 0x2c9d0ba78b8b168bULL, + 0x510153f6a7a7a6a7ULL, 0xcf94fab27d7de97dULL, 0xdcfb374995956e95ULL, + 0x8e9fad56d8d847d8ULL, 0x8b30eb70fbfbcbfbULL, 0x2371c1cdeeee9feeULL, + 0xc791f8bb7c7ced7cULL, 0x17e3cc7166668566ULL, 0xa68ea77bdddd53ddULL, + 0xb84b2eaf17175c17ULL, 0x02468e4547470147ULL, 0x84dc211a9e9e429eULL, + 0x1ec589d4caca0fcaULL, 0x75995a582d2db42dULL, 0x9179632ebfbfc6bfULL, + 0x381b0e3f07071c07ULL, 0x012347acadad8eadULL, 0xea2fb4b05a5a755aULL, + 0x6cb51bef83833683ULL, 0x85ff66b63333cc33ULL, 0x3ff2c65c63639163ULL, + 0x100a041202020802ULL, 0x39384993aaaa92aaULL, 0xafa8e2de7171d971ULL, + 0x0ecf8dc6c8c807c8ULL, 0xc87d32d119196419ULL, 0x7270923b49493949ULL, + 0x869aaf5fd9d943d9ULL, 0xc31df931f2f2eff2ULL, 0x4b48dba8e3e3abe3ULL, + 0xe22ab6b95b5b715bULL, 0x34920dbc88881a88ULL, 0xa4c8293e9a9a529aULL, + 0x2dbe4c0b26269826ULL, 0x8dfa64bf3232c832ULL, 0xe94a7d59b0b0fab0ULL, + 0x1b6acff2e9e983e9ULL, 0x78331e770f0f3c0fULL, 0xe6a6b733d5d573d5ULL, + 0x74ba1df480803a80ULL, 0x997c6127bebec2beULL, 0x26de87ebcdcd13cdULL, + 0xbde468893434d034ULL, 0x7a75903248483d48ULL, 0xab24e354ffffdbffULL, + 0xf78ff48d7a7af57aULL, 0xf4ea3d6490907a90ULL, 0xc23ebe9d5f5f615fULL, + 0x1da0403d20208020ULL, 0x67d5d00f6868bd68ULL, 0xd07234ca1a1a681aULL, + 0x192c41b7aeae82aeULL, 0xc95e757db4b4eab4ULL, 0x9a19a8ce54544d54ULL, + 0xece53b7f93937693ULL, 0x0daa442f22228822ULL, 0x07e9c86364648d64ULL, + 0xdb12ff2af1f1e3f1ULL, 0xbfa2e6cc7373d173ULL, 0x905a248212124812ULL, + 0x3a5d807a40401d40ULL, 0x4028104808082008ULL, 0x56e89b95c3c32bc3ULL, + 0x337bc5dfecec97ecULL, 0x9690ab4ddbdb4bdbULL, 0x611f5fc0a1a1bea1ULL, + 0x1c8307918d8d0e8dULL, 0xf5c97ac83d3df43dULL, 0xccf1335b97976697ULL, + 0x0000000000000000ULL, 0x36d483f9cfcf1bcfULL, 0x4587566e2b2bac2bULL, + 0x97b3ece17676c576ULL, 0x64b019e682823282ULL, 0xfea9b128d6d67fd6ULL, + 0xd87736c31b1b6c1bULL, 0xc15b7774b5b5eeb5ULL, 0x112943beafaf86afULL, + 0x77dfd41d6a6ab56aULL, 0xba0da0ea50505d50ULL, 0x124c8a5745450945ULL, + 0xcb18fb38f3f3ebf3ULL, 0x9df060ad3030c030ULL, 0x2b74c3c4efef9befULL, + 0xe5c37eda3f3ffc3fULL, 0x921caac755554955ULL, 0x791059dba2a2b2a2ULL, + 0x0365c9e9eaea8feaULL, 0x0fecca6a65658965ULL, 0xb9686903babad2baULL, + 0x65935e4a2f2fbc2fULL, 0x4ee79d8ec0c027c0ULL, 0xbe81a160dede5fdeULL, + 0xe06c38fc1c1c701cULL, 0xbb2ee746fdfdd3fdULL, 0x52649a1f4d4d294dULL, + 0xe4e0397692927292ULL, 0x8fbceafa7575c975ULL, 0x301e0c3606061806ULL, + 0x249809ae8a8a128aULL, 0xf940794bb2b2f2b2ULL, 0x6359d185e6e6bfe6ULL, + 0x70361c7e0e0e380eULL, 0xf8633ee71f1f7c1fULL, 0x37f7c45562629562ULL, + 0xeea3b53ad4d477d4ULL, 0x29324d81a8a89aa8ULL, 0xc4f4315296966296ULL, + 0x9b3aef62f9f9c3f9ULL, 0x66f697a3c5c533c5ULL, 0x35b14a1025259425ULL, + 0xf220b2ab59597959ULL, 0x54ae15d084842a84ULL, 0xb7a7e4c57272d572ULL, + 0xd5dd72ec3939e439ULL, 0x5a6198164c4c2d4cULL, 0xca3bbc945e5e655eULL, + 0xe785f09f7878fd78ULL, 0xddd870e53838e038ULL, 0x148605988c8c0a8cULL, + 0xc6b2bf17d1d163d1ULL, 0x410b57e4a5a5aea5ULL, 0x434dd9a1e2e2afe2ULL, + 0x2ff8c24e61619961ULL, 0xf1457b42b3b3f6b3ULL, 0x15a5423421218421ULL, + 0x94d625089c9c4a9cULL, 0xf0663cee1e1e781eULL, 0x2252866143431143ULL, + 0x76fc93b1c7c73bc7ULL, 0xb32be54ffcfcd7fcULL, 0x2014082404041004ULL, + 0xb208a2e351515951ULL, 0xbcc72f2599995e99ULL, 0x4fc4da226d6da96dULL, + 0x68391a650d0d340dULL, 0x8335e979fafacffaULL, 0xb684a369dfdf5bdfULL, + 0xd79bfca97e7ee57eULL, 0x3db4481924249024ULL, 0xc5d776fe3b3bec3bULL, + 0x313d4b9aabab96abULL, 0x3ed181f0cece1fceULL, 0x8855229911114411ULL, + 0x0c8903838f8f068fULL, 0x4a6b9c044e4e254eULL, 0xd1517366b7b7e6b7ULL, + 0x0b60cbe0ebeb8bebULL, 0xfdcc78c13c3cf03cULL, 0x7cbf1ffd81813e81ULL, + 0xd4fe354094946a94ULL, 0xeb0cf31cf7f7fbf7ULL, 0xa1676f18b9b9deb9ULL, + 0x985f268b13134c13ULL, 0x7d9c58512c2cb02cULL, 0xd6b8bb05d3d36bd3ULL, + 0x6b5cd38ce7e7bbe7ULL, 0x57cbdc396e6ea56eULL, 0x6ef395aac4c437c4ULL, + 0x180f061b03030c03ULL, 0x8a13acdc56564556ULL, 0x1a49885e44440d44ULL, + 0xdf9efea07f7fe17fULL, 0x21374f88a9a99ea9ULL, 0x4d8254672a2aa82aULL, + 0xb16d6b0abbbbd6bbULL, 0x46e29f87c1c123c1ULL, 0xa202a6f153535153ULL, + 0xae8ba572dcdc57dcULL, 0x582716530b0b2c0bULL, 0x9cd327019d9d4e9dULL, + 0x47c1d82b6c6cad6cULL, 0x95f562a43131c431ULL, 0x87b9e8f37474cd74ULL, + 0xe309f115f6f6fff6ULL, 0x0a438c4c46460546ULL, 0x092645a5acac8aacULL, + 0x3c970fb589891e89ULL, 0xa04428b414145014ULL, 0x5b42dfbae1e1a3e1ULL, + 0xb04e2ca616165816ULL, 0xcdd274f73a3ae83aULL, 0x6fd0d2066969b969ULL, + 0x482d124109092409ULL, 0xa7ade0d77070dd70ULL, 0xd954716fb6b6e2b6ULL, + 0xceb7bd1ed0d067d0ULL, 0x3b7ec7d6eded93edULL, 0x2edb85e2cccc17ccULL, + 0x2a57846842421542ULL, 0xb4c22d2c98985a98ULL, 0x490e55eda4a4aaa4ULL, + 0x5d8850752828a028ULL, 0xda31b8865c5c6d5cULL, 0x933fed6bf8f8c7f8ULL, + 0x44a411c286862286ULL, +}; + +static const u64 C5[256] = { + 0x18c07830d8181860ULL, 0x2305af462623238cULL, 0xc67ef991b8c6c63fULL, + 0xe8136fcdfbe8e887ULL, 0x874ca113cb878726ULL, 0xb8a9626d11b8b8daULL, + 0x0108050209010104ULL, 0x4f426e9e0d4f4f21ULL, 0x36adee6c9b3636d8ULL, + 0xa6590451ffa6a6a2ULL, 0xd2debdb90cd2d26fULL, 0xf5fb06f70ef5f5f3ULL, + 0x79ef80f2967979f9ULL, 0x6f5fcede306f6fa1ULL, 0x91fcef3f6d91917eULL, + 0x52aa07a4f8525255ULL, 0x6027fdc04760609dULL, 0xbc89766535bcbccaULL, + 0x9baccd2b379b9b56ULL, 0x8e048c018a8e8e02ULL, 0xa371155bd2a3a3b6ULL, + 0x0c603c186c0c0c30ULL, 0x7bff8af6847b7bf1ULL, 0x35b5e16a803535d4ULL, + 0x1de8693af51d1d74ULL, 0xe05347ddb3e0e0a7ULL, 0xd7f6acb321d7d77bULL, + 0xc25eed999cc2c22fULL, 0x2e6d965c432e2eb8ULL, 0x4b627a96294b4b31ULL, + 0xfea321e15dfefedfULL, 0x578216aed5575741ULL, 0x15a8412abd151554ULL, + 0x779fb6eee87777c1ULL, 0x37a5eb6e923737dcULL, 0xe57b56d79ee5e5b3ULL, + 0x9f8cd923139f9f46ULL, 0xf0d317fd23f0f0e7ULL, 0x4a6a7f94204a4a35ULL, + 0xda9e95a944dada4fULL, 0x58fa25b0a258587dULL, 0xc906ca8fcfc9c903ULL, + 0x29558d527c2929a4ULL, 0x0a5022145a0a0a28ULL, 0xb1e14f7f50b1b1feULL, + 0xa0691a5dc9a0a0baULL, 0x6b7fdad6146b6bb1ULL, 0x855cab17d985852eULL, + 0xbd8173673cbdbdceULL, 0x5dd234ba8f5d5d69ULL, 0x1080502090101040ULL, + 0xf4f303f507f4f4f7ULL, 0xcb16c08bddcbcb0bULL, 0x3eedc67cd33e3ef8ULL, + 0x0528110a2d050514ULL, 0x671fe6ce78676781ULL, 0xe47353d597e4e4b7ULL, + 0x2725bb4e0227279cULL, 0x4132588273414119ULL, 0x8b2c9d0ba78b8b16ULL, + 0xa7510153f6a7a7a6ULL, 0x7dcf94fab27d7de9ULL, 0x95dcfb374995956eULL, + 0xd88e9fad56d8d847ULL, 0xfb8b30eb70fbfbcbULL, 0xee2371c1cdeeee9fULL, + 0x7cc791f8bb7c7cedULL, 0x6617e3cc71666685ULL, 0xdda68ea77bdddd53ULL, + 0x17b84b2eaf17175cULL, 0x4702468e45474701ULL, 0x9e84dc211a9e9e42ULL, + 0xca1ec589d4caca0fULL, 0x2d75995a582d2db4ULL, 0xbf9179632ebfbfc6ULL, + 0x07381b0e3f07071cULL, 0xad012347acadad8eULL, 0x5aea2fb4b05a5a75ULL, + 0x836cb51bef838336ULL, 0x3385ff66b63333ccULL, 0x633ff2c65c636391ULL, + 0x02100a0412020208ULL, 0xaa39384993aaaa92ULL, 0x71afa8e2de7171d9ULL, + 0xc80ecf8dc6c8c807ULL, 0x19c87d32d1191964ULL, 0x497270923b494939ULL, + 0xd9869aaf5fd9d943ULL, 0xf2c31df931f2f2efULL, 0xe34b48dba8e3e3abULL, + 0x5be22ab6b95b5b71ULL, 0x8834920dbc88881aULL, 0x9aa4c8293e9a9a52ULL, + 0x262dbe4c0b262698ULL, 0x328dfa64bf3232c8ULL, 0xb0e94a7d59b0b0faULL, + 0xe91b6acff2e9e983ULL, 0x0f78331e770f0f3cULL, 0xd5e6a6b733d5d573ULL, + 0x8074ba1df480803aULL, 0xbe997c6127bebec2ULL, 0xcd26de87ebcdcd13ULL, + 0x34bde468893434d0ULL, 0x487a75903248483dULL, 0xffab24e354ffffdbULL, + 0x7af78ff48d7a7af5ULL, 0x90f4ea3d6490907aULL, 0x5fc23ebe9d5f5f61ULL, + 0x201da0403d202080ULL, 0x6867d5d00f6868bdULL, 0x1ad07234ca1a1a68ULL, + 0xae192c41b7aeae82ULL, 0xb4c95e757db4b4eaULL, 0x549a19a8ce54544dULL, + 0x93ece53b7f939376ULL, 0x220daa442f222288ULL, 0x6407e9c86364648dULL, + 0xf1db12ff2af1f1e3ULL, 0x73bfa2e6cc7373d1ULL, 0x12905a2482121248ULL, + 0x403a5d807a40401dULL, 0x0840281048080820ULL, 0xc356e89b95c3c32bULL, + 0xec337bc5dfecec97ULL, 0xdb9690ab4ddbdb4bULL, 0xa1611f5fc0a1a1beULL, + 0x8d1c8307918d8d0eULL, 0x3df5c97ac83d3df4ULL, 0x97ccf1335b979766ULL, + 0x0000000000000000ULL, 0xcf36d483f9cfcf1bULL, 0x2b4587566e2b2bacULL, + 0x7697b3ece17676c5ULL, 0x8264b019e6828232ULL, 0xd6fea9b128d6d67fULL, + 0x1bd87736c31b1b6cULL, 0xb5c15b7774b5b5eeULL, 0xaf112943beafaf86ULL, + 0x6a77dfd41d6a6ab5ULL, 0x50ba0da0ea50505dULL, 0x45124c8a57454509ULL, + 0xf3cb18fb38f3f3ebULL, 0x309df060ad3030c0ULL, 0xef2b74c3c4efef9bULL, + 0x3fe5c37eda3f3ffcULL, 0x55921caac7555549ULL, 0xa2791059dba2a2b2ULL, + 0xea0365c9e9eaea8fULL, 0x650fecca6a656589ULL, 0xbab9686903babad2ULL, + 0x2f65935e4a2f2fbcULL, 0xc04ee79d8ec0c027ULL, 0xdebe81a160dede5fULL, + 0x1ce06c38fc1c1c70ULL, 0xfdbb2ee746fdfdd3ULL, 0x4d52649a1f4d4d29ULL, + 0x92e4e03976929272ULL, 0x758fbceafa7575c9ULL, 0x06301e0c36060618ULL, + 0x8a249809ae8a8a12ULL, 0xb2f940794bb2b2f2ULL, 0xe66359d185e6e6bfULL, + 0x0e70361c7e0e0e38ULL, 0x1ff8633ee71f1f7cULL, 0x6237f7c455626295ULL, + 0xd4eea3b53ad4d477ULL, 0xa829324d81a8a89aULL, 0x96c4f43152969662ULL, + 0xf99b3aef62f9f9c3ULL, 0xc566f697a3c5c533ULL, 0x2535b14a10252594ULL, + 0x59f220b2ab595979ULL, 0x8454ae15d084842aULL, 0x72b7a7e4c57272d5ULL, + 0x39d5dd72ec3939e4ULL, 0x4c5a6198164c4c2dULL, 0x5eca3bbc945e5e65ULL, + 0x78e785f09f7878fdULL, 0x38ddd870e53838e0ULL, 0x8c148605988c8c0aULL, + 0xd1c6b2bf17d1d163ULL, 0xa5410b57e4a5a5aeULL, 0xe2434dd9a1e2e2afULL, + 0x612ff8c24e616199ULL, 0xb3f1457b42b3b3f6ULL, 0x2115a54234212184ULL, + 0x9c94d625089c9c4aULL, 0x1ef0663cee1e1e78ULL, 0x4322528661434311ULL, + 0xc776fc93b1c7c73bULL, 0xfcb32be54ffcfcd7ULL, 0x0420140824040410ULL, + 0x51b208a2e3515159ULL, 0x99bcc72f2599995eULL, 0x6d4fc4da226d6da9ULL, + 0x0d68391a650d0d34ULL, 0xfa8335e979fafacfULL, 0xdfb684a369dfdf5bULL, + 0x7ed79bfca97e7ee5ULL, 0x243db44819242490ULL, 0x3bc5d776fe3b3becULL, + 0xab313d4b9aabab96ULL, 0xce3ed181f0cece1fULL, 0x1188552299111144ULL, + 0x8f0c8903838f8f06ULL, 0x4e4a6b9c044e4e25ULL, 0xb7d1517366b7b7e6ULL, + 0xeb0b60cbe0ebeb8bULL, 0x3cfdcc78c13c3cf0ULL, 0x817cbf1ffd81813eULL, + 0x94d4fe354094946aULL, 0xf7eb0cf31cf7f7fbULL, 0xb9a1676f18b9b9deULL, + 0x13985f268b13134cULL, 0x2c7d9c58512c2cb0ULL, 0xd3d6b8bb05d3d36bULL, + 0xe76b5cd38ce7e7bbULL, 0x6e57cbdc396e6ea5ULL, 0xc46ef395aac4c437ULL, + 0x03180f061b03030cULL, 0x568a13acdc565645ULL, 0x441a49885e44440dULL, + 0x7fdf9efea07f7fe1ULL, 0xa921374f88a9a99eULL, 0x2a4d8254672a2aa8ULL, + 0xbbb16d6b0abbbbd6ULL, 0xc146e29f87c1c123ULL, 0x53a202a6f1535351ULL, + 0xdcae8ba572dcdc57ULL, 0x0b582716530b0b2cULL, 0x9d9cd327019d9d4eULL, + 0x6c47c1d82b6c6cadULL, 0x3195f562a43131c4ULL, 0x7487b9e8f37474cdULL, + 0xf6e309f115f6f6ffULL, 0x460a438c4c464605ULL, 0xac092645a5acac8aULL, + 0x893c970fb589891eULL, 0x14a04428b4141450ULL, 0xe15b42dfbae1e1a3ULL, + 0x16b04e2ca6161658ULL, 0x3acdd274f73a3ae8ULL, 0x696fd0d2066969b9ULL, + 0x09482d1241090924ULL, 0x70a7ade0d77070ddULL, 0xb6d954716fb6b6e2ULL, + 0xd0ceb7bd1ed0d067ULL, 0xed3b7ec7d6eded93ULL, 0xcc2edb85e2cccc17ULL, + 0x422a578468424215ULL, 0x98b4c22d2c98985aULL, 0xa4490e55eda4a4aaULL, + 0x285d8850752828a0ULL, 0x5cda31b8865c5c6dULL, 0xf8933fed6bf8f8c7ULL, + 0x8644a411c2868622ULL, +}; + +static const u64 C6[256] = { + 0x6018c07830d81818ULL, 0x8c2305af46262323ULL, 0x3fc67ef991b8c6c6ULL, + 0x87e8136fcdfbe8e8ULL, 0x26874ca113cb8787ULL, 0xdab8a9626d11b8b8ULL, + 0x0401080502090101ULL, 0x214f426e9e0d4f4fULL, 0xd836adee6c9b3636ULL, + 0xa2a6590451ffa6a6ULL, 0x6fd2debdb90cd2d2ULL, 0xf3f5fb06f70ef5f5ULL, + 0xf979ef80f2967979ULL, 0xa16f5fcede306f6fULL, 0x7e91fcef3f6d9191ULL, + 0x5552aa07a4f85252ULL, 0x9d6027fdc0476060ULL, 0xcabc89766535bcbcULL, + 0x569baccd2b379b9bULL, 0x028e048c018a8e8eULL, 0xb6a371155bd2a3a3ULL, + 0x300c603c186c0c0cULL, 0xf17bff8af6847b7bULL, 0xd435b5e16a803535ULL, + 0x741de8693af51d1dULL, 0xa7e05347ddb3e0e0ULL, 0x7bd7f6acb321d7d7ULL, + 0x2fc25eed999cc2c2ULL, 0xb82e6d965c432e2eULL, 0x314b627a96294b4bULL, + 0xdffea321e15dfefeULL, 0x41578216aed55757ULL, 0x5415a8412abd1515ULL, + 0xc1779fb6eee87777ULL, 0xdc37a5eb6e923737ULL, 0xb3e57b56d79ee5e5ULL, + 0x469f8cd923139f9fULL, 0xe7f0d317fd23f0f0ULL, 0x354a6a7f94204a4aULL, + 0x4fda9e95a944dadaULL, 0x7d58fa25b0a25858ULL, 0x03c906ca8fcfc9c9ULL, + 0xa429558d527c2929ULL, 0x280a5022145a0a0aULL, 0xfeb1e14f7f50b1b1ULL, + 0xbaa0691a5dc9a0a0ULL, 0xb16b7fdad6146b6bULL, 0x2e855cab17d98585ULL, + 0xcebd8173673cbdbdULL, 0x695dd234ba8f5d5dULL, 0x4010805020901010ULL, + 0xf7f4f303f507f4f4ULL, 0x0bcb16c08bddcbcbULL, 0xf83eedc67cd33e3eULL, + 0x140528110a2d0505ULL, 0x81671fe6ce786767ULL, 0xb7e47353d597e4e4ULL, + 0x9c2725bb4e022727ULL, 0x1941325882734141ULL, 0x168b2c9d0ba78b8bULL, + 0xa6a7510153f6a7a7ULL, 0xe97dcf94fab27d7dULL, 0x6e95dcfb37499595ULL, + 0x47d88e9fad56d8d8ULL, 0xcbfb8b30eb70fbfbULL, 0x9fee2371c1cdeeeeULL, + 0xed7cc791f8bb7c7cULL, 0x856617e3cc716666ULL, 0x53dda68ea77bddddULL, + 0x5c17b84b2eaf1717ULL, 0x014702468e454747ULL, 0x429e84dc211a9e9eULL, + 0x0fca1ec589d4cacaULL, 0xb42d75995a582d2dULL, 0xc6bf9179632ebfbfULL, + 0x1c07381b0e3f0707ULL, 0x8ead012347acadadULL, 0x755aea2fb4b05a5aULL, + 0x36836cb51bef8383ULL, 0xcc3385ff66b63333ULL, 0x91633ff2c65c6363ULL, + 0x0802100a04120202ULL, 0x92aa39384993aaaaULL, 0xd971afa8e2de7171ULL, + 0x07c80ecf8dc6c8c8ULL, 0x6419c87d32d11919ULL, 0x39497270923b4949ULL, + 0x43d9869aaf5fd9d9ULL, 0xeff2c31df931f2f2ULL, 0xabe34b48dba8e3e3ULL, + 0x715be22ab6b95b5bULL, 0x1a8834920dbc8888ULL, 0x529aa4c8293e9a9aULL, + 0x98262dbe4c0b2626ULL, 0xc8328dfa64bf3232ULL, 0xfab0e94a7d59b0b0ULL, + 0x83e91b6acff2e9e9ULL, 0x3c0f78331e770f0fULL, 0x73d5e6a6b733d5d5ULL, + 0x3a8074ba1df48080ULL, 0xc2be997c6127bebeULL, 0x13cd26de87ebcdcdULL, + 0xd034bde468893434ULL, 0x3d487a7590324848ULL, 0xdbffab24e354ffffULL, + 0xf57af78ff48d7a7aULL, 0x7a90f4ea3d649090ULL, 0x615fc23ebe9d5f5fULL, + 0x80201da0403d2020ULL, 0xbd6867d5d00f6868ULL, 0x681ad07234ca1a1aULL, + 0x82ae192c41b7aeaeULL, 0xeab4c95e757db4b4ULL, 0x4d549a19a8ce5454ULL, + 0x7693ece53b7f9393ULL, 0x88220daa442f2222ULL, 0x8d6407e9c8636464ULL, + 0xe3f1db12ff2af1f1ULL, 0xd173bfa2e6cc7373ULL, 0x4812905a24821212ULL, + 0x1d403a5d807a4040ULL, 0x2008402810480808ULL, 0x2bc356e89b95c3c3ULL, + 0x97ec337bc5dfececULL, 0x4bdb9690ab4ddbdbULL, 0xbea1611f5fc0a1a1ULL, + 0x0e8d1c8307918d8dULL, 0xf43df5c97ac83d3dULL, 0x6697ccf1335b9797ULL, + 0x0000000000000000ULL, 0x1bcf36d483f9cfcfULL, 0xac2b4587566e2b2bULL, + 0xc57697b3ece17676ULL, 0x328264b019e68282ULL, 0x7fd6fea9b128d6d6ULL, + 0x6c1bd87736c31b1bULL, 0xeeb5c15b7774b5b5ULL, 0x86af112943beafafULL, + 0xb56a77dfd41d6a6aULL, 0x5d50ba0da0ea5050ULL, 0x0945124c8a574545ULL, + 0xebf3cb18fb38f3f3ULL, 0xc0309df060ad3030ULL, 0x9bef2b74c3c4efefULL, + 0xfc3fe5c37eda3f3fULL, 0x4955921caac75555ULL, 0xb2a2791059dba2a2ULL, + 0x8fea0365c9e9eaeaULL, 0x89650fecca6a6565ULL, 0xd2bab9686903babaULL, + 0xbc2f65935e4a2f2fULL, 0x27c04ee79d8ec0c0ULL, 0x5fdebe81a160dedeULL, + 0x701ce06c38fc1c1cULL, 0xd3fdbb2ee746fdfdULL, 0x294d52649a1f4d4dULL, + 0x7292e4e039769292ULL, 0xc9758fbceafa7575ULL, 0x1806301e0c360606ULL, + 0x128a249809ae8a8aULL, 0xf2b2f940794bb2b2ULL, 0xbfe66359d185e6e6ULL, + 0x380e70361c7e0e0eULL, 0x7c1ff8633ee71f1fULL, 0x956237f7c4556262ULL, + 0x77d4eea3b53ad4d4ULL, 0x9aa829324d81a8a8ULL, 0x6296c4f431529696ULL, + 0xc3f99b3aef62f9f9ULL, 0x33c566f697a3c5c5ULL, 0x942535b14a102525ULL, + 0x7959f220b2ab5959ULL, 0x2a8454ae15d08484ULL, 0xd572b7a7e4c57272ULL, + 0xe439d5dd72ec3939ULL, 0x2d4c5a6198164c4cULL, 0x655eca3bbc945e5eULL, + 0xfd78e785f09f7878ULL, 0xe038ddd870e53838ULL, 0x0a8c148605988c8cULL, + 0x63d1c6b2bf17d1d1ULL, 0xaea5410b57e4a5a5ULL, 0xafe2434dd9a1e2e2ULL, + 0x99612ff8c24e6161ULL, 0xf6b3f1457b42b3b3ULL, 0x842115a542342121ULL, + 0x4a9c94d625089c9cULL, 0x781ef0663cee1e1eULL, 0x1143225286614343ULL, + 0x3bc776fc93b1c7c7ULL, 0xd7fcb32be54ffcfcULL, 0x1004201408240404ULL, + 0x5951b208a2e35151ULL, 0x5e99bcc72f259999ULL, 0xa96d4fc4da226d6dULL, + 0x340d68391a650d0dULL, 0xcffa8335e979fafaULL, 0x5bdfb684a369dfdfULL, + 0xe57ed79bfca97e7eULL, 0x90243db448192424ULL, 0xec3bc5d776fe3b3bULL, + 0x96ab313d4b9aababULL, 0x1fce3ed181f0ceceULL, 0x4411885522991111ULL, + 0x068f0c8903838f8fULL, 0x254e4a6b9c044e4eULL, 0xe6b7d1517366b7b7ULL, + 0x8beb0b60cbe0ebebULL, 0xf03cfdcc78c13c3cULL, 0x3e817cbf1ffd8181ULL, + 0x6a94d4fe35409494ULL, 0xfbf7eb0cf31cf7f7ULL, 0xdeb9a1676f18b9b9ULL, + 0x4c13985f268b1313ULL, 0xb02c7d9c58512c2cULL, 0x6bd3d6b8bb05d3d3ULL, + 0xbbe76b5cd38ce7e7ULL, 0xa56e57cbdc396e6eULL, 0x37c46ef395aac4c4ULL, + 0x0c03180f061b0303ULL, 0x45568a13acdc5656ULL, 0x0d441a49885e4444ULL, + 0xe17fdf9efea07f7fULL, 0x9ea921374f88a9a9ULL, 0xa82a4d8254672a2aULL, + 0xd6bbb16d6b0abbbbULL, 0x23c146e29f87c1c1ULL, 0x5153a202a6f15353ULL, + 0x57dcae8ba572dcdcULL, 0x2c0b582716530b0bULL, 0x4e9d9cd327019d9dULL, + 0xad6c47c1d82b6c6cULL, 0xc43195f562a43131ULL, 0xcd7487b9e8f37474ULL, + 0xfff6e309f115f6f6ULL, 0x05460a438c4c4646ULL, 0x8aac092645a5acacULL, + 0x1e893c970fb58989ULL, 0x5014a04428b41414ULL, 0xa3e15b42dfbae1e1ULL, + 0x5816b04e2ca61616ULL, 0xe83acdd274f73a3aULL, 0xb9696fd0d2066969ULL, + 0x2409482d12410909ULL, 0xdd70a7ade0d77070ULL, 0xe2b6d954716fb6b6ULL, + 0x67d0ceb7bd1ed0d0ULL, 0x93ed3b7ec7d6ededULL, 0x17cc2edb85e2ccccULL, + 0x15422a5784684242ULL, 0x5a98b4c22d2c9898ULL, 0xaaa4490e55eda4a4ULL, + 0xa0285d8850752828ULL, 0x6d5cda31b8865c5cULL, 0xc7f8933fed6bf8f8ULL, + 0x228644a411c28686ULL, +}; + +static const u64 C7[256] = { + 0x186018c07830d818ULL, 0x238c2305af462623ULL, 0xc63fc67ef991b8c6ULL, + 0xe887e8136fcdfbe8ULL, 0x8726874ca113cb87ULL, 0xb8dab8a9626d11b8ULL, + 0x0104010805020901ULL, 0x4f214f426e9e0d4fULL, 0x36d836adee6c9b36ULL, + 0xa6a2a6590451ffa6ULL, 0xd26fd2debdb90cd2ULL, 0xf5f3f5fb06f70ef5ULL, + 0x79f979ef80f29679ULL, 0x6fa16f5fcede306fULL, 0x917e91fcef3f6d91ULL, + 0x525552aa07a4f852ULL, 0x609d6027fdc04760ULL, 0xbccabc89766535bcULL, + 0x9b569baccd2b379bULL, 0x8e028e048c018a8eULL, 0xa3b6a371155bd2a3ULL, + 0x0c300c603c186c0cULL, 0x7bf17bff8af6847bULL, 0x35d435b5e16a8035ULL, + 0x1d741de8693af51dULL, 0xe0a7e05347ddb3e0ULL, 0xd77bd7f6acb321d7ULL, + 0xc22fc25eed999cc2ULL, 0x2eb82e6d965c432eULL, 0x4b314b627a96294bULL, + 0xfedffea321e15dfeULL, 0x5741578216aed557ULL, 0x155415a8412abd15ULL, + 0x77c1779fb6eee877ULL, 0x37dc37a5eb6e9237ULL, 0xe5b3e57b56d79ee5ULL, + 0x9f469f8cd923139fULL, 0xf0e7f0d317fd23f0ULL, 0x4a354a6a7f94204aULL, + 0xda4fda9e95a944daULL, 0x587d58fa25b0a258ULL, 0xc903c906ca8fcfc9ULL, + 0x29a429558d527c29ULL, 0x0a280a5022145a0aULL, 0xb1feb1e14f7f50b1ULL, + 0xa0baa0691a5dc9a0ULL, 0x6bb16b7fdad6146bULL, 0x852e855cab17d985ULL, + 0xbdcebd8173673cbdULL, 0x5d695dd234ba8f5dULL, 0x1040108050209010ULL, + 0xf4f7f4f303f507f4ULL, 0xcb0bcb16c08bddcbULL, 0x3ef83eedc67cd33eULL, + 0x05140528110a2d05ULL, 0x6781671fe6ce7867ULL, 0xe4b7e47353d597e4ULL, + 0x279c2725bb4e0227ULL, 0x4119413258827341ULL, 0x8b168b2c9d0ba78bULL, + 0xa7a6a7510153f6a7ULL, 0x7de97dcf94fab27dULL, 0x956e95dcfb374995ULL, + 0xd847d88e9fad56d8ULL, 0xfbcbfb8b30eb70fbULL, 0xee9fee2371c1cdeeULL, + 0x7ced7cc791f8bb7cULL, 0x66856617e3cc7166ULL, 0xdd53dda68ea77bddULL, + 0x175c17b84b2eaf17ULL, 0x47014702468e4547ULL, 0x9e429e84dc211a9eULL, + 0xca0fca1ec589d4caULL, 0x2db42d75995a582dULL, 0xbfc6bf9179632ebfULL, + 0x071c07381b0e3f07ULL, 0xad8ead012347acadULL, 0x5a755aea2fb4b05aULL, + 0x8336836cb51bef83ULL, 0x33cc3385ff66b633ULL, 0x6391633ff2c65c63ULL, + 0x020802100a041202ULL, 0xaa92aa39384993aaULL, 0x71d971afa8e2de71ULL, + 0xc807c80ecf8dc6c8ULL, 0x196419c87d32d119ULL, 0x4939497270923b49ULL, + 0xd943d9869aaf5fd9ULL, 0xf2eff2c31df931f2ULL, 0xe3abe34b48dba8e3ULL, + 0x5b715be22ab6b95bULL, 0x881a8834920dbc88ULL, 0x9a529aa4c8293e9aULL, + 0x2698262dbe4c0b26ULL, 0x32c8328dfa64bf32ULL, 0xb0fab0e94a7d59b0ULL, + 0xe983e91b6acff2e9ULL, 0x0f3c0f78331e770fULL, 0xd573d5e6a6b733d5ULL, + 0x803a8074ba1df480ULL, 0xbec2be997c6127beULL, 0xcd13cd26de87ebcdULL, + 0x34d034bde4688934ULL, 0x483d487a75903248ULL, 0xffdbffab24e354ffULL, + 0x7af57af78ff48d7aULL, 0x907a90f4ea3d6490ULL, 0x5f615fc23ebe9d5fULL, + 0x2080201da0403d20ULL, 0x68bd6867d5d00f68ULL, 0x1a681ad07234ca1aULL, + 0xae82ae192c41b7aeULL, 0xb4eab4c95e757db4ULL, 0x544d549a19a8ce54ULL, + 0x937693ece53b7f93ULL, 0x2288220daa442f22ULL, 0x648d6407e9c86364ULL, + 0xf1e3f1db12ff2af1ULL, 0x73d173bfa2e6cc73ULL, 0x124812905a248212ULL, + 0x401d403a5d807a40ULL, 0x0820084028104808ULL, 0xc32bc356e89b95c3ULL, + 0xec97ec337bc5dfecULL, 0xdb4bdb9690ab4ddbULL, 0xa1bea1611f5fc0a1ULL, + 0x8d0e8d1c8307918dULL, 0x3df43df5c97ac83dULL, 0x976697ccf1335b97ULL, + 0x0000000000000000ULL, 0xcf1bcf36d483f9cfULL, 0x2bac2b4587566e2bULL, + 0x76c57697b3ece176ULL, 0x82328264b019e682ULL, 0xd67fd6fea9b128d6ULL, + 0x1b6c1bd87736c31bULL, 0xb5eeb5c15b7774b5ULL, 0xaf86af112943beafULL, + 0x6ab56a77dfd41d6aULL, 0x505d50ba0da0ea50ULL, 0x450945124c8a5745ULL, + 0xf3ebf3cb18fb38f3ULL, 0x30c0309df060ad30ULL, 0xef9bef2b74c3c4efULL, + 0x3ffc3fe5c37eda3fULL, 0x554955921caac755ULL, 0xa2b2a2791059dba2ULL, + 0xea8fea0365c9e9eaULL, 0x6589650fecca6a65ULL, 0xbad2bab9686903baULL, + 0x2fbc2f65935e4a2fULL, 0xc027c04ee79d8ec0ULL, 0xde5fdebe81a160deULL, + 0x1c701ce06c38fc1cULL, 0xfdd3fdbb2ee746fdULL, 0x4d294d52649a1f4dULL, + 0x927292e4e0397692ULL, 0x75c9758fbceafa75ULL, 0x061806301e0c3606ULL, + 0x8a128a249809ae8aULL, 0xb2f2b2f940794bb2ULL, 0xe6bfe66359d185e6ULL, + 0x0e380e70361c7e0eULL, 0x1f7c1ff8633ee71fULL, 0x62956237f7c45562ULL, + 0xd477d4eea3b53ad4ULL, 0xa89aa829324d81a8ULL, 0x966296c4f4315296ULL, + 0xf9c3f99b3aef62f9ULL, 0xc533c566f697a3c5ULL, 0x25942535b14a1025ULL, + 0x597959f220b2ab59ULL, 0x842a8454ae15d084ULL, 0x72d572b7a7e4c572ULL, + 0x39e439d5dd72ec39ULL, 0x4c2d4c5a6198164cULL, 0x5e655eca3bbc945eULL, + 0x78fd78e785f09f78ULL, 0x38e038ddd870e538ULL, 0x8c0a8c148605988cULL, + 0xd163d1c6b2bf17d1ULL, 0xa5aea5410b57e4a5ULL, 0xe2afe2434dd9a1e2ULL, + 0x6199612ff8c24e61ULL, 0xb3f6b3f1457b42b3ULL, 0x21842115a5423421ULL, + 0x9c4a9c94d625089cULL, 0x1e781ef0663cee1eULL, 0x4311432252866143ULL, + 0xc73bc776fc93b1c7ULL, 0xfcd7fcb32be54ffcULL, 0x0410042014082404ULL, + 0x515951b208a2e351ULL, 0x995e99bcc72f2599ULL, 0x6da96d4fc4da226dULL, + 0x0d340d68391a650dULL, 0xfacffa8335e979faULL, 0xdf5bdfb684a369dfULL, + 0x7ee57ed79bfca97eULL, 0x2490243db4481924ULL, 0x3bec3bc5d776fe3bULL, + 0xab96ab313d4b9aabULL, 0xce1fce3ed181f0ceULL, 0x1144118855229911ULL, + 0x8f068f0c8903838fULL, 0x4e254e4a6b9c044eULL, 0xb7e6b7d1517366b7ULL, + 0xeb8beb0b60cbe0ebULL, 0x3cf03cfdcc78c13cULL, 0x813e817cbf1ffd81ULL, + 0x946a94d4fe354094ULL, 0xf7fbf7eb0cf31cf7ULL, 0xb9deb9a1676f18b9ULL, + 0x134c13985f268b13ULL, 0x2cb02c7d9c58512cULL, 0xd36bd3d6b8bb05d3ULL, + 0xe7bbe76b5cd38ce7ULL, 0x6ea56e57cbdc396eULL, 0xc437c46ef395aac4ULL, + 0x030c03180f061b03ULL, 0x5645568a13acdc56ULL, 0x440d441a49885e44ULL, + 0x7fe17fdf9efea07fULL, 0xa99ea921374f88a9ULL, 0x2aa82a4d8254672aULL, + 0xbbd6bbb16d6b0abbULL, 0xc123c146e29f87c1ULL, 0x535153a202a6f153ULL, + 0xdc57dcae8ba572dcULL, 0x0b2c0b582716530bULL, 0x9d4e9d9cd327019dULL, + 0x6cad6c47c1d82b6cULL, 0x31c43195f562a431ULL, 0x74cd7487b9e8f374ULL, + 0xf6fff6e309f115f6ULL, 0x4605460a438c4c46ULL, 0xac8aac092645a5acULL, + 0x891e893c970fb589ULL, 0x145014a04428b414ULL, 0xe1a3e15b42dfbae1ULL, + 0x165816b04e2ca616ULL, 0x3ae83acdd274f73aULL, 0x69b9696fd0d20669ULL, + 0x092409482d124109ULL, 0x70dd70a7ade0d770ULL, 0xb6e2b6d954716fb6ULL, + 0xd067d0ceb7bd1ed0ULL, 0xed93ed3b7ec7d6edULL, 0xcc17cc2edb85e2ccULL, + 0x4215422a57846842ULL, 0x985a98b4c22d2c98ULL, 0xa4aaa4490e55eda4ULL, + 0x28a0285d88507528ULL, 0x5c6d5cda31b8865cULL, 0xf8c7f8933fed6bf8ULL, + 0x86228644a411c286ULL, +}; + +static const u64 rc[WHIRLPOOL_ROUNDS] = { + 0x1823c6e887b8014fULL, + 0x36a6d2f5796f9152ULL, + 0x60bc9b8ea30c7b35ULL, + 0x1de0d7c22e4bfe57ULL, + 0x157737e59ff04adaULL, + 0x58c9290ab1a06b85ULL, + 0xbd5d10f4cb3e0567ULL, + 0xe427418ba77d95d8ULL, + 0xfbee7c66dd17479eULL, + 0xca2dbf07ad5a8333ULL, +}; + +/** + * The core Whirlpool transform. + */ + +static void wp512_process_buffer(struct wp512_ctx *wctx) { + int i, r; + u64 K[8]; /* the round key */ + u64 block[8]; /* mu(buffer) */ + u64 state[8]; /* the cipher state */ + u64 L[8]; + const __be64 *buffer = (const __be64 *)wctx->buffer; + + for (i = 0; i < 8; i++) + block[i] = be64_to_cpu(buffer[i]); + + state[0] = block[0] ^ (K[0] = wctx->hash[0]); + state[1] = block[1] ^ (K[1] = wctx->hash[1]); + state[2] = block[2] ^ (K[2] = wctx->hash[2]); + state[3] = block[3] ^ (K[3] = wctx->hash[3]); + state[4] = block[4] ^ (K[4] = wctx->hash[4]); + state[5] = block[5] ^ (K[5] = wctx->hash[5]); + state[6] = block[6] ^ (K[6] = wctx->hash[6]); + state[7] = block[7] ^ (K[7] = wctx->hash[7]); + + for (r = 0; r < WHIRLPOOL_ROUNDS; r++) { + + L[0] = C0[(int)(K[0] >> 56) ] ^ + C1[(int)(K[7] >> 48) & 0xff] ^ + C2[(int)(K[6] >> 40) & 0xff] ^ + C3[(int)(K[5] >> 32) & 0xff] ^ + C4[(int)(K[4] >> 24) & 0xff] ^ + C5[(int)(K[3] >> 16) & 0xff] ^ + C6[(int)(K[2] >> 8) & 0xff] ^ + C7[(int)(K[1] ) & 0xff] ^ + rc[r]; + + L[1] = C0[(int)(K[1] >> 56) ] ^ + C1[(int)(K[0] >> 48) & 0xff] ^ + C2[(int)(K[7] >> 40) & 0xff] ^ + C3[(int)(K[6] >> 32) & 0xff] ^ + C4[(int)(K[5] >> 24) & 0xff] ^ + C5[(int)(K[4] >> 16) & 0xff] ^ + C6[(int)(K[3] >> 8) & 0xff] ^ + C7[(int)(K[2] ) & 0xff]; + + L[2] = C0[(int)(K[2] >> 56) ] ^ + C1[(int)(K[1] >> 48) & 0xff] ^ + C2[(int)(K[0] >> 40) & 0xff] ^ + C3[(int)(K[7] >> 32) & 0xff] ^ + C4[(int)(K[6] >> 24) & 0xff] ^ + C5[(int)(K[5] >> 16) & 0xff] ^ + C6[(int)(K[4] >> 8) & 0xff] ^ + C7[(int)(K[3] ) & 0xff]; + + L[3] = C0[(int)(K[3] >> 56) ] ^ + C1[(int)(K[2] >> 48) & 0xff] ^ + C2[(int)(K[1] >> 40) & 0xff] ^ + C3[(int)(K[0] >> 32) & 0xff] ^ + C4[(int)(K[7] >> 24) & 0xff] ^ + C5[(int)(K[6] >> 16) & 0xff] ^ + C6[(int)(K[5] >> 8) & 0xff] ^ + C7[(int)(K[4] ) & 0xff]; + + L[4] = C0[(int)(K[4] >> 56) ] ^ + C1[(int)(K[3] >> 48) & 0xff] ^ + C2[(int)(K[2] >> 40) & 0xff] ^ + C3[(int)(K[1] >> 32) & 0xff] ^ + C4[(int)(K[0] >> 24) & 0xff] ^ + C5[(int)(K[7] >> 16) & 0xff] ^ + C6[(int)(K[6] >> 8) & 0xff] ^ + C7[(int)(K[5] ) & 0xff]; + + L[5] = C0[(int)(K[5] >> 56) ] ^ + C1[(int)(K[4] >> 48) & 0xff] ^ + C2[(int)(K[3] >> 40) & 0xff] ^ + C3[(int)(K[2] >> 32) & 0xff] ^ + C4[(int)(K[1] >> 24) & 0xff] ^ + C5[(int)(K[0] >> 16) & 0xff] ^ + C6[(int)(K[7] >> 8) & 0xff] ^ + C7[(int)(K[6] ) & 0xff]; + + L[6] = C0[(int)(K[6] >> 56) ] ^ + C1[(int)(K[5] >> 48) & 0xff] ^ + C2[(int)(K[4] >> 40) & 0xff] ^ + C3[(int)(K[3] >> 32) & 0xff] ^ + C4[(int)(K[2] >> 24) & 0xff] ^ + C5[(int)(K[1] >> 16) & 0xff] ^ + C6[(int)(K[0] >> 8) & 0xff] ^ + C7[(int)(K[7] ) & 0xff]; + + L[7] = C0[(int)(K[7] >> 56) ] ^ + C1[(int)(K[6] >> 48) & 0xff] ^ + C2[(int)(K[5] >> 40) & 0xff] ^ + C3[(int)(K[4] >> 32) & 0xff] ^ + C4[(int)(K[3] >> 24) & 0xff] ^ + C5[(int)(K[2] >> 16) & 0xff] ^ + C6[(int)(K[1] >> 8) & 0xff] ^ + C7[(int)(K[0] ) & 0xff]; + + K[0] = L[0]; + K[1] = L[1]; + K[2] = L[2]; + K[3] = L[3]; + K[4] = L[4]; + K[5] = L[5]; + K[6] = L[6]; + K[7] = L[7]; + + L[0] = C0[(int)(state[0] >> 56) ] ^ + C1[(int)(state[7] >> 48) & 0xff] ^ + C2[(int)(state[6] >> 40) & 0xff] ^ + C3[(int)(state[5] >> 32) & 0xff] ^ + C4[(int)(state[4] >> 24) & 0xff] ^ + C5[(int)(state[3] >> 16) & 0xff] ^ + C6[(int)(state[2] >> 8) & 0xff] ^ + C7[(int)(state[1] ) & 0xff] ^ + K[0]; + + L[1] = C0[(int)(state[1] >> 56) ] ^ + C1[(int)(state[0] >> 48) & 0xff] ^ + C2[(int)(state[7] >> 40) & 0xff] ^ + C3[(int)(state[6] >> 32) & 0xff] ^ + C4[(int)(state[5] >> 24) & 0xff] ^ + C5[(int)(state[4] >> 16) & 0xff] ^ + C6[(int)(state[3] >> 8) & 0xff] ^ + C7[(int)(state[2] ) & 0xff] ^ + K[1]; + + L[2] = C0[(int)(state[2] >> 56) ] ^ + C1[(int)(state[1] >> 48) & 0xff] ^ + C2[(int)(state[0] >> 40) & 0xff] ^ + C3[(int)(state[7] >> 32) & 0xff] ^ + C4[(int)(state[6] >> 24) & 0xff] ^ + C5[(int)(state[5] >> 16) & 0xff] ^ + C6[(int)(state[4] >> 8) & 0xff] ^ + C7[(int)(state[3] ) & 0xff] ^ + K[2]; + + L[3] = C0[(int)(state[3] >> 56) ] ^ + C1[(int)(state[2] >> 48) & 0xff] ^ + C2[(int)(state[1] >> 40) & 0xff] ^ + C3[(int)(state[0] >> 32) & 0xff] ^ + C4[(int)(state[7] >> 24) & 0xff] ^ + C5[(int)(state[6] >> 16) & 0xff] ^ + C6[(int)(state[5] >> 8) & 0xff] ^ + C7[(int)(state[4] ) & 0xff] ^ + K[3]; + + L[4] = C0[(int)(state[4] >> 56) ] ^ + C1[(int)(state[3] >> 48) & 0xff] ^ + C2[(int)(state[2] >> 40) & 0xff] ^ + C3[(int)(state[1] >> 32) & 0xff] ^ + C4[(int)(state[0] >> 24) & 0xff] ^ + C5[(int)(state[7] >> 16) & 0xff] ^ + C6[(int)(state[6] >> 8) & 0xff] ^ + C7[(int)(state[5] ) & 0xff] ^ + K[4]; + + L[5] = C0[(int)(state[5] >> 56) ] ^ + C1[(int)(state[4] >> 48) & 0xff] ^ + C2[(int)(state[3] >> 40) & 0xff] ^ + C3[(int)(state[2] >> 32) & 0xff] ^ + C4[(int)(state[1] >> 24) & 0xff] ^ + C5[(int)(state[0] >> 16) & 0xff] ^ + C6[(int)(state[7] >> 8) & 0xff] ^ + C7[(int)(state[6] ) & 0xff] ^ + K[5]; + + L[6] = C0[(int)(state[6] >> 56) ] ^ + C1[(int)(state[5] >> 48) & 0xff] ^ + C2[(int)(state[4] >> 40) & 0xff] ^ + C3[(int)(state[3] >> 32) & 0xff] ^ + C4[(int)(state[2] >> 24) & 0xff] ^ + C5[(int)(state[1] >> 16) & 0xff] ^ + C6[(int)(state[0] >> 8) & 0xff] ^ + C7[(int)(state[7] ) & 0xff] ^ + K[6]; + + L[7] = C0[(int)(state[7] >> 56) ] ^ + C1[(int)(state[6] >> 48) & 0xff] ^ + C2[(int)(state[5] >> 40) & 0xff] ^ + C3[(int)(state[4] >> 32) & 0xff] ^ + C4[(int)(state[3] >> 24) & 0xff] ^ + C5[(int)(state[2] >> 16) & 0xff] ^ + C6[(int)(state[1] >> 8) & 0xff] ^ + C7[(int)(state[0] ) & 0xff] ^ + K[7]; + + state[0] = L[0]; + state[1] = L[1]; + state[2] = L[2]; + state[3] = L[3]; + state[4] = L[4]; + state[5] = L[5]; + state[6] = L[6]; + state[7] = L[7]; + } + /* + * apply the Miyaguchi-Preneel compression function: + */ + wctx->hash[0] ^= state[0] ^ block[0]; + wctx->hash[1] ^= state[1] ^ block[1]; + wctx->hash[2] ^= state[2] ^ block[2]; + wctx->hash[3] ^= state[3] ^ block[3]; + wctx->hash[4] ^= state[4] ^ block[4]; + wctx->hash[5] ^= state[5] ^ block[5]; + wctx->hash[6] ^= state[6] ^ block[6]; + wctx->hash[7] ^= state[7] ^ block[7]; + +} + +static int wp512_init(struct shash_desc *desc) { + struct wp512_ctx *wctx = shash_desc_ctx(desc); + int i; + + memset(wctx->bitLength, 0, 32); + wctx->bufferBits = wctx->bufferPos = 0; + wctx->buffer[0] = 0; + for (i = 0; i < 8; i++) { + wctx->hash[i] = 0L; + } + + return 0; +} + +static int wp512_update(struct shash_desc *desc, const u8 *source, + unsigned int len) +{ + struct wp512_ctx *wctx = shash_desc_ctx(desc); + int sourcePos = 0; + unsigned int bits_len = len * 8; // convert to number of bits + int sourceGap = (8 - ((int)bits_len & 7)) & 7; + int bufferRem = wctx->bufferBits & 7; + int i; + u32 b, carry; + u8 *buffer = wctx->buffer; + u8 *bitLength = wctx->bitLength; + int bufferBits = wctx->bufferBits; + int bufferPos = wctx->bufferPos; + + u64 value = bits_len; + for (i = 31, carry = 0; i >= 0 && (carry != 0 || value != 0ULL); i--) { + carry += bitLength[i] + ((u32)value & 0xff); + bitLength[i] = (u8)carry; + carry >>= 8; + value >>= 8; + } + while (bits_len > 8) { + b = ((source[sourcePos] << sourceGap) & 0xff) | + ((source[sourcePos + 1] & 0xff) >> (8 - sourceGap)); + buffer[bufferPos++] |= (u8)(b >> bufferRem); + bufferBits += 8 - bufferRem; + if (bufferBits == WP512_BLOCK_SIZE * 8) { + wp512_process_buffer(wctx); + bufferBits = bufferPos = 0; + } + buffer[bufferPos] = b << (8 - bufferRem); + bufferBits += bufferRem; + bits_len -= 8; + sourcePos++; + } + if (bits_len > 0) { + b = (source[sourcePos] << sourceGap) & 0xff; + buffer[bufferPos] |= b >> bufferRem; + } else { + b = 0; + } + if (bufferRem + bits_len < 8) { + bufferBits += bits_len; + } else { + bufferPos++; + bufferBits += 8 - bufferRem; + bits_len -= 8 - bufferRem; + if (bufferBits == WP512_BLOCK_SIZE * 8) { + wp512_process_buffer(wctx); + bufferBits = bufferPos = 0; + } + buffer[bufferPos] = b << (8 - bufferRem); + bufferBits += (int)bits_len; + } + + wctx->bufferBits = bufferBits; + wctx->bufferPos = bufferPos; + + return 0; +} + +static int wp512_final(struct shash_desc *desc, u8 *out) +{ + struct wp512_ctx *wctx = shash_desc_ctx(desc); + int i; + u8 *buffer = wctx->buffer; + u8 *bitLength = wctx->bitLength; + int bufferBits = wctx->bufferBits; + int bufferPos = wctx->bufferPos; + __be64 *digest = (__be64 *)out; + + buffer[bufferPos] |= 0x80U >> (bufferBits & 7); + bufferPos++; + if (bufferPos > WP512_BLOCK_SIZE - WP512_LENGTHBYTES) { + if (bufferPos < WP512_BLOCK_SIZE) { + memset(&buffer[bufferPos], 0, WP512_BLOCK_SIZE - bufferPos); + } + wp512_process_buffer(wctx); + bufferPos = 0; + } + if (bufferPos < WP512_BLOCK_SIZE - WP512_LENGTHBYTES) { + memset(&buffer[bufferPos], 0, + (WP512_BLOCK_SIZE - WP512_LENGTHBYTES) - bufferPos); + } + bufferPos = WP512_BLOCK_SIZE - WP512_LENGTHBYTES; + memcpy(&buffer[WP512_BLOCK_SIZE - WP512_LENGTHBYTES], + bitLength, WP512_LENGTHBYTES); + wp512_process_buffer(wctx); + for (i = 0; i < WP512_DIGEST_SIZE/8; i++) + digest[i] = cpu_to_be64(wctx->hash[i]); + wctx->bufferBits = bufferBits; + wctx->bufferPos = bufferPos; + + return 0; +} + +static int wp384_final(struct shash_desc *desc, u8 *out) +{ + u8 D[64]; + + wp512_final(desc, D); + memcpy (out, D, WP384_DIGEST_SIZE); + memset (D, 0, WP512_DIGEST_SIZE); + + return 0; +} + +static int wp256_final(struct shash_desc *desc, u8 *out) +{ + u8 D[64]; + + wp512_final(desc, D); + memcpy (out, D, WP256_DIGEST_SIZE); + memset (D, 0, WP512_DIGEST_SIZE); + + return 0; +} + +static struct shash_alg wp512 = { + .digestsize = WP512_DIGEST_SIZE, + .init = wp512_init, + .update = wp512_update, + .final = wp512_final, + .descsize = sizeof(struct wp512_ctx), + .base = { + .cra_name = "wp512", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = WP512_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static struct shash_alg wp384 = { + .digestsize = WP384_DIGEST_SIZE, + .init = wp512_init, + .update = wp512_update, + .final = wp384_final, + .descsize = sizeof(struct wp512_ctx), + .base = { + .cra_name = "wp384", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = WP512_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static struct shash_alg wp256 = { + .digestsize = WP256_DIGEST_SIZE, + .init = wp512_init, + .update = wp512_update, + .final = wp256_final, + .descsize = sizeof(struct wp512_ctx), + .base = { + .cra_name = "wp256", + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = WP512_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + +static int __init wp512_mod_init(void) +{ + int ret = 0; + + ret = crypto_register_shash(&wp512); + + if (ret < 0) + goto out; + + ret = crypto_register_shash(&wp384); + if (ret < 0) + { + crypto_unregister_shash(&wp512); + goto out; + } + + ret = crypto_register_shash(&wp256); + if (ret < 0) + { + crypto_unregister_shash(&wp512); + crypto_unregister_shash(&wp384); + } +out: + return ret; +} + +static void __exit wp512_mod_fini(void) +{ + crypto_unregister_shash(&wp512); + crypto_unregister_shash(&wp384); + crypto_unregister_shash(&wp256); +} + +MODULE_ALIAS("wp384"); +MODULE_ALIAS("wp256"); + +module_init(wp512_mod_init); +module_exit(wp512_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Whirlpool Message Digest Algorithm"); diff --git a/crypto/xcbc.c b/crypto/xcbc.c new file mode 100644 index 00000000..a5fbdf37 --- /dev/null +++ b/crypto/xcbc.c @@ -0,0 +1,288 @@ +/* + * Copyright (C)2006 USAGI/WIDE Project + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + * + * Author: + * Kazunori Miyazawa <miyazawa@linux-ipv6.org> + */ + +#include <crypto/internal/hash.h> +#include <linux/err.h> +#include <linux/kernel.h> +#include <linux/module.h> + +static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101, + 0x02020202, 0x02020202, 0x02020202, 0x02020202, + 0x03030303, 0x03030303, 0x03030303, 0x03030303}; + +/* + * +------------------------ + * | <parent tfm> + * +------------------------ + * | xcbc_tfm_ctx + * +------------------------ + * | consts (block size * 2) + * +------------------------ + */ +struct xcbc_tfm_ctx { + struct crypto_cipher *child; + u8 ctx[]; +}; + +/* + * +------------------------ + * | <shash desc> + * +------------------------ + * | xcbc_desc_ctx + * +------------------------ + * | odds (block size) + * +------------------------ + * | prev (block size) + * +------------------------ + */ +struct xcbc_desc_ctx { + unsigned int len; + u8 ctx[]; +}; + +static int crypto_xcbc_digest_setkey(struct crypto_shash *parent, + const u8 *inkey, unsigned int keylen) +{ + unsigned long alignmask = crypto_shash_alignmask(parent); + struct xcbc_tfm_ctx *ctx = crypto_shash_ctx(parent); + int bs = crypto_shash_blocksize(parent); + u8 *consts = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); + int err = 0; + u8 key1[bs]; + + if ((err = crypto_cipher_setkey(ctx->child, inkey, keylen))) + return err; + + crypto_cipher_encrypt_one(ctx->child, consts, (u8 *)ks + bs); + crypto_cipher_encrypt_one(ctx->child, consts + bs, (u8 *)ks + bs * 2); + crypto_cipher_encrypt_one(ctx->child, key1, (u8 *)ks); + + return crypto_cipher_setkey(ctx->child, key1, bs); + +} + +static int crypto_xcbc_digest_init(struct shash_desc *pdesc) +{ + unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm); + struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc); + int bs = crypto_shash_blocksize(pdesc->tfm); + u8 *prev = PTR_ALIGN(&ctx->ctx[0], alignmask + 1) + bs; + + ctx->len = 0; + memset(prev, 0, bs); + + return 0; +} + +static int crypto_xcbc_digest_update(struct shash_desc *pdesc, const u8 *p, + unsigned int len) +{ + struct crypto_shash *parent = pdesc->tfm; + unsigned long alignmask = crypto_shash_alignmask(parent); + struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent); + struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc); + struct crypto_cipher *tfm = tctx->child; + int bs = crypto_shash_blocksize(parent); + u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); + u8 *prev = odds + bs; + + /* checking the data can fill the block */ + if ((ctx->len + len) <= bs) { + memcpy(odds + ctx->len, p, len); + ctx->len += len; + return 0; + } + + /* filling odds with new data and encrypting it */ + memcpy(odds + ctx->len, p, bs - ctx->len); + len -= bs - ctx->len; + p += bs - ctx->len; + + crypto_xor(prev, odds, bs); + crypto_cipher_encrypt_one(tfm, prev, prev); + + /* clearing the length */ + ctx->len = 0; + + /* encrypting the rest of data */ + while (len > bs) { + crypto_xor(prev, p, bs); + crypto_cipher_encrypt_one(tfm, prev, prev); + p += bs; + len -= bs; + } + + /* keeping the surplus of blocksize */ + if (len) { + memcpy(odds, p, len); + ctx->len = len; + } + + return 0; +} + +static int crypto_xcbc_digest_final(struct shash_desc *pdesc, u8 *out) +{ + struct crypto_shash *parent = pdesc->tfm; + unsigned long alignmask = crypto_shash_alignmask(parent); + struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent); + struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc); + struct crypto_cipher *tfm = tctx->child; + int bs = crypto_shash_blocksize(parent); + u8 *consts = PTR_ALIGN(&tctx->ctx[0], alignmask + 1); + u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); + u8 *prev = odds + bs; + unsigned int offset = 0; + + if (ctx->len != bs) { + unsigned int rlen; + u8 *p = odds + ctx->len; + + *p = 0x80; + p++; + + rlen = bs - ctx->len -1; + if (rlen) + memset(p, 0, rlen); + + offset += bs; + } + + crypto_xor(prev, odds, bs); + crypto_xor(prev, consts + offset, bs); + + crypto_cipher_encrypt_one(tfm, out, prev); + + return 0; +} + +static int xcbc_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_cipher *cipher; + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_spawn *spawn = crypto_instance_ctx(inst); + struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm); + + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; + + return 0; +}; + +static void xcbc_exit_tfm(struct crypto_tfm *tfm) +{ + struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm); + crypto_free_cipher(ctx->child); +} + +static int xcbc_create(struct crypto_template *tmpl, struct rtattr **tb) +{ + struct shash_instance *inst; + struct crypto_alg *alg; + unsigned long alignmask; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); + if (err) + return err; + + alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK); + if (IS_ERR(alg)) + return PTR_ERR(alg); + + switch(alg->cra_blocksize) { + case 16: + break; + default: + goto out_put_alg; + } + + inst = shash_alloc_instance("xcbc", alg); + err = PTR_ERR(inst); + if (IS_ERR(inst)) + goto out_put_alg; + + err = crypto_init_spawn(shash_instance_ctx(inst), alg, + shash_crypto_instance(inst), + CRYPTO_ALG_TYPE_MASK); + if (err) + goto out_free_inst; + + alignmask = alg->cra_alignmask | 3; + inst->alg.base.cra_alignmask = alignmask; + inst->alg.base.cra_priority = alg->cra_priority; + inst->alg.base.cra_blocksize = alg->cra_blocksize; + + inst->alg.digestsize = alg->cra_blocksize; + inst->alg.descsize = ALIGN(sizeof(struct xcbc_desc_ctx), + crypto_tfm_ctx_alignment()) + + (alignmask & + ~(crypto_tfm_ctx_alignment() - 1)) + + alg->cra_blocksize * 2; + + inst->alg.base.cra_ctxsize = ALIGN(sizeof(struct xcbc_tfm_ctx), + alignmask + 1) + + alg->cra_blocksize * 2; + inst->alg.base.cra_init = xcbc_init_tfm; + inst->alg.base.cra_exit = xcbc_exit_tfm; + + inst->alg.init = crypto_xcbc_digest_init; + inst->alg.update = crypto_xcbc_digest_update; + inst->alg.final = crypto_xcbc_digest_final; + inst->alg.setkey = crypto_xcbc_digest_setkey; + + err = shash_register_instance(tmpl, inst); + if (err) { +out_free_inst: + shash_free_instance(shash_crypto_instance(inst)); + } + +out_put_alg: + crypto_mod_put(alg); + return err; +} + +static struct crypto_template crypto_xcbc_tmpl = { + .name = "xcbc", + .create = xcbc_create, + .free = shash_free_instance, + .module = THIS_MODULE, +}; + +static int __init crypto_xcbc_module_init(void) +{ + return crypto_register_template(&crypto_xcbc_tmpl); +} + +static void __exit crypto_xcbc_module_exit(void) +{ + crypto_unregister_template(&crypto_xcbc_tmpl); +} + +module_init(crypto_xcbc_module_init); +module_exit(crypto_xcbc_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("XCBC keyed hash algorithm"); diff --git a/crypto/xor.c b/crypto/xor.c new file mode 100644 index 00000000..b75182d8 --- /dev/null +++ b/crypto/xor.c @@ -0,0 +1,161 @@ +/* + * xor.c : Multiple Devices driver for Linux + * + * Copyright (C) 1996, 1997, 1998, 1999, 2000, + * Ingo Molnar, Matti Aarnio, Jakub Jelinek, Richard Henderson. + * + * Dispatch optimized RAID-5 checksumming functions. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2, or (at your option) + * any later version. + * + * You should have received a copy of the GNU General Public License + * (for example /usr/src/linux/COPYING); if not, write to the Free + * Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. + */ + +#define BH_TRACE 0 +#include <linux/module.h> +#include <linux/gfp.h> +#include <linux/raid/xor.h> +#include <linux/jiffies.h> +#include <asm/xor.h> + +/* The xor routines to use. */ +static struct xor_block_template *active_template; + +void +xor_blocks(unsigned int src_count, unsigned int bytes, void *dest, void **srcs) +{ + unsigned long *p1, *p2, *p3, *p4; + + p1 = (unsigned long *) srcs[0]; + if (src_count == 1) { + active_template->do_2(bytes, dest, p1); + return; + } + + p2 = (unsigned long *) srcs[1]; + if (src_count == 2) { + active_template->do_3(bytes, dest, p1, p2); + return; + } + + p3 = (unsigned long *) srcs[2]; + if (src_count == 3) { + active_template->do_4(bytes, dest, p1, p2, p3); + return; + } + + p4 = (unsigned long *) srcs[3]; + active_template->do_5(bytes, dest, p1, p2, p3, p4); +} +EXPORT_SYMBOL(xor_blocks); + +/* Set of all registered templates. */ +static struct xor_block_template *template_list; + +#define BENCH_SIZE (PAGE_SIZE) + +static void +do_xor_speed(struct xor_block_template *tmpl, void *b1, void *b2) +{ + int speed; + unsigned long now; + int i, count, max; + + tmpl->next = template_list; + template_list = tmpl; + + /* + * Count the number of XORs done during a whole jiffy, and use + * this to calculate the speed of checksumming. We use a 2-page + * allocation to have guaranteed color L1-cache layout. + */ + max = 0; + for (i = 0; i < 5; i++) { + now = jiffies; + count = 0; + while (jiffies == now) { + mb(); /* prevent loop optimzation */ + tmpl->do_2(BENCH_SIZE, b1, b2); + mb(); + count++; + mb(); + } + if (count > max) + max = count; + } + + speed = max * (HZ * BENCH_SIZE / 1024); + tmpl->speed = speed; + + printk(KERN_INFO " %-10s: %5d.%03d MB/sec\n", tmpl->name, + speed / 1000, speed % 1000); +} + +static int __init +calibrate_xor_blocks(void) +{ + void *b1, *b2; + struct xor_block_template *f, *fastest; + + /* + * Note: Since the memory is not actually used for _anything_ but to + * test the XOR speed, we don't really want kmemcheck to warn about + * reading uninitialized bytes here. + */ + b1 = (void *) __get_free_pages(GFP_KERNEL | __GFP_NOTRACK, 2); + if (!b1) { + printk(KERN_WARNING "xor: Yikes! No memory available.\n"); + return -ENOMEM; + } + b2 = b1 + 2*PAGE_SIZE + BENCH_SIZE; + + /* + * If this arch/cpu has a short-circuited selection, don't loop through + * all the possible functions, just test the best one + */ + + fastest = NULL; + +#ifdef XOR_SELECT_TEMPLATE + fastest = XOR_SELECT_TEMPLATE(fastest); +#endif + +#define xor_speed(templ) do_xor_speed((templ), b1, b2) + + if (fastest) { + printk(KERN_INFO "xor: automatically using best " + "checksumming function: %s\n", + fastest->name); + xor_speed(fastest); + } else { + printk(KERN_INFO "xor: measuring software checksum speed\n"); + XOR_TRY_TEMPLATES; + fastest = template_list; + for (f = fastest; f; f = f->next) + if (f->speed > fastest->speed) + fastest = f; + } + + printk(KERN_INFO "xor: using function: %s (%d.%03d MB/sec)\n", + fastest->name, fastest->speed / 1000, fastest->speed % 1000); + +#undef xor_speed + + free_pages((unsigned long)b1, 2); + + active_template = fastest; + return 0; +} + +static __exit void xor_exit(void) { } + +MODULE_LICENSE("GPL"); + +/* when built-in xor.o must initialize before drivers/md/md.o */ +core_initcall(calibrate_xor_blocks); +module_exit(xor_exit); diff --git a/crypto/xts.c b/crypto/xts.c new file mode 100644 index 00000000..ca1608f4 --- /dev/null +++ b/crypto/xts.c @@ -0,0 +1,364 @@ +/* XTS: as defined in IEEE1619/D16 + * http://grouper.ieee.org/groups/1619/email/pdf00086.pdf + * (sector sizes which are not a multiple of 16 bytes are, + * however currently unsupported) + * + * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org> + * + * Based om ecb.c + * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + */ +#include <crypto/algapi.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/scatterlist.h> +#include <linux/slab.h> + +#include <crypto/xts.h> +#include <crypto/b128ops.h> +#include <crypto/gf128mul.h> + +struct priv { + struct crypto_cipher *child; + struct crypto_cipher *tweak; +}; + +static int setkey(struct crypto_tfm *parent, const u8 *key, + unsigned int keylen) +{ + struct priv *ctx = crypto_tfm_ctx(parent); + struct crypto_cipher *child = ctx->tweak; + u32 *flags = &parent->crt_flags; + int err; + + /* key consists of keys of equal size concatenated, therefore + * the length must be even */ + if (keylen % 2) { + /* tell the user why there was an error */ + *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; + } + + /* we need two cipher instances: one to compute the initial 'tweak' + * by encrypting the IV (usually the 'plain' iv) and the other + * one to encrypt and decrypt the data */ + + /* tweak cipher, uses Key2 i.e. the second half of *key */ + crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_cipher_setkey(child, key + keylen/2, keylen/2); + if (err) + return err; + + crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + + child = ctx->child; + + /* data cipher, uses Key1 i.e. the first half of *key */ + crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_cipher_setkey(child, key, keylen/2); + if (err) + return err; + + crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + + return 0; +} + +struct sinfo { + be128 *t; + struct crypto_tfm *tfm; + void (*fn)(struct crypto_tfm *, u8 *, const u8 *); +}; + +static inline void xts_round(struct sinfo *s, void *dst, const void *src) +{ + be128_xor(dst, s->t, src); /* PP <- T xor P */ + s->fn(s->tfm, dst, dst); /* CC <- E(Key1,PP) */ + be128_xor(dst, dst, s->t); /* C <- T xor CC */ +} + +static int crypt(struct blkcipher_desc *d, + struct blkcipher_walk *w, struct priv *ctx, + void (*tw)(struct crypto_tfm *, u8 *, const u8 *), + void (*fn)(struct crypto_tfm *, u8 *, const u8 *)) +{ + int err; + unsigned int avail; + const int bs = XTS_BLOCK_SIZE; + struct sinfo s = { + .tfm = crypto_cipher_tfm(ctx->child), + .fn = fn + }; + u8 *wsrc; + u8 *wdst; + + err = blkcipher_walk_virt(d, w); + if (!w->nbytes) + return err; + + s.t = (be128 *)w->iv; + avail = w->nbytes; + + wsrc = w->src.virt.addr; + wdst = w->dst.virt.addr; + + /* calculate first value of T */ + tw(crypto_cipher_tfm(ctx->tweak), w->iv, w->iv); + + goto first; + + for (;;) { + do { + gf128mul_x_ble(s.t, s.t); + +first: + xts_round(&s, wdst, wsrc); + + wsrc += bs; + wdst += bs; + } while ((avail -= bs) >= bs); + + err = blkcipher_walk_done(d, w, avail); + if (!w->nbytes) + break; + + avail = w->nbytes; + + wsrc = w->src.virt.addr; + wdst = w->dst.virt.addr; + } + + return err; +} + +static int encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct priv *ctx = crypto_blkcipher_ctx(desc->tfm); + struct blkcipher_walk w; + + blkcipher_walk_init(&w, dst, src, nbytes); + return crypt(desc, &w, ctx, crypto_cipher_alg(ctx->tweak)->cia_encrypt, + crypto_cipher_alg(ctx->child)->cia_encrypt); +} + +static int decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct priv *ctx = crypto_blkcipher_ctx(desc->tfm); + struct blkcipher_walk w; + + blkcipher_walk_init(&w, dst, src, nbytes); + return crypt(desc, &w, ctx, crypto_cipher_alg(ctx->tweak)->cia_encrypt, + crypto_cipher_alg(ctx->child)->cia_decrypt); +} + +int xts_crypt(struct blkcipher_desc *desc, struct scatterlist *sdst, + struct scatterlist *ssrc, unsigned int nbytes, + struct xts_crypt_req *req) +{ + const unsigned int bsize = XTS_BLOCK_SIZE; + const unsigned int max_blks = req->tbuflen / bsize; + struct blkcipher_walk walk; + unsigned int nblocks; + be128 *src, *dst, *t; + be128 *t_buf = req->tbuf; + int err, i; + + BUG_ON(max_blks < 1); + + blkcipher_walk_init(&walk, sdst, ssrc, nbytes); + + err = blkcipher_walk_virt(desc, &walk); + nbytes = walk.nbytes; + if (!nbytes) + return err; + + nblocks = min(nbytes / bsize, max_blks); + src = (be128 *)walk.src.virt.addr; + dst = (be128 *)walk.dst.virt.addr; + + /* calculate first value of T */ + req->tweak_fn(req->tweak_ctx, (u8 *)&t_buf[0], walk.iv); + + i = 0; + goto first; + + for (;;) { + do { + for (i = 0; i < nblocks; i++) { + gf128mul_x_ble(&t_buf[i], t); +first: + t = &t_buf[i]; + + /* PP <- T xor P */ + be128_xor(dst + i, t, src + i); + } + + /* CC <- E(Key2,PP) */ + req->crypt_fn(req->crypt_ctx, (u8 *)dst, + nblocks * bsize); + + /* C <- T xor CC */ + for (i = 0; i < nblocks; i++) + be128_xor(dst + i, dst + i, &t_buf[i]); + + src += nblocks; + dst += nblocks; + nbytes -= nblocks * bsize; + nblocks = min(nbytes / bsize, max_blks); + } while (nblocks > 0); + + *(be128 *)walk.iv = *t; + + err = blkcipher_walk_done(desc, &walk, nbytes); + nbytes = walk.nbytes; + if (!nbytes) + break; + + nblocks = min(nbytes / bsize, max_blks); + src = (be128 *)walk.src.virt.addr; + dst = (be128 *)walk.dst.virt.addr; + } + + return err; +} +EXPORT_SYMBOL_GPL(xts_crypt); + +static int init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_cipher *cipher; + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_spawn *spawn = crypto_instance_ctx(inst); + struct priv *ctx = crypto_tfm_ctx(tfm); + u32 *flags = &tfm->crt_flags; + + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + if (crypto_cipher_blocksize(cipher) != XTS_BLOCK_SIZE) { + *flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; + crypto_free_cipher(cipher); + return -EINVAL; + } + + ctx->child = cipher; + + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) { + crypto_free_cipher(ctx->child); + return PTR_ERR(cipher); + } + + /* this check isn't really needed, leave it here just in case */ + if (crypto_cipher_blocksize(cipher) != XTS_BLOCK_SIZE) { + crypto_free_cipher(cipher); + crypto_free_cipher(ctx->child); + *flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; + return -EINVAL; + } + + ctx->tweak = cipher; + + return 0; +} + +static void exit_tfm(struct crypto_tfm *tfm) +{ + struct priv *ctx = crypto_tfm_ctx(tfm); + crypto_free_cipher(ctx->child); + crypto_free_cipher(ctx->tweak); +} + +static struct crypto_instance *alloc(struct rtattr **tb) +{ + struct crypto_instance *inst; + struct crypto_alg *alg; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); + if (err) + return ERR_PTR(err); + + alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK); + if (IS_ERR(alg)) + return ERR_CAST(alg); + + inst = crypto_alloc_instance("xts", alg); + if (IS_ERR(inst)) + goto out_put_alg; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = alg->cra_blocksize; + + if (alg->cra_alignmask < 7) + inst->alg.cra_alignmask = 7; + else + inst->alg.cra_alignmask = alg->cra_alignmask; + + inst->alg.cra_type = &crypto_blkcipher_type; + + inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; + inst->alg.cra_blkcipher.min_keysize = + 2 * alg->cra_cipher.cia_min_keysize; + inst->alg.cra_blkcipher.max_keysize = + 2 * alg->cra_cipher.cia_max_keysize; + + inst->alg.cra_ctxsize = sizeof(struct priv); + + inst->alg.cra_init = init_tfm; + inst->alg.cra_exit = exit_tfm; + + inst->alg.cra_blkcipher.setkey = setkey; + inst->alg.cra_blkcipher.encrypt = encrypt; + inst->alg.cra_blkcipher.decrypt = decrypt; + +out_put_alg: + crypto_mod_put(alg); + return inst; +} + +static void free(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(inst); +} + +static struct crypto_template crypto_tmpl = { + .name = "xts", + .alloc = alloc, + .free = free, + .module = THIS_MODULE, +}; + +static int __init crypto_module_init(void) +{ + return crypto_register_template(&crypto_tmpl); +} + +static void __exit crypto_module_exit(void) +{ + crypto_unregister_template(&crypto_tmpl); +} + +module_init(crypto_module_init); +module_exit(crypto_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("XTS block cipher mode"); diff --git a/crypto/zlib.c b/crypto/zlib.c new file mode 100644 index 00000000..06b62e5c --- /dev/null +++ b/crypto/zlib.c @@ -0,0 +1,380 @@ +/* + * Cryptographic API. + * + * Zlib algorithm + * + * Copyright 2008 Sony Corporation + * + * Based on deflate.c, which is + * Copyright (c) 2003 James Morris <jmorris@intercode.com.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + * FIXME: deflate transforms will require up to a total of about 436k of kernel + * memory on i386 (390k for compression, the rest for decompression), as the + * current zlib kernel code uses a worst case pre-allocation system by default. + * This needs to be fixed so that the amount of memory required is properly + * related to the winbits and memlevel parameters. + */ + +#define pr_fmt(fmt) "%s: " fmt, __func__ + +#include <linux/init.h> +#include <linux/module.h> +#include <linux/zlib.h> +#include <linux/vmalloc.h> +#include <linux/interrupt.h> +#include <linux/mm.h> +#include <linux/net.h> + +#include <crypto/internal/compress.h> + +#include <net/netlink.h> + + +struct zlib_ctx { + struct z_stream_s comp_stream; + struct z_stream_s decomp_stream; + int decomp_windowBits; +}; + + +static void zlib_comp_exit(struct zlib_ctx *ctx) +{ + struct z_stream_s *stream = &ctx->comp_stream; + + if (stream->workspace) { + zlib_deflateEnd(stream); + vfree(stream->workspace); + stream->workspace = NULL; + } +} + +static void zlib_decomp_exit(struct zlib_ctx *ctx) +{ + struct z_stream_s *stream = &ctx->decomp_stream; + + if (stream->workspace) { + zlib_inflateEnd(stream); + vfree(stream->workspace); + stream->workspace = NULL; + } +} + +static int zlib_init(struct crypto_tfm *tfm) +{ + return 0; +} + +static void zlib_exit(struct crypto_tfm *tfm) +{ + struct zlib_ctx *ctx = crypto_tfm_ctx(tfm); + + zlib_comp_exit(ctx); + zlib_decomp_exit(ctx); +} + + +static int zlib_compress_setup(struct crypto_pcomp *tfm, void *params, + unsigned int len) +{ + struct zlib_ctx *ctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm)); + struct z_stream_s *stream = &ctx->comp_stream; + struct nlattr *tb[ZLIB_COMP_MAX + 1]; + int window_bits, mem_level; + size_t workspacesize; + int ret; + + ret = nla_parse(tb, ZLIB_COMP_MAX, params, len, NULL); + if (ret) + return ret; + + zlib_comp_exit(ctx); + + window_bits = tb[ZLIB_COMP_WINDOWBITS] + ? nla_get_u32(tb[ZLIB_COMP_WINDOWBITS]) + : MAX_WBITS; + mem_level = tb[ZLIB_COMP_MEMLEVEL] + ? nla_get_u32(tb[ZLIB_COMP_MEMLEVEL]) + : DEF_MEM_LEVEL; + + workspacesize = zlib_deflate_workspacesize(window_bits, mem_level); + stream->workspace = vzalloc(workspacesize); + if (!stream->workspace) + return -ENOMEM; + + ret = zlib_deflateInit2(stream, + tb[ZLIB_COMP_LEVEL] + ? nla_get_u32(tb[ZLIB_COMP_LEVEL]) + : Z_DEFAULT_COMPRESSION, + tb[ZLIB_COMP_METHOD] + ? nla_get_u32(tb[ZLIB_COMP_METHOD]) + : Z_DEFLATED, + window_bits, + mem_level, + tb[ZLIB_COMP_STRATEGY] + ? nla_get_u32(tb[ZLIB_COMP_STRATEGY]) + : Z_DEFAULT_STRATEGY); + if (ret != Z_OK) { + vfree(stream->workspace); + stream->workspace = NULL; + return -EINVAL; + } + + return 0; +} + +static int zlib_compress_init(struct crypto_pcomp *tfm) +{ + int ret; + struct zlib_ctx *dctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm)); + struct z_stream_s *stream = &dctx->comp_stream; + + ret = zlib_deflateReset(stream); + if (ret != Z_OK) + return -EINVAL; + + return 0; +} + +static int zlib_compress_update(struct crypto_pcomp *tfm, + struct comp_request *req) +{ + int ret; + struct zlib_ctx *dctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm)); + struct z_stream_s *stream = &dctx->comp_stream; + + pr_debug("avail_in %u, avail_out %u\n", req->avail_in, req->avail_out); + stream->next_in = req->next_in; + stream->avail_in = req->avail_in; + stream->next_out = req->next_out; + stream->avail_out = req->avail_out; + + ret = zlib_deflate(stream, Z_NO_FLUSH); + switch (ret) { + case Z_OK: + break; + + case Z_BUF_ERROR: + pr_debug("zlib_deflate could not make progress\n"); + return -EAGAIN; + + default: + pr_debug("zlib_deflate failed %d\n", ret); + return -EINVAL; + } + + ret = req->avail_out - stream->avail_out; + pr_debug("avail_in %u, avail_out %u (consumed %u, produced %u)\n", + stream->avail_in, stream->avail_out, + req->avail_in - stream->avail_in, ret); + req->next_in = stream->next_in; + req->avail_in = stream->avail_in; + req->next_out = stream->next_out; + req->avail_out = stream->avail_out; + return ret; +} + +static int zlib_compress_final(struct crypto_pcomp *tfm, + struct comp_request *req) +{ + int ret; + struct zlib_ctx *dctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm)); + struct z_stream_s *stream = &dctx->comp_stream; + + pr_debug("avail_in %u, avail_out %u\n", req->avail_in, req->avail_out); + stream->next_in = req->next_in; + stream->avail_in = req->avail_in; + stream->next_out = req->next_out; + stream->avail_out = req->avail_out; + + ret = zlib_deflate(stream, Z_FINISH); + if (ret != Z_STREAM_END) { + pr_debug("zlib_deflate failed %d\n", ret); + return -EINVAL; + } + + ret = req->avail_out - stream->avail_out; + pr_debug("avail_in %u, avail_out %u (consumed %u, produced %u)\n", + stream->avail_in, stream->avail_out, + req->avail_in - stream->avail_in, ret); + req->next_in = stream->next_in; + req->avail_in = stream->avail_in; + req->next_out = stream->next_out; + req->avail_out = stream->avail_out; + return ret; +} + + +static int zlib_decompress_setup(struct crypto_pcomp *tfm, void *params, + unsigned int len) +{ + struct zlib_ctx *ctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm)); + struct z_stream_s *stream = &ctx->decomp_stream; + struct nlattr *tb[ZLIB_DECOMP_MAX + 1]; + int ret = 0; + + ret = nla_parse(tb, ZLIB_DECOMP_MAX, params, len, NULL); + if (ret) + return ret; + + zlib_decomp_exit(ctx); + + ctx->decomp_windowBits = tb[ZLIB_DECOMP_WINDOWBITS] + ? nla_get_u32(tb[ZLIB_DECOMP_WINDOWBITS]) + : DEF_WBITS; + + stream->workspace = vzalloc(zlib_inflate_workspacesize()); + if (!stream->workspace) + return -ENOMEM; + + ret = zlib_inflateInit2(stream, ctx->decomp_windowBits); + if (ret != Z_OK) { + vfree(stream->workspace); + stream->workspace = NULL; + return -EINVAL; + } + + return 0; +} + +static int zlib_decompress_init(struct crypto_pcomp *tfm) +{ + int ret; + struct zlib_ctx *dctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm)); + struct z_stream_s *stream = &dctx->decomp_stream; + + ret = zlib_inflateReset(stream); + if (ret != Z_OK) + return -EINVAL; + + return 0; +} + +static int zlib_decompress_update(struct crypto_pcomp *tfm, + struct comp_request *req) +{ + int ret; + struct zlib_ctx *dctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm)); + struct z_stream_s *stream = &dctx->decomp_stream; + + pr_debug("avail_in %u, avail_out %u\n", req->avail_in, req->avail_out); + stream->next_in = req->next_in; + stream->avail_in = req->avail_in; + stream->next_out = req->next_out; + stream->avail_out = req->avail_out; + + ret = zlib_inflate(stream, Z_SYNC_FLUSH); + switch (ret) { + case Z_OK: + case Z_STREAM_END: + break; + + case Z_BUF_ERROR: + pr_debug("zlib_inflate could not make progress\n"); + return -EAGAIN; + + default: + pr_debug("zlib_inflate failed %d\n", ret); + return -EINVAL; + } + + ret = req->avail_out - stream->avail_out; + pr_debug("avail_in %u, avail_out %u (consumed %u, produced %u)\n", + stream->avail_in, stream->avail_out, + req->avail_in - stream->avail_in, ret); + req->next_in = stream->next_in; + req->avail_in = stream->avail_in; + req->next_out = stream->next_out; + req->avail_out = stream->avail_out; + return ret; +} + +static int zlib_decompress_final(struct crypto_pcomp *tfm, + struct comp_request *req) +{ + int ret; + struct zlib_ctx *dctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm)); + struct z_stream_s *stream = &dctx->decomp_stream; + + pr_debug("avail_in %u, avail_out %u\n", req->avail_in, req->avail_out); + stream->next_in = req->next_in; + stream->avail_in = req->avail_in; + stream->next_out = req->next_out; + stream->avail_out = req->avail_out; + + if (dctx->decomp_windowBits < 0) { + ret = zlib_inflate(stream, Z_SYNC_FLUSH); + /* + * Work around a bug in zlib, which sometimes wants to taste an + * extra byte when being used in the (undocumented) raw deflate + * mode. (From USAGI). + */ + if (ret == Z_OK && !stream->avail_in && stream->avail_out) { + const void *saved_next_in = stream->next_in; + u8 zerostuff = 0; + + stream->next_in = &zerostuff; + stream->avail_in = 1; + ret = zlib_inflate(stream, Z_FINISH); + stream->next_in = saved_next_in; + stream->avail_in = 0; + } + } else + ret = zlib_inflate(stream, Z_FINISH); + if (ret != Z_STREAM_END) { + pr_debug("zlib_inflate failed %d\n", ret); + return -EINVAL; + } + + ret = req->avail_out - stream->avail_out; + pr_debug("avail_in %u, avail_out %u (consumed %u, produced %u)\n", + stream->avail_in, stream->avail_out, + req->avail_in - stream->avail_in, ret); + req->next_in = stream->next_in; + req->avail_in = stream->avail_in; + req->next_out = stream->next_out; + req->avail_out = stream->avail_out; + return ret; +} + + +static struct pcomp_alg zlib_alg = { + .compress_setup = zlib_compress_setup, + .compress_init = zlib_compress_init, + .compress_update = zlib_compress_update, + .compress_final = zlib_compress_final, + .decompress_setup = zlib_decompress_setup, + .decompress_init = zlib_decompress_init, + .decompress_update = zlib_decompress_update, + .decompress_final = zlib_decompress_final, + + .base = { + .cra_name = "zlib", + .cra_flags = CRYPTO_ALG_TYPE_PCOMPRESS, + .cra_ctxsize = sizeof(struct zlib_ctx), + .cra_module = THIS_MODULE, + .cra_init = zlib_init, + .cra_exit = zlib_exit, + } +}; + +static int __init zlib_mod_init(void) +{ + return crypto_register_pcomp(&zlib_alg); +} + +static void __exit zlib_mod_fini(void) +{ + crypto_unregister_pcomp(&zlib_alg); +} + +module_init(zlib_mod_init); +module_exit(zlib_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Zlib Compression Algorithm"); +MODULE_AUTHOR("Sony Corporation"); |