4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005,2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
10 * Derived from "crypto/aes_generic.c"
12 * This program is free software; you can redistribute it and/or modify it
13 * under the terms of the GNU General Public License as published by the Free
14 * Software Foundation; either version 2 of the License, or (at your option)
19 #include <crypto/aes.h>
20 #include <crypto/algapi.h>
21 #include <linux/module.h>
22 #include <linux/init.h>
23 #include "crypt_s390.h"
25 #define AES_KEYLEN_128 1
26 #define AES_KEYLEN_192 2
27 #define AES_KEYLEN_256 4
29 static char keylen_flag = 0;
32 u8 iv[AES_BLOCK_SIZE];
33 u8 key[AES_MAX_KEY_SIZE];
39 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
42 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
43 u32 *flags = &tfm->crt_flags;
47 if (!(keylen_flag & AES_KEYLEN_128))
51 if (!(keylen_flag & AES_KEYLEN_192))
56 if (!(keylen_flag & AES_KEYLEN_256))
64 sctx->key_len = key_len;
65 memcpy(sctx->key, in_key, key_len);
68 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
72 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
74 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
76 switch (sctx->key_len) {
78 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
82 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
86 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
92 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
94 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
96 switch (sctx->key_len) {
98 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
102 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
106 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
113 static struct crypto_alg aes_alg = {
115 .cra_driver_name = "aes-s390",
116 .cra_priority = CRYPT_S390_PRIORITY,
117 .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
118 CRYPTO_ALG_NEED_FALLBACK,
119 .cra_blocksize = AES_BLOCK_SIZE,
120 .cra_ctxsize = sizeof(struct s390_aes_ctx),
121 .cra_module = THIS_MODULE,
122 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
125 .cia_min_keysize = AES_MIN_KEY_SIZE,
126 .cia_max_keysize = AES_MAX_KEY_SIZE,
127 .cia_setkey = aes_set_key,
128 .cia_encrypt = aes_encrypt,
129 .cia_decrypt = aes_decrypt,
134 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
135 unsigned int key_len)
137 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
141 sctx->enc = KM_AES_128_ENCRYPT;
142 sctx->dec = KM_AES_128_DECRYPT;
145 sctx->enc = KM_AES_192_ENCRYPT;
146 sctx->dec = KM_AES_192_DECRYPT;
149 sctx->enc = KM_AES_256_ENCRYPT;
150 sctx->dec = KM_AES_256_DECRYPT;
154 return aes_set_key(tfm, in_key, key_len);
157 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
158 struct blkcipher_walk *walk)
160 int ret = blkcipher_walk_virt(desc, walk);
163 while ((nbytes = walk->nbytes)) {
164 /* only use complete blocks */
165 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
166 u8 *out = walk->dst.virt.addr;
167 u8 *in = walk->src.virt.addr;
169 ret = crypt_s390_km(func, param, out, in, n);
170 BUG_ON((ret < 0) || (ret != n));
172 nbytes &= AES_BLOCK_SIZE - 1;
173 ret = blkcipher_walk_done(desc, walk, nbytes);
179 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
180 struct scatterlist *dst, struct scatterlist *src,
183 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
184 struct blkcipher_walk walk;
186 blkcipher_walk_init(&walk, dst, src, nbytes);
187 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
190 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
191 struct scatterlist *dst, struct scatterlist *src,
194 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
195 struct blkcipher_walk walk;
197 blkcipher_walk_init(&walk, dst, src, nbytes);
198 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
201 static struct crypto_alg ecb_aes_alg = {
202 .cra_name = "ecb(aes)",
203 .cra_driver_name = "ecb-aes-s390",
204 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
205 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
206 CRYPTO_ALG_NEED_FALLBACK,
207 .cra_blocksize = AES_BLOCK_SIZE,
208 .cra_ctxsize = sizeof(struct s390_aes_ctx),
209 .cra_type = &crypto_blkcipher_type,
210 .cra_module = THIS_MODULE,
211 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list),
214 .min_keysize = AES_MIN_KEY_SIZE,
215 .max_keysize = AES_MAX_KEY_SIZE,
216 .setkey = ecb_aes_set_key,
217 .encrypt = ecb_aes_encrypt,
218 .decrypt = ecb_aes_decrypt,
223 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
224 unsigned int key_len)
226 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
230 sctx->enc = KMC_AES_128_ENCRYPT;
231 sctx->dec = KMC_AES_128_DECRYPT;
234 sctx->enc = KMC_AES_192_ENCRYPT;
235 sctx->dec = KMC_AES_192_DECRYPT;
238 sctx->enc = KMC_AES_256_ENCRYPT;
239 sctx->dec = KMC_AES_256_DECRYPT;
243 return aes_set_key(tfm, in_key, key_len);
246 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
247 struct blkcipher_walk *walk)
249 int ret = blkcipher_walk_virt(desc, walk);
250 unsigned int nbytes = walk->nbytes;
255 memcpy(param, walk->iv, AES_BLOCK_SIZE);
257 /* only use complete blocks */
258 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
259 u8 *out = walk->dst.virt.addr;
260 u8 *in = walk->src.virt.addr;
262 ret = crypt_s390_kmc(func, param, out, in, n);
263 BUG_ON((ret < 0) || (ret != n));
265 nbytes &= AES_BLOCK_SIZE - 1;
266 ret = blkcipher_walk_done(desc, walk, nbytes);
267 } while ((nbytes = walk->nbytes));
268 memcpy(walk->iv, param, AES_BLOCK_SIZE);
274 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
275 struct scatterlist *dst, struct scatterlist *src,
278 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
279 struct blkcipher_walk walk;
281 blkcipher_walk_init(&walk, dst, src, nbytes);
282 return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
285 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
286 struct scatterlist *dst, struct scatterlist *src,
289 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
290 struct blkcipher_walk walk;
292 blkcipher_walk_init(&walk, dst, src, nbytes);
293 return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
296 static struct crypto_alg cbc_aes_alg = {
297 .cra_name = "cbc(aes)",
298 .cra_driver_name = "cbc-aes-s390",
299 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
300 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
301 CRYPTO_ALG_NEED_FALLBACK,
302 .cra_blocksize = AES_BLOCK_SIZE,
303 .cra_ctxsize = sizeof(struct s390_aes_ctx),
304 .cra_type = &crypto_blkcipher_type,
305 .cra_module = THIS_MODULE,
306 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list),
309 .min_keysize = AES_MIN_KEY_SIZE,
310 .max_keysize = AES_MAX_KEY_SIZE,
311 .ivsize = AES_BLOCK_SIZE,
312 .setkey = cbc_aes_set_key,
313 .encrypt = cbc_aes_encrypt,
314 .decrypt = cbc_aes_decrypt,
319 static int __init aes_init(void)
323 if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
324 keylen_flag |= AES_KEYLEN_128;
325 if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
326 keylen_flag |= AES_KEYLEN_192;
327 if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
328 keylen_flag |= AES_KEYLEN_256;
333 /* z9 109 and z9 BC/EC only support 128 bit key length */
334 if (keylen_flag == AES_KEYLEN_128) {
335 aes_alg.cra_u.cipher.cia_max_keysize = AES_MIN_KEY_SIZE;
336 ecb_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
337 cbc_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
339 "aes_s390: hardware acceleration only available for"
343 ret = crypto_register_alg(&aes_alg);
347 ret = crypto_register_alg(&ecb_aes_alg);
351 ret = crypto_register_alg(&cbc_aes_alg);
359 crypto_unregister_alg(&ecb_aes_alg);
361 crypto_unregister_alg(&aes_alg);
366 static void __exit aes_fini(void)
368 crypto_unregister_alg(&cbc_aes_alg);
369 crypto_unregister_alg(&ecb_aes_alg);
370 crypto_unregister_alg(&aes_alg);
373 module_init(aes_init);
374 module_exit(aes_fini);
378 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
379 MODULE_LICENSE("GPL");