2 * PCBC: Propagating Cipher Block Chaining mode
4 * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
5 * Written by David Howells (dhowells@redhat.com)
8 * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
17 #include <crypto/algapi.h>
18 #include <crypto/internal/skcipher.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/slab.h>
24 #include <linux/compiler.h>
26 struct crypto_pcbc_ctx {
27 struct crypto_cipher *child;
30 static int crypto_pcbc_setkey(struct crypto_skcipher *parent, const u8 *key,
33 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(parent);
34 struct crypto_cipher *child = ctx->child;
37 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
38 crypto_cipher_set_flags(child, crypto_skcipher_get_flags(parent) &
40 err = crypto_cipher_setkey(child, key, keylen);
41 crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(child) &
46 static int crypto_pcbc_encrypt_segment(struct skcipher_request *req,
47 struct skcipher_walk *walk,
48 struct crypto_cipher *tfm)
50 int bsize = crypto_cipher_blocksize(tfm);
51 unsigned int nbytes = walk->nbytes;
52 u8 *src = walk->src.virt.addr;
53 u8 *dst = walk->dst.virt.addr;
54 u8 * const iv = walk->iv;
57 crypto_xor(iv, src, bsize);
58 crypto_cipher_encrypt_one(tfm, dst, iv);
59 crypto_xor_cpy(iv, dst, src, bsize);
63 } while ((nbytes -= bsize) >= bsize);
68 static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
69 struct skcipher_walk *walk,
70 struct crypto_cipher *tfm)
72 int bsize = crypto_cipher_blocksize(tfm);
73 unsigned int nbytes = walk->nbytes;
74 u8 *src = walk->src.virt.addr;
75 u8 * const iv = walk->iv;
76 u8 tmpbuf[MAX_CIPHER_BLOCKSIZE];
79 memcpy(tmpbuf, src, bsize);
80 crypto_xor(iv, src, bsize);
81 crypto_cipher_encrypt_one(tfm, src, iv);
82 crypto_xor_cpy(iv, tmpbuf, src, bsize);
85 } while ((nbytes -= bsize) >= bsize);
90 static int crypto_pcbc_encrypt(struct skcipher_request *req)
92 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
93 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
94 struct crypto_cipher *child = ctx->child;
95 struct skcipher_walk walk;
99 err = skcipher_walk_virt(&walk, req, false);
101 while ((nbytes = walk.nbytes)) {
102 if (walk.src.virt.addr == walk.dst.virt.addr)
103 nbytes = crypto_pcbc_encrypt_inplace(req, &walk,
106 nbytes = crypto_pcbc_encrypt_segment(req, &walk,
108 err = skcipher_walk_done(&walk, nbytes);
114 static int crypto_pcbc_decrypt_segment(struct skcipher_request *req,
115 struct skcipher_walk *walk,
116 struct crypto_cipher *tfm)
118 int bsize = crypto_cipher_blocksize(tfm);
119 unsigned int nbytes = walk->nbytes;
120 u8 *src = walk->src.virt.addr;
121 u8 *dst = walk->dst.virt.addr;
122 u8 * const iv = walk->iv;
125 crypto_cipher_decrypt_one(tfm, dst, src);
126 crypto_xor(dst, iv, bsize);
127 crypto_xor_cpy(iv, dst, src, bsize);
131 } while ((nbytes -= bsize) >= bsize);
136 static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
137 struct skcipher_walk *walk,
138 struct crypto_cipher *tfm)
140 int bsize = crypto_cipher_blocksize(tfm);
141 unsigned int nbytes = walk->nbytes;
142 u8 *src = walk->src.virt.addr;
143 u8 * const iv = walk->iv;
144 u8 tmpbuf[MAX_CIPHER_BLOCKSIZE] __aligned(__alignof__(u32));
147 memcpy(tmpbuf, src, bsize);
148 crypto_cipher_decrypt_one(tfm, src, src);
149 crypto_xor(src, iv, bsize);
150 crypto_xor_cpy(iv, src, tmpbuf, bsize);
153 } while ((nbytes -= bsize) >= bsize);
158 static int crypto_pcbc_decrypt(struct skcipher_request *req)
160 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
161 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
162 struct crypto_cipher *child = ctx->child;
163 struct skcipher_walk walk;
167 err = skcipher_walk_virt(&walk, req, false);
169 while ((nbytes = walk.nbytes)) {
170 if (walk.src.virt.addr == walk.dst.virt.addr)
171 nbytes = crypto_pcbc_decrypt_inplace(req, &walk,
174 nbytes = crypto_pcbc_decrypt_segment(req, &walk,
176 err = skcipher_walk_done(&walk, nbytes);
182 static int crypto_pcbc_init_tfm(struct crypto_skcipher *tfm)
184 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
185 struct crypto_spawn *spawn = skcipher_instance_ctx(inst);
186 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
187 struct crypto_cipher *cipher;
189 cipher = crypto_spawn_cipher(spawn);
191 return PTR_ERR(cipher);
197 static void crypto_pcbc_exit_tfm(struct crypto_skcipher *tfm)
199 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
201 crypto_free_cipher(ctx->child);
204 static void crypto_pcbc_free(struct skcipher_instance *inst)
206 crypto_drop_skcipher(skcipher_instance_ctx(inst));
210 static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
212 struct skcipher_instance *inst;
213 struct crypto_attr_type *algt;
214 struct crypto_spawn *spawn;
215 struct crypto_alg *alg;
218 algt = crypto_get_attr_type(tb);
220 return PTR_ERR(algt);
222 if (((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) &
223 ~CRYPTO_ALG_INTERNAL)
226 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
230 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER |
231 (algt->type & CRYPTO_ALG_INTERNAL),
232 CRYPTO_ALG_TYPE_MASK |
233 (algt->mask & CRYPTO_ALG_INTERNAL));
238 spawn = skcipher_instance_ctx(inst);
239 err = crypto_init_spawn(spawn, alg, skcipher_crypto_instance(inst),
240 CRYPTO_ALG_TYPE_MASK);
244 err = crypto_inst_setname(skcipher_crypto_instance(inst), "pcbc", alg);
248 inst->alg.base.cra_flags = alg->cra_flags & CRYPTO_ALG_INTERNAL;
249 inst->alg.base.cra_priority = alg->cra_priority;
250 inst->alg.base.cra_blocksize = alg->cra_blocksize;
251 inst->alg.base.cra_alignmask = alg->cra_alignmask;
253 inst->alg.ivsize = alg->cra_blocksize;
254 inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize;
255 inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize;
257 inst->alg.base.cra_ctxsize = sizeof(struct crypto_pcbc_ctx);
259 inst->alg.init = crypto_pcbc_init_tfm;
260 inst->alg.exit = crypto_pcbc_exit_tfm;
262 inst->alg.setkey = crypto_pcbc_setkey;
263 inst->alg.encrypt = crypto_pcbc_encrypt;
264 inst->alg.decrypt = crypto_pcbc_decrypt;
266 inst->free = crypto_pcbc_free;
268 err = skcipher_register_instance(tmpl, inst);
277 crypto_drop_spawn(spawn);
285 static struct crypto_template crypto_pcbc_tmpl = {
287 .create = crypto_pcbc_create,
288 .module = THIS_MODULE,
291 static int __init crypto_pcbc_module_init(void)
293 return crypto_register_template(&crypto_pcbc_tmpl);
296 static void __exit crypto_pcbc_module_exit(void)
298 crypto_unregister_template(&crypto_pcbc_tmpl);
301 module_init(crypto_pcbc_module_init);
302 module_exit(crypto_pcbc_module_exit);
304 MODULE_LICENSE("GPL");
305 MODULE_DESCRIPTION("PCBC block cipher algorithm");
306 MODULE_ALIAS_CRYPTO("pcbc");