]> git.saurik.com Git - apple/xnu.git/blob - libkern/crypto/corecrypto_aes.c
xnu-3789.41.3.tar.gz
[apple/xnu.git] / libkern / crypto / corecrypto_aes.c
1 /*
2 * Copyright (c) 2012 Apple Computer, Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29 #include <libkern/crypto/crypto_internal.h>
30 #include <libkern/crypto/aes.h>
31 #include <corecrypto/ccmode.h>
32 #include <corecrypto/ccaes.h>
33 #include <kern/debug.h>
34
35 aes_rval aes_encrypt_key(const unsigned char *key, int key_len, aes_encrypt_ctx cx[1])
36 {
37 const struct ccmode_cbc *cbc = g_crypto_funcs->ccaes_cbc_encrypt;
38
39 /* Make sure the context size for the mode fits in the one we have */
40 if(cbc->size>sizeof(aes_encrypt_ctx))
41 panic("%s: inconsistent size for AES encrypt context", __FUNCTION__);
42
43 cccbc_init(cbc, cx[0].ctx, key_len, key);
44
45 return aes_good;
46 }
47
48 aes_rval aes_encrypt_cbc(const unsigned char *in_blk, const unsigned char *in_iv, unsigned int num_blk,
49 unsigned char *out_blk, aes_encrypt_ctx cx[1])
50 {
51 const struct ccmode_cbc *cbc = g_crypto_funcs->ccaes_cbc_encrypt;
52 cccbc_iv_decl(cbc->block_size, ctx_iv);
53
54 cccbc_set_iv(cbc, ctx_iv, in_iv);
55 cccbc_update(cbc, cx[0].ctx, ctx_iv, num_blk, in_blk, out_blk); //Actually cbc encrypt.
56
57 return aes_good;
58 }
59
60 #if defined (__i386__) || defined (__x86_64__) || defined (__arm64__)
61 /* This does one block of ECB, using the CBC implementation - this allow to use the same context for both CBC and ECB */
62 aes_rval aes_encrypt(const unsigned char *in_blk, unsigned char *out_blk, aes_encrypt_ctx cx[1])
63 {
64 return aes_encrypt_cbc(in_blk, NULL, 1, out_blk, cx);
65 }
66 #endif
67
68 aes_rval aes_decrypt_key(const unsigned char *key, int key_len, aes_decrypt_ctx cx[1])
69 {
70 const struct ccmode_cbc *cbc = g_crypto_funcs->ccaes_cbc_decrypt;
71
72 /* Make sure the context size for the mode fits in the one we have */
73 if(cbc->size>sizeof(aes_decrypt_ctx))
74 panic("%s: inconsistent size for AES decrypt context", __FUNCTION__);
75
76 cccbc_init(cbc, cx[0].ctx, key_len, key);
77
78 return aes_good;
79 }
80
81 aes_rval aes_decrypt_cbc(const unsigned char *in_blk, const unsigned char *in_iv, unsigned int num_blk,
82 unsigned char *out_blk, aes_decrypt_ctx cx[1])
83 {
84 const struct ccmode_cbc *cbc = g_crypto_funcs->ccaes_cbc_decrypt;
85 cccbc_iv_decl(cbc->block_size, ctx_iv);
86
87 cccbc_set_iv(cbc, ctx_iv, in_iv);
88 cccbc_update(cbc, cx[0].ctx, ctx_iv, num_blk, in_blk, out_blk); //Actually cbc decrypt.
89
90 return aes_good;
91 }
92
93 #if defined (__i386__) || defined (__x86_64__) || defined (__arm64__)
94 /* This does one block of ECB, using the CBC implementation - this allow to use the same context for both CBC and ECB */
95 aes_rval aes_decrypt(const unsigned char *in_blk, unsigned char *out_blk, aes_decrypt_ctx cx[1])
96 {
97 return aes_decrypt_cbc(in_blk, NULL, 1, out_blk, cx);
98 }
99 #endif
100
101 aes_rval aes_encrypt_key128(const unsigned char *key, aes_encrypt_ctx cx[1])
102 {
103 return aes_encrypt_key(key, 16, cx);
104 }
105
106 aes_rval aes_decrypt_key128(const unsigned char *key, aes_decrypt_ctx cx[1])
107 {
108 return aes_decrypt_key(key, 16, cx);
109 }
110
111
112 aes_rval aes_encrypt_key256(const unsigned char *key, aes_encrypt_ctx cx[1])
113 {
114 return aes_encrypt_key(key, 32, cx);
115 }
116
117 aes_rval aes_decrypt_key256(const unsigned char *key, aes_decrypt_ctx cx[1])
118 {
119 return aes_decrypt_key(key, 32, cx);
120 }
121
122 aes_rval aes_encrypt_key_gcm(const unsigned char *key, int key_len, ccgcm_ctx *ctx)
123 {
124 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt;
125 if (!gcm) {
126 return aes_error;
127 }
128
129 return ccgcm_init(gcm, ctx, key_len, key);
130 }
131
132 aes_rval aes_encrypt_key_with_iv_gcm(const unsigned char *key, int key_len, const unsigned char *in_iv, ccgcm_ctx *ctx)
133 {
134 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt;
135 if (!gcm) {
136 return aes_error;
137 }
138
139 return g_crypto_funcs->ccgcm_init_with_iv_fn(gcm, ctx, key_len, key, in_iv);
140 }
141
142 aes_rval aes_encrypt_set_iv_gcm(const unsigned char *in_iv, unsigned int len, ccgcm_ctx *ctx)
143 {
144 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt;
145 if (!gcm) {
146 return aes_error;
147 }
148
149 return ccgcm_set_iv(gcm, ctx, len, in_iv);
150 }
151
152 aes_rval aes_encrypt_reset_gcm(ccgcm_ctx *ctx)
153 {
154 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt;
155 if (!gcm) {
156 return aes_error;
157 }
158
159 return ccgcm_reset(gcm, ctx);
160 }
161
162 aes_rval aes_encrypt_inc_iv_gcm(unsigned char *out_iv, ccgcm_ctx *ctx)
163 {
164 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt;
165 if (!gcm) {
166 return aes_error;
167 }
168
169 return g_crypto_funcs->ccgcm_inc_iv_fn(gcm, ctx, out_iv);
170 }
171
172 aes_rval aes_encrypt_aad_gcm(const unsigned char *aad, unsigned int aad_bytes, ccgcm_ctx *ctx)
173 {
174 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt;
175 if (!gcm) {
176 return aes_error;
177 }
178
179 return ccgcm_gmac(gcm, ctx, aad_bytes, aad);
180 }
181
182 aes_rval aes_encrypt_gcm(const unsigned char *in_blk, unsigned int num_bytes,
183 unsigned char *out_blk, ccgcm_ctx *ctx)
184 {
185 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt;
186 if (!gcm) {
187 return aes_error;
188 }
189
190 return ccgcm_update(gcm, ctx, num_bytes, in_blk, out_blk); //Actually gcm encrypt.
191 }
192
193 aes_rval aes_encrypt_finalize_gcm(unsigned char *tag, unsigned int tag_bytes, ccgcm_ctx *ctx)
194 {
195 int rc;
196 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt;
197 if (!gcm) {
198 return aes_error;
199 }
200
201 rc = ccgcm_finalize(gcm, ctx, tag_bytes, tag);
202 rc |= ccgcm_reset(gcm, ctx);
203 return rc;
204 }
205
206 aes_rval aes_decrypt_key_gcm(const unsigned char *key, int key_len, ccgcm_ctx *ctx)
207 {
208 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt;
209 if (!gcm) {
210 return aes_error;
211 }
212
213 return ccgcm_init(gcm, ctx, key_len, key);
214 }
215
216 aes_rval aes_decrypt_key_with_iv_gcm(const unsigned char *key, int key_len, const unsigned char *in_iv, ccgcm_ctx *ctx)
217 {
218 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt;
219 if (!gcm) {
220 return aes_error;
221 }
222
223 return g_crypto_funcs->ccgcm_init_with_iv_fn(gcm, ctx, key_len, key, in_iv);
224 }
225
226 aes_rval aes_decrypt_set_iv_gcm(const unsigned char *in_iv, unsigned int len, ccgcm_ctx *ctx)
227 {
228 int rc;
229
230 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt;
231 if (!gcm) {
232 return aes_error;
233 }
234
235 rc = ccgcm_reset(gcm, ctx);
236 rc |= ccgcm_set_iv(gcm, ctx, len, in_iv);
237 return rc;
238 }
239
240 aes_rval aes_decrypt_reset_gcm(ccgcm_ctx *ctx)
241 {
242 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt;
243 if (!gcm) {
244 return aes_error;
245 }
246
247 return ccgcm_reset(gcm, ctx);
248 }
249
250 aes_rval aes_decrypt_inc_iv_gcm(unsigned char *out_iv, ccgcm_ctx *ctx)
251 {
252 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt;
253 if (!gcm) {
254 return aes_error;
255 }
256
257 return g_crypto_funcs->ccgcm_inc_iv_fn(gcm, ctx, out_iv);
258 }
259
260 aes_rval aes_decrypt_aad_gcm(const unsigned char *aad, unsigned int aad_bytes, ccgcm_ctx *ctx)
261 {
262 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt;
263 if (!gcm) {
264 return aes_error;
265 }
266
267 return ccgcm_gmac(gcm, ctx, aad_bytes, aad);
268 }
269
270 aes_rval aes_decrypt_gcm(const unsigned char *in_blk, unsigned int num_bytes,
271 unsigned char *out_blk, ccgcm_ctx *ctx)
272 {
273 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt;
274 if (!gcm) {
275 return aes_error;
276 }
277
278 return ccgcm_update(gcm, ctx, num_bytes, in_blk, out_blk); //Actually gcm decrypt.
279 }
280
281 aes_rval aes_decrypt_finalize_gcm(unsigned char *tag, unsigned int tag_bytes, ccgcm_ctx *ctx)
282 {
283 int rc;
284 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt;
285 if (!gcm) {
286 return aes_error;
287 }
288
289 rc = ccgcm_finalize(gcm, ctx, tag_bytes, tag);
290 rc |= ccgcm_reset(gcm, ctx);
291 return rc;
292 }
293
294 unsigned aes_encrypt_get_ctx_size_gcm(void)
295 {
296 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt;
297 if (!gcm) {
298 return 0;
299 }
300 return (cc_ctx_sizeof(ccgcm_ctx, gcm->size));
301 }
302
303 unsigned aes_decrypt_get_ctx_size_gcm(void)
304 {
305 const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt;
306 if (!gcm) {
307 return 0;
308 }
309 return (cc_ctx_sizeof(ccgcm_ctx, gcm->size));
310 }
311