blob: 08a4e65c459b06444add092dbe5db5aa1d4475a4 [file] [log] [blame]
Adam Langley95c29f32014-06-20 12:00:00 -07001/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
2 * All rights reserved.
3 *
4 * This package is an SSL implementation written
5 * by Eric Young (eay@cryptsoft.com).
6 * The implementation was written so as to conform with Netscapes SSL.
7 *
8 * This library is free for commercial and non-commercial use as long as
9 * the following conditions are aheared to. The following conditions
10 * apply to all code found in this distribution, be it the RC4, RSA,
11 * lhash, DES, etc., code; not just the SSL code. The SSL documentation
12 * included with this distribution is covered by the same copyright terms
13 * except that the holder is Tim Hudson (tjh@cryptsoft.com).
14 *
15 * Copyright remains Eric Young's, and as such any Copyright notices in
16 * the code are not to be removed.
17 * If this package is used in a product, Eric Young should be given attribution
18 * as the author of the parts of the library used.
19 * This can be in the form of a textual message at program startup or
20 * in documentation (online or textual) provided with the package.
21 *
22 * Redistribution and use in source and binary forms, with or without
23 * modification, are permitted provided that the following conditions
24 * are met:
25 * 1. Redistributions of source code must retain the copyright
26 * notice, this list of conditions and the following disclaimer.
27 * 2. Redistributions in binary form must reproduce the above copyright
28 * notice, this list of conditions and the following disclaimer in the
29 * documentation and/or other materials provided with the distribution.
30 * 3. All advertising materials mentioning features or use of this software
31 * must display the following acknowledgement:
32 * "This product includes cryptographic software written by
33 * Eric Young (eay@cryptsoft.com)"
34 * The word 'cryptographic' can be left out if the rouines from the library
35 * being used are not cryptographic related :-).
36 * 4. If you include any Windows specific code (or a derivative thereof) from
37 * the apps directory (application code) you must include an acknowledgement:
38 * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
39 *
40 * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
41 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
42 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
43 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
44 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
45 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
46 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
47 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
48 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
49 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
50 * SUCH DAMAGE.
51 *
52 * The licence and distribution terms for any publically available version or
53 * derivative of this code cannot be changed. i.e. this code cannot simply be
54 * copied and put under another distribution licence
55 * [including the GNU Public Licence.]
56 */
57/* ====================================================================
58 * Copyright (c) 1998-2007 The OpenSSL Project. All rights reserved.
59 *
60 * Redistribution and use in source and binary forms, with or without
61 * modification, are permitted provided that the following conditions
62 * are met:
63 *
64 * 1. Redistributions of source code must retain the above copyright
65 * notice, this list of conditions and the following disclaimer.
66 *
67 * 2. Redistributions in binary form must reproduce the above copyright
68 * notice, this list of conditions and the following disclaimer in
69 * the documentation and/or other materials provided with the
70 * distribution.
71 *
72 * 3. All advertising materials mentioning features or use of this
73 * software must display the following acknowledgment:
74 * "This product includes software developed by the OpenSSL Project
75 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
76 *
77 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
78 * endorse or promote products derived from this software without
79 * prior written permission. For written permission, please contact
80 * openssl-core@openssl.org.
81 *
82 * 5. Products derived from this software may not be called "OpenSSL"
83 * nor may "OpenSSL" appear in their names without prior written
84 * permission of the OpenSSL Project.
85 *
86 * 6. Redistributions of any form whatsoever must retain the following
87 * acknowledgment:
88 * "This product includes software developed by the OpenSSL Project
89 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
90 *
91 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
92 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
93 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
94 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
95 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
96 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
97 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
98 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
99 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
100 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
101 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
102 * OF THE POSSIBILITY OF SUCH DAMAGE.
103 * ====================================================================
104 *
105 * This product includes cryptographic software written by Eric Young
106 * (eay@cryptsoft.com). This product includes software written by Tim
107 * Hudson (tjh@cryptsoft.com).
108 *
109 */
110/* ====================================================================
111 * Copyright 2002 Sun Microsystems, Inc. ALL RIGHTS RESERVED.
112 * ECC cipher suite support in OpenSSL originally developed by
113 * SUN MICROSYSTEMS, INC., and contributed to the OpenSSL project.
114 */
115/* ====================================================================
116 * Copyright 2005 Nokia. All rights reserved.
117 *
118 * The portions of the attached software ("Contribution") is developed by
119 * Nokia Corporation and is licensed pursuant to the OpenSSL open source
120 * license.
121 *
122 * The Contribution, originally written by Mika Kousa and Pasi Eronen of
123 * Nokia Corporation, consists of the "PSK" (Pre-Shared Key) ciphersuites
124 * support (see RFC 4279) to OpenSSL.
125 *
126 * No patent licenses or other rights except those expressly stated in
127 * the OpenSSL open source license shall be deemed granted or received
128 * expressly, by implication, estoppel, or otherwise.
129 *
130 * No assurances are provided by Nokia that the Contribution does not
131 * infringe the patent or other intellectual property rights of any third
132 * party or that the license provides you with all the necessary rights
133 * to make use of the Contribution.
134 *
135 * THE SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND. IN
136 * ADDITION TO THE DISCLAIMERS INCLUDED IN THE LICENSE, NOKIA
137 * SPECIFICALLY DISCLAIMS ANY LIABILITY FOR CLAIMS BROUGHT BY YOU OR ANY
138 * OTHER ENTITY BASED ON INFRINGEMENT OF INTELLECTUAL PROPERTY RIGHTS OR
139 * OTHERWISE. */
140
David Benjamin9e4e01e2015-09-15 01:48:04 -0400141#include <openssl/ssl.h>
142
Adam Langley95c29f32014-06-20 12:00:00 -0700143#include <assert.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400144#include <string.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700145
David Benjamin71f07942015-04-08 02:36:59 -0400146#include <openssl/buf.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400147#include <openssl/err.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500148#include <openssl/md5.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700149#include <openssl/mem.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500150#include <openssl/sha.h>
David Benjamin71f07942015-04-08 02:36:59 -0400151#include <openssl/stack.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700152
David Benjamin2ee94aa2015-04-07 22:38:30 -0400153#include "internal.h"
Steven Valdezcb966542016-08-17 16:56:14 -0400154#include "../crypto/internal.h"
Adam Langley95c29f32014-06-20 12:00:00 -0700155
Adam Langley95c29f32014-06-20 12:00:00 -0700156
David Benjamina1c90a52015-05-30 17:03:14 -0400157/* kCiphers is an array of all supported ciphers, sorted by id. */
David Benjamin20c37312015-11-11 21:33:18 -0800158static const SSL_CIPHER kCiphers[] = {
David Benjamina1c90a52015-05-30 17:03:14 -0400159 /* The RSA ciphers */
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700160 /* Cipher 02 */
161 {
David Benjaminff2df332015-11-18 10:01:16 -0500162 SSL3_TXT_RSA_NULL_SHA,
163 SSL3_CK_RSA_NULL_SHA,
164 SSL_kRSA,
165 SSL_aRSA,
166 SSL_eNULL,
167 SSL_SHA1,
168 SSL_HANDSHAKE_MAC_DEFAULT,
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700169 },
170
David Benjamina1c90a52015-05-30 17:03:14 -0400171 /* Cipher 0A */
172 {
David Benjaminff2df332015-11-18 10:01:16 -0500173 SSL3_TXT_RSA_DES_192_CBC3_SHA,
174 SSL3_CK_RSA_DES_192_CBC3_SHA,
175 SSL_kRSA,
176 SSL_aRSA,
177 SSL_3DES,
178 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500179 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400180 },
181
182
183 /* New AES ciphersuites */
184
185 /* Cipher 2F */
186 {
David Benjaminff2df332015-11-18 10:01:16 -0500187 TLS1_TXT_RSA_WITH_AES_128_SHA,
188 TLS1_CK_RSA_WITH_AES_128_SHA,
189 SSL_kRSA,
190 SSL_aRSA,
191 SSL_AES128,
192 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500193 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400194 },
195
196 /* Cipher 33 */
197 {
David Benjaminff2df332015-11-18 10:01:16 -0500198 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA,
199 TLS1_CK_DHE_RSA_WITH_AES_128_SHA,
200 SSL_kDHE,
201 SSL_aRSA,
202 SSL_AES128,
203 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500204 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400205 },
206
207 /* Cipher 35 */
208 {
David Benjaminff2df332015-11-18 10:01:16 -0500209 TLS1_TXT_RSA_WITH_AES_256_SHA,
210 TLS1_CK_RSA_WITH_AES_256_SHA,
211 SSL_kRSA,
212 SSL_aRSA,
213 SSL_AES256,
214 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500215 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400216 },
217
218 /* Cipher 39 */
219 {
David Benjaminff2df332015-11-18 10:01:16 -0500220 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA,
221 TLS1_CK_DHE_RSA_WITH_AES_256_SHA,
222 SSL_kDHE,
223 SSL_aRSA,
224 SSL_AES256,
225 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500226 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400227 },
228
229
230 /* TLS v1.2 ciphersuites */
231
232 /* Cipher 3C */
233 {
David Benjaminff2df332015-11-18 10:01:16 -0500234 TLS1_TXT_RSA_WITH_AES_128_SHA256,
235 TLS1_CK_RSA_WITH_AES_128_SHA256,
236 SSL_kRSA,
237 SSL_aRSA,
238 SSL_AES128,
239 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500240 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400241 },
242
243 /* Cipher 3D */
244 {
David Benjaminff2df332015-11-18 10:01:16 -0500245 TLS1_TXT_RSA_WITH_AES_256_SHA256,
246 TLS1_CK_RSA_WITH_AES_256_SHA256,
247 SSL_kRSA,
248 SSL_aRSA,
249 SSL_AES256,
250 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500251 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400252 },
253
254 /* Cipher 67 */
255 {
256 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500257 TLS1_CK_DHE_RSA_WITH_AES_128_SHA256,
258 SSL_kDHE,
259 SSL_aRSA,
260 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500261 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500262 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400263 },
264
265 /* Cipher 6B */
266 {
267 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500268 TLS1_CK_DHE_RSA_WITH_AES_256_SHA256,
269 SSL_kDHE,
270 SSL_aRSA,
271 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500272 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500273 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400274 },
275
Adam Langley85bc5602015-06-09 09:54:04 -0700276 /* PSK cipher suites. */
277
David Benjamina1c90a52015-05-30 17:03:14 -0400278 /* Cipher 8C */
279 {
David Benjaminff2df332015-11-18 10:01:16 -0500280 TLS1_TXT_PSK_WITH_AES_128_CBC_SHA,
281 TLS1_CK_PSK_WITH_AES_128_CBC_SHA,
282 SSL_kPSK,
283 SSL_aPSK,
284 SSL_AES128,
285 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500286 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400287 },
288
289 /* Cipher 8D */
290 {
David Benjaminff2df332015-11-18 10:01:16 -0500291 TLS1_TXT_PSK_WITH_AES_256_CBC_SHA,
292 TLS1_CK_PSK_WITH_AES_256_CBC_SHA,
293 SSL_kPSK,
294 SSL_aPSK,
295 SSL_AES256,
296 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500297 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400298 },
299
David Benjamina1c90a52015-05-30 17:03:14 -0400300 /* GCM ciphersuites from RFC5288 */
301
302 /* Cipher 9C */
303 {
304 TLS1_TXT_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500305 TLS1_CK_RSA_WITH_AES_128_GCM_SHA256,
306 SSL_kRSA,
307 SSL_aRSA,
308 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500309 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400310 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400311 },
312
313 /* Cipher 9D */
314 {
315 TLS1_TXT_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500316 TLS1_CK_RSA_WITH_AES_256_GCM_SHA384,
317 SSL_kRSA,
318 SSL_aRSA,
319 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500320 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400321 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400322 },
323
324 /* Cipher 9E */
325 {
326 TLS1_TXT_DHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500327 TLS1_CK_DHE_RSA_WITH_AES_128_GCM_SHA256,
328 SSL_kDHE,
329 SSL_aRSA,
330 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500331 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400332 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400333 },
334
335 /* Cipher 9F */
336 {
337 TLS1_TXT_DHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500338 TLS1_CK_DHE_RSA_WITH_AES_256_GCM_SHA384,
339 SSL_kDHE,
340 SSL_aRSA,
341 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500342 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400343 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400344 },
345
Steven Valdez803c77a2016-09-06 14:13:43 -0400346 /* TLS 1.3 suites. */
347
348 /* Cipher 1301 */
349 {
350 TLS1_TXT_AES_128_GCM_SHA256,
351 TLS1_CK_AES_128_GCM_SHA256,
352 SSL_kGENERIC,
353 SSL_aGENERIC,
354 SSL_AES128GCM,
355 SSL_AEAD,
356 SSL_HANDSHAKE_MAC_SHA256,
357 },
358
359 /* Cipher 1302 */
360 {
361 TLS1_TXT_AES_256_GCM_SHA384,
362 TLS1_CK_AES_256_GCM_SHA384,
363 SSL_kGENERIC,
364 SSL_aGENERIC,
365 SSL_AES256GCM,
366 SSL_AEAD,
367 SSL_HANDSHAKE_MAC_SHA384,
368 },
369
370 /* Cipher 1303 */
371 {
372 TLS1_TXT_CHACHA20_POLY1305_SHA256,
373 TLS1_CK_CHACHA20_POLY1305_SHA256,
374 SSL_kGENERIC,
375 SSL_aGENERIC,
376 SSL_CHACHA20POLY1305,
377 SSL_AEAD,
378 SSL_HANDSHAKE_MAC_SHA256,
379 },
380
Matt Braithwaite053931e2016-05-25 12:06:05 -0700381 /* CECPQ1 (combined elliptic curve + post-quantum) suites. */
382
383 /* Cipher 16B7 */
384 {
385 TLS1_TXT_CECPQ1_RSA_WITH_CHACHA20_POLY1305_SHA256,
386 TLS1_CK_CECPQ1_RSA_WITH_CHACHA20_POLY1305_SHA256,
387 SSL_kCECPQ1,
388 SSL_aRSA,
389 SSL_CHACHA20POLY1305,
390 SSL_AEAD,
391 SSL_HANDSHAKE_MAC_SHA256,
392 },
393
394 /* Cipher 16B8 */
395 {
396 TLS1_TXT_CECPQ1_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
397 TLS1_CK_CECPQ1_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
398 SSL_kCECPQ1,
399 SSL_aECDSA,
400 SSL_CHACHA20POLY1305,
401 SSL_AEAD,
402 SSL_HANDSHAKE_MAC_SHA256,
403 },
404
405 /* Cipher 16B9 */
406 {
407 TLS1_TXT_CECPQ1_RSA_WITH_AES_256_GCM_SHA384,
408 TLS1_CK_CECPQ1_RSA_WITH_AES_256_GCM_SHA384,
409 SSL_kCECPQ1,
410 SSL_aRSA,
411 SSL_AES256GCM,
412 SSL_AEAD,
413 SSL_HANDSHAKE_MAC_SHA384,
414 },
415
416 /* Cipher 16BA */
417 {
418 TLS1_TXT_CECPQ1_ECDSA_WITH_AES_256_GCM_SHA384,
419 TLS1_CK_CECPQ1_ECDSA_WITH_AES_256_GCM_SHA384,
420 SSL_kCECPQ1,
421 SSL_aECDSA,
422 SSL_AES256GCM,
423 SSL_AEAD,
424 SSL_HANDSHAKE_MAC_SHA384,
425 },
426
David Benjamina1c90a52015-05-30 17:03:14 -0400427 /* Cipher C009 */
428 {
429 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500430 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
431 SSL_kECDHE,
432 SSL_aECDSA,
433 SSL_AES128,
434 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500435 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400436 },
437
438 /* Cipher C00A */
439 {
440 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500441 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
442 SSL_kECDHE,
443 SSL_aECDSA,
444 SSL_AES256,
445 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500446 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400447 },
448
David Benjamina1c90a52015-05-30 17:03:14 -0400449 /* Cipher C013 */
450 {
451 TLS1_TXT_ECDHE_RSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500452 TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA,
453 SSL_kECDHE,
454 SSL_aRSA,
455 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500456 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500457 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400458 },
459
460 /* Cipher C014 */
461 {
462 TLS1_TXT_ECDHE_RSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500463 TLS1_CK_ECDHE_RSA_WITH_AES_256_CBC_SHA,
464 SSL_kECDHE,
465 SSL_aRSA,
466 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500467 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500468 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400469 },
470
471
472 /* HMAC based TLS v1.2 ciphersuites from RFC5289 */
473
474 /* Cipher C023 */
475 {
476 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500477 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_SHA256,
478 SSL_kECDHE,
479 SSL_aECDSA,
480 SSL_AES128,
481 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500482 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400483 },
484
485 /* Cipher C024 */
486 {
487 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500488 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_SHA384,
489 SSL_kECDHE,
490 SSL_aECDSA,
491 SSL_AES256,
492 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500493 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400494 },
495
496 /* Cipher C027 */
497 {
498 TLS1_TXT_ECDHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500499 TLS1_CK_ECDHE_RSA_WITH_AES_128_SHA256,
500 SSL_kECDHE,
501 SSL_aRSA,
502 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500503 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500504 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400505 },
506
507 /* Cipher C028 */
508 {
509 TLS1_TXT_ECDHE_RSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500510 TLS1_CK_ECDHE_RSA_WITH_AES_256_SHA384,
511 SSL_kECDHE,
512 SSL_aRSA,
513 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500514 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500515 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400516 },
517
518
519 /* GCM based TLS v1.2 ciphersuites from RFC5289 */
520
521 /* Cipher C02B */
522 {
523 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500524 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
525 SSL_kECDHE,
526 SSL_aECDSA,
527 SSL_AES128GCM,
528 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400529 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400530 },
531
532 /* Cipher C02C */
533 {
534 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500535 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
536 SSL_kECDHE,
537 SSL_aECDSA,
538 SSL_AES256GCM,
539 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400540 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400541 },
542
543 /* Cipher C02F */
544 {
545 TLS1_TXT_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500546 TLS1_CK_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
547 SSL_kECDHE,
548 SSL_aRSA,
549 SSL_AES128GCM,
550 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400551 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400552 },
553
554 /* Cipher C030 */
555 {
556 TLS1_TXT_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500557 TLS1_CK_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
558 SSL_kECDHE,
559 SSL_aRSA,
560 SSL_AES256GCM,
561 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400562 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400563 },
564
Adam Langley85bc5602015-06-09 09:54:04 -0700565 /* ECDHE-PSK cipher suites. */
566
567 /* Cipher C035 */
568 {
569 TLS1_TXT_ECDHE_PSK_WITH_AES_128_CBC_SHA,
570 TLS1_CK_ECDHE_PSK_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500571 SSL_kECDHE,
572 SSL_aPSK,
573 SSL_AES128,
574 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500575 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700576 },
577
578 /* Cipher C036 */
579 {
580 TLS1_TXT_ECDHE_PSK_WITH_AES_256_CBC_SHA,
581 TLS1_CK_ECDHE_PSK_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500582 SSL_kECDHE,
583 SSL_aPSK,
584 SSL_AES256,
585 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500586 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700587 },
588
589 /* ChaCha20-Poly1305 cipher suites. */
590
David Benjamin13414b32015-12-09 23:02:39 -0500591#if !defined(BORINGSSL_ANDROID_SYSTEM)
David Benjamina1c90a52015-05-30 17:03:14 -0400592 {
Brian Smith271777f2015-10-03 13:53:33 -1000593 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500594 TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD,
595 SSL_kECDHE,
596 SSL_aRSA,
597 SSL_CHACHA20POLY1305_OLD,
598 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400599 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400600 },
601
602 {
Brian Smith271777f2015-10-03 13:53:33 -1000603 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500604 TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD,
605 SSL_kECDHE,
606 SSL_aECDSA,
607 SSL_CHACHA20POLY1305_OLD,
608 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400609 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400610 },
Adam Langleyd98dc132015-09-23 16:41:33 -0700611#endif
David Benjamin13414b32015-12-09 23:02:39 -0500612
613 /* Cipher CCA8 */
614 {
615 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
616 TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
617 SSL_kECDHE,
618 SSL_aRSA,
619 SSL_CHACHA20POLY1305,
620 SSL_AEAD,
621 SSL_HANDSHAKE_MAC_SHA256,
622 },
623
624 /* Cipher CCA9 */
625 {
626 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
627 TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
628 SSL_kECDHE,
629 SSL_aECDSA,
630 SSL_CHACHA20POLY1305,
631 SSL_AEAD,
632 SSL_HANDSHAKE_MAC_SHA256,
633 },
634
635 /* Cipher CCAB */
636 {
637 TLS1_TXT_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
638 TLS1_CK_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
639 SSL_kECDHE,
640 SSL_aPSK,
641 SSL_CHACHA20POLY1305,
642 SSL_AEAD,
643 SSL_HANDSHAKE_MAC_SHA256,
644 },
Matt Braithwaite053931e2016-05-25 12:06:05 -0700645
David Benjamina1c90a52015-05-30 17:03:14 -0400646};
647
Steven Valdezcb966542016-08-17 16:56:14 -0400648static const size_t kCiphersLen = OPENSSL_ARRAY_SIZE(kCiphers);
David Benjamina1c90a52015-05-30 17:03:14 -0400649
Adam Langleyfcf25832014-12-18 17:42:32 -0800650#define CIPHER_ADD 1
651#define CIPHER_KILL 2
652#define CIPHER_DEL 3
653#define CIPHER_ORD 4
654#define CIPHER_SPECIAL 5
Adam Langley95c29f32014-06-20 12:00:00 -0700655
Adam Langleyfcf25832014-12-18 17:42:32 -0800656typedef struct cipher_order_st {
657 const SSL_CIPHER *cipher;
658 int active;
Adam Langleyfcf25832014-12-18 17:42:32 -0800659 int in_group;
660 struct cipher_order_st *next, *prev;
661} CIPHER_ORDER;
Adam Langley95c29f32014-06-20 12:00:00 -0700662
David Benjamin0344daf2015-04-08 02:08:01 -0400663typedef struct cipher_alias_st {
664 /* name is the name of the cipher alias. */
665 const char *name;
666
667 /* The following fields are bitmasks for the corresponding fields on
668 * |SSL_CIPHER|. A cipher matches a cipher alias iff, for each bitmask, the
669 * bit corresponding to the cipher's value is set to 1. If any bitmask is
670 * all zeroes, the alias matches nothing. Use |~0u| for the default value. */
671 uint32_t algorithm_mkey;
672 uint32_t algorithm_auth;
673 uint32_t algorithm_enc;
674 uint32_t algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -0500675
676 /* min_version, if non-zero, matches all ciphers which were added in that
677 * particular protocol version. */
678 uint16_t min_version;
David Benjamin0344daf2015-04-08 02:08:01 -0400679} CIPHER_ALIAS;
680
David Benjamina1c90a52015-05-30 17:03:14 -0400681static const CIPHER_ALIAS kCipherAliases[] = {
Matt Braithwaite053931e2016-05-25 12:06:05 -0700682 /* "ALL" doesn't include eNULL nor kCECPQ1. These must be explicitly
683 * enabled. */
684 {"ALL", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700685
David Benjamina1c90a52015-05-30 17:03:14 -0400686 /* The "COMPLEMENTOFDEFAULT" rule is omitted. It matches nothing. */
Adam Langley95c29f32014-06-20 12:00:00 -0700687
David Benjamina1c90a52015-05-30 17:03:14 -0400688 /* key exchange aliases
689 * (some of those using only a single bit here combine
690 * multiple key exchange algs according to the RFCs,
691 * e.g. kEDH combines DHE_DSS and DHE_RSA) */
David Benjamind6e9eec2015-11-18 09:48:55 -0500692 {"kRSA", SSL_kRSA, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700693
David Benjamind6e9eec2015-11-18 09:48:55 -0500694 {"kDHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
695 {"kEDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
696 {"DH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700697
David Benjamind6e9eec2015-11-18 09:48:55 -0500698 {"kECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700699 {"kCECPQ1", SSL_kCECPQ1, ~0u, ~0u, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500700 {"kEECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
701 {"ECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700702
David Benjamind6e9eec2015-11-18 09:48:55 -0500703 {"kPSK", SSL_kPSK, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700704
David Benjamina1c90a52015-05-30 17:03:14 -0400705 /* server authentication aliases */
Matt Braithwaite053931e2016-05-25 12:06:05 -0700706 {"aRSA", ~SSL_kCECPQ1, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
707 {"aECDSA", ~SSL_kCECPQ1, SSL_aECDSA, ~0u, ~0u, 0},
708 {"ECDSA", ~SSL_kCECPQ1, SSL_aECDSA, ~0u, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500709 {"aPSK", ~0u, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700710
David Benjamina1c90a52015-05-30 17:03:14 -0400711 /* aliases combining key exchange and server authentication */
David Benjamind6e9eec2015-11-18 09:48:55 -0500712 {"DHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
713 {"EDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
714 {"ECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
715 {"EECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
716 {"RSA", SSL_kRSA, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
717 {"PSK", SSL_kPSK, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700718
David Benjamina1c90a52015-05-30 17:03:14 -0400719 /* symmetric encryption aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500720 {"3DES", ~0u, ~0u, SSL_3DES, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500721 {"AES128", ~0u, ~0u, SSL_AES128 | SSL_AES128GCM, ~0u, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700722 {"AES256", ~SSL_kCECPQ1, ~0u, SSL_AES256 | SSL_AES256GCM, ~0u, 0},
723 {"AES", ~SSL_kCECPQ1, ~0u, SSL_AES, ~0u, 0},
724 {"AESGCM", ~SSL_kCECPQ1, ~0u, SSL_AES128GCM | SSL_AES256GCM, ~0u, 0},
725 {"CHACHA20", ~SSL_kCECPQ1, ~0u, SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD, ~0u,
David Benjamin13414b32015-12-09 23:02:39 -0500726 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700727
David Benjamina1c90a52015-05-30 17:03:14 -0400728 /* MAC aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500729 {"MD5", ~0u, ~0u, ~0u, SSL_MD5, 0},
730 {"SHA1", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
731 {"SHA", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700732 {"SHA256", ~SSL_kCECPQ1, ~0u, ~0u, SSL_SHA256, 0},
733 {"SHA384", ~SSL_kCECPQ1, ~0u, ~0u, SSL_SHA384, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700734
David Benjamindcb6ef02015-11-06 15:35:54 -0500735 /* Legacy protocol minimum version aliases. "TLSv1" is intentionally the
736 * same as "SSLv3". */
Matt Braithwaite053931e2016-05-25 12:06:05 -0700737 {"SSLv3", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
738 {"TLSv1", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
739 {"TLSv1.2", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, TLS1_2_VERSION},
Adam Langley95c29f32014-06-20 12:00:00 -0700740
David Benjamind6e9eec2015-11-18 09:48:55 -0500741 /* Legacy strength classes. */
Matthew Braithwaite8aaa9e12016-09-07 15:09:58 -0700742 {"HIGH", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, 0},
743 {"FIPS", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, 0},
Adam Langleyfcf25832014-12-18 17:42:32 -0800744};
Adam Langley95c29f32014-06-20 12:00:00 -0700745
Steven Valdezcb966542016-08-17 16:56:14 -0400746static const size_t kCipherAliasesLen = OPENSSL_ARRAY_SIZE(kCipherAliases);
David Benjamina1c90a52015-05-30 17:03:14 -0400747
748static int ssl_cipher_id_cmp(const void *in_a, const void *in_b) {
749 const SSL_CIPHER *a = in_a;
750 const SSL_CIPHER *b = in_b;
751
752 if (a->id > b->id) {
753 return 1;
754 } else if (a->id < b->id) {
755 return -1;
756 } else {
757 return 0;
758 }
759}
760
761static int ssl_cipher_ptr_id_cmp(const SSL_CIPHER **a, const SSL_CIPHER **b) {
762 return ssl_cipher_id_cmp(*a, *b);
763}
764
765const SSL_CIPHER *SSL_get_cipher_by_value(uint16_t value) {
766 SSL_CIPHER c;
767
768 c.id = 0x03000000L | value;
769 return bsearch(&c, kCiphers, kCiphersLen, sizeof(SSL_CIPHER),
770 ssl_cipher_id_cmp);
771}
David Benjamin0344daf2015-04-08 02:08:01 -0400772
David Benjaminea72bd02014-12-21 21:27:41 -0500773int ssl_cipher_get_evp_aead(const EVP_AEAD **out_aead,
774 size_t *out_mac_secret_len,
775 size_t *out_fixed_iv_len,
776 const SSL_CIPHER *cipher, uint16_t version) {
777 *out_aead = NULL;
778 *out_mac_secret_len = 0;
779 *out_fixed_iv_len = 0;
Adam Langleyc9fb3752014-06-20 12:00:00 -0700780
David Benjaminea72bd02014-12-21 21:27:41 -0500781 switch (cipher->algorithm_enc) {
Adam Langleyfcf25832014-12-18 17:42:32 -0800782 case SSL_AES128GCM:
David Benjaminea72bd02014-12-21 21:27:41 -0500783 *out_aead = EVP_aead_aes_128_gcm();
784 *out_fixed_iv_len = 4;
Steven Valdez79750562016-06-16 06:38:04 -0400785 break;
Adam Langleyfcf25832014-12-18 17:42:32 -0800786
787 case SSL_AES256GCM:
David Benjaminea72bd02014-12-21 21:27:41 -0500788 *out_aead = EVP_aead_aes_256_gcm();
789 *out_fixed_iv_len = 4;
Steven Valdez79750562016-06-16 06:38:04 -0400790 break;
Adam Langleyfcf25832014-12-18 17:42:32 -0800791
Adam Langleyd98dc132015-09-23 16:41:33 -0700792#if !defined(BORINGSSL_ANDROID_SYSTEM)
Brian Smith271777f2015-10-03 13:53:33 -1000793 case SSL_CHACHA20POLY1305_OLD:
Brian Smith3e23e4c2015-10-03 11:38:58 -1000794 *out_aead = EVP_aead_chacha20_poly1305_old();
David Benjaminea72bd02014-12-21 21:27:41 -0500795 *out_fixed_iv_len = 0;
Steven Valdez79750562016-06-16 06:38:04 -0400796 break;
Adam Langleyd98dc132015-09-23 16:41:33 -0700797#endif
Adam Langleyfcf25832014-12-18 17:42:32 -0800798
David Benjamin13414b32015-12-09 23:02:39 -0500799 case SSL_CHACHA20POLY1305:
800 *out_aead = EVP_aead_chacha20_poly1305();
801 *out_fixed_iv_len = 12;
Steven Valdez79750562016-06-16 06:38:04 -0400802 break;
David Benjamin13414b32015-12-09 23:02:39 -0500803
David Benjaminea72bd02014-12-21 21:27:41 -0500804 case SSL_AES128:
805 switch (cipher->algorithm_mac) {
806 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500807 if (version == SSL3_VERSION) {
808 *out_aead = EVP_aead_aes_128_cbc_sha1_ssl3();
809 *out_fixed_iv_len = 16;
810 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500811 *out_aead = EVP_aead_aes_128_cbc_sha1_tls_implicit_iv();
812 *out_fixed_iv_len = 16;
813 } else {
814 *out_aead = EVP_aead_aes_128_cbc_sha1_tls();
815 }
816 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400817 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500818 case SSL_SHA256:
819 *out_aead = EVP_aead_aes_128_cbc_sha256_tls();
820 *out_mac_secret_len = SHA256_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400821 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500822 default:
823 return 0;
824 }
Steven Valdez79750562016-06-16 06:38:04 -0400825 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500826
827 case SSL_AES256:
828 switch (cipher->algorithm_mac) {
829 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500830 if (version == SSL3_VERSION) {
831 *out_aead = EVP_aead_aes_256_cbc_sha1_ssl3();
832 *out_fixed_iv_len = 16;
833 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500834 *out_aead = EVP_aead_aes_256_cbc_sha1_tls_implicit_iv();
835 *out_fixed_iv_len = 16;
836 } else {
837 *out_aead = EVP_aead_aes_256_cbc_sha1_tls();
838 }
839 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400840 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500841 case SSL_SHA256:
842 *out_aead = EVP_aead_aes_256_cbc_sha256_tls();
843 *out_mac_secret_len = SHA256_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400844 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500845 case SSL_SHA384:
846 *out_aead = EVP_aead_aes_256_cbc_sha384_tls();
847 *out_mac_secret_len = SHA384_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400848 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500849 default:
850 return 0;
851 }
Steven Valdez79750562016-06-16 06:38:04 -0400852 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500853
854 case SSL_3DES:
855 switch (cipher->algorithm_mac) {
856 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500857 if (version == SSL3_VERSION) {
858 *out_aead = EVP_aead_des_ede3_cbc_sha1_ssl3();
859 *out_fixed_iv_len = 8;
860 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500861 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls_implicit_iv();
862 *out_fixed_iv_len = 8;
863 } else {
864 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls();
865 }
866 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400867 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500868 default:
869 return 0;
870 }
Steven Valdez79750562016-06-16 06:38:04 -0400871 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500872
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700873 case SSL_eNULL:
874 switch (cipher->algorithm_mac) {
875 case SSL_SHA1:
876 if (version == SSL3_VERSION) {
877 *out_aead = EVP_aead_null_sha1_ssl3();
878 } else {
879 *out_aead = EVP_aead_null_sha1_tls();
880 }
881 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400882 break;
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700883 default:
884 return 0;
885 }
Steven Valdez79750562016-06-16 06:38:04 -0400886 break;
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700887
David Benjaminea72bd02014-12-21 21:27:41 -0500888 default:
889 return 0;
890 }
Steven Valdez79750562016-06-16 06:38:04 -0400891
892 /* In TLS 1.3, the iv_len is equal to the AEAD nonce length whereas the code
893 * above computes the TLS 1.2 construction.
894 *
895 * TODO(davidben,svaldez): Avoid computing the wrong value and fixing it. */
896 if (version >= TLS1_3_VERSION) {
897 *out_fixed_iv_len = EVP_AEAD_nonce_length(*out_aead);
898 assert(*out_fixed_iv_len >= 8);
899 }
900 return 1;
Adam Langleyfcf25832014-12-18 17:42:32 -0800901}
Adam Langleyc9fb3752014-06-20 12:00:00 -0700902
David Benjaminb0883312015-08-06 09:54:13 -0400903const EVP_MD *ssl_get_handshake_digest(uint32_t algorithm_prf) {
904 switch (algorithm_prf) {
905 case SSL_HANDSHAKE_MAC_DEFAULT:
906 return EVP_sha1();
907 case SSL_HANDSHAKE_MAC_SHA256:
908 return EVP_sha256();
909 case SSL_HANDSHAKE_MAC_SHA384:
910 return EVP_sha384();
911 default:
912 return NULL;
Adam Langleyfcf25832014-12-18 17:42:32 -0800913 }
Adam Langley95c29f32014-06-20 12:00:00 -0700914}
915
916#define ITEM_SEP(a) \
Adam Langleyfcf25832014-12-18 17:42:32 -0800917 (((a) == ':') || ((a) == ' ') || ((a) == ';') || ((a) == ','))
Adam Langley95c29f32014-06-20 12:00:00 -0700918
David Benjamin0344daf2015-04-08 02:08:01 -0400919/* rule_equals returns one iff the NUL-terminated string |rule| is equal to the
920 * |buf_len| bytes at |buf|. */
921static int rule_equals(const char *rule, const char *buf, size_t buf_len) {
922 /* |strncmp| alone only checks that |buf| is a prefix of |rule|. */
923 return strncmp(rule, buf, buf_len) == 0 && rule[buf_len] == '\0';
924}
925
Adam Langley95c29f32014-06-20 12:00:00 -0700926static void ll_append_tail(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -0800927 CIPHER_ORDER **tail) {
928 if (curr == *tail) {
929 return;
930 }
931 if (curr == *head) {
932 *head = curr->next;
933 }
934 if (curr->prev != NULL) {
935 curr->prev->next = curr->next;
936 }
937 if (curr->next != NULL) {
938 curr->next->prev = curr->prev;
939 }
940 (*tail)->next = curr;
941 curr->prev = *tail;
942 curr->next = NULL;
943 *tail = curr;
944}
Adam Langley95c29f32014-06-20 12:00:00 -0700945
946static void ll_append_head(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -0800947 CIPHER_ORDER **tail) {
948 if (curr == *head) {
949 return;
950 }
951 if (curr == *tail) {
952 *tail = curr->prev;
953 }
954 if (curr->next != NULL) {
955 curr->next->prev = curr->prev;
956 }
957 if (curr->prev != NULL) {
958 curr->prev->next = curr->next;
959 }
960 (*head)->prev = curr;
961 curr->next = *head;
962 curr->prev = NULL;
963 *head = curr;
964}
Adam Langley95c29f32014-06-20 12:00:00 -0700965
David Benjamin82c9e902014-12-12 15:55:27 -0500966static void ssl_cipher_collect_ciphers(const SSL_PROTOCOL_METHOD *ssl_method,
Adam Langleyfcf25832014-12-18 17:42:32 -0800967 CIPHER_ORDER *co_list,
968 CIPHER_ORDER **head_p,
969 CIPHER_ORDER **tail_p) {
David Benjamina1c90a52015-05-30 17:03:14 -0400970 /* The set of ciphers is static, but some subset may be unsupported by
971 * |ssl_method|, so the list may be smaller. */
972 size_t co_list_num = 0;
David Benjamin54091232016-09-05 12:47:25 -0400973 for (size_t i = 0; i < kCiphersLen; i++) {
David Benjamina1c90a52015-05-30 17:03:14 -0400974 const SSL_CIPHER *cipher = &kCiphers[i];
975 if (ssl_method->supports_cipher(cipher)) {
976 co_list[co_list_num].cipher = cipher;
Adam Langleyfcf25832014-12-18 17:42:32 -0800977 co_list[co_list_num].next = NULL;
978 co_list[co_list_num].prev = NULL;
979 co_list[co_list_num].active = 0;
980 co_list[co_list_num].in_group = 0;
981 co_list_num++;
982 }
983 }
Adam Langley95c29f32014-06-20 12:00:00 -0700984
Adam Langleyfcf25832014-12-18 17:42:32 -0800985 /* Prepare linked list from list entries. */
986 if (co_list_num > 0) {
987 co_list[0].prev = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -0700988
Adam Langleyfcf25832014-12-18 17:42:32 -0800989 if (co_list_num > 1) {
990 co_list[0].next = &co_list[1];
Adam Langley95c29f32014-06-20 12:00:00 -0700991
David Benjamin54091232016-09-05 12:47:25 -0400992 for (size_t i = 1; i < co_list_num - 1; i++) {
Adam Langleyfcf25832014-12-18 17:42:32 -0800993 co_list[i].prev = &co_list[i - 1];
994 co_list[i].next = &co_list[i + 1];
995 }
Adam Langley95c29f32014-06-20 12:00:00 -0700996
Adam Langleyfcf25832014-12-18 17:42:32 -0800997 co_list[co_list_num - 1].prev = &co_list[co_list_num - 2];
998 }
999
1000 co_list[co_list_num - 1].next = NULL;
1001
1002 *head_p = &co_list[0];
1003 *tail_p = &co_list[co_list_num - 1];
1004 }
1005}
Adam Langley95c29f32014-06-20 12:00:00 -07001006
David Benjamin0344daf2015-04-08 02:08:01 -04001007/* ssl_cipher_apply_rule applies the rule type |rule| to ciphers matching its
1008 * parameters in the linked list from |*head_p| to |*tail_p|. It writes the new
1009 * head and tail of the list to |*head_p| and |*tail_p|, respectively.
1010 *
1011 * - If |cipher_id| is non-zero, only that cipher is selected.
1012 * - Otherwise, if |strength_bits| is non-negative, it selects ciphers
1013 * of that strength.
David Benjamind6e9eec2015-11-18 09:48:55 -05001014 * - Otherwise, it selects ciphers that match each bitmasks in |alg_*| and
David Benjamindcb6ef02015-11-06 15:35:54 -05001015 * |min_version|. */
Adam Langleyfcf25832014-12-18 17:42:32 -08001016static void ssl_cipher_apply_rule(
David Benjamin107db582015-04-08 00:41:59 -04001017 uint32_t cipher_id, uint32_t alg_mkey, uint32_t alg_auth,
David Benjamind6e9eec2015-11-18 09:48:55 -05001018 uint32_t alg_enc, uint32_t alg_mac, uint16_t min_version, int rule,
1019 int strength_bits, int in_group, CIPHER_ORDER **head_p,
1020 CIPHER_ORDER **tail_p) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001021 CIPHER_ORDER *head, *tail, *curr, *next, *last;
1022 const SSL_CIPHER *cp;
1023 int reverse = 0;
Adam Langley95c29f32014-06-20 12:00:00 -07001024
David Benjamindcb6ef02015-11-06 15:35:54 -05001025 if (cipher_id == 0 && strength_bits == -1 && min_version == 0 &&
David Benjamind6e9eec2015-11-18 09:48:55 -05001026 (alg_mkey == 0 || alg_auth == 0 || alg_enc == 0 || alg_mac == 0)) {
David Benjamin0344daf2015-04-08 02:08:01 -04001027 /* The rule matches nothing, so bail early. */
1028 return;
1029 }
1030
Adam Langleyfcf25832014-12-18 17:42:32 -08001031 if (rule == CIPHER_DEL) {
1032 /* needed to maintain sorting between currently deleted ciphers */
1033 reverse = 1;
1034 }
Adam Langley95c29f32014-06-20 12:00:00 -07001035
Adam Langleyfcf25832014-12-18 17:42:32 -08001036 head = *head_p;
1037 tail = *tail_p;
Adam Langley95c29f32014-06-20 12:00:00 -07001038
Adam Langleyfcf25832014-12-18 17:42:32 -08001039 if (reverse) {
1040 next = tail;
1041 last = head;
1042 } else {
1043 next = head;
1044 last = tail;
1045 }
Adam Langley95c29f32014-06-20 12:00:00 -07001046
Adam Langleyfcf25832014-12-18 17:42:32 -08001047 curr = NULL;
1048 for (;;) {
1049 if (curr == last) {
1050 break;
1051 }
Adam Langley95c29f32014-06-20 12:00:00 -07001052
Adam Langleyfcf25832014-12-18 17:42:32 -08001053 curr = next;
1054 if (curr == NULL) {
1055 break;
1056 }
Adam Langleye3142a72014-07-24 17:56:48 -07001057
Adam Langleyfcf25832014-12-18 17:42:32 -08001058 next = reverse ? curr->prev : curr->next;
1059 cp = curr->cipher;
Adam Langleye3142a72014-07-24 17:56:48 -07001060
David Benjamin0344daf2015-04-08 02:08:01 -04001061 /* Selection criteria is either a specific cipher, the value of
1062 * |strength_bits|, or the algorithms used. */
1063 if (cipher_id != 0) {
1064 if (cipher_id != cp->id) {
1065 continue;
1066 }
1067 } else if (strength_bits >= 0) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001068 if (strength_bits != SSL_CIPHER_get_bits(cp, NULL)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001069 continue;
1070 }
David Benjamin881f1962016-08-10 18:29:12 -04001071 } else {
1072 if (!(alg_mkey & cp->algorithm_mkey) ||
1073 !(alg_auth & cp->algorithm_auth) ||
1074 !(alg_enc & cp->algorithm_enc) ||
1075 !(alg_mac & cp->algorithm_mac) ||
1076 (min_version != 0 && SSL_CIPHER_get_min_version(cp) != min_version)) {
1077 continue;
1078 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001079 }
Adam Langleye3142a72014-07-24 17:56:48 -07001080
Adam Langleyfcf25832014-12-18 17:42:32 -08001081 /* add the cipher if it has not been added yet. */
1082 if (rule == CIPHER_ADD) {
1083 /* reverse == 0 */
1084 if (!curr->active) {
1085 ll_append_tail(&head, curr, &tail);
1086 curr->active = 1;
1087 curr->in_group = in_group;
1088 }
1089 }
Adam Langley95c29f32014-06-20 12:00:00 -07001090
Adam Langleyfcf25832014-12-18 17:42:32 -08001091 /* Move the added cipher to this location */
1092 else if (rule == CIPHER_ORD) {
1093 /* reverse == 0 */
1094 if (curr->active) {
1095 ll_append_tail(&head, curr, &tail);
1096 curr->in_group = 0;
1097 }
1098 } else if (rule == CIPHER_DEL) {
1099 /* reverse == 1 */
1100 if (curr->active) {
1101 /* most recently deleted ciphersuites get best positions
1102 * for any future CIPHER_ADD (note that the CIPHER_DEL loop
1103 * works in reverse to maintain the order) */
1104 ll_append_head(&head, curr, &tail);
1105 curr->active = 0;
1106 curr->in_group = 0;
1107 }
1108 } else if (rule == CIPHER_KILL) {
1109 /* reverse == 0 */
1110 if (head == curr) {
1111 head = curr->next;
1112 } else {
1113 curr->prev->next = curr->next;
1114 }
Adam Langley95c29f32014-06-20 12:00:00 -07001115
Adam Langleyfcf25832014-12-18 17:42:32 -08001116 if (tail == curr) {
1117 tail = curr->prev;
1118 }
1119 curr->active = 0;
1120 if (curr->next != NULL) {
1121 curr->next->prev = curr->prev;
1122 }
1123 if (curr->prev != NULL) {
1124 curr->prev->next = curr->next;
1125 }
1126 curr->next = NULL;
1127 curr->prev = NULL;
1128 }
1129 }
Adam Langley95c29f32014-06-20 12:00:00 -07001130
Adam Langleyfcf25832014-12-18 17:42:32 -08001131 *head_p = head;
1132 *tail_p = tail;
1133}
Adam Langley95c29f32014-06-20 12:00:00 -07001134
1135static int ssl_cipher_strength_sort(CIPHER_ORDER **head_p,
Adam Langleyfcf25832014-12-18 17:42:32 -08001136 CIPHER_ORDER **tail_p) {
1137 int max_strength_bits, i, *number_uses;
1138 CIPHER_ORDER *curr;
Adam Langley95c29f32014-06-20 12:00:00 -07001139
Adam Langleyfcf25832014-12-18 17:42:32 -08001140 /* This routine sorts the ciphers with descending strength. The sorting must
1141 * keep the pre-sorted sequence, so we apply the normal sorting routine as
1142 * '+' movement to the end of the list. */
1143 max_strength_bits = 0;
1144 curr = *head_p;
1145 while (curr != NULL) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001146 if (curr->active &&
1147 SSL_CIPHER_get_bits(curr->cipher, NULL) > max_strength_bits) {
1148 max_strength_bits = SSL_CIPHER_get_bits(curr->cipher, NULL);
Adam Langleyfcf25832014-12-18 17:42:32 -08001149 }
1150 curr = curr->next;
1151 }
Adam Langley95c29f32014-06-20 12:00:00 -07001152
Adam Langleyfcf25832014-12-18 17:42:32 -08001153 number_uses = OPENSSL_malloc((max_strength_bits + 1) * sizeof(int));
1154 if (!number_uses) {
David Benjamin3570d732015-06-29 00:28:17 -04001155 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001156 return 0;
1157 }
1158 memset(number_uses, 0, (max_strength_bits + 1) * sizeof(int));
Adam Langley95c29f32014-06-20 12:00:00 -07001159
Adam Langleyfcf25832014-12-18 17:42:32 -08001160 /* Now find the strength_bits values actually used. */
1161 curr = *head_p;
1162 while (curr != NULL) {
1163 if (curr->active) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001164 number_uses[SSL_CIPHER_get_bits(curr->cipher, NULL)]++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001165 }
1166 curr = curr->next;
1167 }
Adam Langley95c29f32014-06-20 12:00:00 -07001168
Adam Langleyfcf25832014-12-18 17:42:32 -08001169 /* Go through the list of used strength_bits values in descending order. */
1170 for (i = max_strength_bits; i >= 0; i--) {
1171 if (number_uses[i] > 0) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001172 ssl_cipher_apply_rule(0, 0, 0, 0, 0, 0, CIPHER_ORD, i, 0, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001173 }
1174 }
1175
1176 OPENSSL_free(number_uses);
1177 return 1;
1178}
Adam Langley95c29f32014-06-20 12:00:00 -07001179
David Benjamin0344daf2015-04-08 02:08:01 -04001180static int ssl_cipher_process_rulestr(const SSL_PROTOCOL_METHOD *ssl_method,
1181 const char *rule_str,
Adam Langleyfcf25832014-12-18 17:42:32 -08001182 CIPHER_ORDER **head_p,
David Benjamin0344daf2015-04-08 02:08:01 -04001183 CIPHER_ORDER **tail_p) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001184 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001185 uint16_t min_version;
Adam Langleyfcf25832014-12-18 17:42:32 -08001186 const char *l, *buf;
Adam Langleyf139c992016-10-02 09:56:09 -07001187 int multi, skip_rule, rule, ok, in_group = 0, has_group = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001188 size_t j, buf_len;
1189 uint32_t cipher_id;
Adam Langleyfcf25832014-12-18 17:42:32 -08001190 char ch;
Adam Langley95c29f32014-06-20 12:00:00 -07001191
Adam Langleyfcf25832014-12-18 17:42:32 -08001192 l = rule_str;
1193 for (;;) {
1194 ch = *l;
Adam Langley95c29f32014-06-20 12:00:00 -07001195
Adam Langleyfcf25832014-12-18 17:42:32 -08001196 if (ch == '\0') {
1197 break; /* done */
1198 }
Adam Langley95c29f32014-06-20 12:00:00 -07001199
Adam Langleyfcf25832014-12-18 17:42:32 -08001200 if (in_group) {
1201 if (ch == ']') {
Adam Langleyfcf25832014-12-18 17:42:32 -08001202 if (*tail_p) {
1203 (*tail_p)->in_group = 0;
1204 }
1205 in_group = 0;
1206 l++;
1207 continue;
1208 }
David Benjamin37d92462014-09-20 17:54:24 -04001209
Adam Langleyfcf25832014-12-18 17:42:32 -08001210 if (ch == '|') {
1211 rule = CIPHER_ADD;
1212 l++;
1213 continue;
1214 } else if (!(ch >= 'a' && ch <= 'z') && !(ch >= 'A' && ch <= 'Z') &&
1215 !(ch >= '0' && ch <= '9')) {
David Benjamin3570d732015-06-29 00:28:17 -04001216 OPENSSL_PUT_ERROR(SSL, SSL_R_UNEXPECTED_OPERATOR_IN_GROUP);
Adam Langleyf139c992016-10-02 09:56:09 -07001217 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001218 } else {
1219 rule = CIPHER_ADD;
1220 }
1221 } else if (ch == '-') {
1222 rule = CIPHER_DEL;
1223 l++;
1224 } else if (ch == '+') {
1225 rule = CIPHER_ORD;
1226 l++;
1227 } else if (ch == '!') {
1228 rule = CIPHER_KILL;
1229 l++;
1230 } else if (ch == '@') {
1231 rule = CIPHER_SPECIAL;
1232 l++;
1233 } else if (ch == '[') {
1234 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001235 OPENSSL_PUT_ERROR(SSL, SSL_R_NESTED_GROUP);
Adam Langleyf139c992016-10-02 09:56:09 -07001236 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001237 }
1238 in_group = 1;
1239 has_group = 1;
1240 l++;
1241 continue;
1242 } else {
1243 rule = CIPHER_ADD;
1244 }
Adam Langley95c29f32014-06-20 12:00:00 -07001245
Adam Langleyfcf25832014-12-18 17:42:32 -08001246 /* If preference groups are enabled, the only legal operator is +.
1247 * Otherwise the in_group bits will get mixed up. */
1248 if (has_group && rule != CIPHER_ADD) {
David Benjamin3570d732015-06-29 00:28:17 -04001249 OPENSSL_PUT_ERROR(SSL, SSL_R_MIXED_SPECIAL_OPERATOR_WITH_GROUPS);
Adam Langleyf139c992016-10-02 09:56:09 -07001250 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001251 }
Adam Langley95c29f32014-06-20 12:00:00 -07001252
Adam Langleyfcf25832014-12-18 17:42:32 -08001253 if (ITEM_SEP(ch)) {
1254 l++;
1255 continue;
1256 }
Adam Langley95c29f32014-06-20 12:00:00 -07001257
David Benjamin0344daf2015-04-08 02:08:01 -04001258 multi = 0;
1259 cipher_id = 0;
1260 alg_mkey = ~0u;
1261 alg_auth = ~0u;
1262 alg_enc = ~0u;
1263 alg_mac = ~0u;
David Benjamindcb6ef02015-11-06 15:35:54 -05001264 min_version = 0;
1265 skip_rule = 0;
Adam Langley95c29f32014-06-20 12:00:00 -07001266
Adam Langleyfcf25832014-12-18 17:42:32 -08001267 for (;;) {
1268 ch = *l;
1269 buf = l;
David Benjamin0344daf2015-04-08 02:08:01 -04001270 buf_len = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001271 while (((ch >= 'A') && (ch <= 'Z')) || ((ch >= '0') && (ch <= '9')) ||
1272 ((ch >= 'a') && (ch <= 'z')) || (ch == '-') || (ch == '.')) {
1273 ch = *(++l);
David Benjamin0344daf2015-04-08 02:08:01 -04001274 buf_len++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001275 }
Adam Langley95c29f32014-06-20 12:00:00 -07001276
David Benjamin0344daf2015-04-08 02:08:01 -04001277 if (buf_len == 0) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001278 /* We hit something we cannot deal with, it is no command or separator
1279 * nor alphanumeric, so we call this an error. */
David Benjamin3570d732015-06-29 00:28:17 -04001280 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyf99f2442016-10-02 09:53:38 -07001281 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001282 }
Adam Langley95c29f32014-06-20 12:00:00 -07001283
Adam Langleyfcf25832014-12-18 17:42:32 -08001284 if (rule == CIPHER_SPECIAL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001285 break;
1286 }
David Benjamin0344daf2015-04-08 02:08:01 -04001287
1288 /* Look for a matching exact cipher. These aren't allowed in multipart
1289 * rules. */
1290 if (!multi && ch != '+') {
David Benjamina1c90a52015-05-30 17:03:14 -04001291 for (j = 0; j < kCiphersLen; j++) {
1292 const SSL_CIPHER *cipher = &kCiphers[j];
1293 if (rule_equals(cipher->name, buf, buf_len)) {
David Benjamin0344daf2015-04-08 02:08:01 -04001294 cipher_id = cipher->id;
1295 break;
1296 }
1297 }
1298 }
1299 if (cipher_id == 0) {
1300 /* If not an exact cipher, look for a matching cipher alias. */
David Benjamina1c90a52015-05-30 17:03:14 -04001301 for (j = 0; j < kCipherAliasesLen; j++) {
David Benjamin0344daf2015-04-08 02:08:01 -04001302 if (rule_equals(kCipherAliases[j].name, buf, buf_len)) {
1303 alg_mkey &= kCipherAliases[j].algorithm_mkey;
1304 alg_auth &= kCipherAliases[j].algorithm_auth;
1305 alg_enc &= kCipherAliases[j].algorithm_enc;
1306 alg_mac &= kCipherAliases[j].algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001307
1308 if (min_version != 0 &&
1309 min_version != kCipherAliases[j].min_version) {
1310 skip_rule = 1;
1311 } else {
1312 min_version = kCipherAliases[j].min_version;
1313 }
David Benjamin0344daf2015-04-08 02:08:01 -04001314 break;
1315 }
1316 }
David Benjamina1c90a52015-05-30 17:03:14 -04001317 if (j == kCipherAliasesLen) {
David Benjamindcb6ef02015-11-06 15:35:54 -05001318 skip_rule = 1;
David Benjamin0344daf2015-04-08 02:08:01 -04001319 }
1320 }
1321
1322 /* Check for a multipart rule. */
1323 if (ch != '+') {
1324 break;
1325 }
1326 l++;
1327 multi = 1;
Adam Langleyfcf25832014-12-18 17:42:32 -08001328 }
Adam Langley95c29f32014-06-20 12:00:00 -07001329
David Benjamin13414b32015-12-09 23:02:39 -05001330 /* If one of the CHACHA20_POLY1305 variants is selected, include the other
1331 * as well. They have the same name to avoid requiring changes in
1332 * configuration. Apply this transformation late so that the cipher name
1333 * still behaves as an exact name and not an alias in multipart rules.
1334 *
1335 * This is temporary and will be removed when the pre-standard construction
1336 * is removed. */
1337 if (cipher_id == TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD ||
1338 cipher_id == TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256) {
1339 cipher_id = 0;
1340 alg_mkey = SSL_kECDHE;
1341 alg_auth = SSL_aRSA;
1342 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1343 alg_mac = SSL_AEAD;
1344 } else if (cipher_id == TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD ||
1345 cipher_id == TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256) {
1346 cipher_id = 0;
1347 alg_mkey = SSL_kECDHE;
1348 alg_auth = SSL_aECDSA;
1349 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1350 alg_mac = SSL_AEAD;
1351 }
1352
Adam Langleyfcf25832014-12-18 17:42:32 -08001353 /* Ok, we have the rule, now apply it. */
1354 if (rule == CIPHER_SPECIAL) {
1355 /* special command */
1356 ok = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001357 if (buf_len == 8 && !strncmp(buf, "STRENGTH", 8)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001358 ok = ssl_cipher_strength_sort(head_p, tail_p);
1359 } else {
David Benjamin3570d732015-06-29 00:28:17 -04001360 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyfcf25832014-12-18 17:42:32 -08001361 }
Adam Langley95c29f32014-06-20 12:00:00 -07001362
Adam Langleyfcf25832014-12-18 17:42:32 -08001363 if (ok == 0) {
Adam Langleyf139c992016-10-02 09:56:09 -07001364 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001365 }
Adam Langley95c29f32014-06-20 12:00:00 -07001366
Adam Langleyfcf25832014-12-18 17:42:32 -08001367 /* We do not support any "multi" options together with "@", so throw away
1368 * the rest of the command, if any left, until end or ':' is found. */
1369 while (*l != '\0' && !ITEM_SEP(*l)) {
1370 l++;
1371 }
David Benjamindcb6ef02015-11-06 15:35:54 -05001372 } else if (!skip_rule) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001373 ssl_cipher_apply_rule(cipher_id, alg_mkey, alg_auth, alg_enc, alg_mac,
David Benjamind6e9eec2015-11-18 09:48:55 -05001374 min_version, rule, -1, in_group, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001375 }
1376 }
Adam Langley95c29f32014-06-20 12:00:00 -07001377
Adam Langleyfcf25832014-12-18 17:42:32 -08001378 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001379 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyf139c992016-10-02 09:56:09 -07001380 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001381 }
Adam Langley95c29f32014-06-20 12:00:00 -07001382
Adam Langleyf139c992016-10-02 09:56:09 -07001383 return 1;
Adam Langleyfcf25832014-12-18 17:42:32 -08001384}
Adam Langley95c29f32014-06-20 12:00:00 -07001385
Adam Langleyfcf25832014-12-18 17:42:32 -08001386STACK_OF(SSL_CIPHER) *
1387ssl_create_cipher_list(const SSL_PROTOCOL_METHOD *ssl_method,
David Benjamin71f07942015-04-08 02:36:59 -04001388 struct ssl_cipher_preference_list_st **out_cipher_list,
1389 STACK_OF(SSL_CIPHER) **out_cipher_list_by_id,
1390 const char *rule_str) {
David Benjamin0344daf2015-04-08 02:08:01 -04001391 int ok;
Adam Langleyfcf25832014-12-18 17:42:32 -08001392 STACK_OF(SSL_CIPHER) *cipherstack = NULL, *tmp_cipher_list = NULL;
1393 const char *rule_p;
1394 CIPHER_ORDER *co_list = NULL, *head = NULL, *tail = NULL, *curr;
Adam Langleyfcf25832014-12-18 17:42:32 -08001395 uint8_t *in_group_flags = NULL;
1396 unsigned int num_in_group_flags = 0;
1397 struct ssl_cipher_preference_list_st *pref_list = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -07001398
Adam Langleyfcf25832014-12-18 17:42:32 -08001399 /* Return with error if nothing to do. */
David Benjamin71f07942015-04-08 02:36:59 -04001400 if (rule_str == NULL || out_cipher_list == NULL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001401 return NULL;
1402 }
David Benjamin5213df42014-08-20 14:19:54 -04001403
Adam Langleyfcf25832014-12-18 17:42:32 -08001404 /* Now we have to collect the available ciphers from the compiled in ciphers.
1405 * We cannot get more than the number compiled in, so it is used for
1406 * allocation. */
Brian Smith5ba06892016-02-07 09:36:04 -10001407 co_list = OPENSSL_malloc(sizeof(CIPHER_ORDER) * kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001408 if (co_list == NULL) {
David Benjamin3570d732015-06-29 00:28:17 -04001409 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001410 return NULL;
1411 }
Adam Langley95c29f32014-06-20 12:00:00 -07001412
David Benjamina1c90a52015-05-30 17:03:14 -04001413 ssl_cipher_collect_ciphers(ssl_method, co_list, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001414
Adam Langleyfcf25832014-12-18 17:42:32 -08001415 /* Now arrange all ciphers by preference:
1416 * TODO(davidben): Compute this order once and copy it. */
Adam Langley95c29f32014-06-20 12:00:00 -07001417
Steven Valdez803c77a2016-09-06 14:13:43 -04001418 /* Everything else being equal, prefer TLS 1.3 ciphers then ECDHE_ECDSA then
1419 * ECDHE_RSA over other key exchange mechanisms */
Matt Braithwaite053931e2016-05-25 12:06:05 -07001420
Steven Valdez803c77a2016-09-06 14:13:43 -04001421 ssl_cipher_apply_rule(0, SSL_kGENERIC, SSL_aGENERIC, ~0u, ~0u, 0, CIPHER_ADD,
1422 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001423 ssl_cipher_apply_rule(0, SSL_kECDHE, SSL_aECDSA, ~0u, ~0u, 0, CIPHER_ADD, -1,
Adam Langleyfcf25832014-12-18 17:42:32 -08001424 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001425 ssl_cipher_apply_rule(0, SSL_kECDHE, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0,
1426 &head, &tail);
Steven Valdez803c77a2016-09-06 14:13:43 -04001427 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0, &head,
1428 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001429
Adam Langleyfcf25832014-12-18 17:42:32 -08001430 /* Order the bulk ciphers. First the preferred AEAD ciphers. We prefer
1431 * CHACHA20 unless there is hardware support for fast and constant-time
David Benjamin13414b32015-12-09 23:02:39 -05001432 * AES_GCM. Of the two CHACHA20 variants, the new one is preferred over the
1433 * old one. */
Adam Langleyfcf25832014-12-18 17:42:32 -08001434 if (EVP_has_aes_hardware()) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001435 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1436 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001437 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1438 &head, &tail);
David Benjamin13414b32015-12-09 23:02:39 -05001439 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1440 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001441 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001442 CIPHER_ADD, -1, 0, &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001443 } else {
David Benjamin13414b32015-12-09 23:02:39 -05001444 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1445 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001446 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001447 CIPHER_ADD, -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001448 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1449 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001450 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1451 &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001452 }
Adam Langley95c29f32014-06-20 12:00:00 -07001453
David Benjamin43336652016-03-03 15:32:29 -05001454 /* Then the legacy non-AEAD ciphers: AES_128_CBC, AES_256_CBC,
Matthew Braithwaite8aaa9e12016-09-07 15:09:58 -07001455 * 3DES_EDE_CBC_SHA. */
David Benjamind6e9eec2015-11-18 09:48:55 -05001456 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128, ~0u, 0, CIPHER_ADD, -1, 0,
1457 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001458 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256, ~0u, 0, CIPHER_ADD, -1, 0,
1459 &head, &tail);
1460 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_3DES, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1461 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001462
Adam Langleyfcf25832014-12-18 17:42:32 -08001463 /* Temporarily enable everything else for sorting */
David Benjamind6e9eec2015-11-18 09:48:55 -05001464 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1465 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001466
Adam Langleyfcf25832014-12-18 17:42:32 -08001467 /* Move ciphers without forward secrecy to the end. */
Steven Valdez803c77a2016-09-06 14:13:43 -04001468 ssl_cipher_apply_rule(0, (SSL_kRSA | SSL_kPSK), ~0u, ~0u, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001469 CIPHER_ORD, -1, 0, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001470
Adam Langleyfcf25832014-12-18 17:42:32 -08001471 /* Now disable everything (maintaining the ordering!) */
David Benjamind6e9eec2015-11-18 09:48:55 -05001472 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0, &head,
1473 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001474
Adam Langleyfcf25832014-12-18 17:42:32 -08001475 /* If the rule_string begins with DEFAULT, apply the default rule before
1476 * using the (possibly available) additional rules. */
1477 ok = 1;
1478 rule_p = rule_str;
1479 if (strncmp(rule_str, "DEFAULT", 7) == 0) {
David Benjamin0344daf2015-04-08 02:08:01 -04001480 ok = ssl_cipher_process_rulestr(ssl_method, SSL_DEFAULT_CIPHER_LIST, &head,
1481 &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001482 rule_p += 7;
1483 if (*rule_p == ':') {
1484 rule_p++;
1485 }
1486 }
Adam Langley858a88d2014-06-20 12:00:00 -07001487
Adam Langleyfcf25832014-12-18 17:42:32 -08001488 if (ok && strlen(rule_p) > 0) {
David Benjamin0344daf2015-04-08 02:08:01 -04001489 ok = ssl_cipher_process_rulestr(ssl_method, rule_p, &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001490 }
Adam Langley95c29f32014-06-20 12:00:00 -07001491
Adam Langleyfcf25832014-12-18 17:42:32 -08001492 if (!ok) {
1493 goto err;
1494 }
1495
1496 /* Allocate new "cipherstack" for the result, return with error
1497 * if we cannot get one. */
1498 cipherstack = sk_SSL_CIPHER_new_null();
1499 if (cipherstack == NULL) {
1500 goto err;
1501 }
1502
David Benjamina1c90a52015-05-30 17:03:14 -04001503 in_group_flags = OPENSSL_malloc(kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001504 if (!in_group_flags) {
1505 goto err;
1506 }
1507
1508 /* The cipher selection for the list is done. The ciphers are added
1509 * to the resulting precedence to the STACK_OF(SSL_CIPHER). */
1510 for (curr = head; curr != NULL; curr = curr->next) {
1511 if (curr->active) {
David Benjamin2adb7ec2015-01-11 19:59:06 -05001512 if (!sk_SSL_CIPHER_push(cipherstack, curr->cipher)) {
1513 goto err;
1514 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001515 in_group_flags[num_in_group_flags++] = curr->in_group;
1516 }
1517 }
1518 OPENSSL_free(co_list); /* Not needed any longer */
1519 co_list = NULL;
1520
1521 tmp_cipher_list = sk_SSL_CIPHER_dup(cipherstack);
1522 if (tmp_cipher_list == NULL) {
1523 goto err;
1524 }
1525 pref_list = OPENSSL_malloc(sizeof(struct ssl_cipher_preference_list_st));
1526 if (!pref_list) {
1527 goto err;
1528 }
1529 pref_list->ciphers = cipherstack;
1530 pref_list->in_group_flags = OPENSSL_malloc(num_in_group_flags);
1531 if (!pref_list->in_group_flags) {
1532 goto err;
1533 }
1534 memcpy(pref_list->in_group_flags, in_group_flags, num_in_group_flags);
1535 OPENSSL_free(in_group_flags);
1536 in_group_flags = NULL;
David Benjamin71f07942015-04-08 02:36:59 -04001537 if (*out_cipher_list != NULL) {
1538 ssl_cipher_preference_list_free(*out_cipher_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001539 }
David Benjamin71f07942015-04-08 02:36:59 -04001540 *out_cipher_list = pref_list;
Adam Langleyfcf25832014-12-18 17:42:32 -08001541 pref_list = NULL;
1542
David Benjamin71f07942015-04-08 02:36:59 -04001543 if (out_cipher_list_by_id != NULL) {
David Benjamin2755a3e2015-04-22 16:17:58 -04001544 sk_SSL_CIPHER_free(*out_cipher_list_by_id);
David Benjamin71f07942015-04-08 02:36:59 -04001545 *out_cipher_list_by_id = tmp_cipher_list;
Adam Langleyfcf25832014-12-18 17:42:32 -08001546 tmp_cipher_list = NULL;
David Benjamin71f07942015-04-08 02:36:59 -04001547 (void) sk_SSL_CIPHER_set_cmp_func(*out_cipher_list_by_id,
1548 ssl_cipher_ptr_id_cmp);
Adam Langleyfcf25832014-12-18 17:42:32 -08001549
David Benjamin71f07942015-04-08 02:36:59 -04001550 sk_SSL_CIPHER_sort(*out_cipher_list_by_id);
Adam Langleyfcf25832014-12-18 17:42:32 -08001551 } else {
1552 sk_SSL_CIPHER_free(tmp_cipher_list);
1553 tmp_cipher_list = NULL;
1554 }
1555
1556 return cipherstack;
Adam Langley858a88d2014-06-20 12:00:00 -07001557
1558err:
David Benjamin2755a3e2015-04-22 16:17:58 -04001559 OPENSSL_free(co_list);
1560 OPENSSL_free(in_group_flags);
1561 sk_SSL_CIPHER_free(cipherstack);
1562 sk_SSL_CIPHER_free(tmp_cipher_list);
1563 if (pref_list) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001564 OPENSSL_free(pref_list->in_group_flags);
1565 }
David Benjamin2755a3e2015-04-22 16:17:58 -04001566 OPENSSL_free(pref_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001567 return NULL;
1568}
Adam Langley95c29f32014-06-20 12:00:00 -07001569
David Benjamin71f07942015-04-08 02:36:59 -04001570uint32_t SSL_CIPHER_get_id(const SSL_CIPHER *cipher) { return cipher->id; }
1571
David Benjamina1c90a52015-05-30 17:03:14 -04001572uint16_t ssl_cipher_get_value(const SSL_CIPHER *cipher) {
1573 uint32_t id = cipher->id;
1574 /* All ciphers are SSLv3. */
1575 assert((id & 0xff000000) == 0x03000000);
1576 return id & 0xffff;
1577}
1578
David Benjamin71f07942015-04-08 02:36:59 -04001579int SSL_CIPHER_is_AES(const SSL_CIPHER *cipher) {
1580 return (cipher->algorithm_enc & SSL_AES) != 0;
1581}
1582
1583int SSL_CIPHER_has_MD5_HMAC(const SSL_CIPHER *cipher) {
1584 return (cipher->algorithm_mac & SSL_MD5) != 0;
1585}
1586
David Benjaminef793f42015-11-05 18:16:27 -05001587int SSL_CIPHER_has_SHA1_HMAC(const SSL_CIPHER *cipher) {
1588 return (cipher->algorithm_mac & SSL_SHA1) != 0;
1589}
1590
David Benjamina211aee2016-02-24 17:18:44 -05001591int SSL_CIPHER_has_SHA256_HMAC(const SSL_CIPHER *cipher) {
1592 return (cipher->algorithm_mac & SSL_SHA256) != 0;
1593}
1594
David Benjamin71f07942015-04-08 02:36:59 -04001595int SSL_CIPHER_is_AESGCM(const SSL_CIPHER *cipher) {
David Benjaminc0125ef2015-09-09 09:11:07 -04001596 return (cipher->algorithm_enc & (SSL_AES128GCM | SSL_AES256GCM)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001597}
1598
David Benjaminef793f42015-11-05 18:16:27 -05001599int SSL_CIPHER_is_AES128GCM(const SSL_CIPHER *cipher) {
1600 return (cipher->algorithm_enc & SSL_AES128GCM) != 0;
1601}
1602
Adam Langleyb00061c2015-11-16 17:44:52 -08001603int SSL_CIPHER_is_AES128CBC(const SSL_CIPHER *cipher) {
1604 return (cipher->algorithm_enc & SSL_AES128) != 0;
1605}
1606
1607int SSL_CIPHER_is_AES256CBC(const SSL_CIPHER *cipher) {
1608 return (cipher->algorithm_enc & SSL_AES256) != 0;
1609}
1610
David Benjamin51a01a52015-10-29 13:19:56 -04001611int SSL_CIPHER_is_CHACHA20POLY1305(const SSL_CIPHER *cipher) {
David Benjamin13414b32015-12-09 23:02:39 -05001612 return (cipher->algorithm_enc &
1613 (SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001614}
1615
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001616int SSL_CIPHER_is_NULL(const SSL_CIPHER *cipher) {
1617 return (cipher->algorithm_enc & SSL_eNULL) != 0;
1618}
1619
1620int SSL_CIPHER_is_block_cipher(const SSL_CIPHER *cipher) {
Matthew Braithwaite8aaa9e12016-09-07 15:09:58 -07001621 return (cipher->algorithm_enc & SSL_eNULL) == 0 &&
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001622 cipher->algorithm_mac != SSL_AEAD;
1623}
1624
David Benjaminef793f42015-11-05 18:16:27 -05001625int SSL_CIPHER_is_ECDSA(const SSL_CIPHER *cipher) {
1626 return (cipher->algorithm_auth & SSL_aECDSA) != 0;
1627}
1628
David Benjamin0fc7df52016-06-02 18:36:33 -04001629int SSL_CIPHER_is_DHE(const SSL_CIPHER *cipher) {
1630 return (cipher->algorithm_mkey & SSL_kDHE) != 0;
1631}
1632
David Benjamin4cc36ad2015-12-19 14:23:26 -05001633int SSL_CIPHER_is_ECDHE(const SSL_CIPHER *cipher) {
1634 return (cipher->algorithm_mkey & SSL_kECDHE) != 0;
1635}
1636
Matt Braithwaite053931e2016-05-25 12:06:05 -07001637int SSL_CIPHER_is_CECPQ1(const SSL_CIPHER *cipher) {
1638 return (cipher->algorithm_mkey & SSL_kCECPQ1) != 0;
1639}
1640
David Benjaminef793f42015-11-05 18:16:27 -05001641uint16_t SSL_CIPHER_get_min_version(const SSL_CIPHER *cipher) {
Steven Valdez803c77a2016-09-06 14:13:43 -04001642 if (cipher->algorithm_mkey == SSL_kGENERIC ||
1643 cipher->algorithm_auth == SSL_aGENERIC) {
1644 return TLS1_3_VERSION;
1645 }
1646
David Benjamindcb6ef02015-11-06 15:35:54 -05001647 if (cipher->algorithm_prf != SSL_HANDSHAKE_MAC_DEFAULT) {
1648 /* Cipher suites before TLS 1.2 use the default PRF, while all those added
1649 * afterwards specify a particular hash. */
David Benjaminef793f42015-11-05 18:16:27 -05001650 return TLS1_2_VERSION;
1651 }
1652 return SSL3_VERSION;
1653}
1654
Nick Harper1fd39d82016-06-14 18:14:35 -07001655uint16_t SSL_CIPHER_get_max_version(const SSL_CIPHER *cipher) {
Steven Valdez803c77a2016-09-06 14:13:43 -04001656 if (cipher->algorithm_mkey == SSL_kGENERIC ||
1657 cipher->algorithm_auth == SSL_aGENERIC) {
Nick Harper1fd39d82016-06-14 18:14:35 -07001658 return TLS1_3_VERSION;
1659 }
1660 return TLS1_2_VERSION;
1661}
1662
David Benjamin71f07942015-04-08 02:36:59 -04001663/* return the actual cipher being used */
1664const char *SSL_CIPHER_get_name(const SSL_CIPHER *cipher) {
1665 if (cipher != NULL) {
1666 return cipher->name;
1667 }
1668
1669 return "(NONE)";
1670}
1671
1672const char *SSL_CIPHER_get_kx_name(const SSL_CIPHER *cipher) {
1673 if (cipher == NULL) {
1674 return "";
1675 }
1676
1677 switch (cipher->algorithm_mkey) {
1678 case SSL_kRSA:
1679 return "RSA";
1680
1681 case SSL_kDHE:
1682 switch (cipher->algorithm_auth) {
1683 case SSL_aRSA:
1684 return "DHE_RSA";
1685 default:
1686 assert(0);
1687 return "UNKNOWN";
1688 }
1689
1690 case SSL_kECDHE:
1691 switch (cipher->algorithm_auth) {
1692 case SSL_aECDSA:
1693 return "ECDHE_ECDSA";
1694 case SSL_aRSA:
1695 return "ECDHE_RSA";
1696 case SSL_aPSK:
1697 return "ECDHE_PSK";
1698 default:
1699 assert(0);
1700 return "UNKNOWN";
1701 }
1702
Matt Braithwaite053931e2016-05-25 12:06:05 -07001703 case SSL_kCECPQ1:
1704 switch (cipher->algorithm_auth) {
1705 case SSL_aECDSA:
1706 return "CECPQ1_ECDSA";
1707 case SSL_aRSA:
1708 return "CECPQ1_RSA";
1709 default:
1710 assert(0);
1711 return "UNKNOWN";
1712 }
1713
David Benjamin71f07942015-04-08 02:36:59 -04001714 case SSL_kPSK:
1715 assert(cipher->algorithm_auth == SSL_aPSK);
1716 return "PSK";
1717
Steven Valdez803c77a2016-09-06 14:13:43 -04001718 case SSL_kGENERIC:
1719 assert(cipher->algorithm_auth == SSL_aGENERIC);
1720 return "GENERIC";
1721
David Benjamin71f07942015-04-08 02:36:59 -04001722 default:
1723 assert(0);
1724 return "UNKNOWN";
1725 }
1726}
1727
1728static const char *ssl_cipher_get_enc_name(const SSL_CIPHER *cipher) {
1729 switch (cipher->algorithm_enc) {
1730 case SSL_3DES:
1731 return "3DES_EDE_CBC";
David Benjamin71f07942015-04-08 02:36:59 -04001732 case SSL_AES128:
1733 return "AES_128_CBC";
1734 case SSL_AES256:
1735 return "AES_256_CBC";
1736 case SSL_AES128GCM:
1737 return "AES_128_GCM";
1738 case SSL_AES256GCM:
1739 return "AES_256_GCM";
David Benjamin13414b32015-12-09 23:02:39 -05001740 case SSL_CHACHA20POLY1305:
Brian Smith271777f2015-10-03 13:53:33 -10001741 case SSL_CHACHA20POLY1305_OLD:
David Benjamin71f07942015-04-08 02:36:59 -04001742 return "CHACHA20_POLY1305";
1743 break;
1744 default:
1745 assert(0);
1746 return "UNKNOWN";
1747 }
1748}
1749
1750static const char *ssl_cipher_get_prf_name(const SSL_CIPHER *cipher) {
David Benjaminb0883312015-08-06 09:54:13 -04001751 switch (cipher->algorithm_prf) {
1752 case SSL_HANDSHAKE_MAC_DEFAULT:
1753 /* Before TLS 1.2, the PRF component is the hash used in the HMAC, which is
1754 * only ever MD5 or SHA-1. */
1755 switch (cipher->algorithm_mac) {
1756 case SSL_MD5:
1757 return "MD5";
1758 case SSL_SHA1:
1759 return "SHA";
1760 }
1761 break;
1762 case SSL_HANDSHAKE_MAC_SHA256:
1763 return "SHA256";
1764 case SSL_HANDSHAKE_MAC_SHA384:
1765 return "SHA384";
David Benjamin71f07942015-04-08 02:36:59 -04001766 }
David Benjaminb0883312015-08-06 09:54:13 -04001767 assert(0);
1768 return "UNKNOWN";
David Benjamin71f07942015-04-08 02:36:59 -04001769}
1770
1771char *SSL_CIPHER_get_rfc_name(const SSL_CIPHER *cipher) {
1772 if (cipher == NULL) {
1773 return NULL;
1774 }
1775
1776 const char *kx_name = SSL_CIPHER_get_kx_name(cipher);
1777 const char *enc_name = ssl_cipher_get_enc_name(cipher);
1778 const char *prf_name = ssl_cipher_get_prf_name(cipher);
1779
Steven Valdez803c77a2016-09-06 14:13:43 -04001780 /* The final name is TLS_{kx_name}_WITH_{enc_name}_{prf_name} or
1781 * TLS_{enc_name}_{prf_name} depending on whether the cipher is AEAD-only. */
1782 size_t len = 4 + strlen(enc_name) + 1 + strlen(prf_name) + 1;
1783
1784 if (cipher->algorithm_mkey != SSL_kGENERIC) {
1785 len += strlen(kx_name) + 6;
1786 }
1787
David Benjamin71f07942015-04-08 02:36:59 -04001788 char *ret = OPENSSL_malloc(len);
1789 if (ret == NULL) {
1790 return NULL;
1791 }
Steven Valdez803c77a2016-09-06 14:13:43 -04001792
David Benjamin71f07942015-04-08 02:36:59 -04001793 if (BUF_strlcpy(ret, "TLS_", len) >= len ||
Steven Valdez803c77a2016-09-06 14:13:43 -04001794 (cipher->algorithm_mkey != SSL_kGENERIC &&
1795 (BUF_strlcat(ret, kx_name, len) >= len ||
1796 BUF_strlcat(ret, "_WITH_", len) >= len)) ||
David Benjamin71f07942015-04-08 02:36:59 -04001797 BUF_strlcat(ret, enc_name, len) >= len ||
1798 BUF_strlcat(ret, "_", len) >= len ||
1799 BUF_strlcat(ret, prf_name, len) >= len) {
1800 assert(0);
1801 OPENSSL_free(ret);
1802 return NULL;
1803 }
Steven Valdez803c77a2016-09-06 14:13:43 -04001804
David Benjamin71f07942015-04-08 02:36:59 -04001805 assert(strlen(ret) + 1 == len);
1806 return ret;
1807}
1808
1809int SSL_CIPHER_get_bits(const SSL_CIPHER *cipher, int *out_alg_bits) {
1810 if (cipher == NULL) {
1811 return 0;
1812 }
1813
David Benjamin9f2e2772015-11-18 09:59:43 -05001814 int alg_bits, strength_bits;
1815 switch (cipher->algorithm_enc) {
1816 case SSL_AES128:
1817 case SSL_AES128GCM:
David Benjamin9f2e2772015-11-18 09:59:43 -05001818 alg_bits = 128;
1819 strength_bits = 128;
1820 break;
1821
1822 case SSL_AES256:
1823 case SSL_AES256GCM:
1824#if !defined(BORINGSSL_ANDROID_SYSTEM)
1825 case SSL_CHACHA20POLY1305_OLD:
1826#endif
David Benjamin13414b32015-12-09 23:02:39 -05001827 case SSL_CHACHA20POLY1305:
David Benjamin9f2e2772015-11-18 09:59:43 -05001828 alg_bits = 256;
1829 strength_bits = 256;
1830 break;
1831
1832 case SSL_3DES:
1833 alg_bits = 168;
1834 strength_bits = 112;
1835 break;
1836
1837 case SSL_eNULL:
1838 alg_bits = 0;
1839 strength_bits = 0;
1840 break;
1841
1842 default:
1843 assert(0);
1844 alg_bits = 0;
1845 strength_bits = 0;
David Benjamin71f07942015-04-08 02:36:59 -04001846 }
David Benjamin9f2e2772015-11-18 09:59:43 -05001847
1848 if (out_alg_bits != NULL) {
1849 *out_alg_bits = alg_bits;
1850 }
1851 return strength_bits;
David Benjamin71f07942015-04-08 02:36:59 -04001852}
1853
Adam Langleyfcf25832014-12-18 17:42:32 -08001854const char *SSL_CIPHER_description(const SSL_CIPHER *cipher, char *buf,
1855 int len) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001856 const char *kx, *au, *enc, *mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001857 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001858
Adam Langleyfcf25832014-12-18 17:42:32 -08001859 alg_mkey = cipher->algorithm_mkey;
1860 alg_auth = cipher->algorithm_auth;
1861 alg_enc = cipher->algorithm_enc;
1862 alg_mac = cipher->algorithm_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001863
Adam Langleyfcf25832014-12-18 17:42:32 -08001864 switch (alg_mkey) {
1865 case SSL_kRSA:
1866 kx = "RSA";
1867 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001868
David Benjamin7061e282015-03-19 11:10:48 -04001869 case SSL_kDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001870 kx = "DH";
1871 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001872
David Benjamin7061e282015-03-19 11:10:48 -04001873 case SSL_kECDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001874 kx = "ECDH";
1875 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001876
Matt Braithwaite053931e2016-05-25 12:06:05 -07001877 case SSL_kCECPQ1:
1878 kx = "CECPQ1";
1879 break;
1880
Adam Langleyfcf25832014-12-18 17:42:32 -08001881 case SSL_kPSK:
1882 kx = "PSK";
1883 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001884
Steven Valdez803c77a2016-09-06 14:13:43 -04001885 case SSL_kGENERIC:
1886 kx = "GENERIC";
1887 break;
1888
Adam Langleyfcf25832014-12-18 17:42:32 -08001889 default:
1890 kx = "unknown";
1891 }
Adam Langley95c29f32014-06-20 12:00:00 -07001892
Adam Langleyfcf25832014-12-18 17:42:32 -08001893 switch (alg_auth) {
1894 case SSL_aRSA:
1895 au = "RSA";
1896 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001897
Adam Langleyfcf25832014-12-18 17:42:32 -08001898 case SSL_aECDSA:
1899 au = "ECDSA";
1900 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07001901
Adam Langleyfcf25832014-12-18 17:42:32 -08001902 case SSL_aPSK:
1903 au = "PSK";
1904 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07001905
Steven Valdez803c77a2016-09-06 14:13:43 -04001906 case SSL_aGENERIC:
1907 au = "GENERIC";
1908 break;
1909
Adam Langleyfcf25832014-12-18 17:42:32 -08001910 default:
1911 au = "unknown";
1912 break;
1913 }
Adam Langleyde0b2022014-06-20 12:00:00 -07001914
Adam Langleyfcf25832014-12-18 17:42:32 -08001915 switch (alg_enc) {
1916 case SSL_3DES:
1917 enc = "3DES(168)";
1918 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001919
Adam Langleyfcf25832014-12-18 17:42:32 -08001920 case SSL_AES128:
1921 enc = "AES(128)";
1922 break;
1923
1924 case SSL_AES256:
1925 enc = "AES(256)";
1926 break;
1927
1928 case SSL_AES128GCM:
1929 enc = "AESGCM(128)";
1930 break;
1931
1932 case SSL_AES256GCM:
1933 enc = "AESGCM(256)";
1934 break;
1935
Brian Smith271777f2015-10-03 13:53:33 -10001936 case SSL_CHACHA20POLY1305_OLD:
David Benjamin13414b32015-12-09 23:02:39 -05001937 enc = "ChaCha20-Poly1305-Old";
1938 break;
1939
1940 case SSL_CHACHA20POLY1305:
Adam Langleyfcf25832014-12-18 17:42:32 -08001941 enc = "ChaCha20-Poly1305";
1942 break;
1943
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001944 case SSL_eNULL:
1945 enc="None";
1946 break;
1947
Adam Langleyfcf25832014-12-18 17:42:32 -08001948 default:
1949 enc = "unknown";
1950 break;
1951 }
1952
1953 switch (alg_mac) {
1954 case SSL_MD5:
1955 mac = "MD5";
1956 break;
1957
1958 case SSL_SHA1:
1959 mac = "SHA1";
1960 break;
1961
1962 case SSL_SHA256:
1963 mac = "SHA256";
1964 break;
1965
1966 case SSL_SHA384:
1967 mac = "SHA384";
1968 break;
1969
1970 case SSL_AEAD:
1971 mac = "AEAD";
1972 break;
1973
1974 default:
1975 mac = "unknown";
1976 break;
1977 }
1978
1979 if (buf == NULL) {
1980 len = 128;
1981 buf = OPENSSL_malloc(len);
David Benjamin1eed2c02015-02-08 23:20:06 -05001982 if (buf == NULL) {
1983 return NULL;
1984 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001985 } else if (len < 128) {
1986 return "Buffer too small";
1987 }
1988
Brian Smith0687bdf2016-01-17 09:18:26 -10001989 BIO_snprintf(buf, len, "%-23s Kx=%-8s Au=%-4s Enc=%-9s Mac=%-4s\n",
1990 cipher->name, kx, au, enc, mac);
Adam Langleyfcf25832014-12-18 17:42:32 -08001991 return buf;
1992}
1993
David Benjamin71f07942015-04-08 02:36:59 -04001994const char *SSL_CIPHER_get_version(const SSL_CIPHER *cipher) {
1995 return "TLSv1/SSLv3";
Adam Langleyfcf25832014-12-18 17:42:32 -08001996}
1997
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07001998COMP_METHOD *SSL_COMP_get_compression_methods(void) { return NULL; }
Adam Langleyfcf25832014-12-18 17:42:32 -08001999
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07002000int SSL_COMP_add_compression_method(int id, COMP_METHOD *cm) { return 1; }
Adam Langleyfcf25832014-12-18 17:42:32 -08002001
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07002002const char *SSL_COMP_get_name(const COMP_METHOD *comp) { return NULL; }
Adam Langley95c29f32014-06-20 12:00:00 -07002003
Adam Langley3e9e0432016-10-03 15:58:07 -07002004void SSL_COMP_free_compression_methods(void) {}
2005
David Benjamind1d80782015-07-05 11:54:09 -04002006int ssl_cipher_get_key_type(const SSL_CIPHER *cipher) {
David Benjamin71f07942015-04-08 02:36:59 -04002007 uint32_t alg_a = cipher->algorithm_auth;
Adam Langley95c29f32014-06-20 12:00:00 -07002008
Adam Langleyfcf25832014-12-18 17:42:32 -08002009 if (alg_a & SSL_aECDSA) {
David Benjamind1d80782015-07-05 11:54:09 -04002010 return EVP_PKEY_EC;
Adam Langleyfcf25832014-12-18 17:42:32 -08002011 } else if (alg_a & SSL_aRSA) {
David Benjamind1d80782015-07-05 11:54:09 -04002012 return EVP_PKEY_RSA;
Adam Langleyfcf25832014-12-18 17:42:32 -08002013 }
Adam Langley95c29f32014-06-20 12:00:00 -07002014
David Benjamind1d80782015-07-05 11:54:09 -04002015 return EVP_PKEY_NONE;
Adam Langleyfcf25832014-12-18 17:42:32 -08002016}
David Benjamin9c651c92014-07-12 13:27:45 -04002017
David Benjaminc032dfa2016-05-12 14:54:57 -04002018int ssl_cipher_uses_certificate_auth(const SSL_CIPHER *cipher) {
2019 return (cipher->algorithm_auth & SSL_aCERT) != 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08002020}
2021
Adam Langleyfcf25832014-12-18 17:42:32 -08002022int ssl_cipher_requires_server_key_exchange(const SSL_CIPHER *cipher) {
2023 /* Ephemeral Diffie-Hellman key exchanges require a ServerKeyExchange. */
Matt Braithwaite053931e2016-05-25 12:06:05 -07002024 if (cipher->algorithm_mkey & SSL_kDHE ||
2025 cipher->algorithm_mkey & SSL_kECDHE ||
2026 cipher->algorithm_mkey & SSL_kCECPQ1) {
Adam Langleyfcf25832014-12-18 17:42:32 -08002027 return 1;
2028 }
2029
2030 /* It is optional in all others. */
2031 return 0;
2032}
David Benjaminb8d28cf2015-07-28 21:34:45 -04002033
2034size_t ssl_cipher_get_record_split_len(const SSL_CIPHER *cipher) {
2035 size_t block_size;
2036 switch (cipher->algorithm_enc) {
2037 case SSL_3DES:
2038 block_size = 8;
2039 break;
2040 case SSL_AES128:
2041 case SSL_AES256:
2042 block_size = 16;
2043 break;
2044 default:
2045 return 0;
2046 }
2047
2048 size_t mac_len;
2049 switch (cipher->algorithm_mac) {
2050 case SSL_MD5:
2051 mac_len = MD5_DIGEST_LENGTH;
2052 break;
2053 case SSL_SHA1:
2054 mac_len = SHA_DIGEST_LENGTH;
2055 break;
2056 default:
2057 return 0;
2058 }
2059
2060 size_t ret = 1 + mac_len;
2061 ret += block_size - (ret % block_size);
2062 return ret;
2063}