blob: 52369a4c7b61ab120c1cce5a5ac0997857b5a887 [file] [log] [blame]
Adam Langley95c29f32014-06-20 12:00:00 -07001/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
2 * All rights reserved.
3 *
4 * This package is an SSL implementation written
5 * by Eric Young (eay@cryptsoft.com).
6 * The implementation was written so as to conform with Netscapes SSL.
7 *
8 * This library is free for commercial and non-commercial use as long as
9 * the following conditions are aheared to. The following conditions
10 * apply to all code found in this distribution, be it the RC4, RSA,
11 * lhash, DES, etc., code; not just the SSL code. The SSL documentation
12 * included with this distribution is covered by the same copyright terms
13 * except that the holder is Tim Hudson (tjh@cryptsoft.com).
14 *
15 * Copyright remains Eric Young's, and as such any Copyright notices in
16 * the code are not to be removed.
17 * If this package is used in a product, Eric Young should be given attribution
18 * as the author of the parts of the library used.
19 * This can be in the form of a textual message at program startup or
20 * in documentation (online or textual) provided with the package.
21 *
22 * Redistribution and use in source and binary forms, with or without
23 * modification, are permitted provided that the following conditions
24 * are met:
25 * 1. Redistributions of source code must retain the copyright
26 * notice, this list of conditions and the following disclaimer.
27 * 2. Redistributions in binary form must reproduce the above copyright
28 * notice, this list of conditions and the following disclaimer in the
29 * documentation and/or other materials provided with the distribution.
30 * 3. All advertising materials mentioning features or use of this software
31 * must display the following acknowledgement:
32 * "This product includes cryptographic software written by
33 * Eric Young (eay@cryptsoft.com)"
34 * The word 'cryptographic' can be left out if the rouines from the library
35 * being used are not cryptographic related :-).
36 * 4. If you include any Windows specific code (or a derivative thereof) from
37 * the apps directory (application code) you must include an acknowledgement:
38 * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
39 *
40 * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
41 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
42 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
43 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
44 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
45 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
46 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
47 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
48 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
49 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
50 * SUCH DAMAGE.
51 *
52 * The licence and distribution terms for any publically available version or
53 * derivative of this code cannot be changed. i.e. this code cannot simply be
54 * copied and put under another distribution licence
55 * [including the GNU Public Licence.]
56 */
57/* ====================================================================
58 * Copyright (c) 1998-2007 The OpenSSL Project. All rights reserved.
59 *
60 * Redistribution and use in source and binary forms, with or without
61 * modification, are permitted provided that the following conditions
62 * are met:
63 *
64 * 1. Redistributions of source code must retain the above copyright
65 * notice, this list of conditions and the following disclaimer.
66 *
67 * 2. Redistributions in binary form must reproduce the above copyright
68 * notice, this list of conditions and the following disclaimer in
69 * the documentation and/or other materials provided with the
70 * distribution.
71 *
72 * 3. All advertising materials mentioning features or use of this
73 * software must display the following acknowledgment:
74 * "This product includes software developed by the OpenSSL Project
75 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
76 *
77 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
78 * endorse or promote products derived from this software without
79 * prior written permission. For written permission, please contact
80 * openssl-core@openssl.org.
81 *
82 * 5. Products derived from this software may not be called "OpenSSL"
83 * nor may "OpenSSL" appear in their names without prior written
84 * permission of the OpenSSL Project.
85 *
86 * 6. Redistributions of any form whatsoever must retain the following
87 * acknowledgment:
88 * "This product includes software developed by the OpenSSL Project
89 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
90 *
91 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
92 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
93 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
94 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
95 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
96 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
97 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
98 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
99 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
100 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
101 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
102 * OF THE POSSIBILITY OF SUCH DAMAGE.
103 * ====================================================================
104 *
105 * This product includes cryptographic software written by Eric Young
106 * (eay@cryptsoft.com). This product includes software written by Tim
107 * Hudson (tjh@cryptsoft.com).
108 *
109 */
110/* ====================================================================
111 * Copyright 2002 Sun Microsystems, Inc. ALL RIGHTS RESERVED.
112 * ECC cipher suite support in OpenSSL originally developed by
113 * SUN MICROSYSTEMS, INC., and contributed to the OpenSSL project.
114 */
115/* ====================================================================
116 * Copyright 2005 Nokia. All rights reserved.
117 *
118 * The portions of the attached software ("Contribution") is developed by
119 * Nokia Corporation and is licensed pursuant to the OpenSSL open source
120 * license.
121 *
122 * The Contribution, originally written by Mika Kousa and Pasi Eronen of
123 * Nokia Corporation, consists of the "PSK" (Pre-Shared Key) ciphersuites
124 * support (see RFC 4279) to OpenSSL.
125 *
126 * No patent licenses or other rights except those expressly stated in
127 * the OpenSSL open source license shall be deemed granted or received
128 * expressly, by implication, estoppel, or otherwise.
129 *
130 * No assurances are provided by Nokia that the Contribution does not
131 * infringe the patent or other intellectual property rights of any third
132 * party or that the license provides you with all the necessary rights
133 * to make use of the Contribution.
134 *
135 * THE SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND. IN
136 * ADDITION TO THE DISCLAIMERS INCLUDED IN THE LICENSE, NOKIA
137 * SPECIFICALLY DISCLAIMS ANY LIABILITY FOR CLAIMS BROUGHT BY YOU OR ANY
138 * OTHER ENTITY BASED ON INFRINGEMENT OF INTELLECTUAL PROPERTY RIGHTS OR
139 * OTHERWISE. */
140
David Benjamin9e4e01e2015-09-15 01:48:04 -0400141#include <openssl/ssl.h>
142
Adam Langley95c29f32014-06-20 12:00:00 -0700143#include <assert.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400144#include <string.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700145
David Benjamin71f07942015-04-08 02:36:59 -0400146#include <openssl/buf.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400147#include <openssl/err.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500148#include <openssl/md5.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700149#include <openssl/mem.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500150#include <openssl/sha.h>
David Benjamin71f07942015-04-08 02:36:59 -0400151#include <openssl/stack.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700152
David Benjamin2ee94aa2015-04-07 22:38:30 -0400153#include "internal.h"
Steven Valdezcb966542016-08-17 16:56:14 -0400154#include "../crypto/internal.h"
Adam Langley95c29f32014-06-20 12:00:00 -0700155
Adam Langley95c29f32014-06-20 12:00:00 -0700156
David Benjamina1c90a52015-05-30 17:03:14 -0400157/* kCiphers is an array of all supported ciphers, sorted by id. */
David Benjamin20c37312015-11-11 21:33:18 -0800158static const SSL_CIPHER kCiphers[] = {
David Benjamina1c90a52015-05-30 17:03:14 -0400159 /* The RSA ciphers */
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700160 /* Cipher 02 */
161 {
David Benjaminff2df332015-11-18 10:01:16 -0500162 SSL3_TXT_RSA_NULL_SHA,
163 SSL3_CK_RSA_NULL_SHA,
164 SSL_kRSA,
165 SSL_aRSA,
166 SSL_eNULL,
167 SSL_SHA1,
168 SSL_HANDSHAKE_MAC_DEFAULT,
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700169 },
170
David Benjamina1c90a52015-05-30 17:03:14 -0400171 /* Cipher 04 */
172 {
David Benjaminff2df332015-11-18 10:01:16 -0500173 SSL3_TXT_RSA_RC4_128_MD5,
174 SSL3_CK_RSA_RC4_128_MD5,
175 SSL_kRSA,
176 SSL_aRSA,
177 SSL_RC4,
178 SSL_MD5,
179 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400180 },
181
182 /* Cipher 05 */
183 {
David Benjaminff2df332015-11-18 10:01:16 -0500184 SSL3_TXT_RSA_RC4_128_SHA,
185 SSL3_CK_RSA_RC4_128_SHA,
186 SSL_kRSA,
187 SSL_aRSA,
188 SSL_RC4,
189 SSL_SHA1,
190 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400191 },
192
193 /* Cipher 0A */
194 {
David Benjaminff2df332015-11-18 10:01:16 -0500195 SSL3_TXT_RSA_DES_192_CBC3_SHA,
196 SSL3_CK_RSA_DES_192_CBC3_SHA,
197 SSL_kRSA,
198 SSL_aRSA,
199 SSL_3DES,
200 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500201 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400202 },
203
204
205 /* New AES ciphersuites */
206
207 /* Cipher 2F */
208 {
David Benjaminff2df332015-11-18 10:01:16 -0500209 TLS1_TXT_RSA_WITH_AES_128_SHA,
210 TLS1_CK_RSA_WITH_AES_128_SHA,
211 SSL_kRSA,
212 SSL_aRSA,
213 SSL_AES128,
214 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500215 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400216 },
217
218 /* Cipher 33 */
219 {
David Benjaminff2df332015-11-18 10:01:16 -0500220 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA,
221 TLS1_CK_DHE_RSA_WITH_AES_128_SHA,
222 SSL_kDHE,
223 SSL_aRSA,
224 SSL_AES128,
225 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500226 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400227 },
228
229 /* Cipher 35 */
230 {
David Benjaminff2df332015-11-18 10:01:16 -0500231 TLS1_TXT_RSA_WITH_AES_256_SHA,
232 TLS1_CK_RSA_WITH_AES_256_SHA,
233 SSL_kRSA,
234 SSL_aRSA,
235 SSL_AES256,
236 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500237 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400238 },
239
240 /* Cipher 39 */
241 {
David Benjaminff2df332015-11-18 10:01:16 -0500242 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA,
243 TLS1_CK_DHE_RSA_WITH_AES_256_SHA,
244 SSL_kDHE,
245 SSL_aRSA,
246 SSL_AES256,
247 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500248 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400249 },
250
251
252 /* TLS v1.2 ciphersuites */
253
254 /* Cipher 3C */
255 {
David Benjaminff2df332015-11-18 10:01:16 -0500256 TLS1_TXT_RSA_WITH_AES_128_SHA256,
257 TLS1_CK_RSA_WITH_AES_128_SHA256,
258 SSL_kRSA,
259 SSL_aRSA,
260 SSL_AES128,
261 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500262 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400263 },
264
265 /* Cipher 3D */
266 {
David Benjaminff2df332015-11-18 10:01:16 -0500267 TLS1_TXT_RSA_WITH_AES_256_SHA256,
268 TLS1_CK_RSA_WITH_AES_256_SHA256,
269 SSL_kRSA,
270 SSL_aRSA,
271 SSL_AES256,
272 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500273 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400274 },
275
276 /* Cipher 67 */
277 {
278 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500279 TLS1_CK_DHE_RSA_WITH_AES_128_SHA256,
280 SSL_kDHE,
281 SSL_aRSA,
282 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500283 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500284 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400285 },
286
287 /* Cipher 6B */
288 {
289 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500290 TLS1_CK_DHE_RSA_WITH_AES_256_SHA256,
291 SSL_kDHE,
292 SSL_aRSA,
293 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500294 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500295 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400296 },
297
Adam Langley85bc5602015-06-09 09:54:04 -0700298 /* PSK cipher suites. */
299
David Benjamina1c90a52015-05-30 17:03:14 -0400300 /* Cipher 8A */
301 {
David Benjaminff2df332015-11-18 10:01:16 -0500302 TLS1_TXT_PSK_WITH_RC4_128_SHA,
303 TLS1_CK_PSK_WITH_RC4_128_SHA,
304 SSL_kPSK,
305 SSL_aPSK,
306 SSL_RC4,
307 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500308 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400309 },
310
311 /* Cipher 8C */
312 {
David Benjaminff2df332015-11-18 10:01:16 -0500313 TLS1_TXT_PSK_WITH_AES_128_CBC_SHA,
314 TLS1_CK_PSK_WITH_AES_128_CBC_SHA,
315 SSL_kPSK,
316 SSL_aPSK,
317 SSL_AES128,
318 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500319 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400320 },
321
322 /* Cipher 8D */
323 {
David Benjaminff2df332015-11-18 10:01:16 -0500324 TLS1_TXT_PSK_WITH_AES_256_CBC_SHA,
325 TLS1_CK_PSK_WITH_AES_256_CBC_SHA,
326 SSL_kPSK,
327 SSL_aPSK,
328 SSL_AES256,
329 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500330 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400331 },
332
David Benjamina1c90a52015-05-30 17:03:14 -0400333 /* GCM ciphersuites from RFC5288 */
334
335 /* Cipher 9C */
336 {
337 TLS1_TXT_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500338 TLS1_CK_RSA_WITH_AES_128_GCM_SHA256,
339 SSL_kRSA,
340 SSL_aRSA,
341 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500342 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400343 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400344 },
345
346 /* Cipher 9D */
347 {
348 TLS1_TXT_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500349 TLS1_CK_RSA_WITH_AES_256_GCM_SHA384,
350 SSL_kRSA,
351 SSL_aRSA,
352 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500353 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400354 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400355 },
356
357 /* Cipher 9E */
358 {
359 TLS1_TXT_DHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500360 TLS1_CK_DHE_RSA_WITH_AES_128_GCM_SHA256,
361 SSL_kDHE,
362 SSL_aRSA,
363 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500364 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400365 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400366 },
367
368 /* Cipher 9F */
369 {
370 TLS1_TXT_DHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500371 TLS1_CK_DHE_RSA_WITH_AES_256_GCM_SHA384,
372 SSL_kDHE,
373 SSL_aRSA,
374 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500375 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400376 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400377 },
378
Matt Braithwaite053931e2016-05-25 12:06:05 -0700379 /* CECPQ1 (combined elliptic curve + post-quantum) suites. */
380
381 /* Cipher 16B7 */
382 {
383 TLS1_TXT_CECPQ1_RSA_WITH_CHACHA20_POLY1305_SHA256,
384 TLS1_CK_CECPQ1_RSA_WITH_CHACHA20_POLY1305_SHA256,
385 SSL_kCECPQ1,
386 SSL_aRSA,
387 SSL_CHACHA20POLY1305,
388 SSL_AEAD,
389 SSL_HANDSHAKE_MAC_SHA256,
390 },
391
392 /* Cipher 16B8 */
393 {
394 TLS1_TXT_CECPQ1_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
395 TLS1_CK_CECPQ1_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
396 SSL_kCECPQ1,
397 SSL_aECDSA,
398 SSL_CHACHA20POLY1305,
399 SSL_AEAD,
400 SSL_HANDSHAKE_MAC_SHA256,
401 },
402
403 /* Cipher 16B9 */
404 {
405 TLS1_TXT_CECPQ1_RSA_WITH_AES_256_GCM_SHA384,
406 TLS1_CK_CECPQ1_RSA_WITH_AES_256_GCM_SHA384,
407 SSL_kCECPQ1,
408 SSL_aRSA,
409 SSL_AES256GCM,
410 SSL_AEAD,
411 SSL_HANDSHAKE_MAC_SHA384,
412 },
413
414 /* Cipher 16BA */
415 {
416 TLS1_TXT_CECPQ1_ECDSA_WITH_AES_256_GCM_SHA384,
417 TLS1_CK_CECPQ1_ECDSA_WITH_AES_256_GCM_SHA384,
418 SSL_kCECPQ1,
419 SSL_aECDSA,
420 SSL_AES256GCM,
421 SSL_AEAD,
422 SSL_HANDSHAKE_MAC_SHA384,
423 },
424
David Benjamina1c90a52015-05-30 17:03:14 -0400425 /* Cipher C007 */
426 {
427 TLS1_TXT_ECDHE_ECDSA_WITH_RC4_128_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500428 TLS1_CK_ECDHE_ECDSA_WITH_RC4_128_SHA,
429 SSL_kECDHE,
430 SSL_aECDSA,
431 SSL_RC4,
432 SSL_SHA1,
433 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400434 },
435
436 /* Cipher C009 */
437 {
438 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500439 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
440 SSL_kECDHE,
441 SSL_aECDSA,
442 SSL_AES128,
443 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500444 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400445 },
446
447 /* Cipher C00A */
448 {
449 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500450 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
451 SSL_kECDHE,
452 SSL_aECDSA,
453 SSL_AES256,
454 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500455 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400456 },
457
458 /* Cipher C011 */
459 {
David Benjaminff2df332015-11-18 10:01:16 -0500460 TLS1_TXT_ECDHE_RSA_WITH_RC4_128_SHA,
461 TLS1_CK_ECDHE_RSA_WITH_RC4_128_SHA,
462 SSL_kECDHE,
463 SSL_aRSA,
464 SSL_RC4,
465 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500466 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400467 },
468
469 /* Cipher C013 */
470 {
471 TLS1_TXT_ECDHE_RSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500472 TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA,
473 SSL_kECDHE,
474 SSL_aRSA,
475 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500476 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500477 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400478 },
479
480 /* Cipher C014 */
481 {
482 TLS1_TXT_ECDHE_RSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500483 TLS1_CK_ECDHE_RSA_WITH_AES_256_CBC_SHA,
484 SSL_kECDHE,
485 SSL_aRSA,
486 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500487 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500488 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400489 },
490
491
492 /* HMAC based TLS v1.2 ciphersuites from RFC5289 */
493
494 /* Cipher C023 */
495 {
496 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500497 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_SHA256,
498 SSL_kECDHE,
499 SSL_aECDSA,
500 SSL_AES128,
501 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500502 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400503 },
504
505 /* Cipher C024 */
506 {
507 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500508 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_SHA384,
509 SSL_kECDHE,
510 SSL_aECDSA,
511 SSL_AES256,
512 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500513 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400514 },
515
516 /* Cipher C027 */
517 {
518 TLS1_TXT_ECDHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500519 TLS1_CK_ECDHE_RSA_WITH_AES_128_SHA256,
520 SSL_kECDHE,
521 SSL_aRSA,
522 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500523 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500524 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400525 },
526
527 /* Cipher C028 */
528 {
529 TLS1_TXT_ECDHE_RSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500530 TLS1_CK_ECDHE_RSA_WITH_AES_256_SHA384,
531 SSL_kECDHE,
532 SSL_aRSA,
533 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500534 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500535 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400536 },
537
538
539 /* GCM based TLS v1.2 ciphersuites from RFC5289 */
540
541 /* Cipher C02B */
542 {
543 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500544 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
545 SSL_kECDHE,
546 SSL_aECDSA,
547 SSL_AES128GCM,
548 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400549 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400550 },
551
552 /* Cipher C02C */
553 {
554 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500555 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
556 SSL_kECDHE,
557 SSL_aECDSA,
558 SSL_AES256GCM,
559 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400560 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400561 },
562
563 /* Cipher C02F */
564 {
565 TLS1_TXT_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500566 TLS1_CK_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
567 SSL_kECDHE,
568 SSL_aRSA,
569 SSL_AES128GCM,
570 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400571 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400572 },
573
574 /* Cipher C030 */
575 {
576 TLS1_TXT_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500577 TLS1_CK_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
578 SSL_kECDHE,
579 SSL_aRSA,
580 SSL_AES256GCM,
581 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400582 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400583 },
584
Adam Langley85bc5602015-06-09 09:54:04 -0700585 /* ECDHE-PSK cipher suites. */
586
587 /* Cipher C035 */
588 {
589 TLS1_TXT_ECDHE_PSK_WITH_AES_128_CBC_SHA,
590 TLS1_CK_ECDHE_PSK_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500591 SSL_kECDHE,
592 SSL_aPSK,
593 SSL_AES128,
594 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500595 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700596 },
597
598 /* Cipher C036 */
599 {
600 TLS1_TXT_ECDHE_PSK_WITH_AES_256_CBC_SHA,
601 TLS1_CK_ECDHE_PSK_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500602 SSL_kECDHE,
603 SSL_aPSK,
604 SSL_AES256,
605 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500606 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700607 },
608
609 /* ChaCha20-Poly1305 cipher suites. */
610
David Benjamin13414b32015-12-09 23:02:39 -0500611#if !defined(BORINGSSL_ANDROID_SYSTEM)
David Benjamina1c90a52015-05-30 17:03:14 -0400612 {
Brian Smith271777f2015-10-03 13:53:33 -1000613 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500614 TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD,
615 SSL_kECDHE,
616 SSL_aRSA,
617 SSL_CHACHA20POLY1305_OLD,
618 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400619 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400620 },
621
622 {
Brian Smith271777f2015-10-03 13:53:33 -1000623 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500624 TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD,
625 SSL_kECDHE,
626 SSL_aECDSA,
627 SSL_CHACHA20POLY1305_OLD,
628 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400629 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400630 },
Adam Langleyd98dc132015-09-23 16:41:33 -0700631#endif
David Benjamin13414b32015-12-09 23:02:39 -0500632
633 /* Cipher CCA8 */
634 {
635 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
636 TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
637 SSL_kECDHE,
638 SSL_aRSA,
639 SSL_CHACHA20POLY1305,
640 SSL_AEAD,
641 SSL_HANDSHAKE_MAC_SHA256,
642 },
643
644 /* Cipher CCA9 */
645 {
646 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
647 TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
648 SSL_kECDHE,
649 SSL_aECDSA,
650 SSL_CHACHA20POLY1305,
651 SSL_AEAD,
652 SSL_HANDSHAKE_MAC_SHA256,
653 },
654
655 /* Cipher CCAB */
656 {
657 TLS1_TXT_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
658 TLS1_CK_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
659 SSL_kECDHE,
660 SSL_aPSK,
661 SSL_CHACHA20POLY1305,
662 SSL_AEAD,
663 SSL_HANDSHAKE_MAC_SHA256,
664 },
Matt Braithwaite053931e2016-05-25 12:06:05 -0700665
Steven Valdez3084e7b2016-06-02 12:07:20 -0400666 /* Cipher D001 */
667 {
668 TLS1_TXT_ECDHE_PSK_WITH_AES_128_GCM_SHA256,
669 TLS1_CK_ECDHE_PSK_WITH_AES_128_GCM_SHA256,
670 SSL_kECDHE,
671 SSL_aPSK,
672 SSL_AES128GCM,
David Benjaminc9a43682016-06-21 17:30:54 -0400673 SSL_AEAD,
Steven Valdez3084e7b2016-06-02 12:07:20 -0400674 SSL_HANDSHAKE_MAC_SHA256,
675 },
676
677 /* Cipher D002 */
678 {
679 TLS1_TXT_ECDHE_PSK_WITH_AES_256_GCM_SHA384,
680 TLS1_CK_ECDHE_PSK_WITH_AES_256_GCM_SHA384,
681 SSL_kECDHE,
682 SSL_aPSK,
683 SSL_AES256GCM,
David Benjaminc9a43682016-06-21 17:30:54 -0400684 SSL_AEAD,
Steven Valdez3084e7b2016-06-02 12:07:20 -0400685 SSL_HANDSHAKE_MAC_SHA384,
686 },
687
David Benjamina1c90a52015-05-30 17:03:14 -0400688};
689
Steven Valdezcb966542016-08-17 16:56:14 -0400690static const size_t kCiphersLen = OPENSSL_ARRAY_SIZE(kCiphers);
David Benjamina1c90a52015-05-30 17:03:14 -0400691
Adam Langleyfcf25832014-12-18 17:42:32 -0800692#define CIPHER_ADD 1
693#define CIPHER_KILL 2
694#define CIPHER_DEL 3
695#define CIPHER_ORD 4
696#define CIPHER_SPECIAL 5
Adam Langley95c29f32014-06-20 12:00:00 -0700697
Adam Langleyfcf25832014-12-18 17:42:32 -0800698typedef struct cipher_order_st {
699 const SSL_CIPHER *cipher;
700 int active;
Adam Langleyfcf25832014-12-18 17:42:32 -0800701 int in_group;
702 struct cipher_order_st *next, *prev;
703} CIPHER_ORDER;
Adam Langley95c29f32014-06-20 12:00:00 -0700704
David Benjamin0344daf2015-04-08 02:08:01 -0400705typedef struct cipher_alias_st {
706 /* name is the name of the cipher alias. */
707 const char *name;
708
709 /* The following fields are bitmasks for the corresponding fields on
710 * |SSL_CIPHER|. A cipher matches a cipher alias iff, for each bitmask, the
711 * bit corresponding to the cipher's value is set to 1. If any bitmask is
712 * all zeroes, the alias matches nothing. Use |~0u| for the default value. */
713 uint32_t algorithm_mkey;
714 uint32_t algorithm_auth;
715 uint32_t algorithm_enc;
716 uint32_t algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -0500717
718 /* min_version, if non-zero, matches all ciphers which were added in that
719 * particular protocol version. */
720 uint16_t min_version;
David Benjamin0344daf2015-04-08 02:08:01 -0400721} CIPHER_ALIAS;
722
David Benjamina1c90a52015-05-30 17:03:14 -0400723static const CIPHER_ALIAS kCipherAliases[] = {
Matt Braithwaite053931e2016-05-25 12:06:05 -0700724 /* "ALL" doesn't include eNULL nor kCECPQ1. These must be explicitly
725 * enabled. */
726 {"ALL", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700727
David Benjamina1c90a52015-05-30 17:03:14 -0400728 /* The "COMPLEMENTOFDEFAULT" rule is omitted. It matches nothing. */
Adam Langley95c29f32014-06-20 12:00:00 -0700729
David Benjamina1c90a52015-05-30 17:03:14 -0400730 /* key exchange aliases
731 * (some of those using only a single bit here combine
732 * multiple key exchange algs according to the RFCs,
733 * e.g. kEDH combines DHE_DSS and DHE_RSA) */
David Benjamind6e9eec2015-11-18 09:48:55 -0500734 {"kRSA", SSL_kRSA, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700735
David Benjamind6e9eec2015-11-18 09:48:55 -0500736 {"kDHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
737 {"kEDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
738 {"DH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700739
David Benjamind6e9eec2015-11-18 09:48:55 -0500740 {"kECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700741 {"kCECPQ1", SSL_kCECPQ1, ~0u, ~0u, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500742 {"kEECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
743 {"ECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700744
David Benjamind6e9eec2015-11-18 09:48:55 -0500745 {"kPSK", SSL_kPSK, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700746
David Benjamina1c90a52015-05-30 17:03:14 -0400747 /* server authentication aliases */
Matt Braithwaite053931e2016-05-25 12:06:05 -0700748 {"aRSA", ~SSL_kCECPQ1, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
749 {"aECDSA", ~SSL_kCECPQ1, SSL_aECDSA, ~0u, ~0u, 0},
750 {"ECDSA", ~SSL_kCECPQ1, SSL_aECDSA, ~0u, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500751 {"aPSK", ~0u, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700752
David Benjamina1c90a52015-05-30 17:03:14 -0400753 /* aliases combining key exchange and server authentication */
David Benjamind6e9eec2015-11-18 09:48:55 -0500754 {"DHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
755 {"EDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
756 {"ECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
757 {"EECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
758 {"RSA", SSL_kRSA, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
759 {"PSK", SSL_kPSK, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700760
David Benjamina1c90a52015-05-30 17:03:14 -0400761 /* symmetric encryption aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500762 {"3DES", ~0u, ~0u, SSL_3DES, ~0u, 0},
763 {"RC4", ~0u, ~0u, SSL_RC4, ~0u, 0},
764 {"AES128", ~0u, ~0u, SSL_AES128 | SSL_AES128GCM, ~0u, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700765 {"AES256", ~SSL_kCECPQ1, ~0u, SSL_AES256 | SSL_AES256GCM, ~0u, 0},
766 {"AES", ~SSL_kCECPQ1, ~0u, SSL_AES, ~0u, 0},
767 {"AESGCM", ~SSL_kCECPQ1, ~0u, SSL_AES128GCM | SSL_AES256GCM, ~0u, 0},
768 {"CHACHA20", ~SSL_kCECPQ1, ~0u, SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD, ~0u,
David Benjamin13414b32015-12-09 23:02:39 -0500769 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700770
David Benjamina1c90a52015-05-30 17:03:14 -0400771 /* MAC aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500772 {"MD5", ~0u, ~0u, ~0u, SSL_MD5, 0},
773 {"SHA1", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
774 {"SHA", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700775 {"SHA256", ~SSL_kCECPQ1, ~0u, ~0u, SSL_SHA256, 0},
776 {"SHA384", ~SSL_kCECPQ1, ~0u, ~0u, SSL_SHA384, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700777
David Benjamindcb6ef02015-11-06 15:35:54 -0500778 /* Legacy protocol minimum version aliases. "TLSv1" is intentionally the
779 * same as "SSLv3". */
Matt Braithwaite053931e2016-05-25 12:06:05 -0700780 {"SSLv3", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
781 {"TLSv1", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
782 {"TLSv1.2", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, TLS1_2_VERSION},
Adam Langley95c29f32014-06-20 12:00:00 -0700783
David Benjamind6e9eec2015-11-18 09:48:55 -0500784 /* Legacy strength classes. */
785 {"MEDIUM", ~0u, ~0u, SSL_RC4, ~0u, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700786 {"HIGH", ~SSL_kCECPQ1, ~0u, ~(SSL_eNULL|SSL_RC4), ~0u, 0},
787 {"FIPS", ~SSL_kCECPQ1, ~0u, ~(SSL_eNULL|SSL_RC4), ~0u, 0},
Adam Langleyfcf25832014-12-18 17:42:32 -0800788};
Adam Langley95c29f32014-06-20 12:00:00 -0700789
Steven Valdezcb966542016-08-17 16:56:14 -0400790static const size_t kCipherAliasesLen = OPENSSL_ARRAY_SIZE(kCipherAliases);
David Benjamina1c90a52015-05-30 17:03:14 -0400791
792static int ssl_cipher_id_cmp(const void *in_a, const void *in_b) {
793 const SSL_CIPHER *a = in_a;
794 const SSL_CIPHER *b = in_b;
795
796 if (a->id > b->id) {
797 return 1;
798 } else if (a->id < b->id) {
799 return -1;
800 } else {
801 return 0;
802 }
803}
804
805static int ssl_cipher_ptr_id_cmp(const SSL_CIPHER **a, const SSL_CIPHER **b) {
806 return ssl_cipher_id_cmp(*a, *b);
807}
808
809const SSL_CIPHER *SSL_get_cipher_by_value(uint16_t value) {
810 SSL_CIPHER c;
811
812 c.id = 0x03000000L | value;
813 return bsearch(&c, kCiphers, kCiphersLen, sizeof(SSL_CIPHER),
814 ssl_cipher_id_cmp);
815}
David Benjamin0344daf2015-04-08 02:08:01 -0400816
David Benjaminea72bd02014-12-21 21:27:41 -0500817int ssl_cipher_get_evp_aead(const EVP_AEAD **out_aead,
818 size_t *out_mac_secret_len,
819 size_t *out_fixed_iv_len,
820 const SSL_CIPHER *cipher, uint16_t version) {
821 *out_aead = NULL;
822 *out_mac_secret_len = 0;
823 *out_fixed_iv_len = 0;
Adam Langleyc9fb3752014-06-20 12:00:00 -0700824
David Benjaminea72bd02014-12-21 21:27:41 -0500825 switch (cipher->algorithm_enc) {
Adam Langleyfcf25832014-12-18 17:42:32 -0800826 case SSL_AES128GCM:
David Benjaminea72bd02014-12-21 21:27:41 -0500827 *out_aead = EVP_aead_aes_128_gcm();
828 *out_fixed_iv_len = 4;
Steven Valdez79750562016-06-16 06:38:04 -0400829 break;
Adam Langleyfcf25832014-12-18 17:42:32 -0800830
831 case SSL_AES256GCM:
David Benjaminea72bd02014-12-21 21:27:41 -0500832 *out_aead = EVP_aead_aes_256_gcm();
833 *out_fixed_iv_len = 4;
Steven Valdez79750562016-06-16 06:38:04 -0400834 break;
Adam Langleyfcf25832014-12-18 17:42:32 -0800835
Adam Langleyd98dc132015-09-23 16:41:33 -0700836#if !defined(BORINGSSL_ANDROID_SYSTEM)
Brian Smith271777f2015-10-03 13:53:33 -1000837 case SSL_CHACHA20POLY1305_OLD:
Brian Smith3e23e4c2015-10-03 11:38:58 -1000838 *out_aead = EVP_aead_chacha20_poly1305_old();
David Benjaminea72bd02014-12-21 21:27:41 -0500839 *out_fixed_iv_len = 0;
Steven Valdez79750562016-06-16 06:38:04 -0400840 break;
Adam Langleyd98dc132015-09-23 16:41:33 -0700841#endif
Adam Langleyfcf25832014-12-18 17:42:32 -0800842
David Benjamin13414b32015-12-09 23:02:39 -0500843 case SSL_CHACHA20POLY1305:
844 *out_aead = EVP_aead_chacha20_poly1305();
845 *out_fixed_iv_len = 12;
Steven Valdez79750562016-06-16 06:38:04 -0400846 break;
David Benjamin13414b32015-12-09 23:02:39 -0500847
Adam Langleyfcf25832014-12-18 17:42:32 -0800848 case SSL_RC4:
David Benjaminea72bd02014-12-21 21:27:41 -0500849 switch (cipher->algorithm_mac) {
850 case SSL_MD5:
David Benjamin044abb02014-12-23 10:57:17 -0500851 if (version == SSL3_VERSION) {
852 *out_aead = EVP_aead_rc4_md5_ssl3();
853 } else {
854 *out_aead = EVP_aead_rc4_md5_tls();
855 }
David Benjaminea72bd02014-12-21 21:27:41 -0500856 *out_mac_secret_len = MD5_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400857 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500858 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500859 if (version == SSL3_VERSION) {
860 *out_aead = EVP_aead_rc4_sha1_ssl3();
861 } else {
862 *out_aead = EVP_aead_rc4_sha1_tls();
863 }
David Benjaminea72bd02014-12-21 21:27:41 -0500864 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400865 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500866 default:
867 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -0800868 }
Steven Valdez79750562016-06-16 06:38:04 -0400869 break;
Adam Langleyfcf25832014-12-18 17:42:32 -0800870
David Benjaminea72bd02014-12-21 21:27:41 -0500871 case SSL_AES128:
872 switch (cipher->algorithm_mac) {
873 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500874 if (version == SSL3_VERSION) {
875 *out_aead = EVP_aead_aes_128_cbc_sha1_ssl3();
876 *out_fixed_iv_len = 16;
877 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500878 *out_aead = EVP_aead_aes_128_cbc_sha1_tls_implicit_iv();
879 *out_fixed_iv_len = 16;
880 } else {
881 *out_aead = EVP_aead_aes_128_cbc_sha1_tls();
882 }
883 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400884 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500885 case SSL_SHA256:
886 *out_aead = EVP_aead_aes_128_cbc_sha256_tls();
887 *out_mac_secret_len = SHA256_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400888 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500889 default:
890 return 0;
891 }
Steven Valdez79750562016-06-16 06:38:04 -0400892 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500893
894 case SSL_AES256:
895 switch (cipher->algorithm_mac) {
896 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500897 if (version == SSL3_VERSION) {
898 *out_aead = EVP_aead_aes_256_cbc_sha1_ssl3();
899 *out_fixed_iv_len = 16;
900 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500901 *out_aead = EVP_aead_aes_256_cbc_sha1_tls_implicit_iv();
902 *out_fixed_iv_len = 16;
903 } else {
904 *out_aead = EVP_aead_aes_256_cbc_sha1_tls();
905 }
906 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400907 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500908 case SSL_SHA256:
909 *out_aead = EVP_aead_aes_256_cbc_sha256_tls();
910 *out_mac_secret_len = SHA256_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400911 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500912 case SSL_SHA384:
913 *out_aead = EVP_aead_aes_256_cbc_sha384_tls();
914 *out_mac_secret_len = SHA384_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400915 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500916 default:
917 return 0;
918 }
Steven Valdez79750562016-06-16 06:38:04 -0400919 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500920
921 case SSL_3DES:
922 switch (cipher->algorithm_mac) {
923 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500924 if (version == SSL3_VERSION) {
925 *out_aead = EVP_aead_des_ede3_cbc_sha1_ssl3();
926 *out_fixed_iv_len = 8;
927 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500928 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls_implicit_iv();
929 *out_fixed_iv_len = 8;
930 } else {
931 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls();
932 }
933 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400934 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500935 default:
936 return 0;
937 }
Steven Valdez79750562016-06-16 06:38:04 -0400938 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500939
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700940 case SSL_eNULL:
941 switch (cipher->algorithm_mac) {
942 case SSL_SHA1:
943 if (version == SSL3_VERSION) {
944 *out_aead = EVP_aead_null_sha1_ssl3();
945 } else {
946 *out_aead = EVP_aead_null_sha1_tls();
947 }
948 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400949 break;
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700950 default:
951 return 0;
952 }
Steven Valdez79750562016-06-16 06:38:04 -0400953 break;
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700954
David Benjaminea72bd02014-12-21 21:27:41 -0500955 default:
956 return 0;
957 }
Steven Valdez79750562016-06-16 06:38:04 -0400958
959 /* In TLS 1.3, the iv_len is equal to the AEAD nonce length whereas the code
960 * above computes the TLS 1.2 construction.
961 *
962 * TODO(davidben,svaldez): Avoid computing the wrong value and fixing it. */
963 if (version >= TLS1_3_VERSION) {
964 *out_fixed_iv_len = EVP_AEAD_nonce_length(*out_aead);
965 assert(*out_fixed_iv_len >= 8);
966 }
967 return 1;
Adam Langleyfcf25832014-12-18 17:42:32 -0800968}
Adam Langleyc9fb3752014-06-20 12:00:00 -0700969
David Benjaminb0883312015-08-06 09:54:13 -0400970const EVP_MD *ssl_get_handshake_digest(uint32_t algorithm_prf) {
971 switch (algorithm_prf) {
972 case SSL_HANDSHAKE_MAC_DEFAULT:
973 return EVP_sha1();
974 case SSL_HANDSHAKE_MAC_SHA256:
975 return EVP_sha256();
976 case SSL_HANDSHAKE_MAC_SHA384:
977 return EVP_sha384();
978 default:
979 return NULL;
Adam Langleyfcf25832014-12-18 17:42:32 -0800980 }
Adam Langley95c29f32014-06-20 12:00:00 -0700981}
982
983#define ITEM_SEP(a) \
Adam Langleyfcf25832014-12-18 17:42:32 -0800984 (((a) == ':') || ((a) == ' ') || ((a) == ';') || ((a) == ','))
Adam Langley95c29f32014-06-20 12:00:00 -0700985
David Benjamin0344daf2015-04-08 02:08:01 -0400986/* rule_equals returns one iff the NUL-terminated string |rule| is equal to the
987 * |buf_len| bytes at |buf|. */
988static int rule_equals(const char *rule, const char *buf, size_t buf_len) {
989 /* |strncmp| alone only checks that |buf| is a prefix of |rule|. */
990 return strncmp(rule, buf, buf_len) == 0 && rule[buf_len] == '\0';
991}
992
Adam Langley95c29f32014-06-20 12:00:00 -0700993static void ll_append_tail(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -0800994 CIPHER_ORDER **tail) {
995 if (curr == *tail) {
996 return;
997 }
998 if (curr == *head) {
999 *head = curr->next;
1000 }
1001 if (curr->prev != NULL) {
1002 curr->prev->next = curr->next;
1003 }
1004 if (curr->next != NULL) {
1005 curr->next->prev = curr->prev;
1006 }
1007 (*tail)->next = curr;
1008 curr->prev = *tail;
1009 curr->next = NULL;
1010 *tail = curr;
1011}
Adam Langley95c29f32014-06-20 12:00:00 -07001012
1013static void ll_append_head(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -08001014 CIPHER_ORDER **tail) {
1015 if (curr == *head) {
1016 return;
1017 }
1018 if (curr == *tail) {
1019 *tail = curr->prev;
1020 }
1021 if (curr->next != NULL) {
1022 curr->next->prev = curr->prev;
1023 }
1024 if (curr->prev != NULL) {
1025 curr->prev->next = curr->next;
1026 }
1027 (*head)->prev = curr;
1028 curr->next = *head;
1029 curr->prev = NULL;
1030 *head = curr;
1031}
Adam Langley95c29f32014-06-20 12:00:00 -07001032
David Benjamin82c9e902014-12-12 15:55:27 -05001033static void ssl_cipher_collect_ciphers(const SSL_PROTOCOL_METHOD *ssl_method,
Adam Langleyfcf25832014-12-18 17:42:32 -08001034 CIPHER_ORDER *co_list,
1035 CIPHER_ORDER **head_p,
1036 CIPHER_ORDER **tail_p) {
David Benjamina1c90a52015-05-30 17:03:14 -04001037 /* The set of ciphers is static, but some subset may be unsupported by
1038 * |ssl_method|, so the list may be smaller. */
1039 size_t co_list_num = 0;
1040 size_t i;
1041 for (i = 0; i < kCiphersLen; i++) {
1042 const SSL_CIPHER *cipher = &kCiphers[i];
1043 if (ssl_method->supports_cipher(cipher)) {
1044 co_list[co_list_num].cipher = cipher;
Adam Langleyfcf25832014-12-18 17:42:32 -08001045 co_list[co_list_num].next = NULL;
1046 co_list[co_list_num].prev = NULL;
1047 co_list[co_list_num].active = 0;
1048 co_list[co_list_num].in_group = 0;
1049 co_list_num++;
1050 }
1051 }
Adam Langley95c29f32014-06-20 12:00:00 -07001052
Adam Langleyfcf25832014-12-18 17:42:32 -08001053 /* Prepare linked list from list entries. */
1054 if (co_list_num > 0) {
1055 co_list[0].prev = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -07001056
Adam Langleyfcf25832014-12-18 17:42:32 -08001057 if (co_list_num > 1) {
1058 co_list[0].next = &co_list[1];
Adam Langley95c29f32014-06-20 12:00:00 -07001059
Adam Langleyfcf25832014-12-18 17:42:32 -08001060 for (i = 1; i < co_list_num - 1; i++) {
1061 co_list[i].prev = &co_list[i - 1];
1062 co_list[i].next = &co_list[i + 1];
1063 }
Adam Langley95c29f32014-06-20 12:00:00 -07001064
Adam Langleyfcf25832014-12-18 17:42:32 -08001065 co_list[co_list_num - 1].prev = &co_list[co_list_num - 2];
1066 }
1067
1068 co_list[co_list_num - 1].next = NULL;
1069
1070 *head_p = &co_list[0];
1071 *tail_p = &co_list[co_list_num - 1];
1072 }
1073}
Adam Langley95c29f32014-06-20 12:00:00 -07001074
David Benjamin0344daf2015-04-08 02:08:01 -04001075/* ssl_cipher_apply_rule applies the rule type |rule| to ciphers matching its
1076 * parameters in the linked list from |*head_p| to |*tail_p|. It writes the new
1077 * head and tail of the list to |*head_p| and |*tail_p|, respectively.
1078 *
1079 * - If |cipher_id| is non-zero, only that cipher is selected.
1080 * - Otherwise, if |strength_bits| is non-negative, it selects ciphers
1081 * of that strength.
David Benjamind6e9eec2015-11-18 09:48:55 -05001082 * - Otherwise, it selects ciphers that match each bitmasks in |alg_*| and
David Benjamindcb6ef02015-11-06 15:35:54 -05001083 * |min_version|. */
Adam Langleyfcf25832014-12-18 17:42:32 -08001084static void ssl_cipher_apply_rule(
David Benjamin107db582015-04-08 00:41:59 -04001085 uint32_t cipher_id, uint32_t alg_mkey, uint32_t alg_auth,
David Benjamind6e9eec2015-11-18 09:48:55 -05001086 uint32_t alg_enc, uint32_t alg_mac, uint16_t min_version, int rule,
1087 int strength_bits, int in_group, CIPHER_ORDER **head_p,
1088 CIPHER_ORDER **tail_p) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001089 CIPHER_ORDER *head, *tail, *curr, *next, *last;
1090 const SSL_CIPHER *cp;
1091 int reverse = 0;
Adam Langley95c29f32014-06-20 12:00:00 -07001092
David Benjamindcb6ef02015-11-06 15:35:54 -05001093 if (cipher_id == 0 && strength_bits == -1 && min_version == 0 &&
David Benjamind6e9eec2015-11-18 09:48:55 -05001094 (alg_mkey == 0 || alg_auth == 0 || alg_enc == 0 || alg_mac == 0)) {
David Benjamin0344daf2015-04-08 02:08:01 -04001095 /* The rule matches nothing, so bail early. */
1096 return;
1097 }
1098
Adam Langleyfcf25832014-12-18 17:42:32 -08001099 if (rule == CIPHER_DEL) {
1100 /* needed to maintain sorting between currently deleted ciphers */
1101 reverse = 1;
1102 }
Adam Langley95c29f32014-06-20 12:00:00 -07001103
Adam Langleyfcf25832014-12-18 17:42:32 -08001104 head = *head_p;
1105 tail = *tail_p;
Adam Langley95c29f32014-06-20 12:00:00 -07001106
Adam Langleyfcf25832014-12-18 17:42:32 -08001107 if (reverse) {
1108 next = tail;
1109 last = head;
1110 } else {
1111 next = head;
1112 last = tail;
1113 }
Adam Langley95c29f32014-06-20 12:00:00 -07001114
Adam Langleyfcf25832014-12-18 17:42:32 -08001115 curr = NULL;
1116 for (;;) {
1117 if (curr == last) {
1118 break;
1119 }
Adam Langley95c29f32014-06-20 12:00:00 -07001120
Adam Langleyfcf25832014-12-18 17:42:32 -08001121 curr = next;
1122 if (curr == NULL) {
1123 break;
1124 }
Adam Langleye3142a72014-07-24 17:56:48 -07001125
Adam Langleyfcf25832014-12-18 17:42:32 -08001126 next = reverse ? curr->prev : curr->next;
1127 cp = curr->cipher;
Adam Langleye3142a72014-07-24 17:56:48 -07001128
David Benjamin0344daf2015-04-08 02:08:01 -04001129 /* Selection criteria is either a specific cipher, the value of
1130 * |strength_bits|, or the algorithms used. */
1131 if (cipher_id != 0) {
1132 if (cipher_id != cp->id) {
1133 continue;
1134 }
1135 } else if (strength_bits >= 0) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001136 if (strength_bits != SSL_CIPHER_get_bits(cp, NULL)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001137 continue;
1138 }
David Benjamin881f1962016-08-10 18:29:12 -04001139 } else {
1140 if (!(alg_mkey & cp->algorithm_mkey) ||
1141 !(alg_auth & cp->algorithm_auth) ||
1142 !(alg_enc & cp->algorithm_enc) ||
1143 !(alg_mac & cp->algorithm_mac) ||
1144 (min_version != 0 && SSL_CIPHER_get_min_version(cp) != min_version)) {
1145 continue;
1146 }
1147
1148 /* The following ciphers are internal implementation details of TLS 1.3
1149 * resumption but are not yet finalized. Disable them by default until
1150 * then. */
1151 if (cp->id == TLS1_CK_ECDHE_PSK_WITH_AES_128_GCM_SHA256 ||
1152 cp->id == TLS1_CK_ECDHE_PSK_WITH_AES_256_GCM_SHA384) {
1153 continue;
1154 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001155 }
Adam Langleye3142a72014-07-24 17:56:48 -07001156
Adam Langleyfcf25832014-12-18 17:42:32 -08001157 /* add the cipher if it has not been added yet. */
1158 if (rule == CIPHER_ADD) {
1159 /* reverse == 0 */
1160 if (!curr->active) {
1161 ll_append_tail(&head, curr, &tail);
1162 curr->active = 1;
1163 curr->in_group = in_group;
1164 }
1165 }
Adam Langley95c29f32014-06-20 12:00:00 -07001166
Adam Langleyfcf25832014-12-18 17:42:32 -08001167 /* Move the added cipher to this location */
1168 else if (rule == CIPHER_ORD) {
1169 /* reverse == 0 */
1170 if (curr->active) {
1171 ll_append_tail(&head, curr, &tail);
1172 curr->in_group = 0;
1173 }
1174 } else if (rule == CIPHER_DEL) {
1175 /* reverse == 1 */
1176 if (curr->active) {
1177 /* most recently deleted ciphersuites get best positions
1178 * for any future CIPHER_ADD (note that the CIPHER_DEL loop
1179 * works in reverse to maintain the order) */
1180 ll_append_head(&head, curr, &tail);
1181 curr->active = 0;
1182 curr->in_group = 0;
1183 }
1184 } else if (rule == CIPHER_KILL) {
1185 /* reverse == 0 */
1186 if (head == curr) {
1187 head = curr->next;
1188 } else {
1189 curr->prev->next = curr->next;
1190 }
Adam Langley95c29f32014-06-20 12:00:00 -07001191
Adam Langleyfcf25832014-12-18 17:42:32 -08001192 if (tail == curr) {
1193 tail = curr->prev;
1194 }
1195 curr->active = 0;
1196 if (curr->next != NULL) {
1197 curr->next->prev = curr->prev;
1198 }
1199 if (curr->prev != NULL) {
1200 curr->prev->next = curr->next;
1201 }
1202 curr->next = NULL;
1203 curr->prev = NULL;
1204 }
1205 }
Adam Langley95c29f32014-06-20 12:00:00 -07001206
Adam Langleyfcf25832014-12-18 17:42:32 -08001207 *head_p = head;
1208 *tail_p = tail;
1209}
Adam Langley95c29f32014-06-20 12:00:00 -07001210
1211static int ssl_cipher_strength_sort(CIPHER_ORDER **head_p,
Adam Langleyfcf25832014-12-18 17:42:32 -08001212 CIPHER_ORDER **tail_p) {
1213 int max_strength_bits, i, *number_uses;
1214 CIPHER_ORDER *curr;
Adam Langley95c29f32014-06-20 12:00:00 -07001215
Adam Langleyfcf25832014-12-18 17:42:32 -08001216 /* This routine sorts the ciphers with descending strength. The sorting must
1217 * keep the pre-sorted sequence, so we apply the normal sorting routine as
1218 * '+' movement to the end of the list. */
1219 max_strength_bits = 0;
1220 curr = *head_p;
1221 while (curr != NULL) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001222 if (curr->active &&
1223 SSL_CIPHER_get_bits(curr->cipher, NULL) > max_strength_bits) {
1224 max_strength_bits = SSL_CIPHER_get_bits(curr->cipher, NULL);
Adam Langleyfcf25832014-12-18 17:42:32 -08001225 }
1226 curr = curr->next;
1227 }
Adam Langley95c29f32014-06-20 12:00:00 -07001228
Adam Langleyfcf25832014-12-18 17:42:32 -08001229 number_uses = OPENSSL_malloc((max_strength_bits + 1) * sizeof(int));
1230 if (!number_uses) {
David Benjamin3570d732015-06-29 00:28:17 -04001231 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001232 return 0;
1233 }
1234 memset(number_uses, 0, (max_strength_bits + 1) * sizeof(int));
Adam Langley95c29f32014-06-20 12:00:00 -07001235
Adam Langleyfcf25832014-12-18 17:42:32 -08001236 /* Now find the strength_bits values actually used. */
1237 curr = *head_p;
1238 while (curr != NULL) {
1239 if (curr->active) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001240 number_uses[SSL_CIPHER_get_bits(curr->cipher, NULL)]++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001241 }
1242 curr = curr->next;
1243 }
Adam Langley95c29f32014-06-20 12:00:00 -07001244
Adam Langleyfcf25832014-12-18 17:42:32 -08001245 /* Go through the list of used strength_bits values in descending order. */
1246 for (i = max_strength_bits; i >= 0; i--) {
1247 if (number_uses[i] > 0) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001248 ssl_cipher_apply_rule(0, 0, 0, 0, 0, 0, CIPHER_ORD, i, 0, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001249 }
1250 }
1251
1252 OPENSSL_free(number_uses);
1253 return 1;
1254}
Adam Langley95c29f32014-06-20 12:00:00 -07001255
David Benjamin0344daf2015-04-08 02:08:01 -04001256static int ssl_cipher_process_rulestr(const SSL_PROTOCOL_METHOD *ssl_method,
1257 const char *rule_str,
Adam Langleyfcf25832014-12-18 17:42:32 -08001258 CIPHER_ORDER **head_p,
David Benjamin0344daf2015-04-08 02:08:01 -04001259 CIPHER_ORDER **tail_p) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001260 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001261 uint16_t min_version;
Adam Langleyfcf25832014-12-18 17:42:32 -08001262 const char *l, *buf;
David Benjamindcb6ef02015-11-06 15:35:54 -05001263 int multi, skip_rule, rule, retval, ok, in_group = 0, has_group = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001264 size_t j, buf_len;
1265 uint32_t cipher_id;
Adam Langleyfcf25832014-12-18 17:42:32 -08001266 char ch;
Adam Langley95c29f32014-06-20 12:00:00 -07001267
Adam Langleyfcf25832014-12-18 17:42:32 -08001268 retval = 1;
1269 l = rule_str;
1270 for (;;) {
1271 ch = *l;
Adam Langley95c29f32014-06-20 12:00:00 -07001272
Adam Langleyfcf25832014-12-18 17:42:32 -08001273 if (ch == '\0') {
1274 break; /* done */
1275 }
Adam Langley95c29f32014-06-20 12:00:00 -07001276
Adam Langleyfcf25832014-12-18 17:42:32 -08001277 if (in_group) {
1278 if (ch == ']') {
Adam Langleyfcf25832014-12-18 17:42:32 -08001279 if (*tail_p) {
1280 (*tail_p)->in_group = 0;
1281 }
1282 in_group = 0;
1283 l++;
1284 continue;
1285 }
David Benjamin37d92462014-09-20 17:54:24 -04001286
Adam Langleyfcf25832014-12-18 17:42:32 -08001287 if (ch == '|') {
1288 rule = CIPHER_ADD;
1289 l++;
1290 continue;
1291 } else if (!(ch >= 'a' && ch <= 'z') && !(ch >= 'A' && ch <= 'Z') &&
1292 !(ch >= '0' && ch <= '9')) {
David Benjamin3570d732015-06-29 00:28:17 -04001293 OPENSSL_PUT_ERROR(SSL, SSL_R_UNEXPECTED_OPERATOR_IN_GROUP);
David Benjamin0344daf2015-04-08 02:08:01 -04001294 retval = in_group = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001295 break;
1296 } else {
1297 rule = CIPHER_ADD;
1298 }
1299 } else if (ch == '-') {
1300 rule = CIPHER_DEL;
1301 l++;
1302 } else if (ch == '+') {
1303 rule = CIPHER_ORD;
1304 l++;
1305 } else if (ch == '!') {
1306 rule = CIPHER_KILL;
1307 l++;
1308 } else if (ch == '@') {
1309 rule = CIPHER_SPECIAL;
1310 l++;
1311 } else if (ch == '[') {
1312 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001313 OPENSSL_PUT_ERROR(SSL, SSL_R_NESTED_GROUP);
David Benjamin0344daf2015-04-08 02:08:01 -04001314 retval = in_group = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001315 break;
1316 }
1317 in_group = 1;
1318 has_group = 1;
1319 l++;
1320 continue;
1321 } else {
1322 rule = CIPHER_ADD;
1323 }
Adam Langley95c29f32014-06-20 12:00:00 -07001324
Adam Langleyfcf25832014-12-18 17:42:32 -08001325 /* If preference groups are enabled, the only legal operator is +.
1326 * Otherwise the in_group bits will get mixed up. */
1327 if (has_group && rule != CIPHER_ADD) {
David Benjamin3570d732015-06-29 00:28:17 -04001328 OPENSSL_PUT_ERROR(SSL, SSL_R_MIXED_SPECIAL_OPERATOR_WITH_GROUPS);
David Benjamin0344daf2015-04-08 02:08:01 -04001329 retval = in_group = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001330 break;
1331 }
Adam Langley95c29f32014-06-20 12:00:00 -07001332
Adam Langleyfcf25832014-12-18 17:42:32 -08001333 if (ITEM_SEP(ch)) {
1334 l++;
1335 continue;
1336 }
Adam Langley95c29f32014-06-20 12:00:00 -07001337
David Benjamin0344daf2015-04-08 02:08:01 -04001338 multi = 0;
1339 cipher_id = 0;
1340 alg_mkey = ~0u;
1341 alg_auth = ~0u;
1342 alg_enc = ~0u;
1343 alg_mac = ~0u;
David Benjamindcb6ef02015-11-06 15:35:54 -05001344 min_version = 0;
1345 skip_rule = 0;
Adam Langley95c29f32014-06-20 12:00:00 -07001346
Adam Langleyfcf25832014-12-18 17:42:32 -08001347 for (;;) {
1348 ch = *l;
1349 buf = l;
David Benjamin0344daf2015-04-08 02:08:01 -04001350 buf_len = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001351 while (((ch >= 'A') && (ch <= 'Z')) || ((ch >= '0') && (ch <= '9')) ||
1352 ((ch >= 'a') && (ch <= 'z')) || (ch == '-') || (ch == '.')) {
1353 ch = *(++l);
David Benjamin0344daf2015-04-08 02:08:01 -04001354 buf_len++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001355 }
Adam Langley95c29f32014-06-20 12:00:00 -07001356
David Benjamin0344daf2015-04-08 02:08:01 -04001357 if (buf_len == 0) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001358 /* We hit something we cannot deal with, it is no command or separator
1359 * nor alphanumeric, so we call this an error. */
David Benjamin3570d732015-06-29 00:28:17 -04001360 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
David Benjamin0344daf2015-04-08 02:08:01 -04001361 retval = in_group = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001362 l++;
1363 break;
1364 }
Adam Langley95c29f32014-06-20 12:00:00 -07001365
Adam Langleyfcf25832014-12-18 17:42:32 -08001366 if (rule == CIPHER_SPECIAL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001367 break;
1368 }
David Benjamin0344daf2015-04-08 02:08:01 -04001369
1370 /* Look for a matching exact cipher. These aren't allowed in multipart
1371 * rules. */
1372 if (!multi && ch != '+') {
David Benjamina1c90a52015-05-30 17:03:14 -04001373 for (j = 0; j < kCiphersLen; j++) {
1374 const SSL_CIPHER *cipher = &kCiphers[j];
1375 if (rule_equals(cipher->name, buf, buf_len)) {
David Benjamin0344daf2015-04-08 02:08:01 -04001376 cipher_id = cipher->id;
1377 break;
1378 }
1379 }
1380 }
1381 if (cipher_id == 0) {
1382 /* If not an exact cipher, look for a matching cipher alias. */
David Benjamina1c90a52015-05-30 17:03:14 -04001383 for (j = 0; j < kCipherAliasesLen; j++) {
David Benjamin0344daf2015-04-08 02:08:01 -04001384 if (rule_equals(kCipherAliases[j].name, buf, buf_len)) {
1385 alg_mkey &= kCipherAliases[j].algorithm_mkey;
1386 alg_auth &= kCipherAliases[j].algorithm_auth;
1387 alg_enc &= kCipherAliases[j].algorithm_enc;
1388 alg_mac &= kCipherAliases[j].algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001389
1390 if (min_version != 0 &&
1391 min_version != kCipherAliases[j].min_version) {
1392 skip_rule = 1;
1393 } else {
1394 min_version = kCipherAliases[j].min_version;
1395 }
David Benjamin0344daf2015-04-08 02:08:01 -04001396 break;
1397 }
1398 }
David Benjamina1c90a52015-05-30 17:03:14 -04001399 if (j == kCipherAliasesLen) {
David Benjamindcb6ef02015-11-06 15:35:54 -05001400 skip_rule = 1;
David Benjamin0344daf2015-04-08 02:08:01 -04001401 }
1402 }
1403
1404 /* Check for a multipart rule. */
1405 if (ch != '+') {
1406 break;
1407 }
1408 l++;
1409 multi = 1;
Adam Langleyfcf25832014-12-18 17:42:32 -08001410 }
Adam Langley95c29f32014-06-20 12:00:00 -07001411
David Benjamin13414b32015-12-09 23:02:39 -05001412 /* If one of the CHACHA20_POLY1305 variants is selected, include the other
1413 * as well. They have the same name to avoid requiring changes in
1414 * configuration. Apply this transformation late so that the cipher name
1415 * still behaves as an exact name and not an alias in multipart rules.
1416 *
1417 * This is temporary and will be removed when the pre-standard construction
1418 * is removed. */
1419 if (cipher_id == TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD ||
1420 cipher_id == TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256) {
1421 cipher_id = 0;
1422 alg_mkey = SSL_kECDHE;
1423 alg_auth = SSL_aRSA;
1424 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1425 alg_mac = SSL_AEAD;
1426 } else if (cipher_id == TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD ||
1427 cipher_id == TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256) {
1428 cipher_id = 0;
1429 alg_mkey = SSL_kECDHE;
1430 alg_auth = SSL_aECDSA;
1431 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1432 alg_mac = SSL_AEAD;
1433 }
1434
Adam Langleyfcf25832014-12-18 17:42:32 -08001435 /* Ok, we have the rule, now apply it. */
1436 if (rule == CIPHER_SPECIAL) {
1437 /* special command */
1438 ok = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001439 if (buf_len == 8 && !strncmp(buf, "STRENGTH", 8)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001440 ok = ssl_cipher_strength_sort(head_p, tail_p);
1441 } else {
David Benjamin3570d732015-06-29 00:28:17 -04001442 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyfcf25832014-12-18 17:42:32 -08001443 }
Adam Langley95c29f32014-06-20 12:00:00 -07001444
Adam Langleyfcf25832014-12-18 17:42:32 -08001445 if (ok == 0) {
1446 retval = 0;
1447 }
Adam Langley95c29f32014-06-20 12:00:00 -07001448
Adam Langleyfcf25832014-12-18 17:42:32 -08001449 /* We do not support any "multi" options together with "@", so throw away
1450 * the rest of the command, if any left, until end or ':' is found. */
1451 while (*l != '\0' && !ITEM_SEP(*l)) {
1452 l++;
1453 }
David Benjamindcb6ef02015-11-06 15:35:54 -05001454 } else if (!skip_rule) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001455 ssl_cipher_apply_rule(cipher_id, alg_mkey, alg_auth, alg_enc, alg_mac,
David Benjamind6e9eec2015-11-18 09:48:55 -05001456 min_version, rule, -1, in_group, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001457 }
1458 }
Adam Langley95c29f32014-06-20 12:00:00 -07001459
Adam Langleyfcf25832014-12-18 17:42:32 -08001460 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001461 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyfcf25832014-12-18 17:42:32 -08001462 retval = 0;
1463 }
Adam Langley95c29f32014-06-20 12:00:00 -07001464
Adam Langleyfcf25832014-12-18 17:42:32 -08001465 return retval;
1466}
Adam Langley95c29f32014-06-20 12:00:00 -07001467
Adam Langleyfcf25832014-12-18 17:42:32 -08001468STACK_OF(SSL_CIPHER) *
1469ssl_create_cipher_list(const SSL_PROTOCOL_METHOD *ssl_method,
David Benjamin71f07942015-04-08 02:36:59 -04001470 struct ssl_cipher_preference_list_st **out_cipher_list,
1471 STACK_OF(SSL_CIPHER) **out_cipher_list_by_id,
1472 const char *rule_str) {
David Benjamin0344daf2015-04-08 02:08:01 -04001473 int ok;
Adam Langleyfcf25832014-12-18 17:42:32 -08001474 STACK_OF(SSL_CIPHER) *cipherstack = NULL, *tmp_cipher_list = NULL;
1475 const char *rule_p;
1476 CIPHER_ORDER *co_list = NULL, *head = NULL, *tail = NULL, *curr;
Adam Langleyfcf25832014-12-18 17:42:32 -08001477 uint8_t *in_group_flags = NULL;
1478 unsigned int num_in_group_flags = 0;
1479 struct ssl_cipher_preference_list_st *pref_list = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -07001480
Adam Langleyfcf25832014-12-18 17:42:32 -08001481 /* Return with error if nothing to do. */
David Benjamin71f07942015-04-08 02:36:59 -04001482 if (rule_str == NULL || out_cipher_list == NULL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001483 return NULL;
1484 }
David Benjamin5213df42014-08-20 14:19:54 -04001485
Adam Langleyfcf25832014-12-18 17:42:32 -08001486 /* Now we have to collect the available ciphers from the compiled in ciphers.
1487 * We cannot get more than the number compiled in, so it is used for
1488 * allocation. */
Brian Smith5ba06892016-02-07 09:36:04 -10001489 co_list = OPENSSL_malloc(sizeof(CIPHER_ORDER) * kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001490 if (co_list == NULL) {
David Benjamin3570d732015-06-29 00:28:17 -04001491 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001492 return NULL;
1493 }
Adam Langley95c29f32014-06-20 12:00:00 -07001494
David Benjamina1c90a52015-05-30 17:03:14 -04001495 ssl_cipher_collect_ciphers(ssl_method, co_list, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001496
Adam Langleyfcf25832014-12-18 17:42:32 -08001497 /* Now arrange all ciphers by preference:
1498 * TODO(davidben): Compute this order once and copy it. */
Adam Langley95c29f32014-06-20 12:00:00 -07001499
Adam Langleyfcf25832014-12-18 17:42:32 -08001500 /* Everything else being equal, prefer ECDHE_ECDSA then ECDHE_RSA over other
1501 * key exchange mechanisms */
Matt Braithwaite053931e2016-05-25 12:06:05 -07001502
David Benjamind6e9eec2015-11-18 09:48:55 -05001503 ssl_cipher_apply_rule(0, SSL_kECDHE, SSL_aECDSA, ~0u, ~0u, 0, CIPHER_ADD, -1,
Adam Langleyfcf25832014-12-18 17:42:32 -08001504 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001505 ssl_cipher_apply_rule(0, SSL_kECDHE, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0,
1506 &head, &tail);
1507 ssl_cipher_apply_rule(0, SSL_kECDHE, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0,
1508 &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001509
Adam Langleyfcf25832014-12-18 17:42:32 -08001510 /* Order the bulk ciphers. First the preferred AEAD ciphers. We prefer
1511 * CHACHA20 unless there is hardware support for fast and constant-time
David Benjamin13414b32015-12-09 23:02:39 -05001512 * AES_GCM. Of the two CHACHA20 variants, the new one is preferred over the
1513 * old one. */
Adam Langleyfcf25832014-12-18 17:42:32 -08001514 if (EVP_has_aes_hardware()) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001515 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1516 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001517 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1518 &head, &tail);
David Benjamin13414b32015-12-09 23:02:39 -05001519 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1520 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001521 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001522 CIPHER_ADD, -1, 0, &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001523 } else {
David Benjamin13414b32015-12-09 23:02:39 -05001524 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1525 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001526 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001527 CIPHER_ADD, -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001528 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1529 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001530 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1531 &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001532 }
Adam Langley95c29f32014-06-20 12:00:00 -07001533
David Benjamin43336652016-03-03 15:32:29 -05001534 /* Then the legacy non-AEAD ciphers: AES_128_CBC, AES_256_CBC,
1535 * 3DES_EDE_CBC_SHA, RC4_128_SHA, RC4_128_MD5. */
David Benjamind6e9eec2015-11-18 09:48:55 -05001536 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128, ~0u, 0, CIPHER_ADD, -1, 0,
1537 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001538 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256, ~0u, 0, CIPHER_ADD, -1, 0,
1539 &head, &tail);
1540 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_3DES, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1541 &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001542 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_RC4, ~SSL_MD5, 0, CIPHER_ADD, -1, 0,
1543 &head, &tail);
1544 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_RC4, SSL_MD5, 0, CIPHER_ADD, -1, 0,
1545 &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001546
Adam Langleyfcf25832014-12-18 17:42:32 -08001547 /* Temporarily enable everything else for sorting */
David Benjamind6e9eec2015-11-18 09:48:55 -05001548 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1549 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001550
Adam Langleyfcf25832014-12-18 17:42:32 -08001551 /* Move ciphers without forward secrecy to the end. */
David Benjamind6e9eec2015-11-18 09:48:55 -05001552 ssl_cipher_apply_rule(0, ~(SSL_kDHE | SSL_kECDHE), ~0u, ~0u, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001553 CIPHER_ORD, -1, 0, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001554
Adam Langleyfcf25832014-12-18 17:42:32 -08001555 /* Now disable everything (maintaining the ordering!) */
David Benjamind6e9eec2015-11-18 09:48:55 -05001556 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0, &head,
1557 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001558
Adam Langleyfcf25832014-12-18 17:42:32 -08001559 /* If the rule_string begins with DEFAULT, apply the default rule before
1560 * using the (possibly available) additional rules. */
1561 ok = 1;
1562 rule_p = rule_str;
1563 if (strncmp(rule_str, "DEFAULT", 7) == 0) {
David Benjamin0344daf2015-04-08 02:08:01 -04001564 ok = ssl_cipher_process_rulestr(ssl_method, SSL_DEFAULT_CIPHER_LIST, &head,
1565 &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001566 rule_p += 7;
1567 if (*rule_p == ':') {
1568 rule_p++;
1569 }
1570 }
Adam Langley858a88d2014-06-20 12:00:00 -07001571
Adam Langleyfcf25832014-12-18 17:42:32 -08001572 if (ok && strlen(rule_p) > 0) {
David Benjamin0344daf2015-04-08 02:08:01 -04001573 ok = ssl_cipher_process_rulestr(ssl_method, rule_p, &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001574 }
Adam Langley95c29f32014-06-20 12:00:00 -07001575
Adam Langleyfcf25832014-12-18 17:42:32 -08001576 if (!ok) {
1577 goto err;
1578 }
1579
1580 /* Allocate new "cipherstack" for the result, return with error
1581 * if we cannot get one. */
1582 cipherstack = sk_SSL_CIPHER_new_null();
1583 if (cipherstack == NULL) {
1584 goto err;
1585 }
1586
David Benjamina1c90a52015-05-30 17:03:14 -04001587 in_group_flags = OPENSSL_malloc(kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001588 if (!in_group_flags) {
1589 goto err;
1590 }
1591
1592 /* The cipher selection for the list is done. The ciphers are added
1593 * to the resulting precedence to the STACK_OF(SSL_CIPHER). */
1594 for (curr = head; curr != NULL; curr = curr->next) {
1595 if (curr->active) {
David Benjamin2adb7ec2015-01-11 19:59:06 -05001596 if (!sk_SSL_CIPHER_push(cipherstack, curr->cipher)) {
1597 goto err;
1598 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001599 in_group_flags[num_in_group_flags++] = curr->in_group;
1600 }
1601 }
1602 OPENSSL_free(co_list); /* Not needed any longer */
1603 co_list = NULL;
1604
1605 tmp_cipher_list = sk_SSL_CIPHER_dup(cipherstack);
1606 if (tmp_cipher_list == NULL) {
1607 goto err;
1608 }
1609 pref_list = OPENSSL_malloc(sizeof(struct ssl_cipher_preference_list_st));
1610 if (!pref_list) {
1611 goto err;
1612 }
1613 pref_list->ciphers = cipherstack;
1614 pref_list->in_group_flags = OPENSSL_malloc(num_in_group_flags);
1615 if (!pref_list->in_group_flags) {
1616 goto err;
1617 }
1618 memcpy(pref_list->in_group_flags, in_group_flags, num_in_group_flags);
1619 OPENSSL_free(in_group_flags);
1620 in_group_flags = NULL;
David Benjamin71f07942015-04-08 02:36:59 -04001621 if (*out_cipher_list != NULL) {
1622 ssl_cipher_preference_list_free(*out_cipher_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001623 }
David Benjamin71f07942015-04-08 02:36:59 -04001624 *out_cipher_list = pref_list;
Adam Langleyfcf25832014-12-18 17:42:32 -08001625 pref_list = NULL;
1626
David Benjamin71f07942015-04-08 02:36:59 -04001627 if (out_cipher_list_by_id != NULL) {
David Benjamin2755a3e2015-04-22 16:17:58 -04001628 sk_SSL_CIPHER_free(*out_cipher_list_by_id);
David Benjamin71f07942015-04-08 02:36:59 -04001629 *out_cipher_list_by_id = tmp_cipher_list;
Adam Langleyfcf25832014-12-18 17:42:32 -08001630 tmp_cipher_list = NULL;
David Benjamin71f07942015-04-08 02:36:59 -04001631 (void) sk_SSL_CIPHER_set_cmp_func(*out_cipher_list_by_id,
1632 ssl_cipher_ptr_id_cmp);
Adam Langleyfcf25832014-12-18 17:42:32 -08001633
David Benjamin71f07942015-04-08 02:36:59 -04001634 sk_SSL_CIPHER_sort(*out_cipher_list_by_id);
Adam Langleyfcf25832014-12-18 17:42:32 -08001635 } else {
1636 sk_SSL_CIPHER_free(tmp_cipher_list);
1637 tmp_cipher_list = NULL;
1638 }
1639
1640 return cipherstack;
Adam Langley858a88d2014-06-20 12:00:00 -07001641
1642err:
David Benjamin2755a3e2015-04-22 16:17:58 -04001643 OPENSSL_free(co_list);
1644 OPENSSL_free(in_group_flags);
1645 sk_SSL_CIPHER_free(cipherstack);
1646 sk_SSL_CIPHER_free(tmp_cipher_list);
1647 if (pref_list) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001648 OPENSSL_free(pref_list->in_group_flags);
1649 }
David Benjamin2755a3e2015-04-22 16:17:58 -04001650 OPENSSL_free(pref_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001651 return NULL;
1652}
Adam Langley95c29f32014-06-20 12:00:00 -07001653
David Benjamin71f07942015-04-08 02:36:59 -04001654uint32_t SSL_CIPHER_get_id(const SSL_CIPHER *cipher) { return cipher->id; }
1655
David Benjamina1c90a52015-05-30 17:03:14 -04001656uint16_t ssl_cipher_get_value(const SSL_CIPHER *cipher) {
1657 uint32_t id = cipher->id;
1658 /* All ciphers are SSLv3. */
1659 assert((id & 0xff000000) == 0x03000000);
1660 return id & 0xffff;
1661}
1662
Steven Valdez4aa154e2016-07-29 14:32:55 -04001663int ssl_cipher_get_ecdhe_psk_cipher(const SSL_CIPHER *cipher,
1664 uint16_t *out_cipher) {
1665 switch (cipher->id) {
1666 case TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256:
1667 case TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256:
1668 case TLS1_CK_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256:
1669 *out_cipher = TLS1_CK_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256 & 0xffff;
1670 return 1;
1671
1672 case TLS1_CK_ECDHE_RSA_WITH_AES_128_GCM_SHA256:
1673 case TLS1_CK_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256:
1674 case TLS1_CK_ECDHE_PSK_WITH_AES_128_GCM_SHA256:
1675 *out_cipher = TLS1_CK_ECDHE_PSK_WITH_AES_128_GCM_SHA256 & 0xffff;
1676 return 1;
1677
1678 case TLS1_CK_ECDHE_RSA_WITH_AES_256_GCM_SHA384:
1679 case TLS1_CK_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384:
1680 case TLS1_CK_ECDHE_PSK_WITH_AES_256_GCM_SHA384:
1681 *out_cipher = TLS1_CK_ECDHE_PSK_WITH_AES_256_GCM_SHA384 & 0xffff;
1682 return 1;
1683 }
1684 return 0;
1685}
1686
David Benjamin71f07942015-04-08 02:36:59 -04001687int SSL_CIPHER_is_AES(const SSL_CIPHER *cipher) {
1688 return (cipher->algorithm_enc & SSL_AES) != 0;
1689}
1690
1691int SSL_CIPHER_has_MD5_HMAC(const SSL_CIPHER *cipher) {
1692 return (cipher->algorithm_mac & SSL_MD5) != 0;
1693}
1694
David Benjaminef793f42015-11-05 18:16:27 -05001695int SSL_CIPHER_has_SHA1_HMAC(const SSL_CIPHER *cipher) {
1696 return (cipher->algorithm_mac & SSL_SHA1) != 0;
1697}
1698
David Benjamina211aee2016-02-24 17:18:44 -05001699int SSL_CIPHER_has_SHA256_HMAC(const SSL_CIPHER *cipher) {
1700 return (cipher->algorithm_mac & SSL_SHA256) != 0;
1701}
1702
David Benjamin71f07942015-04-08 02:36:59 -04001703int SSL_CIPHER_is_AESGCM(const SSL_CIPHER *cipher) {
David Benjaminc0125ef2015-09-09 09:11:07 -04001704 return (cipher->algorithm_enc & (SSL_AES128GCM | SSL_AES256GCM)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001705}
1706
David Benjaminef793f42015-11-05 18:16:27 -05001707int SSL_CIPHER_is_AES128GCM(const SSL_CIPHER *cipher) {
1708 return (cipher->algorithm_enc & SSL_AES128GCM) != 0;
1709}
1710
Adam Langleyb00061c2015-11-16 17:44:52 -08001711int SSL_CIPHER_is_AES128CBC(const SSL_CIPHER *cipher) {
1712 return (cipher->algorithm_enc & SSL_AES128) != 0;
1713}
1714
1715int SSL_CIPHER_is_AES256CBC(const SSL_CIPHER *cipher) {
1716 return (cipher->algorithm_enc & SSL_AES256) != 0;
1717}
1718
David Benjamin51a01a52015-10-29 13:19:56 -04001719int SSL_CIPHER_is_CHACHA20POLY1305(const SSL_CIPHER *cipher) {
David Benjamin13414b32015-12-09 23:02:39 -05001720 return (cipher->algorithm_enc &
1721 (SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001722}
1723
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001724int SSL_CIPHER_is_NULL(const SSL_CIPHER *cipher) {
1725 return (cipher->algorithm_enc & SSL_eNULL) != 0;
1726}
1727
Adam Langleyd7fe75c2015-09-18 15:40:48 -07001728int SSL_CIPHER_is_RC4(const SSL_CIPHER *cipher) {
1729 return (cipher->algorithm_enc & SSL_RC4) != 0;
1730}
1731
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001732int SSL_CIPHER_is_block_cipher(const SSL_CIPHER *cipher) {
1733 /* Neither stream cipher nor AEAD. */
1734 return (cipher->algorithm_enc & (SSL_RC4 | SSL_eNULL)) == 0 &&
1735 cipher->algorithm_mac != SSL_AEAD;
1736}
1737
David Benjaminef793f42015-11-05 18:16:27 -05001738int SSL_CIPHER_is_ECDSA(const SSL_CIPHER *cipher) {
1739 return (cipher->algorithm_auth & SSL_aECDSA) != 0;
1740}
1741
David Benjamin0fc7df52016-06-02 18:36:33 -04001742int SSL_CIPHER_is_DHE(const SSL_CIPHER *cipher) {
1743 return (cipher->algorithm_mkey & SSL_kDHE) != 0;
1744}
1745
David Benjamin4cc36ad2015-12-19 14:23:26 -05001746int SSL_CIPHER_is_ECDHE(const SSL_CIPHER *cipher) {
1747 return (cipher->algorithm_mkey & SSL_kECDHE) != 0;
1748}
1749
Matt Braithwaite053931e2016-05-25 12:06:05 -07001750int SSL_CIPHER_is_CECPQ1(const SSL_CIPHER *cipher) {
1751 return (cipher->algorithm_mkey & SSL_kCECPQ1) != 0;
1752}
1753
David Benjaminef793f42015-11-05 18:16:27 -05001754uint16_t SSL_CIPHER_get_min_version(const SSL_CIPHER *cipher) {
David Benjamindcb6ef02015-11-06 15:35:54 -05001755 if (cipher->algorithm_prf != SSL_HANDSHAKE_MAC_DEFAULT) {
1756 /* Cipher suites before TLS 1.2 use the default PRF, while all those added
1757 * afterwards specify a particular hash. */
David Benjaminef793f42015-11-05 18:16:27 -05001758 return TLS1_2_VERSION;
1759 }
1760 return SSL3_VERSION;
1761}
1762
Nick Harper1fd39d82016-06-14 18:14:35 -07001763uint16_t SSL_CIPHER_get_max_version(const SSL_CIPHER *cipher) {
1764 if (cipher->algorithm_mac == SSL_AEAD &&
1765 (cipher->algorithm_enc & SSL_CHACHA20POLY1305_OLD) == 0 &&
David Benjamin54c217c2016-07-13 12:35:25 -04001766 (cipher->algorithm_mkey & SSL_kECDHE) != 0 &&
1767 /* TODO(davidben,svaldez): Support PSK-based ciphers in TLS 1.3. */
1768 (cipher->algorithm_auth & SSL_aCERT) != 0) {
Nick Harper1fd39d82016-06-14 18:14:35 -07001769 return TLS1_3_VERSION;
1770 }
1771 return TLS1_2_VERSION;
1772}
1773
David Benjamin71f07942015-04-08 02:36:59 -04001774/* return the actual cipher being used */
1775const char *SSL_CIPHER_get_name(const SSL_CIPHER *cipher) {
1776 if (cipher != NULL) {
1777 return cipher->name;
1778 }
1779
1780 return "(NONE)";
1781}
1782
1783const char *SSL_CIPHER_get_kx_name(const SSL_CIPHER *cipher) {
1784 if (cipher == NULL) {
1785 return "";
1786 }
1787
1788 switch (cipher->algorithm_mkey) {
1789 case SSL_kRSA:
1790 return "RSA";
1791
1792 case SSL_kDHE:
1793 switch (cipher->algorithm_auth) {
1794 case SSL_aRSA:
1795 return "DHE_RSA";
1796 default:
1797 assert(0);
1798 return "UNKNOWN";
1799 }
1800
1801 case SSL_kECDHE:
1802 switch (cipher->algorithm_auth) {
1803 case SSL_aECDSA:
1804 return "ECDHE_ECDSA";
1805 case SSL_aRSA:
1806 return "ECDHE_RSA";
1807 case SSL_aPSK:
1808 return "ECDHE_PSK";
1809 default:
1810 assert(0);
1811 return "UNKNOWN";
1812 }
1813
Matt Braithwaite053931e2016-05-25 12:06:05 -07001814 case SSL_kCECPQ1:
1815 switch (cipher->algorithm_auth) {
1816 case SSL_aECDSA:
1817 return "CECPQ1_ECDSA";
1818 case SSL_aRSA:
1819 return "CECPQ1_RSA";
1820 default:
1821 assert(0);
1822 return "UNKNOWN";
1823 }
1824
David Benjamin71f07942015-04-08 02:36:59 -04001825 case SSL_kPSK:
1826 assert(cipher->algorithm_auth == SSL_aPSK);
1827 return "PSK";
1828
1829 default:
1830 assert(0);
1831 return "UNKNOWN";
1832 }
1833}
1834
1835static const char *ssl_cipher_get_enc_name(const SSL_CIPHER *cipher) {
1836 switch (cipher->algorithm_enc) {
1837 case SSL_3DES:
1838 return "3DES_EDE_CBC";
1839 case SSL_RC4:
1840 return "RC4";
1841 case SSL_AES128:
1842 return "AES_128_CBC";
1843 case SSL_AES256:
1844 return "AES_256_CBC";
1845 case SSL_AES128GCM:
1846 return "AES_128_GCM";
1847 case SSL_AES256GCM:
1848 return "AES_256_GCM";
David Benjamin13414b32015-12-09 23:02:39 -05001849 case SSL_CHACHA20POLY1305:
Brian Smith271777f2015-10-03 13:53:33 -10001850 case SSL_CHACHA20POLY1305_OLD:
David Benjamin71f07942015-04-08 02:36:59 -04001851 return "CHACHA20_POLY1305";
1852 break;
1853 default:
1854 assert(0);
1855 return "UNKNOWN";
1856 }
1857}
1858
1859static const char *ssl_cipher_get_prf_name(const SSL_CIPHER *cipher) {
David Benjaminb0883312015-08-06 09:54:13 -04001860 switch (cipher->algorithm_prf) {
1861 case SSL_HANDSHAKE_MAC_DEFAULT:
1862 /* Before TLS 1.2, the PRF component is the hash used in the HMAC, which is
1863 * only ever MD5 or SHA-1. */
1864 switch (cipher->algorithm_mac) {
1865 case SSL_MD5:
1866 return "MD5";
1867 case SSL_SHA1:
1868 return "SHA";
1869 }
1870 break;
1871 case SSL_HANDSHAKE_MAC_SHA256:
1872 return "SHA256";
1873 case SSL_HANDSHAKE_MAC_SHA384:
1874 return "SHA384";
David Benjamin71f07942015-04-08 02:36:59 -04001875 }
David Benjaminb0883312015-08-06 09:54:13 -04001876 assert(0);
1877 return "UNKNOWN";
David Benjamin71f07942015-04-08 02:36:59 -04001878}
1879
1880char *SSL_CIPHER_get_rfc_name(const SSL_CIPHER *cipher) {
1881 if (cipher == NULL) {
1882 return NULL;
1883 }
1884
1885 const char *kx_name = SSL_CIPHER_get_kx_name(cipher);
1886 const char *enc_name = ssl_cipher_get_enc_name(cipher);
1887 const char *prf_name = ssl_cipher_get_prf_name(cipher);
1888
1889 /* The final name is TLS_{kx_name}_WITH_{enc_name}_{prf_name}. */
1890 size_t len = 4 + strlen(kx_name) + 6 + strlen(enc_name) + 1 +
1891 strlen(prf_name) + 1;
1892 char *ret = OPENSSL_malloc(len);
1893 if (ret == NULL) {
1894 return NULL;
1895 }
1896 if (BUF_strlcpy(ret, "TLS_", len) >= len ||
1897 BUF_strlcat(ret, kx_name, len) >= len ||
1898 BUF_strlcat(ret, "_WITH_", len) >= len ||
1899 BUF_strlcat(ret, enc_name, len) >= len ||
1900 BUF_strlcat(ret, "_", len) >= len ||
1901 BUF_strlcat(ret, prf_name, len) >= len) {
1902 assert(0);
1903 OPENSSL_free(ret);
1904 return NULL;
1905 }
1906 assert(strlen(ret) + 1 == len);
1907 return ret;
1908}
1909
1910int SSL_CIPHER_get_bits(const SSL_CIPHER *cipher, int *out_alg_bits) {
1911 if (cipher == NULL) {
1912 return 0;
1913 }
1914
David Benjamin9f2e2772015-11-18 09:59:43 -05001915 int alg_bits, strength_bits;
1916 switch (cipher->algorithm_enc) {
1917 case SSL_AES128:
1918 case SSL_AES128GCM:
1919 case SSL_RC4:
1920 alg_bits = 128;
1921 strength_bits = 128;
1922 break;
1923
1924 case SSL_AES256:
1925 case SSL_AES256GCM:
1926#if !defined(BORINGSSL_ANDROID_SYSTEM)
1927 case SSL_CHACHA20POLY1305_OLD:
1928#endif
David Benjamin13414b32015-12-09 23:02:39 -05001929 case SSL_CHACHA20POLY1305:
David Benjamin9f2e2772015-11-18 09:59:43 -05001930 alg_bits = 256;
1931 strength_bits = 256;
1932 break;
1933
1934 case SSL_3DES:
1935 alg_bits = 168;
1936 strength_bits = 112;
1937 break;
1938
1939 case SSL_eNULL:
1940 alg_bits = 0;
1941 strength_bits = 0;
1942 break;
1943
1944 default:
1945 assert(0);
1946 alg_bits = 0;
1947 strength_bits = 0;
David Benjamin71f07942015-04-08 02:36:59 -04001948 }
David Benjamin9f2e2772015-11-18 09:59:43 -05001949
1950 if (out_alg_bits != NULL) {
1951 *out_alg_bits = alg_bits;
1952 }
1953 return strength_bits;
David Benjamin71f07942015-04-08 02:36:59 -04001954}
1955
Adam Langleyfcf25832014-12-18 17:42:32 -08001956const char *SSL_CIPHER_description(const SSL_CIPHER *cipher, char *buf,
1957 int len) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001958 const char *kx, *au, *enc, *mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001959 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001960
Adam Langleyfcf25832014-12-18 17:42:32 -08001961 alg_mkey = cipher->algorithm_mkey;
1962 alg_auth = cipher->algorithm_auth;
1963 alg_enc = cipher->algorithm_enc;
1964 alg_mac = cipher->algorithm_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001965
Adam Langleyfcf25832014-12-18 17:42:32 -08001966 switch (alg_mkey) {
1967 case SSL_kRSA:
1968 kx = "RSA";
1969 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001970
David Benjamin7061e282015-03-19 11:10:48 -04001971 case SSL_kDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001972 kx = "DH";
1973 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001974
David Benjamin7061e282015-03-19 11:10:48 -04001975 case SSL_kECDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001976 kx = "ECDH";
1977 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001978
Matt Braithwaite053931e2016-05-25 12:06:05 -07001979 case SSL_kCECPQ1:
1980 kx = "CECPQ1";
1981 break;
1982
Adam Langleyfcf25832014-12-18 17:42:32 -08001983 case SSL_kPSK:
1984 kx = "PSK";
1985 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001986
Adam Langleyfcf25832014-12-18 17:42:32 -08001987 default:
1988 kx = "unknown";
1989 }
Adam Langley95c29f32014-06-20 12:00:00 -07001990
Adam Langleyfcf25832014-12-18 17:42:32 -08001991 switch (alg_auth) {
1992 case SSL_aRSA:
1993 au = "RSA";
1994 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001995
Adam Langleyfcf25832014-12-18 17:42:32 -08001996 case SSL_aECDSA:
1997 au = "ECDSA";
1998 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07001999
Adam Langleyfcf25832014-12-18 17:42:32 -08002000 case SSL_aPSK:
2001 au = "PSK";
2002 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07002003
Adam Langleyfcf25832014-12-18 17:42:32 -08002004 default:
2005 au = "unknown";
2006 break;
2007 }
Adam Langleyde0b2022014-06-20 12:00:00 -07002008
Adam Langleyfcf25832014-12-18 17:42:32 -08002009 switch (alg_enc) {
2010 case SSL_3DES:
2011 enc = "3DES(168)";
2012 break;
Adam Langley95c29f32014-06-20 12:00:00 -07002013
Adam Langleyfcf25832014-12-18 17:42:32 -08002014 case SSL_RC4:
2015 enc = "RC4(128)";
2016 break;
2017
2018 case SSL_AES128:
2019 enc = "AES(128)";
2020 break;
2021
2022 case SSL_AES256:
2023 enc = "AES(256)";
2024 break;
2025
2026 case SSL_AES128GCM:
2027 enc = "AESGCM(128)";
2028 break;
2029
2030 case SSL_AES256GCM:
2031 enc = "AESGCM(256)";
2032 break;
2033
Brian Smith271777f2015-10-03 13:53:33 -10002034 case SSL_CHACHA20POLY1305_OLD:
David Benjamin13414b32015-12-09 23:02:39 -05002035 enc = "ChaCha20-Poly1305-Old";
2036 break;
2037
2038 case SSL_CHACHA20POLY1305:
Adam Langleyfcf25832014-12-18 17:42:32 -08002039 enc = "ChaCha20-Poly1305";
2040 break;
2041
Matt Braithwaiteaf096752015-09-02 19:48:16 -07002042 case SSL_eNULL:
2043 enc="None";
2044 break;
2045
Adam Langleyfcf25832014-12-18 17:42:32 -08002046 default:
2047 enc = "unknown";
2048 break;
2049 }
2050
2051 switch (alg_mac) {
2052 case SSL_MD5:
2053 mac = "MD5";
2054 break;
2055
2056 case SSL_SHA1:
2057 mac = "SHA1";
2058 break;
2059
2060 case SSL_SHA256:
2061 mac = "SHA256";
2062 break;
2063
2064 case SSL_SHA384:
2065 mac = "SHA384";
2066 break;
2067
2068 case SSL_AEAD:
2069 mac = "AEAD";
2070 break;
2071
2072 default:
2073 mac = "unknown";
2074 break;
2075 }
2076
2077 if (buf == NULL) {
2078 len = 128;
2079 buf = OPENSSL_malloc(len);
David Benjamin1eed2c02015-02-08 23:20:06 -05002080 if (buf == NULL) {
2081 return NULL;
2082 }
Adam Langleyfcf25832014-12-18 17:42:32 -08002083 } else if (len < 128) {
2084 return "Buffer too small";
2085 }
2086
Brian Smith0687bdf2016-01-17 09:18:26 -10002087 BIO_snprintf(buf, len, "%-23s Kx=%-8s Au=%-4s Enc=%-9s Mac=%-4s\n",
2088 cipher->name, kx, au, enc, mac);
Adam Langleyfcf25832014-12-18 17:42:32 -08002089 return buf;
2090}
2091
David Benjamin71f07942015-04-08 02:36:59 -04002092const char *SSL_CIPHER_get_version(const SSL_CIPHER *cipher) {
2093 return "TLSv1/SSLv3";
Adam Langleyfcf25832014-12-18 17:42:32 -08002094}
2095
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07002096COMP_METHOD *SSL_COMP_get_compression_methods(void) { return NULL; }
Adam Langleyfcf25832014-12-18 17:42:32 -08002097
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07002098int SSL_COMP_add_compression_method(int id, COMP_METHOD *cm) { return 1; }
Adam Langleyfcf25832014-12-18 17:42:32 -08002099
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07002100const char *SSL_COMP_get_name(const COMP_METHOD *comp) { return NULL; }
Adam Langley95c29f32014-06-20 12:00:00 -07002101
David Benjamind1d80782015-07-05 11:54:09 -04002102int ssl_cipher_get_key_type(const SSL_CIPHER *cipher) {
David Benjamin71f07942015-04-08 02:36:59 -04002103 uint32_t alg_a = cipher->algorithm_auth;
Adam Langley95c29f32014-06-20 12:00:00 -07002104
Adam Langleyfcf25832014-12-18 17:42:32 -08002105 if (alg_a & SSL_aECDSA) {
David Benjamind1d80782015-07-05 11:54:09 -04002106 return EVP_PKEY_EC;
Adam Langleyfcf25832014-12-18 17:42:32 -08002107 } else if (alg_a & SSL_aRSA) {
David Benjamind1d80782015-07-05 11:54:09 -04002108 return EVP_PKEY_RSA;
Adam Langleyfcf25832014-12-18 17:42:32 -08002109 }
Adam Langley95c29f32014-06-20 12:00:00 -07002110
David Benjamind1d80782015-07-05 11:54:09 -04002111 return EVP_PKEY_NONE;
Adam Langleyfcf25832014-12-18 17:42:32 -08002112}
David Benjamin9c651c92014-07-12 13:27:45 -04002113
David Benjaminc032dfa2016-05-12 14:54:57 -04002114int ssl_cipher_uses_certificate_auth(const SSL_CIPHER *cipher) {
2115 return (cipher->algorithm_auth & SSL_aCERT) != 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08002116}
2117
Adam Langleyfcf25832014-12-18 17:42:32 -08002118int ssl_cipher_requires_server_key_exchange(const SSL_CIPHER *cipher) {
2119 /* Ephemeral Diffie-Hellman key exchanges require a ServerKeyExchange. */
Matt Braithwaite053931e2016-05-25 12:06:05 -07002120 if (cipher->algorithm_mkey & SSL_kDHE ||
2121 cipher->algorithm_mkey & SSL_kECDHE ||
2122 cipher->algorithm_mkey & SSL_kCECPQ1) {
Adam Langleyfcf25832014-12-18 17:42:32 -08002123 return 1;
2124 }
2125
2126 /* It is optional in all others. */
2127 return 0;
2128}
David Benjaminb8d28cf2015-07-28 21:34:45 -04002129
2130size_t ssl_cipher_get_record_split_len(const SSL_CIPHER *cipher) {
2131 size_t block_size;
2132 switch (cipher->algorithm_enc) {
2133 case SSL_3DES:
2134 block_size = 8;
2135 break;
2136 case SSL_AES128:
2137 case SSL_AES256:
2138 block_size = 16;
2139 break;
2140 default:
2141 return 0;
2142 }
2143
2144 size_t mac_len;
2145 switch (cipher->algorithm_mac) {
2146 case SSL_MD5:
2147 mac_len = MD5_DIGEST_LENGTH;
2148 break;
2149 case SSL_SHA1:
2150 mac_len = SHA_DIGEST_LENGTH;
2151 break;
2152 default:
2153 return 0;
2154 }
2155
2156 size_t ret = 1 + mac_len;
2157 ret += block_size - (ret % block_size);
2158 return ret;
2159}