blob: 946336f3300702710f3f7f89eb0e96a62a891d6f [file] [log] [blame]
Adam Langley95c29f32014-06-20 12:00:00 -07001/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
2 * All rights reserved.
3 *
4 * This package is an SSL implementation written
5 * by Eric Young (eay@cryptsoft.com).
6 * The implementation was written so as to conform with Netscapes SSL.
7 *
8 * This library is free for commercial and non-commercial use as long as
9 * the following conditions are aheared to. The following conditions
10 * apply to all code found in this distribution, be it the RC4, RSA,
11 * lhash, DES, etc., code; not just the SSL code. The SSL documentation
12 * included with this distribution is covered by the same copyright terms
13 * except that the holder is Tim Hudson (tjh@cryptsoft.com).
14 *
15 * Copyright remains Eric Young's, and as such any Copyright notices in
16 * the code are not to be removed.
17 * If this package is used in a product, Eric Young should be given attribution
18 * as the author of the parts of the library used.
19 * This can be in the form of a textual message at program startup or
20 * in documentation (online or textual) provided with the package.
21 *
22 * Redistribution and use in source and binary forms, with or without
23 * modification, are permitted provided that the following conditions
24 * are met:
25 * 1. Redistributions of source code must retain the copyright
26 * notice, this list of conditions and the following disclaimer.
27 * 2. Redistributions in binary form must reproduce the above copyright
28 * notice, this list of conditions and the following disclaimer in the
29 * documentation and/or other materials provided with the distribution.
30 * 3. All advertising materials mentioning features or use of this software
31 * must display the following acknowledgement:
32 * "This product includes cryptographic software written by
33 * Eric Young (eay@cryptsoft.com)"
34 * The word 'cryptographic' can be left out if the rouines from the library
35 * being used are not cryptographic related :-).
36 * 4. If you include any Windows specific code (or a derivative thereof) from
37 * the apps directory (application code) you must include an acknowledgement:
38 * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
39 *
40 * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
41 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
42 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
43 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
44 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
45 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
46 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
47 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
48 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
49 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
50 * SUCH DAMAGE.
51 *
52 * The licence and distribution terms for any publically available version or
53 * derivative of this code cannot be changed. i.e. this code cannot simply be
54 * copied and put under another distribution licence
55 * [including the GNU Public Licence.]
56 */
57/* ====================================================================
58 * Copyright (c) 1998-2007 The OpenSSL Project. All rights reserved.
59 *
60 * Redistribution and use in source and binary forms, with or without
61 * modification, are permitted provided that the following conditions
62 * are met:
63 *
64 * 1. Redistributions of source code must retain the above copyright
65 * notice, this list of conditions and the following disclaimer.
66 *
67 * 2. Redistributions in binary form must reproduce the above copyright
68 * notice, this list of conditions and the following disclaimer in
69 * the documentation and/or other materials provided with the
70 * distribution.
71 *
72 * 3. All advertising materials mentioning features or use of this
73 * software must display the following acknowledgment:
74 * "This product includes software developed by the OpenSSL Project
75 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
76 *
77 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
78 * endorse or promote products derived from this software without
79 * prior written permission. For written permission, please contact
80 * openssl-core@openssl.org.
81 *
82 * 5. Products derived from this software may not be called "OpenSSL"
83 * nor may "OpenSSL" appear in their names without prior written
84 * permission of the OpenSSL Project.
85 *
86 * 6. Redistributions of any form whatsoever must retain the following
87 * acknowledgment:
88 * "This product includes software developed by the OpenSSL Project
89 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
90 *
91 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
92 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
93 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
94 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
95 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
96 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
97 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
98 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
99 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
100 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
101 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
102 * OF THE POSSIBILITY OF SUCH DAMAGE.
103 * ====================================================================
104 *
105 * This product includes cryptographic software written by Eric Young
106 * (eay@cryptsoft.com). This product includes software written by Tim
107 * Hudson (tjh@cryptsoft.com).
108 *
109 */
110/* ====================================================================
111 * Copyright 2002 Sun Microsystems, Inc. ALL RIGHTS RESERVED.
112 * ECC cipher suite support in OpenSSL originally developed by
113 * SUN MICROSYSTEMS, INC., and contributed to the OpenSSL project.
114 */
115/* ====================================================================
116 * Copyright 2005 Nokia. All rights reserved.
117 *
118 * The portions of the attached software ("Contribution") is developed by
119 * Nokia Corporation and is licensed pursuant to the OpenSSL open source
120 * license.
121 *
122 * The Contribution, originally written by Mika Kousa and Pasi Eronen of
123 * Nokia Corporation, consists of the "PSK" (Pre-Shared Key) ciphersuites
124 * support (see RFC 4279) to OpenSSL.
125 *
126 * No patent licenses or other rights except those expressly stated in
127 * the OpenSSL open source license shall be deemed granted or received
128 * expressly, by implication, estoppel, or otherwise.
129 *
130 * No assurances are provided by Nokia that the Contribution does not
131 * infringe the patent or other intellectual property rights of any third
132 * party or that the license provides you with all the necessary rights
133 * to make use of the Contribution.
134 *
135 * THE SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND. IN
136 * ADDITION TO THE DISCLAIMERS INCLUDED IN THE LICENSE, NOKIA
137 * SPECIFICALLY DISCLAIMS ANY LIABILITY FOR CLAIMS BROUGHT BY YOU OR ANY
138 * OTHER ENTITY BASED ON INFRINGEMENT OF INTELLECTUAL PROPERTY RIGHTS OR
139 * OTHERWISE. */
140
David Benjamin9e4e01e2015-09-15 01:48:04 -0400141#include <openssl/ssl.h>
142
Adam Langley95c29f32014-06-20 12:00:00 -0700143#include <assert.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400144#include <string.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700145
David Benjamin71f07942015-04-08 02:36:59 -0400146#include <openssl/buf.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400147#include <openssl/err.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500148#include <openssl/md5.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700149#include <openssl/mem.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500150#include <openssl/sha.h>
David Benjamin71f07942015-04-08 02:36:59 -0400151#include <openssl/stack.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700152
David Benjamin2ee94aa2015-04-07 22:38:30 -0400153#include "internal.h"
Steven Valdezcb966542016-08-17 16:56:14 -0400154#include "../crypto/internal.h"
Adam Langley95c29f32014-06-20 12:00:00 -0700155
Adam Langley95c29f32014-06-20 12:00:00 -0700156
David Benjamina1c90a52015-05-30 17:03:14 -0400157/* kCiphers is an array of all supported ciphers, sorted by id. */
David Benjamin20c37312015-11-11 21:33:18 -0800158static const SSL_CIPHER kCiphers[] = {
David Benjamina1c90a52015-05-30 17:03:14 -0400159 /* The RSA ciphers */
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700160 /* Cipher 02 */
161 {
David Benjaminff2df332015-11-18 10:01:16 -0500162 SSL3_TXT_RSA_NULL_SHA,
163 SSL3_CK_RSA_NULL_SHA,
164 SSL_kRSA,
165 SSL_aRSA,
166 SSL_eNULL,
167 SSL_SHA1,
168 SSL_HANDSHAKE_MAC_DEFAULT,
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700169 },
170
David Benjamina1c90a52015-05-30 17:03:14 -0400171 /* Cipher 0A */
172 {
David Benjaminff2df332015-11-18 10:01:16 -0500173 SSL3_TXT_RSA_DES_192_CBC3_SHA,
174 SSL3_CK_RSA_DES_192_CBC3_SHA,
175 SSL_kRSA,
176 SSL_aRSA,
177 SSL_3DES,
178 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500179 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400180 },
181
182
183 /* New AES ciphersuites */
184
185 /* Cipher 2F */
186 {
David Benjaminff2df332015-11-18 10:01:16 -0500187 TLS1_TXT_RSA_WITH_AES_128_SHA,
188 TLS1_CK_RSA_WITH_AES_128_SHA,
189 SSL_kRSA,
190 SSL_aRSA,
191 SSL_AES128,
192 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500193 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400194 },
195
196 /* Cipher 33 */
197 {
David Benjaminff2df332015-11-18 10:01:16 -0500198 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA,
199 TLS1_CK_DHE_RSA_WITH_AES_128_SHA,
200 SSL_kDHE,
201 SSL_aRSA,
202 SSL_AES128,
203 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500204 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400205 },
206
207 /* Cipher 35 */
208 {
David Benjaminff2df332015-11-18 10:01:16 -0500209 TLS1_TXT_RSA_WITH_AES_256_SHA,
210 TLS1_CK_RSA_WITH_AES_256_SHA,
211 SSL_kRSA,
212 SSL_aRSA,
213 SSL_AES256,
214 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500215 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400216 },
217
218 /* Cipher 39 */
219 {
David Benjaminff2df332015-11-18 10:01:16 -0500220 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA,
221 TLS1_CK_DHE_RSA_WITH_AES_256_SHA,
222 SSL_kDHE,
223 SSL_aRSA,
224 SSL_AES256,
225 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500226 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400227 },
228
229
230 /* TLS v1.2 ciphersuites */
231
232 /* Cipher 3C */
233 {
David Benjaminff2df332015-11-18 10:01:16 -0500234 TLS1_TXT_RSA_WITH_AES_128_SHA256,
235 TLS1_CK_RSA_WITH_AES_128_SHA256,
236 SSL_kRSA,
237 SSL_aRSA,
238 SSL_AES128,
239 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500240 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400241 },
242
243 /* Cipher 3D */
244 {
David Benjaminff2df332015-11-18 10:01:16 -0500245 TLS1_TXT_RSA_WITH_AES_256_SHA256,
246 TLS1_CK_RSA_WITH_AES_256_SHA256,
247 SSL_kRSA,
248 SSL_aRSA,
249 SSL_AES256,
250 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500251 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400252 },
253
254 /* Cipher 67 */
255 {
256 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500257 TLS1_CK_DHE_RSA_WITH_AES_128_SHA256,
258 SSL_kDHE,
259 SSL_aRSA,
260 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500261 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500262 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400263 },
264
265 /* Cipher 6B */
266 {
267 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500268 TLS1_CK_DHE_RSA_WITH_AES_256_SHA256,
269 SSL_kDHE,
270 SSL_aRSA,
271 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500272 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500273 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400274 },
275
Adam Langley85bc5602015-06-09 09:54:04 -0700276 /* PSK cipher suites. */
277
David Benjamina1c90a52015-05-30 17:03:14 -0400278 /* Cipher 8C */
279 {
David Benjaminff2df332015-11-18 10:01:16 -0500280 TLS1_TXT_PSK_WITH_AES_128_CBC_SHA,
281 TLS1_CK_PSK_WITH_AES_128_CBC_SHA,
282 SSL_kPSK,
283 SSL_aPSK,
284 SSL_AES128,
285 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500286 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400287 },
288
289 /* Cipher 8D */
290 {
David Benjaminff2df332015-11-18 10:01:16 -0500291 TLS1_TXT_PSK_WITH_AES_256_CBC_SHA,
292 TLS1_CK_PSK_WITH_AES_256_CBC_SHA,
293 SSL_kPSK,
294 SSL_aPSK,
295 SSL_AES256,
296 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500297 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400298 },
299
David Benjamina1c90a52015-05-30 17:03:14 -0400300 /* GCM ciphersuites from RFC5288 */
301
302 /* Cipher 9C */
303 {
304 TLS1_TXT_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500305 TLS1_CK_RSA_WITH_AES_128_GCM_SHA256,
306 SSL_kRSA,
307 SSL_aRSA,
308 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500309 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400310 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400311 },
312
313 /* Cipher 9D */
314 {
315 TLS1_TXT_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500316 TLS1_CK_RSA_WITH_AES_256_GCM_SHA384,
317 SSL_kRSA,
318 SSL_aRSA,
319 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500320 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400321 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400322 },
323
324 /* Cipher 9E */
325 {
326 TLS1_TXT_DHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500327 TLS1_CK_DHE_RSA_WITH_AES_128_GCM_SHA256,
328 SSL_kDHE,
329 SSL_aRSA,
330 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500331 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400332 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400333 },
334
335 /* Cipher 9F */
336 {
337 TLS1_TXT_DHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500338 TLS1_CK_DHE_RSA_WITH_AES_256_GCM_SHA384,
339 SSL_kDHE,
340 SSL_aRSA,
341 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500342 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400343 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400344 },
345
Matt Braithwaite053931e2016-05-25 12:06:05 -0700346 /* CECPQ1 (combined elliptic curve + post-quantum) suites. */
347
348 /* Cipher 16B7 */
349 {
350 TLS1_TXT_CECPQ1_RSA_WITH_CHACHA20_POLY1305_SHA256,
351 TLS1_CK_CECPQ1_RSA_WITH_CHACHA20_POLY1305_SHA256,
352 SSL_kCECPQ1,
353 SSL_aRSA,
354 SSL_CHACHA20POLY1305,
355 SSL_AEAD,
356 SSL_HANDSHAKE_MAC_SHA256,
357 },
358
359 /* Cipher 16B8 */
360 {
361 TLS1_TXT_CECPQ1_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
362 TLS1_CK_CECPQ1_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
363 SSL_kCECPQ1,
364 SSL_aECDSA,
365 SSL_CHACHA20POLY1305,
366 SSL_AEAD,
367 SSL_HANDSHAKE_MAC_SHA256,
368 },
369
370 /* Cipher 16B9 */
371 {
372 TLS1_TXT_CECPQ1_RSA_WITH_AES_256_GCM_SHA384,
373 TLS1_CK_CECPQ1_RSA_WITH_AES_256_GCM_SHA384,
374 SSL_kCECPQ1,
375 SSL_aRSA,
376 SSL_AES256GCM,
377 SSL_AEAD,
378 SSL_HANDSHAKE_MAC_SHA384,
379 },
380
381 /* Cipher 16BA */
382 {
383 TLS1_TXT_CECPQ1_ECDSA_WITH_AES_256_GCM_SHA384,
384 TLS1_CK_CECPQ1_ECDSA_WITH_AES_256_GCM_SHA384,
385 SSL_kCECPQ1,
386 SSL_aECDSA,
387 SSL_AES256GCM,
388 SSL_AEAD,
389 SSL_HANDSHAKE_MAC_SHA384,
390 },
391
David Benjamina1c90a52015-05-30 17:03:14 -0400392 /* Cipher C009 */
393 {
394 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500395 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
396 SSL_kECDHE,
397 SSL_aECDSA,
398 SSL_AES128,
399 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500400 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400401 },
402
403 /* Cipher C00A */
404 {
405 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500406 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
407 SSL_kECDHE,
408 SSL_aECDSA,
409 SSL_AES256,
410 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500411 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400412 },
413
David Benjamina1c90a52015-05-30 17:03:14 -0400414 /* Cipher C013 */
415 {
416 TLS1_TXT_ECDHE_RSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500417 TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA,
418 SSL_kECDHE,
419 SSL_aRSA,
420 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500421 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500422 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400423 },
424
425 /* Cipher C014 */
426 {
427 TLS1_TXT_ECDHE_RSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500428 TLS1_CK_ECDHE_RSA_WITH_AES_256_CBC_SHA,
429 SSL_kECDHE,
430 SSL_aRSA,
431 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500432 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500433 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400434 },
435
436
437 /* HMAC based TLS v1.2 ciphersuites from RFC5289 */
438
439 /* Cipher C023 */
440 {
441 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500442 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_SHA256,
443 SSL_kECDHE,
444 SSL_aECDSA,
445 SSL_AES128,
446 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500447 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400448 },
449
450 /* Cipher C024 */
451 {
452 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500453 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_SHA384,
454 SSL_kECDHE,
455 SSL_aECDSA,
456 SSL_AES256,
457 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500458 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400459 },
460
461 /* Cipher C027 */
462 {
463 TLS1_TXT_ECDHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500464 TLS1_CK_ECDHE_RSA_WITH_AES_128_SHA256,
465 SSL_kECDHE,
466 SSL_aRSA,
467 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500468 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500469 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400470 },
471
472 /* Cipher C028 */
473 {
474 TLS1_TXT_ECDHE_RSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500475 TLS1_CK_ECDHE_RSA_WITH_AES_256_SHA384,
476 SSL_kECDHE,
477 SSL_aRSA,
478 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500479 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500480 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400481 },
482
483
484 /* GCM based TLS v1.2 ciphersuites from RFC5289 */
485
486 /* Cipher C02B */
487 {
488 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500489 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
490 SSL_kECDHE,
491 SSL_aECDSA,
492 SSL_AES128GCM,
493 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400494 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400495 },
496
497 /* Cipher C02C */
498 {
499 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500500 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
501 SSL_kECDHE,
502 SSL_aECDSA,
503 SSL_AES256GCM,
504 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400505 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400506 },
507
508 /* Cipher C02F */
509 {
510 TLS1_TXT_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500511 TLS1_CK_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
512 SSL_kECDHE,
513 SSL_aRSA,
514 SSL_AES128GCM,
515 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400516 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400517 },
518
519 /* Cipher C030 */
520 {
521 TLS1_TXT_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500522 TLS1_CK_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
523 SSL_kECDHE,
524 SSL_aRSA,
525 SSL_AES256GCM,
526 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400527 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400528 },
529
Adam Langley85bc5602015-06-09 09:54:04 -0700530 /* ECDHE-PSK cipher suites. */
531
532 /* Cipher C035 */
533 {
534 TLS1_TXT_ECDHE_PSK_WITH_AES_128_CBC_SHA,
535 TLS1_CK_ECDHE_PSK_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500536 SSL_kECDHE,
537 SSL_aPSK,
538 SSL_AES128,
539 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500540 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700541 },
542
543 /* Cipher C036 */
544 {
545 TLS1_TXT_ECDHE_PSK_WITH_AES_256_CBC_SHA,
546 TLS1_CK_ECDHE_PSK_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500547 SSL_kECDHE,
548 SSL_aPSK,
549 SSL_AES256,
550 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500551 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700552 },
553
554 /* ChaCha20-Poly1305 cipher suites. */
555
David Benjamin13414b32015-12-09 23:02:39 -0500556#if !defined(BORINGSSL_ANDROID_SYSTEM)
David Benjamina1c90a52015-05-30 17:03:14 -0400557 {
Brian Smith271777f2015-10-03 13:53:33 -1000558 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500559 TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD,
560 SSL_kECDHE,
561 SSL_aRSA,
562 SSL_CHACHA20POLY1305_OLD,
563 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400564 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400565 },
566
567 {
Brian Smith271777f2015-10-03 13:53:33 -1000568 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500569 TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD,
570 SSL_kECDHE,
571 SSL_aECDSA,
572 SSL_CHACHA20POLY1305_OLD,
573 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400574 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400575 },
Adam Langleyd98dc132015-09-23 16:41:33 -0700576#endif
David Benjamin13414b32015-12-09 23:02:39 -0500577
578 /* Cipher CCA8 */
579 {
580 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
581 TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
582 SSL_kECDHE,
583 SSL_aRSA,
584 SSL_CHACHA20POLY1305,
585 SSL_AEAD,
586 SSL_HANDSHAKE_MAC_SHA256,
587 },
588
589 /* Cipher CCA9 */
590 {
591 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
592 TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
593 SSL_kECDHE,
594 SSL_aECDSA,
595 SSL_CHACHA20POLY1305,
596 SSL_AEAD,
597 SSL_HANDSHAKE_MAC_SHA256,
598 },
599
600 /* Cipher CCAB */
601 {
602 TLS1_TXT_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
603 TLS1_CK_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
604 SSL_kECDHE,
605 SSL_aPSK,
606 SSL_CHACHA20POLY1305,
607 SSL_AEAD,
608 SSL_HANDSHAKE_MAC_SHA256,
609 },
Matt Braithwaite053931e2016-05-25 12:06:05 -0700610
Steven Valdez3084e7b2016-06-02 12:07:20 -0400611 /* Cipher D001 */
612 {
613 TLS1_TXT_ECDHE_PSK_WITH_AES_128_GCM_SHA256,
614 TLS1_CK_ECDHE_PSK_WITH_AES_128_GCM_SHA256,
615 SSL_kECDHE,
616 SSL_aPSK,
617 SSL_AES128GCM,
David Benjaminc9a43682016-06-21 17:30:54 -0400618 SSL_AEAD,
Steven Valdez3084e7b2016-06-02 12:07:20 -0400619 SSL_HANDSHAKE_MAC_SHA256,
620 },
621
622 /* Cipher D002 */
623 {
624 TLS1_TXT_ECDHE_PSK_WITH_AES_256_GCM_SHA384,
625 TLS1_CK_ECDHE_PSK_WITH_AES_256_GCM_SHA384,
626 SSL_kECDHE,
627 SSL_aPSK,
628 SSL_AES256GCM,
David Benjaminc9a43682016-06-21 17:30:54 -0400629 SSL_AEAD,
Steven Valdez3084e7b2016-06-02 12:07:20 -0400630 SSL_HANDSHAKE_MAC_SHA384,
631 },
632
David Benjamina1c90a52015-05-30 17:03:14 -0400633};
634
Steven Valdezcb966542016-08-17 16:56:14 -0400635static const size_t kCiphersLen = OPENSSL_ARRAY_SIZE(kCiphers);
David Benjamina1c90a52015-05-30 17:03:14 -0400636
Adam Langleyfcf25832014-12-18 17:42:32 -0800637#define CIPHER_ADD 1
638#define CIPHER_KILL 2
639#define CIPHER_DEL 3
640#define CIPHER_ORD 4
641#define CIPHER_SPECIAL 5
Adam Langley95c29f32014-06-20 12:00:00 -0700642
Adam Langleyfcf25832014-12-18 17:42:32 -0800643typedef struct cipher_order_st {
644 const SSL_CIPHER *cipher;
645 int active;
Adam Langleyfcf25832014-12-18 17:42:32 -0800646 int in_group;
647 struct cipher_order_st *next, *prev;
648} CIPHER_ORDER;
Adam Langley95c29f32014-06-20 12:00:00 -0700649
David Benjamin0344daf2015-04-08 02:08:01 -0400650typedef struct cipher_alias_st {
651 /* name is the name of the cipher alias. */
652 const char *name;
653
654 /* The following fields are bitmasks for the corresponding fields on
655 * |SSL_CIPHER|. A cipher matches a cipher alias iff, for each bitmask, the
656 * bit corresponding to the cipher's value is set to 1. If any bitmask is
657 * all zeroes, the alias matches nothing. Use |~0u| for the default value. */
658 uint32_t algorithm_mkey;
659 uint32_t algorithm_auth;
660 uint32_t algorithm_enc;
661 uint32_t algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -0500662
663 /* min_version, if non-zero, matches all ciphers which were added in that
664 * particular protocol version. */
665 uint16_t min_version;
David Benjamin0344daf2015-04-08 02:08:01 -0400666} CIPHER_ALIAS;
667
David Benjamina1c90a52015-05-30 17:03:14 -0400668static const CIPHER_ALIAS kCipherAliases[] = {
Matt Braithwaite053931e2016-05-25 12:06:05 -0700669 /* "ALL" doesn't include eNULL nor kCECPQ1. These must be explicitly
670 * enabled. */
671 {"ALL", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700672
David Benjamina1c90a52015-05-30 17:03:14 -0400673 /* The "COMPLEMENTOFDEFAULT" rule is omitted. It matches nothing. */
Adam Langley95c29f32014-06-20 12:00:00 -0700674
David Benjamina1c90a52015-05-30 17:03:14 -0400675 /* key exchange aliases
676 * (some of those using only a single bit here combine
677 * multiple key exchange algs according to the RFCs,
678 * e.g. kEDH combines DHE_DSS and DHE_RSA) */
David Benjamind6e9eec2015-11-18 09:48:55 -0500679 {"kRSA", SSL_kRSA, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700680
David Benjamind6e9eec2015-11-18 09:48:55 -0500681 {"kDHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
682 {"kEDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
683 {"DH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700684
David Benjamind6e9eec2015-11-18 09:48:55 -0500685 {"kECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700686 {"kCECPQ1", SSL_kCECPQ1, ~0u, ~0u, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500687 {"kEECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
688 {"ECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700689
David Benjamind6e9eec2015-11-18 09:48:55 -0500690 {"kPSK", SSL_kPSK, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700691
David Benjamina1c90a52015-05-30 17:03:14 -0400692 /* server authentication aliases */
Matt Braithwaite053931e2016-05-25 12:06:05 -0700693 {"aRSA", ~SSL_kCECPQ1, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
694 {"aECDSA", ~SSL_kCECPQ1, SSL_aECDSA, ~0u, ~0u, 0},
695 {"ECDSA", ~SSL_kCECPQ1, SSL_aECDSA, ~0u, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500696 {"aPSK", ~0u, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700697
David Benjamina1c90a52015-05-30 17:03:14 -0400698 /* aliases combining key exchange and server authentication */
David Benjamind6e9eec2015-11-18 09:48:55 -0500699 {"DHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
700 {"EDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
701 {"ECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
702 {"EECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
703 {"RSA", SSL_kRSA, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
704 {"PSK", SSL_kPSK, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700705
David Benjamina1c90a52015-05-30 17:03:14 -0400706 /* symmetric encryption aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500707 {"3DES", ~0u, ~0u, SSL_3DES, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500708 {"AES128", ~0u, ~0u, SSL_AES128 | SSL_AES128GCM, ~0u, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700709 {"AES256", ~SSL_kCECPQ1, ~0u, SSL_AES256 | SSL_AES256GCM, ~0u, 0},
710 {"AES", ~SSL_kCECPQ1, ~0u, SSL_AES, ~0u, 0},
711 {"AESGCM", ~SSL_kCECPQ1, ~0u, SSL_AES128GCM | SSL_AES256GCM, ~0u, 0},
712 {"CHACHA20", ~SSL_kCECPQ1, ~0u, SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD, ~0u,
David Benjamin13414b32015-12-09 23:02:39 -0500713 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700714
David Benjamina1c90a52015-05-30 17:03:14 -0400715 /* MAC aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500716 {"MD5", ~0u, ~0u, ~0u, SSL_MD5, 0},
717 {"SHA1", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
718 {"SHA", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700719 {"SHA256", ~SSL_kCECPQ1, ~0u, ~0u, SSL_SHA256, 0},
720 {"SHA384", ~SSL_kCECPQ1, ~0u, ~0u, SSL_SHA384, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700721
David Benjamindcb6ef02015-11-06 15:35:54 -0500722 /* Legacy protocol minimum version aliases. "TLSv1" is intentionally the
723 * same as "SSLv3". */
Matt Braithwaite053931e2016-05-25 12:06:05 -0700724 {"SSLv3", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
725 {"TLSv1", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
726 {"TLSv1.2", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, TLS1_2_VERSION},
Adam Langley95c29f32014-06-20 12:00:00 -0700727
David Benjamind6e9eec2015-11-18 09:48:55 -0500728 /* Legacy strength classes. */
Matthew Braithwaite8aaa9e12016-09-07 15:09:58 -0700729 {"HIGH", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, 0},
730 {"FIPS", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, 0},
Adam Langleyfcf25832014-12-18 17:42:32 -0800731};
Adam Langley95c29f32014-06-20 12:00:00 -0700732
Steven Valdezcb966542016-08-17 16:56:14 -0400733static const size_t kCipherAliasesLen = OPENSSL_ARRAY_SIZE(kCipherAliases);
David Benjamina1c90a52015-05-30 17:03:14 -0400734
735static int ssl_cipher_id_cmp(const void *in_a, const void *in_b) {
736 const SSL_CIPHER *a = in_a;
737 const SSL_CIPHER *b = in_b;
738
739 if (a->id > b->id) {
740 return 1;
741 } else if (a->id < b->id) {
742 return -1;
743 } else {
744 return 0;
745 }
746}
747
748static int ssl_cipher_ptr_id_cmp(const SSL_CIPHER **a, const SSL_CIPHER **b) {
749 return ssl_cipher_id_cmp(*a, *b);
750}
751
752const SSL_CIPHER *SSL_get_cipher_by_value(uint16_t value) {
753 SSL_CIPHER c;
754
755 c.id = 0x03000000L | value;
756 return bsearch(&c, kCiphers, kCiphersLen, sizeof(SSL_CIPHER),
757 ssl_cipher_id_cmp);
758}
David Benjamin0344daf2015-04-08 02:08:01 -0400759
David Benjaminea72bd02014-12-21 21:27:41 -0500760int ssl_cipher_get_evp_aead(const EVP_AEAD **out_aead,
761 size_t *out_mac_secret_len,
762 size_t *out_fixed_iv_len,
763 const SSL_CIPHER *cipher, uint16_t version) {
764 *out_aead = NULL;
765 *out_mac_secret_len = 0;
766 *out_fixed_iv_len = 0;
Adam Langleyc9fb3752014-06-20 12:00:00 -0700767
David Benjaminea72bd02014-12-21 21:27:41 -0500768 switch (cipher->algorithm_enc) {
Adam Langleyfcf25832014-12-18 17:42:32 -0800769 case SSL_AES128GCM:
David Benjaminea72bd02014-12-21 21:27:41 -0500770 *out_aead = EVP_aead_aes_128_gcm();
771 *out_fixed_iv_len = 4;
Steven Valdez79750562016-06-16 06:38:04 -0400772 break;
Adam Langleyfcf25832014-12-18 17:42:32 -0800773
774 case SSL_AES256GCM:
David Benjaminea72bd02014-12-21 21:27:41 -0500775 *out_aead = EVP_aead_aes_256_gcm();
776 *out_fixed_iv_len = 4;
Steven Valdez79750562016-06-16 06:38:04 -0400777 break;
Adam Langleyfcf25832014-12-18 17:42:32 -0800778
Adam Langleyd98dc132015-09-23 16:41:33 -0700779#if !defined(BORINGSSL_ANDROID_SYSTEM)
Brian Smith271777f2015-10-03 13:53:33 -1000780 case SSL_CHACHA20POLY1305_OLD:
Brian Smith3e23e4c2015-10-03 11:38:58 -1000781 *out_aead = EVP_aead_chacha20_poly1305_old();
David Benjaminea72bd02014-12-21 21:27:41 -0500782 *out_fixed_iv_len = 0;
Steven Valdez79750562016-06-16 06:38:04 -0400783 break;
Adam Langleyd98dc132015-09-23 16:41:33 -0700784#endif
Adam Langleyfcf25832014-12-18 17:42:32 -0800785
David Benjamin13414b32015-12-09 23:02:39 -0500786 case SSL_CHACHA20POLY1305:
787 *out_aead = EVP_aead_chacha20_poly1305();
788 *out_fixed_iv_len = 12;
Steven Valdez79750562016-06-16 06:38:04 -0400789 break;
David Benjamin13414b32015-12-09 23:02:39 -0500790
David Benjaminea72bd02014-12-21 21:27:41 -0500791 case SSL_AES128:
792 switch (cipher->algorithm_mac) {
793 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500794 if (version == SSL3_VERSION) {
795 *out_aead = EVP_aead_aes_128_cbc_sha1_ssl3();
796 *out_fixed_iv_len = 16;
797 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500798 *out_aead = EVP_aead_aes_128_cbc_sha1_tls_implicit_iv();
799 *out_fixed_iv_len = 16;
800 } else {
801 *out_aead = EVP_aead_aes_128_cbc_sha1_tls();
802 }
803 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400804 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500805 case SSL_SHA256:
806 *out_aead = EVP_aead_aes_128_cbc_sha256_tls();
807 *out_mac_secret_len = SHA256_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400808 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500809 default:
810 return 0;
811 }
Steven Valdez79750562016-06-16 06:38:04 -0400812 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500813
814 case SSL_AES256:
815 switch (cipher->algorithm_mac) {
816 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500817 if (version == SSL3_VERSION) {
818 *out_aead = EVP_aead_aes_256_cbc_sha1_ssl3();
819 *out_fixed_iv_len = 16;
820 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500821 *out_aead = EVP_aead_aes_256_cbc_sha1_tls_implicit_iv();
822 *out_fixed_iv_len = 16;
823 } else {
824 *out_aead = EVP_aead_aes_256_cbc_sha1_tls();
825 }
826 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400827 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500828 case SSL_SHA256:
829 *out_aead = EVP_aead_aes_256_cbc_sha256_tls();
830 *out_mac_secret_len = SHA256_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400831 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500832 case SSL_SHA384:
833 *out_aead = EVP_aead_aes_256_cbc_sha384_tls();
834 *out_mac_secret_len = SHA384_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400835 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500836 default:
837 return 0;
838 }
Steven Valdez79750562016-06-16 06:38:04 -0400839 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500840
841 case SSL_3DES:
842 switch (cipher->algorithm_mac) {
843 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500844 if (version == SSL3_VERSION) {
845 *out_aead = EVP_aead_des_ede3_cbc_sha1_ssl3();
846 *out_fixed_iv_len = 8;
847 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500848 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls_implicit_iv();
849 *out_fixed_iv_len = 8;
850 } else {
851 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls();
852 }
853 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400854 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500855 default:
856 return 0;
857 }
Steven Valdez79750562016-06-16 06:38:04 -0400858 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500859
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700860 case SSL_eNULL:
861 switch (cipher->algorithm_mac) {
862 case SSL_SHA1:
863 if (version == SSL3_VERSION) {
864 *out_aead = EVP_aead_null_sha1_ssl3();
865 } else {
866 *out_aead = EVP_aead_null_sha1_tls();
867 }
868 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400869 break;
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700870 default:
871 return 0;
872 }
Steven Valdez79750562016-06-16 06:38:04 -0400873 break;
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700874
David Benjaminea72bd02014-12-21 21:27:41 -0500875 default:
876 return 0;
877 }
Steven Valdez79750562016-06-16 06:38:04 -0400878
879 /* In TLS 1.3, the iv_len is equal to the AEAD nonce length whereas the code
880 * above computes the TLS 1.2 construction.
881 *
882 * TODO(davidben,svaldez): Avoid computing the wrong value and fixing it. */
883 if (version >= TLS1_3_VERSION) {
884 *out_fixed_iv_len = EVP_AEAD_nonce_length(*out_aead);
885 assert(*out_fixed_iv_len >= 8);
886 }
887 return 1;
Adam Langleyfcf25832014-12-18 17:42:32 -0800888}
Adam Langleyc9fb3752014-06-20 12:00:00 -0700889
David Benjaminb0883312015-08-06 09:54:13 -0400890const EVP_MD *ssl_get_handshake_digest(uint32_t algorithm_prf) {
891 switch (algorithm_prf) {
892 case SSL_HANDSHAKE_MAC_DEFAULT:
893 return EVP_sha1();
894 case SSL_HANDSHAKE_MAC_SHA256:
895 return EVP_sha256();
896 case SSL_HANDSHAKE_MAC_SHA384:
897 return EVP_sha384();
898 default:
899 return NULL;
Adam Langleyfcf25832014-12-18 17:42:32 -0800900 }
Adam Langley95c29f32014-06-20 12:00:00 -0700901}
902
903#define ITEM_SEP(a) \
Adam Langleyfcf25832014-12-18 17:42:32 -0800904 (((a) == ':') || ((a) == ' ') || ((a) == ';') || ((a) == ','))
Adam Langley95c29f32014-06-20 12:00:00 -0700905
David Benjamin0344daf2015-04-08 02:08:01 -0400906/* rule_equals returns one iff the NUL-terminated string |rule| is equal to the
907 * |buf_len| bytes at |buf|. */
908static int rule_equals(const char *rule, const char *buf, size_t buf_len) {
909 /* |strncmp| alone only checks that |buf| is a prefix of |rule|. */
910 return strncmp(rule, buf, buf_len) == 0 && rule[buf_len] == '\0';
911}
912
Adam Langley95c29f32014-06-20 12:00:00 -0700913static void ll_append_tail(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -0800914 CIPHER_ORDER **tail) {
915 if (curr == *tail) {
916 return;
917 }
918 if (curr == *head) {
919 *head = curr->next;
920 }
921 if (curr->prev != NULL) {
922 curr->prev->next = curr->next;
923 }
924 if (curr->next != NULL) {
925 curr->next->prev = curr->prev;
926 }
927 (*tail)->next = curr;
928 curr->prev = *tail;
929 curr->next = NULL;
930 *tail = curr;
931}
Adam Langley95c29f32014-06-20 12:00:00 -0700932
933static void ll_append_head(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -0800934 CIPHER_ORDER **tail) {
935 if (curr == *head) {
936 return;
937 }
938 if (curr == *tail) {
939 *tail = curr->prev;
940 }
941 if (curr->next != NULL) {
942 curr->next->prev = curr->prev;
943 }
944 if (curr->prev != NULL) {
945 curr->prev->next = curr->next;
946 }
947 (*head)->prev = curr;
948 curr->next = *head;
949 curr->prev = NULL;
950 *head = curr;
951}
Adam Langley95c29f32014-06-20 12:00:00 -0700952
David Benjamin82c9e902014-12-12 15:55:27 -0500953static void ssl_cipher_collect_ciphers(const SSL_PROTOCOL_METHOD *ssl_method,
Adam Langleyfcf25832014-12-18 17:42:32 -0800954 CIPHER_ORDER *co_list,
955 CIPHER_ORDER **head_p,
956 CIPHER_ORDER **tail_p) {
David Benjamina1c90a52015-05-30 17:03:14 -0400957 /* The set of ciphers is static, but some subset may be unsupported by
958 * |ssl_method|, so the list may be smaller. */
959 size_t co_list_num = 0;
David Benjamin54091232016-09-05 12:47:25 -0400960 for (size_t i = 0; i < kCiphersLen; i++) {
David Benjamina1c90a52015-05-30 17:03:14 -0400961 const SSL_CIPHER *cipher = &kCiphers[i];
962 if (ssl_method->supports_cipher(cipher)) {
963 co_list[co_list_num].cipher = cipher;
Adam Langleyfcf25832014-12-18 17:42:32 -0800964 co_list[co_list_num].next = NULL;
965 co_list[co_list_num].prev = NULL;
966 co_list[co_list_num].active = 0;
967 co_list[co_list_num].in_group = 0;
968 co_list_num++;
969 }
970 }
Adam Langley95c29f32014-06-20 12:00:00 -0700971
Adam Langleyfcf25832014-12-18 17:42:32 -0800972 /* Prepare linked list from list entries. */
973 if (co_list_num > 0) {
974 co_list[0].prev = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -0700975
Adam Langleyfcf25832014-12-18 17:42:32 -0800976 if (co_list_num > 1) {
977 co_list[0].next = &co_list[1];
Adam Langley95c29f32014-06-20 12:00:00 -0700978
David Benjamin54091232016-09-05 12:47:25 -0400979 for (size_t i = 1; i < co_list_num - 1; i++) {
Adam Langleyfcf25832014-12-18 17:42:32 -0800980 co_list[i].prev = &co_list[i - 1];
981 co_list[i].next = &co_list[i + 1];
982 }
Adam Langley95c29f32014-06-20 12:00:00 -0700983
Adam Langleyfcf25832014-12-18 17:42:32 -0800984 co_list[co_list_num - 1].prev = &co_list[co_list_num - 2];
985 }
986
987 co_list[co_list_num - 1].next = NULL;
988
989 *head_p = &co_list[0];
990 *tail_p = &co_list[co_list_num - 1];
991 }
992}
Adam Langley95c29f32014-06-20 12:00:00 -0700993
David Benjamin0344daf2015-04-08 02:08:01 -0400994/* ssl_cipher_apply_rule applies the rule type |rule| to ciphers matching its
995 * parameters in the linked list from |*head_p| to |*tail_p|. It writes the new
996 * head and tail of the list to |*head_p| and |*tail_p|, respectively.
997 *
998 * - If |cipher_id| is non-zero, only that cipher is selected.
999 * - Otherwise, if |strength_bits| is non-negative, it selects ciphers
1000 * of that strength.
David Benjamind6e9eec2015-11-18 09:48:55 -05001001 * - Otherwise, it selects ciphers that match each bitmasks in |alg_*| and
David Benjamindcb6ef02015-11-06 15:35:54 -05001002 * |min_version|. */
Adam Langleyfcf25832014-12-18 17:42:32 -08001003static void ssl_cipher_apply_rule(
David Benjamin107db582015-04-08 00:41:59 -04001004 uint32_t cipher_id, uint32_t alg_mkey, uint32_t alg_auth,
David Benjamind6e9eec2015-11-18 09:48:55 -05001005 uint32_t alg_enc, uint32_t alg_mac, uint16_t min_version, int rule,
1006 int strength_bits, int in_group, CIPHER_ORDER **head_p,
1007 CIPHER_ORDER **tail_p) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001008 CIPHER_ORDER *head, *tail, *curr, *next, *last;
1009 const SSL_CIPHER *cp;
1010 int reverse = 0;
Adam Langley95c29f32014-06-20 12:00:00 -07001011
David Benjamindcb6ef02015-11-06 15:35:54 -05001012 if (cipher_id == 0 && strength_bits == -1 && min_version == 0 &&
David Benjamind6e9eec2015-11-18 09:48:55 -05001013 (alg_mkey == 0 || alg_auth == 0 || alg_enc == 0 || alg_mac == 0)) {
David Benjamin0344daf2015-04-08 02:08:01 -04001014 /* The rule matches nothing, so bail early. */
1015 return;
1016 }
1017
Adam Langleyfcf25832014-12-18 17:42:32 -08001018 if (rule == CIPHER_DEL) {
1019 /* needed to maintain sorting between currently deleted ciphers */
1020 reverse = 1;
1021 }
Adam Langley95c29f32014-06-20 12:00:00 -07001022
Adam Langleyfcf25832014-12-18 17:42:32 -08001023 head = *head_p;
1024 tail = *tail_p;
Adam Langley95c29f32014-06-20 12:00:00 -07001025
Adam Langleyfcf25832014-12-18 17:42:32 -08001026 if (reverse) {
1027 next = tail;
1028 last = head;
1029 } else {
1030 next = head;
1031 last = tail;
1032 }
Adam Langley95c29f32014-06-20 12:00:00 -07001033
Adam Langleyfcf25832014-12-18 17:42:32 -08001034 curr = NULL;
1035 for (;;) {
1036 if (curr == last) {
1037 break;
1038 }
Adam Langley95c29f32014-06-20 12:00:00 -07001039
Adam Langleyfcf25832014-12-18 17:42:32 -08001040 curr = next;
1041 if (curr == NULL) {
1042 break;
1043 }
Adam Langleye3142a72014-07-24 17:56:48 -07001044
Adam Langleyfcf25832014-12-18 17:42:32 -08001045 next = reverse ? curr->prev : curr->next;
1046 cp = curr->cipher;
Adam Langleye3142a72014-07-24 17:56:48 -07001047
David Benjamin0344daf2015-04-08 02:08:01 -04001048 /* Selection criteria is either a specific cipher, the value of
1049 * |strength_bits|, or the algorithms used. */
1050 if (cipher_id != 0) {
1051 if (cipher_id != cp->id) {
1052 continue;
1053 }
1054 } else if (strength_bits >= 0) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001055 if (strength_bits != SSL_CIPHER_get_bits(cp, NULL)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001056 continue;
1057 }
David Benjamin881f1962016-08-10 18:29:12 -04001058 } else {
1059 if (!(alg_mkey & cp->algorithm_mkey) ||
1060 !(alg_auth & cp->algorithm_auth) ||
1061 !(alg_enc & cp->algorithm_enc) ||
1062 !(alg_mac & cp->algorithm_mac) ||
1063 (min_version != 0 && SSL_CIPHER_get_min_version(cp) != min_version)) {
1064 continue;
1065 }
1066
1067 /* The following ciphers are internal implementation details of TLS 1.3
1068 * resumption but are not yet finalized. Disable them by default until
1069 * then. */
1070 if (cp->id == TLS1_CK_ECDHE_PSK_WITH_AES_128_GCM_SHA256 ||
1071 cp->id == TLS1_CK_ECDHE_PSK_WITH_AES_256_GCM_SHA384) {
1072 continue;
1073 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001074 }
Adam Langleye3142a72014-07-24 17:56:48 -07001075
Adam Langleyfcf25832014-12-18 17:42:32 -08001076 /* add the cipher if it has not been added yet. */
1077 if (rule == CIPHER_ADD) {
1078 /* reverse == 0 */
1079 if (!curr->active) {
1080 ll_append_tail(&head, curr, &tail);
1081 curr->active = 1;
1082 curr->in_group = in_group;
1083 }
1084 }
Adam Langley95c29f32014-06-20 12:00:00 -07001085
Adam Langleyfcf25832014-12-18 17:42:32 -08001086 /* Move the added cipher to this location */
1087 else if (rule == CIPHER_ORD) {
1088 /* reverse == 0 */
1089 if (curr->active) {
1090 ll_append_tail(&head, curr, &tail);
1091 curr->in_group = 0;
1092 }
1093 } else if (rule == CIPHER_DEL) {
1094 /* reverse == 1 */
1095 if (curr->active) {
1096 /* most recently deleted ciphersuites get best positions
1097 * for any future CIPHER_ADD (note that the CIPHER_DEL loop
1098 * works in reverse to maintain the order) */
1099 ll_append_head(&head, curr, &tail);
1100 curr->active = 0;
1101 curr->in_group = 0;
1102 }
1103 } else if (rule == CIPHER_KILL) {
1104 /* reverse == 0 */
1105 if (head == curr) {
1106 head = curr->next;
1107 } else {
1108 curr->prev->next = curr->next;
1109 }
Adam Langley95c29f32014-06-20 12:00:00 -07001110
Adam Langleyfcf25832014-12-18 17:42:32 -08001111 if (tail == curr) {
1112 tail = curr->prev;
1113 }
1114 curr->active = 0;
1115 if (curr->next != NULL) {
1116 curr->next->prev = curr->prev;
1117 }
1118 if (curr->prev != NULL) {
1119 curr->prev->next = curr->next;
1120 }
1121 curr->next = NULL;
1122 curr->prev = NULL;
1123 }
1124 }
Adam Langley95c29f32014-06-20 12:00:00 -07001125
Adam Langleyfcf25832014-12-18 17:42:32 -08001126 *head_p = head;
1127 *tail_p = tail;
1128}
Adam Langley95c29f32014-06-20 12:00:00 -07001129
1130static int ssl_cipher_strength_sort(CIPHER_ORDER **head_p,
Adam Langleyfcf25832014-12-18 17:42:32 -08001131 CIPHER_ORDER **tail_p) {
1132 int max_strength_bits, i, *number_uses;
1133 CIPHER_ORDER *curr;
Adam Langley95c29f32014-06-20 12:00:00 -07001134
Adam Langleyfcf25832014-12-18 17:42:32 -08001135 /* This routine sorts the ciphers with descending strength. The sorting must
1136 * keep the pre-sorted sequence, so we apply the normal sorting routine as
1137 * '+' movement to the end of the list. */
1138 max_strength_bits = 0;
1139 curr = *head_p;
1140 while (curr != NULL) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001141 if (curr->active &&
1142 SSL_CIPHER_get_bits(curr->cipher, NULL) > max_strength_bits) {
1143 max_strength_bits = SSL_CIPHER_get_bits(curr->cipher, NULL);
Adam Langleyfcf25832014-12-18 17:42:32 -08001144 }
1145 curr = curr->next;
1146 }
Adam Langley95c29f32014-06-20 12:00:00 -07001147
Adam Langleyfcf25832014-12-18 17:42:32 -08001148 number_uses = OPENSSL_malloc((max_strength_bits + 1) * sizeof(int));
1149 if (!number_uses) {
David Benjamin3570d732015-06-29 00:28:17 -04001150 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001151 return 0;
1152 }
1153 memset(number_uses, 0, (max_strength_bits + 1) * sizeof(int));
Adam Langley95c29f32014-06-20 12:00:00 -07001154
Adam Langleyfcf25832014-12-18 17:42:32 -08001155 /* Now find the strength_bits values actually used. */
1156 curr = *head_p;
1157 while (curr != NULL) {
1158 if (curr->active) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001159 number_uses[SSL_CIPHER_get_bits(curr->cipher, NULL)]++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001160 }
1161 curr = curr->next;
1162 }
Adam Langley95c29f32014-06-20 12:00:00 -07001163
Adam Langleyfcf25832014-12-18 17:42:32 -08001164 /* Go through the list of used strength_bits values in descending order. */
1165 for (i = max_strength_bits; i >= 0; i--) {
1166 if (number_uses[i] > 0) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001167 ssl_cipher_apply_rule(0, 0, 0, 0, 0, 0, CIPHER_ORD, i, 0, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001168 }
1169 }
1170
1171 OPENSSL_free(number_uses);
1172 return 1;
1173}
Adam Langley95c29f32014-06-20 12:00:00 -07001174
David Benjamin0344daf2015-04-08 02:08:01 -04001175static int ssl_cipher_process_rulestr(const SSL_PROTOCOL_METHOD *ssl_method,
1176 const char *rule_str,
Adam Langleyfcf25832014-12-18 17:42:32 -08001177 CIPHER_ORDER **head_p,
David Benjamin0344daf2015-04-08 02:08:01 -04001178 CIPHER_ORDER **tail_p) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001179 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001180 uint16_t min_version;
Adam Langleyfcf25832014-12-18 17:42:32 -08001181 const char *l, *buf;
Adam Langleyf139c992016-10-02 09:56:09 -07001182 int multi, skip_rule, rule, ok, in_group = 0, has_group = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001183 size_t j, buf_len;
1184 uint32_t cipher_id;
Adam Langleyfcf25832014-12-18 17:42:32 -08001185 char ch;
Adam Langley95c29f32014-06-20 12:00:00 -07001186
Adam Langleyfcf25832014-12-18 17:42:32 -08001187 l = rule_str;
1188 for (;;) {
1189 ch = *l;
Adam Langley95c29f32014-06-20 12:00:00 -07001190
Adam Langleyfcf25832014-12-18 17:42:32 -08001191 if (ch == '\0') {
1192 break; /* done */
1193 }
Adam Langley95c29f32014-06-20 12:00:00 -07001194
Adam Langleyfcf25832014-12-18 17:42:32 -08001195 if (in_group) {
1196 if (ch == ']') {
Adam Langleyfcf25832014-12-18 17:42:32 -08001197 if (*tail_p) {
1198 (*tail_p)->in_group = 0;
1199 }
1200 in_group = 0;
1201 l++;
1202 continue;
1203 }
David Benjamin37d92462014-09-20 17:54:24 -04001204
Adam Langleyfcf25832014-12-18 17:42:32 -08001205 if (ch == '|') {
1206 rule = CIPHER_ADD;
1207 l++;
1208 continue;
1209 } else if (!(ch >= 'a' && ch <= 'z') && !(ch >= 'A' && ch <= 'Z') &&
1210 !(ch >= '0' && ch <= '9')) {
David Benjamin3570d732015-06-29 00:28:17 -04001211 OPENSSL_PUT_ERROR(SSL, SSL_R_UNEXPECTED_OPERATOR_IN_GROUP);
Adam Langleyf139c992016-10-02 09:56:09 -07001212 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001213 } else {
1214 rule = CIPHER_ADD;
1215 }
1216 } else if (ch == '-') {
1217 rule = CIPHER_DEL;
1218 l++;
1219 } else if (ch == '+') {
1220 rule = CIPHER_ORD;
1221 l++;
1222 } else if (ch == '!') {
1223 rule = CIPHER_KILL;
1224 l++;
1225 } else if (ch == '@') {
1226 rule = CIPHER_SPECIAL;
1227 l++;
1228 } else if (ch == '[') {
1229 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001230 OPENSSL_PUT_ERROR(SSL, SSL_R_NESTED_GROUP);
Adam Langleyf139c992016-10-02 09:56:09 -07001231 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001232 }
1233 in_group = 1;
1234 has_group = 1;
1235 l++;
1236 continue;
1237 } else {
1238 rule = CIPHER_ADD;
1239 }
Adam Langley95c29f32014-06-20 12:00:00 -07001240
Adam Langleyfcf25832014-12-18 17:42:32 -08001241 /* If preference groups are enabled, the only legal operator is +.
1242 * Otherwise the in_group bits will get mixed up. */
1243 if (has_group && rule != CIPHER_ADD) {
David Benjamin3570d732015-06-29 00:28:17 -04001244 OPENSSL_PUT_ERROR(SSL, SSL_R_MIXED_SPECIAL_OPERATOR_WITH_GROUPS);
Adam Langleyf139c992016-10-02 09:56:09 -07001245 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001246 }
Adam Langley95c29f32014-06-20 12:00:00 -07001247
Adam Langleyfcf25832014-12-18 17:42:32 -08001248 if (ITEM_SEP(ch)) {
1249 l++;
1250 continue;
1251 }
Adam Langley95c29f32014-06-20 12:00:00 -07001252
David Benjamin0344daf2015-04-08 02:08:01 -04001253 multi = 0;
1254 cipher_id = 0;
1255 alg_mkey = ~0u;
1256 alg_auth = ~0u;
1257 alg_enc = ~0u;
1258 alg_mac = ~0u;
David Benjamindcb6ef02015-11-06 15:35:54 -05001259 min_version = 0;
1260 skip_rule = 0;
Adam Langley95c29f32014-06-20 12:00:00 -07001261
Adam Langleyfcf25832014-12-18 17:42:32 -08001262 for (;;) {
1263 ch = *l;
1264 buf = l;
David Benjamin0344daf2015-04-08 02:08:01 -04001265 buf_len = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001266 while (((ch >= 'A') && (ch <= 'Z')) || ((ch >= '0') && (ch <= '9')) ||
1267 ((ch >= 'a') && (ch <= 'z')) || (ch == '-') || (ch == '.')) {
1268 ch = *(++l);
David Benjamin0344daf2015-04-08 02:08:01 -04001269 buf_len++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001270 }
Adam Langley95c29f32014-06-20 12:00:00 -07001271
David Benjamin0344daf2015-04-08 02:08:01 -04001272 if (buf_len == 0) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001273 /* We hit something we cannot deal with, it is no command or separator
1274 * nor alphanumeric, so we call this an error. */
David Benjamin3570d732015-06-29 00:28:17 -04001275 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyf99f2442016-10-02 09:53:38 -07001276 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001277 }
Adam Langley95c29f32014-06-20 12:00:00 -07001278
Adam Langleyfcf25832014-12-18 17:42:32 -08001279 if (rule == CIPHER_SPECIAL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001280 break;
1281 }
David Benjamin0344daf2015-04-08 02:08:01 -04001282
1283 /* Look for a matching exact cipher. These aren't allowed in multipart
1284 * rules. */
1285 if (!multi && ch != '+') {
David Benjamina1c90a52015-05-30 17:03:14 -04001286 for (j = 0; j < kCiphersLen; j++) {
1287 const SSL_CIPHER *cipher = &kCiphers[j];
1288 if (rule_equals(cipher->name, buf, buf_len)) {
David Benjamin0344daf2015-04-08 02:08:01 -04001289 cipher_id = cipher->id;
1290 break;
1291 }
1292 }
1293 }
1294 if (cipher_id == 0) {
1295 /* If not an exact cipher, look for a matching cipher alias. */
David Benjamina1c90a52015-05-30 17:03:14 -04001296 for (j = 0; j < kCipherAliasesLen; j++) {
David Benjamin0344daf2015-04-08 02:08:01 -04001297 if (rule_equals(kCipherAliases[j].name, buf, buf_len)) {
1298 alg_mkey &= kCipherAliases[j].algorithm_mkey;
1299 alg_auth &= kCipherAliases[j].algorithm_auth;
1300 alg_enc &= kCipherAliases[j].algorithm_enc;
1301 alg_mac &= kCipherAliases[j].algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001302
1303 if (min_version != 0 &&
1304 min_version != kCipherAliases[j].min_version) {
1305 skip_rule = 1;
1306 } else {
1307 min_version = kCipherAliases[j].min_version;
1308 }
David Benjamin0344daf2015-04-08 02:08:01 -04001309 break;
1310 }
1311 }
David Benjamina1c90a52015-05-30 17:03:14 -04001312 if (j == kCipherAliasesLen) {
David Benjamindcb6ef02015-11-06 15:35:54 -05001313 skip_rule = 1;
David Benjamin0344daf2015-04-08 02:08:01 -04001314 }
1315 }
1316
1317 /* Check for a multipart rule. */
1318 if (ch != '+') {
1319 break;
1320 }
1321 l++;
1322 multi = 1;
Adam Langleyfcf25832014-12-18 17:42:32 -08001323 }
Adam Langley95c29f32014-06-20 12:00:00 -07001324
David Benjamin13414b32015-12-09 23:02:39 -05001325 /* If one of the CHACHA20_POLY1305 variants is selected, include the other
1326 * as well. They have the same name to avoid requiring changes in
1327 * configuration. Apply this transformation late so that the cipher name
1328 * still behaves as an exact name and not an alias in multipart rules.
1329 *
1330 * This is temporary and will be removed when the pre-standard construction
1331 * is removed. */
1332 if (cipher_id == TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD ||
1333 cipher_id == TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256) {
1334 cipher_id = 0;
1335 alg_mkey = SSL_kECDHE;
1336 alg_auth = SSL_aRSA;
1337 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1338 alg_mac = SSL_AEAD;
1339 } else if (cipher_id == TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD ||
1340 cipher_id == TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256) {
1341 cipher_id = 0;
1342 alg_mkey = SSL_kECDHE;
1343 alg_auth = SSL_aECDSA;
1344 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1345 alg_mac = SSL_AEAD;
1346 }
1347
Adam Langleyfcf25832014-12-18 17:42:32 -08001348 /* Ok, we have the rule, now apply it. */
1349 if (rule == CIPHER_SPECIAL) {
1350 /* special command */
1351 ok = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001352 if (buf_len == 8 && !strncmp(buf, "STRENGTH", 8)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001353 ok = ssl_cipher_strength_sort(head_p, tail_p);
1354 } else {
David Benjamin3570d732015-06-29 00:28:17 -04001355 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyfcf25832014-12-18 17:42:32 -08001356 }
Adam Langley95c29f32014-06-20 12:00:00 -07001357
Adam Langleyfcf25832014-12-18 17:42:32 -08001358 if (ok == 0) {
Adam Langleyf139c992016-10-02 09:56:09 -07001359 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001360 }
Adam Langley95c29f32014-06-20 12:00:00 -07001361
Adam Langleyfcf25832014-12-18 17:42:32 -08001362 /* We do not support any "multi" options together with "@", so throw away
1363 * the rest of the command, if any left, until end or ':' is found. */
1364 while (*l != '\0' && !ITEM_SEP(*l)) {
1365 l++;
1366 }
David Benjamindcb6ef02015-11-06 15:35:54 -05001367 } else if (!skip_rule) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001368 ssl_cipher_apply_rule(cipher_id, alg_mkey, alg_auth, alg_enc, alg_mac,
David Benjamind6e9eec2015-11-18 09:48:55 -05001369 min_version, rule, -1, in_group, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001370 }
1371 }
Adam Langley95c29f32014-06-20 12:00:00 -07001372
Adam Langleyfcf25832014-12-18 17:42:32 -08001373 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001374 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyf139c992016-10-02 09:56:09 -07001375 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001376 }
Adam Langley95c29f32014-06-20 12:00:00 -07001377
Adam Langleyf139c992016-10-02 09:56:09 -07001378 return 1;
Adam Langleyfcf25832014-12-18 17:42:32 -08001379}
Adam Langley95c29f32014-06-20 12:00:00 -07001380
Adam Langleyfcf25832014-12-18 17:42:32 -08001381STACK_OF(SSL_CIPHER) *
1382ssl_create_cipher_list(const SSL_PROTOCOL_METHOD *ssl_method,
David Benjamin71f07942015-04-08 02:36:59 -04001383 struct ssl_cipher_preference_list_st **out_cipher_list,
1384 STACK_OF(SSL_CIPHER) **out_cipher_list_by_id,
1385 const char *rule_str) {
David Benjamin0344daf2015-04-08 02:08:01 -04001386 int ok;
Adam Langleyfcf25832014-12-18 17:42:32 -08001387 STACK_OF(SSL_CIPHER) *cipherstack = NULL, *tmp_cipher_list = NULL;
1388 const char *rule_p;
1389 CIPHER_ORDER *co_list = NULL, *head = NULL, *tail = NULL, *curr;
Adam Langleyfcf25832014-12-18 17:42:32 -08001390 uint8_t *in_group_flags = NULL;
1391 unsigned int num_in_group_flags = 0;
1392 struct ssl_cipher_preference_list_st *pref_list = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -07001393
Adam Langleyfcf25832014-12-18 17:42:32 -08001394 /* Return with error if nothing to do. */
David Benjamin71f07942015-04-08 02:36:59 -04001395 if (rule_str == NULL || out_cipher_list == NULL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001396 return NULL;
1397 }
David Benjamin5213df42014-08-20 14:19:54 -04001398
Adam Langleyfcf25832014-12-18 17:42:32 -08001399 /* Now we have to collect the available ciphers from the compiled in ciphers.
1400 * We cannot get more than the number compiled in, so it is used for
1401 * allocation. */
Brian Smith5ba06892016-02-07 09:36:04 -10001402 co_list = OPENSSL_malloc(sizeof(CIPHER_ORDER) * kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001403 if (co_list == NULL) {
David Benjamin3570d732015-06-29 00:28:17 -04001404 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001405 return NULL;
1406 }
Adam Langley95c29f32014-06-20 12:00:00 -07001407
David Benjamina1c90a52015-05-30 17:03:14 -04001408 ssl_cipher_collect_ciphers(ssl_method, co_list, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001409
Adam Langleyfcf25832014-12-18 17:42:32 -08001410 /* Now arrange all ciphers by preference:
1411 * TODO(davidben): Compute this order once and copy it. */
Adam Langley95c29f32014-06-20 12:00:00 -07001412
Adam Langleyfcf25832014-12-18 17:42:32 -08001413 /* Everything else being equal, prefer ECDHE_ECDSA then ECDHE_RSA over other
1414 * key exchange mechanisms */
Matt Braithwaite053931e2016-05-25 12:06:05 -07001415
David Benjamind6e9eec2015-11-18 09:48:55 -05001416 ssl_cipher_apply_rule(0, SSL_kECDHE, SSL_aECDSA, ~0u, ~0u, 0, CIPHER_ADD, -1,
Adam Langleyfcf25832014-12-18 17:42:32 -08001417 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001418 ssl_cipher_apply_rule(0, SSL_kECDHE, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0,
1419 &head, &tail);
1420 ssl_cipher_apply_rule(0, SSL_kECDHE, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0,
1421 &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001422
Adam Langleyfcf25832014-12-18 17:42:32 -08001423 /* Order the bulk ciphers. First the preferred AEAD ciphers. We prefer
1424 * CHACHA20 unless there is hardware support for fast and constant-time
David Benjamin13414b32015-12-09 23:02:39 -05001425 * AES_GCM. Of the two CHACHA20 variants, the new one is preferred over the
1426 * old one. */
Adam Langleyfcf25832014-12-18 17:42:32 -08001427 if (EVP_has_aes_hardware()) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001428 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1429 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001430 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1431 &head, &tail);
David Benjamin13414b32015-12-09 23:02:39 -05001432 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1433 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001434 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001435 CIPHER_ADD, -1, 0, &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001436 } else {
David Benjamin13414b32015-12-09 23:02:39 -05001437 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1438 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001439 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001440 CIPHER_ADD, -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001441 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1442 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001443 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1444 &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001445 }
Adam Langley95c29f32014-06-20 12:00:00 -07001446
David Benjamin43336652016-03-03 15:32:29 -05001447 /* Then the legacy non-AEAD ciphers: AES_128_CBC, AES_256_CBC,
Matthew Braithwaite8aaa9e12016-09-07 15:09:58 -07001448 * 3DES_EDE_CBC_SHA. */
David Benjamind6e9eec2015-11-18 09:48:55 -05001449 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128, ~0u, 0, CIPHER_ADD, -1, 0,
1450 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001451 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256, ~0u, 0, CIPHER_ADD, -1, 0,
1452 &head, &tail);
1453 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_3DES, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1454 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001455
Adam Langleyfcf25832014-12-18 17:42:32 -08001456 /* Temporarily enable everything else for sorting */
David Benjamind6e9eec2015-11-18 09:48:55 -05001457 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1458 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001459
Adam Langleyfcf25832014-12-18 17:42:32 -08001460 /* Move ciphers without forward secrecy to the end. */
David Benjamind6e9eec2015-11-18 09:48:55 -05001461 ssl_cipher_apply_rule(0, ~(SSL_kDHE | SSL_kECDHE), ~0u, ~0u, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001462 CIPHER_ORD, -1, 0, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001463
Adam Langleyfcf25832014-12-18 17:42:32 -08001464 /* Now disable everything (maintaining the ordering!) */
David Benjamind6e9eec2015-11-18 09:48:55 -05001465 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0, &head,
1466 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001467
Adam Langleyfcf25832014-12-18 17:42:32 -08001468 /* If the rule_string begins with DEFAULT, apply the default rule before
1469 * using the (possibly available) additional rules. */
1470 ok = 1;
1471 rule_p = rule_str;
1472 if (strncmp(rule_str, "DEFAULT", 7) == 0) {
David Benjamin0344daf2015-04-08 02:08:01 -04001473 ok = ssl_cipher_process_rulestr(ssl_method, SSL_DEFAULT_CIPHER_LIST, &head,
1474 &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001475 rule_p += 7;
1476 if (*rule_p == ':') {
1477 rule_p++;
1478 }
1479 }
Adam Langley858a88d2014-06-20 12:00:00 -07001480
Adam Langleyfcf25832014-12-18 17:42:32 -08001481 if (ok && strlen(rule_p) > 0) {
David Benjamin0344daf2015-04-08 02:08:01 -04001482 ok = ssl_cipher_process_rulestr(ssl_method, rule_p, &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001483 }
Adam Langley95c29f32014-06-20 12:00:00 -07001484
Adam Langleyfcf25832014-12-18 17:42:32 -08001485 if (!ok) {
1486 goto err;
1487 }
1488
1489 /* Allocate new "cipherstack" for the result, return with error
1490 * if we cannot get one. */
1491 cipherstack = sk_SSL_CIPHER_new_null();
1492 if (cipherstack == NULL) {
1493 goto err;
1494 }
1495
David Benjamina1c90a52015-05-30 17:03:14 -04001496 in_group_flags = OPENSSL_malloc(kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001497 if (!in_group_flags) {
1498 goto err;
1499 }
1500
1501 /* The cipher selection for the list is done. The ciphers are added
1502 * to the resulting precedence to the STACK_OF(SSL_CIPHER). */
1503 for (curr = head; curr != NULL; curr = curr->next) {
1504 if (curr->active) {
David Benjamin2adb7ec2015-01-11 19:59:06 -05001505 if (!sk_SSL_CIPHER_push(cipherstack, curr->cipher)) {
1506 goto err;
1507 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001508 in_group_flags[num_in_group_flags++] = curr->in_group;
1509 }
1510 }
1511 OPENSSL_free(co_list); /* Not needed any longer */
1512 co_list = NULL;
1513
1514 tmp_cipher_list = sk_SSL_CIPHER_dup(cipherstack);
1515 if (tmp_cipher_list == NULL) {
1516 goto err;
1517 }
1518 pref_list = OPENSSL_malloc(sizeof(struct ssl_cipher_preference_list_st));
1519 if (!pref_list) {
1520 goto err;
1521 }
1522 pref_list->ciphers = cipherstack;
1523 pref_list->in_group_flags = OPENSSL_malloc(num_in_group_flags);
1524 if (!pref_list->in_group_flags) {
1525 goto err;
1526 }
1527 memcpy(pref_list->in_group_flags, in_group_flags, num_in_group_flags);
1528 OPENSSL_free(in_group_flags);
1529 in_group_flags = NULL;
David Benjamin71f07942015-04-08 02:36:59 -04001530 if (*out_cipher_list != NULL) {
1531 ssl_cipher_preference_list_free(*out_cipher_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001532 }
David Benjamin71f07942015-04-08 02:36:59 -04001533 *out_cipher_list = pref_list;
Adam Langleyfcf25832014-12-18 17:42:32 -08001534 pref_list = NULL;
1535
David Benjamin71f07942015-04-08 02:36:59 -04001536 if (out_cipher_list_by_id != NULL) {
David Benjamin2755a3e2015-04-22 16:17:58 -04001537 sk_SSL_CIPHER_free(*out_cipher_list_by_id);
David Benjamin71f07942015-04-08 02:36:59 -04001538 *out_cipher_list_by_id = tmp_cipher_list;
Adam Langleyfcf25832014-12-18 17:42:32 -08001539 tmp_cipher_list = NULL;
David Benjamin71f07942015-04-08 02:36:59 -04001540 (void) sk_SSL_CIPHER_set_cmp_func(*out_cipher_list_by_id,
1541 ssl_cipher_ptr_id_cmp);
Adam Langleyfcf25832014-12-18 17:42:32 -08001542
David Benjamin71f07942015-04-08 02:36:59 -04001543 sk_SSL_CIPHER_sort(*out_cipher_list_by_id);
Adam Langleyfcf25832014-12-18 17:42:32 -08001544 } else {
1545 sk_SSL_CIPHER_free(tmp_cipher_list);
1546 tmp_cipher_list = NULL;
1547 }
1548
1549 return cipherstack;
Adam Langley858a88d2014-06-20 12:00:00 -07001550
1551err:
David Benjamin2755a3e2015-04-22 16:17:58 -04001552 OPENSSL_free(co_list);
1553 OPENSSL_free(in_group_flags);
1554 sk_SSL_CIPHER_free(cipherstack);
1555 sk_SSL_CIPHER_free(tmp_cipher_list);
1556 if (pref_list) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001557 OPENSSL_free(pref_list->in_group_flags);
1558 }
David Benjamin2755a3e2015-04-22 16:17:58 -04001559 OPENSSL_free(pref_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001560 return NULL;
1561}
Adam Langley95c29f32014-06-20 12:00:00 -07001562
David Benjamin71f07942015-04-08 02:36:59 -04001563uint32_t SSL_CIPHER_get_id(const SSL_CIPHER *cipher) { return cipher->id; }
1564
David Benjamina1c90a52015-05-30 17:03:14 -04001565uint16_t ssl_cipher_get_value(const SSL_CIPHER *cipher) {
1566 uint32_t id = cipher->id;
1567 /* All ciphers are SSLv3. */
1568 assert((id & 0xff000000) == 0x03000000);
1569 return id & 0xffff;
1570}
1571
Steven Valdez4aa154e2016-07-29 14:32:55 -04001572int ssl_cipher_get_ecdhe_psk_cipher(const SSL_CIPHER *cipher,
1573 uint16_t *out_cipher) {
1574 switch (cipher->id) {
1575 case TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256:
1576 case TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256:
1577 case TLS1_CK_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256:
1578 *out_cipher = TLS1_CK_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256 & 0xffff;
1579 return 1;
1580
1581 case TLS1_CK_ECDHE_RSA_WITH_AES_128_GCM_SHA256:
1582 case TLS1_CK_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256:
1583 case TLS1_CK_ECDHE_PSK_WITH_AES_128_GCM_SHA256:
1584 *out_cipher = TLS1_CK_ECDHE_PSK_WITH_AES_128_GCM_SHA256 & 0xffff;
1585 return 1;
1586
1587 case TLS1_CK_ECDHE_RSA_WITH_AES_256_GCM_SHA384:
1588 case TLS1_CK_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384:
1589 case TLS1_CK_ECDHE_PSK_WITH_AES_256_GCM_SHA384:
1590 *out_cipher = TLS1_CK_ECDHE_PSK_WITH_AES_256_GCM_SHA384 & 0xffff;
1591 return 1;
1592 }
1593 return 0;
1594}
1595
David Benjamin71f07942015-04-08 02:36:59 -04001596int SSL_CIPHER_is_AES(const SSL_CIPHER *cipher) {
1597 return (cipher->algorithm_enc & SSL_AES) != 0;
1598}
1599
1600int SSL_CIPHER_has_MD5_HMAC(const SSL_CIPHER *cipher) {
1601 return (cipher->algorithm_mac & SSL_MD5) != 0;
1602}
1603
David Benjaminef793f42015-11-05 18:16:27 -05001604int SSL_CIPHER_has_SHA1_HMAC(const SSL_CIPHER *cipher) {
1605 return (cipher->algorithm_mac & SSL_SHA1) != 0;
1606}
1607
David Benjamina211aee2016-02-24 17:18:44 -05001608int SSL_CIPHER_has_SHA256_HMAC(const SSL_CIPHER *cipher) {
1609 return (cipher->algorithm_mac & SSL_SHA256) != 0;
1610}
1611
David Benjamin71f07942015-04-08 02:36:59 -04001612int SSL_CIPHER_is_AESGCM(const SSL_CIPHER *cipher) {
David Benjaminc0125ef2015-09-09 09:11:07 -04001613 return (cipher->algorithm_enc & (SSL_AES128GCM | SSL_AES256GCM)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001614}
1615
David Benjaminef793f42015-11-05 18:16:27 -05001616int SSL_CIPHER_is_AES128GCM(const SSL_CIPHER *cipher) {
1617 return (cipher->algorithm_enc & SSL_AES128GCM) != 0;
1618}
1619
Adam Langleyb00061c2015-11-16 17:44:52 -08001620int SSL_CIPHER_is_AES128CBC(const SSL_CIPHER *cipher) {
1621 return (cipher->algorithm_enc & SSL_AES128) != 0;
1622}
1623
1624int SSL_CIPHER_is_AES256CBC(const SSL_CIPHER *cipher) {
1625 return (cipher->algorithm_enc & SSL_AES256) != 0;
1626}
1627
David Benjamin51a01a52015-10-29 13:19:56 -04001628int SSL_CIPHER_is_CHACHA20POLY1305(const SSL_CIPHER *cipher) {
David Benjamin13414b32015-12-09 23:02:39 -05001629 return (cipher->algorithm_enc &
1630 (SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001631}
1632
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001633int SSL_CIPHER_is_NULL(const SSL_CIPHER *cipher) {
1634 return (cipher->algorithm_enc & SSL_eNULL) != 0;
1635}
1636
1637int SSL_CIPHER_is_block_cipher(const SSL_CIPHER *cipher) {
Matthew Braithwaite8aaa9e12016-09-07 15:09:58 -07001638 return (cipher->algorithm_enc & SSL_eNULL) == 0 &&
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001639 cipher->algorithm_mac != SSL_AEAD;
1640}
1641
David Benjaminef793f42015-11-05 18:16:27 -05001642int SSL_CIPHER_is_ECDSA(const SSL_CIPHER *cipher) {
1643 return (cipher->algorithm_auth & SSL_aECDSA) != 0;
1644}
1645
David Benjamin0fc7df52016-06-02 18:36:33 -04001646int SSL_CIPHER_is_DHE(const SSL_CIPHER *cipher) {
1647 return (cipher->algorithm_mkey & SSL_kDHE) != 0;
1648}
1649
David Benjamin4cc36ad2015-12-19 14:23:26 -05001650int SSL_CIPHER_is_ECDHE(const SSL_CIPHER *cipher) {
1651 return (cipher->algorithm_mkey & SSL_kECDHE) != 0;
1652}
1653
Matt Braithwaite053931e2016-05-25 12:06:05 -07001654int SSL_CIPHER_is_CECPQ1(const SSL_CIPHER *cipher) {
1655 return (cipher->algorithm_mkey & SSL_kCECPQ1) != 0;
1656}
1657
David Benjaminef793f42015-11-05 18:16:27 -05001658uint16_t SSL_CIPHER_get_min_version(const SSL_CIPHER *cipher) {
David Benjamindcb6ef02015-11-06 15:35:54 -05001659 if (cipher->algorithm_prf != SSL_HANDSHAKE_MAC_DEFAULT) {
1660 /* Cipher suites before TLS 1.2 use the default PRF, while all those added
1661 * afterwards specify a particular hash. */
David Benjaminef793f42015-11-05 18:16:27 -05001662 return TLS1_2_VERSION;
1663 }
1664 return SSL3_VERSION;
1665}
1666
Nick Harper1fd39d82016-06-14 18:14:35 -07001667uint16_t SSL_CIPHER_get_max_version(const SSL_CIPHER *cipher) {
1668 if (cipher->algorithm_mac == SSL_AEAD &&
1669 (cipher->algorithm_enc & SSL_CHACHA20POLY1305_OLD) == 0 &&
David Benjamin54c217c2016-07-13 12:35:25 -04001670 (cipher->algorithm_mkey & SSL_kECDHE) != 0 &&
1671 /* TODO(davidben,svaldez): Support PSK-based ciphers in TLS 1.3. */
1672 (cipher->algorithm_auth & SSL_aCERT) != 0) {
Nick Harper1fd39d82016-06-14 18:14:35 -07001673 return TLS1_3_VERSION;
1674 }
1675 return TLS1_2_VERSION;
1676}
1677
David Benjamin71f07942015-04-08 02:36:59 -04001678/* return the actual cipher being used */
1679const char *SSL_CIPHER_get_name(const SSL_CIPHER *cipher) {
1680 if (cipher != NULL) {
1681 return cipher->name;
1682 }
1683
1684 return "(NONE)";
1685}
1686
1687const char *SSL_CIPHER_get_kx_name(const SSL_CIPHER *cipher) {
1688 if (cipher == NULL) {
1689 return "";
1690 }
1691
1692 switch (cipher->algorithm_mkey) {
1693 case SSL_kRSA:
1694 return "RSA";
1695
1696 case SSL_kDHE:
1697 switch (cipher->algorithm_auth) {
1698 case SSL_aRSA:
1699 return "DHE_RSA";
1700 default:
1701 assert(0);
1702 return "UNKNOWN";
1703 }
1704
1705 case SSL_kECDHE:
1706 switch (cipher->algorithm_auth) {
1707 case SSL_aECDSA:
1708 return "ECDHE_ECDSA";
1709 case SSL_aRSA:
1710 return "ECDHE_RSA";
1711 case SSL_aPSK:
1712 return "ECDHE_PSK";
1713 default:
1714 assert(0);
1715 return "UNKNOWN";
1716 }
1717
Matt Braithwaite053931e2016-05-25 12:06:05 -07001718 case SSL_kCECPQ1:
1719 switch (cipher->algorithm_auth) {
1720 case SSL_aECDSA:
1721 return "CECPQ1_ECDSA";
1722 case SSL_aRSA:
1723 return "CECPQ1_RSA";
1724 default:
1725 assert(0);
1726 return "UNKNOWN";
1727 }
1728
David Benjamin71f07942015-04-08 02:36:59 -04001729 case SSL_kPSK:
1730 assert(cipher->algorithm_auth == SSL_aPSK);
1731 return "PSK";
1732
1733 default:
1734 assert(0);
1735 return "UNKNOWN";
1736 }
1737}
1738
1739static const char *ssl_cipher_get_enc_name(const SSL_CIPHER *cipher) {
1740 switch (cipher->algorithm_enc) {
1741 case SSL_3DES:
1742 return "3DES_EDE_CBC";
David Benjamin71f07942015-04-08 02:36:59 -04001743 case SSL_AES128:
1744 return "AES_128_CBC";
1745 case SSL_AES256:
1746 return "AES_256_CBC";
1747 case SSL_AES128GCM:
1748 return "AES_128_GCM";
1749 case SSL_AES256GCM:
1750 return "AES_256_GCM";
David Benjamin13414b32015-12-09 23:02:39 -05001751 case SSL_CHACHA20POLY1305:
Brian Smith271777f2015-10-03 13:53:33 -10001752 case SSL_CHACHA20POLY1305_OLD:
David Benjamin71f07942015-04-08 02:36:59 -04001753 return "CHACHA20_POLY1305";
1754 break;
1755 default:
1756 assert(0);
1757 return "UNKNOWN";
1758 }
1759}
1760
1761static const char *ssl_cipher_get_prf_name(const SSL_CIPHER *cipher) {
David Benjaminb0883312015-08-06 09:54:13 -04001762 switch (cipher->algorithm_prf) {
1763 case SSL_HANDSHAKE_MAC_DEFAULT:
1764 /* Before TLS 1.2, the PRF component is the hash used in the HMAC, which is
1765 * only ever MD5 or SHA-1. */
1766 switch (cipher->algorithm_mac) {
1767 case SSL_MD5:
1768 return "MD5";
1769 case SSL_SHA1:
1770 return "SHA";
1771 }
1772 break;
1773 case SSL_HANDSHAKE_MAC_SHA256:
1774 return "SHA256";
1775 case SSL_HANDSHAKE_MAC_SHA384:
1776 return "SHA384";
David Benjamin71f07942015-04-08 02:36:59 -04001777 }
David Benjaminb0883312015-08-06 09:54:13 -04001778 assert(0);
1779 return "UNKNOWN";
David Benjamin71f07942015-04-08 02:36:59 -04001780}
1781
1782char *SSL_CIPHER_get_rfc_name(const SSL_CIPHER *cipher) {
1783 if (cipher == NULL) {
1784 return NULL;
1785 }
1786
1787 const char *kx_name = SSL_CIPHER_get_kx_name(cipher);
1788 const char *enc_name = ssl_cipher_get_enc_name(cipher);
1789 const char *prf_name = ssl_cipher_get_prf_name(cipher);
1790
1791 /* The final name is TLS_{kx_name}_WITH_{enc_name}_{prf_name}. */
1792 size_t len = 4 + strlen(kx_name) + 6 + strlen(enc_name) + 1 +
1793 strlen(prf_name) + 1;
1794 char *ret = OPENSSL_malloc(len);
1795 if (ret == NULL) {
1796 return NULL;
1797 }
1798 if (BUF_strlcpy(ret, "TLS_", len) >= len ||
1799 BUF_strlcat(ret, kx_name, len) >= len ||
1800 BUF_strlcat(ret, "_WITH_", len) >= len ||
1801 BUF_strlcat(ret, enc_name, len) >= len ||
1802 BUF_strlcat(ret, "_", len) >= len ||
1803 BUF_strlcat(ret, prf_name, len) >= len) {
1804 assert(0);
1805 OPENSSL_free(ret);
1806 return NULL;
1807 }
1808 assert(strlen(ret) + 1 == len);
1809 return ret;
1810}
1811
1812int SSL_CIPHER_get_bits(const SSL_CIPHER *cipher, int *out_alg_bits) {
1813 if (cipher == NULL) {
1814 return 0;
1815 }
1816
David Benjamin9f2e2772015-11-18 09:59:43 -05001817 int alg_bits, strength_bits;
1818 switch (cipher->algorithm_enc) {
1819 case SSL_AES128:
1820 case SSL_AES128GCM:
David Benjamin9f2e2772015-11-18 09:59:43 -05001821 alg_bits = 128;
1822 strength_bits = 128;
1823 break;
1824
1825 case SSL_AES256:
1826 case SSL_AES256GCM:
1827#if !defined(BORINGSSL_ANDROID_SYSTEM)
1828 case SSL_CHACHA20POLY1305_OLD:
1829#endif
David Benjamin13414b32015-12-09 23:02:39 -05001830 case SSL_CHACHA20POLY1305:
David Benjamin9f2e2772015-11-18 09:59:43 -05001831 alg_bits = 256;
1832 strength_bits = 256;
1833 break;
1834
1835 case SSL_3DES:
1836 alg_bits = 168;
1837 strength_bits = 112;
1838 break;
1839
1840 case SSL_eNULL:
1841 alg_bits = 0;
1842 strength_bits = 0;
1843 break;
1844
1845 default:
1846 assert(0);
1847 alg_bits = 0;
1848 strength_bits = 0;
David Benjamin71f07942015-04-08 02:36:59 -04001849 }
David Benjamin9f2e2772015-11-18 09:59:43 -05001850
1851 if (out_alg_bits != NULL) {
1852 *out_alg_bits = alg_bits;
1853 }
1854 return strength_bits;
David Benjamin71f07942015-04-08 02:36:59 -04001855}
1856
Adam Langleyfcf25832014-12-18 17:42:32 -08001857const char *SSL_CIPHER_description(const SSL_CIPHER *cipher, char *buf,
1858 int len) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001859 const char *kx, *au, *enc, *mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001860 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001861
Adam Langleyfcf25832014-12-18 17:42:32 -08001862 alg_mkey = cipher->algorithm_mkey;
1863 alg_auth = cipher->algorithm_auth;
1864 alg_enc = cipher->algorithm_enc;
1865 alg_mac = cipher->algorithm_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001866
Adam Langleyfcf25832014-12-18 17:42:32 -08001867 switch (alg_mkey) {
1868 case SSL_kRSA:
1869 kx = "RSA";
1870 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001871
David Benjamin7061e282015-03-19 11:10:48 -04001872 case SSL_kDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001873 kx = "DH";
1874 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001875
David Benjamin7061e282015-03-19 11:10:48 -04001876 case SSL_kECDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001877 kx = "ECDH";
1878 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001879
Matt Braithwaite053931e2016-05-25 12:06:05 -07001880 case SSL_kCECPQ1:
1881 kx = "CECPQ1";
1882 break;
1883
Adam Langleyfcf25832014-12-18 17:42:32 -08001884 case SSL_kPSK:
1885 kx = "PSK";
1886 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001887
Adam Langleyfcf25832014-12-18 17:42:32 -08001888 default:
1889 kx = "unknown";
1890 }
Adam Langley95c29f32014-06-20 12:00:00 -07001891
Adam Langleyfcf25832014-12-18 17:42:32 -08001892 switch (alg_auth) {
1893 case SSL_aRSA:
1894 au = "RSA";
1895 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001896
Adam Langleyfcf25832014-12-18 17:42:32 -08001897 case SSL_aECDSA:
1898 au = "ECDSA";
1899 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07001900
Adam Langleyfcf25832014-12-18 17:42:32 -08001901 case SSL_aPSK:
1902 au = "PSK";
1903 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07001904
Adam Langleyfcf25832014-12-18 17:42:32 -08001905 default:
1906 au = "unknown";
1907 break;
1908 }
Adam Langleyde0b2022014-06-20 12:00:00 -07001909
Adam Langleyfcf25832014-12-18 17:42:32 -08001910 switch (alg_enc) {
1911 case SSL_3DES:
1912 enc = "3DES(168)";
1913 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001914
Adam Langleyfcf25832014-12-18 17:42:32 -08001915 case SSL_AES128:
1916 enc = "AES(128)";
1917 break;
1918
1919 case SSL_AES256:
1920 enc = "AES(256)";
1921 break;
1922
1923 case SSL_AES128GCM:
1924 enc = "AESGCM(128)";
1925 break;
1926
1927 case SSL_AES256GCM:
1928 enc = "AESGCM(256)";
1929 break;
1930
Brian Smith271777f2015-10-03 13:53:33 -10001931 case SSL_CHACHA20POLY1305_OLD:
David Benjamin13414b32015-12-09 23:02:39 -05001932 enc = "ChaCha20-Poly1305-Old";
1933 break;
1934
1935 case SSL_CHACHA20POLY1305:
Adam Langleyfcf25832014-12-18 17:42:32 -08001936 enc = "ChaCha20-Poly1305";
1937 break;
1938
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001939 case SSL_eNULL:
1940 enc="None";
1941 break;
1942
Adam Langleyfcf25832014-12-18 17:42:32 -08001943 default:
1944 enc = "unknown";
1945 break;
1946 }
1947
1948 switch (alg_mac) {
1949 case SSL_MD5:
1950 mac = "MD5";
1951 break;
1952
1953 case SSL_SHA1:
1954 mac = "SHA1";
1955 break;
1956
1957 case SSL_SHA256:
1958 mac = "SHA256";
1959 break;
1960
1961 case SSL_SHA384:
1962 mac = "SHA384";
1963 break;
1964
1965 case SSL_AEAD:
1966 mac = "AEAD";
1967 break;
1968
1969 default:
1970 mac = "unknown";
1971 break;
1972 }
1973
1974 if (buf == NULL) {
1975 len = 128;
1976 buf = OPENSSL_malloc(len);
David Benjamin1eed2c02015-02-08 23:20:06 -05001977 if (buf == NULL) {
1978 return NULL;
1979 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001980 } else if (len < 128) {
1981 return "Buffer too small";
1982 }
1983
Brian Smith0687bdf2016-01-17 09:18:26 -10001984 BIO_snprintf(buf, len, "%-23s Kx=%-8s Au=%-4s Enc=%-9s Mac=%-4s\n",
1985 cipher->name, kx, au, enc, mac);
Adam Langleyfcf25832014-12-18 17:42:32 -08001986 return buf;
1987}
1988
David Benjamin71f07942015-04-08 02:36:59 -04001989const char *SSL_CIPHER_get_version(const SSL_CIPHER *cipher) {
1990 return "TLSv1/SSLv3";
Adam Langleyfcf25832014-12-18 17:42:32 -08001991}
1992
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07001993COMP_METHOD *SSL_COMP_get_compression_methods(void) { return NULL; }
Adam Langleyfcf25832014-12-18 17:42:32 -08001994
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07001995int SSL_COMP_add_compression_method(int id, COMP_METHOD *cm) { return 1; }
Adam Langleyfcf25832014-12-18 17:42:32 -08001996
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07001997const char *SSL_COMP_get_name(const COMP_METHOD *comp) { return NULL; }
Adam Langley95c29f32014-06-20 12:00:00 -07001998
David Benjamind1d80782015-07-05 11:54:09 -04001999int ssl_cipher_get_key_type(const SSL_CIPHER *cipher) {
David Benjamin71f07942015-04-08 02:36:59 -04002000 uint32_t alg_a = cipher->algorithm_auth;
Adam Langley95c29f32014-06-20 12:00:00 -07002001
Adam Langleyfcf25832014-12-18 17:42:32 -08002002 if (alg_a & SSL_aECDSA) {
David Benjamind1d80782015-07-05 11:54:09 -04002003 return EVP_PKEY_EC;
Adam Langleyfcf25832014-12-18 17:42:32 -08002004 } else if (alg_a & SSL_aRSA) {
David Benjamind1d80782015-07-05 11:54:09 -04002005 return EVP_PKEY_RSA;
Adam Langleyfcf25832014-12-18 17:42:32 -08002006 }
Adam Langley95c29f32014-06-20 12:00:00 -07002007
David Benjamind1d80782015-07-05 11:54:09 -04002008 return EVP_PKEY_NONE;
Adam Langleyfcf25832014-12-18 17:42:32 -08002009}
David Benjamin9c651c92014-07-12 13:27:45 -04002010
David Benjaminc032dfa2016-05-12 14:54:57 -04002011int ssl_cipher_uses_certificate_auth(const SSL_CIPHER *cipher) {
2012 return (cipher->algorithm_auth & SSL_aCERT) != 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08002013}
2014
Adam Langleyfcf25832014-12-18 17:42:32 -08002015int ssl_cipher_requires_server_key_exchange(const SSL_CIPHER *cipher) {
2016 /* Ephemeral Diffie-Hellman key exchanges require a ServerKeyExchange. */
Matt Braithwaite053931e2016-05-25 12:06:05 -07002017 if (cipher->algorithm_mkey & SSL_kDHE ||
2018 cipher->algorithm_mkey & SSL_kECDHE ||
2019 cipher->algorithm_mkey & SSL_kCECPQ1) {
Adam Langleyfcf25832014-12-18 17:42:32 -08002020 return 1;
2021 }
2022
2023 /* It is optional in all others. */
2024 return 0;
2025}
David Benjaminb8d28cf2015-07-28 21:34:45 -04002026
2027size_t ssl_cipher_get_record_split_len(const SSL_CIPHER *cipher) {
2028 size_t block_size;
2029 switch (cipher->algorithm_enc) {
2030 case SSL_3DES:
2031 block_size = 8;
2032 break;
2033 case SSL_AES128:
2034 case SSL_AES256:
2035 block_size = 16;
2036 break;
2037 default:
2038 return 0;
2039 }
2040
2041 size_t mac_len;
2042 switch (cipher->algorithm_mac) {
2043 case SSL_MD5:
2044 mac_len = MD5_DIGEST_LENGTH;
2045 break;
2046 case SSL_SHA1:
2047 mac_len = SHA_DIGEST_LENGTH;
2048 break;
2049 default:
2050 return 0;
2051 }
2052
2053 size_t ret = 1 + mac_len;
2054 ret += block_size - (ret % block_size);
2055 return ret;
2056}