blob: 52237215debcf07dc71dcab6616dcf1bb5dc4ae0 [file] [log] [blame]
Adam Langley95c29f32014-06-20 12:00:00 -07001/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
2 * All rights reserved.
3 *
4 * This package is an SSL implementation written
5 * by Eric Young (eay@cryptsoft.com).
6 * The implementation was written so as to conform with Netscapes SSL.
7 *
8 * This library is free for commercial and non-commercial use as long as
9 * the following conditions are aheared to. The following conditions
10 * apply to all code found in this distribution, be it the RC4, RSA,
11 * lhash, DES, etc., code; not just the SSL code. The SSL documentation
12 * included with this distribution is covered by the same copyright terms
13 * except that the holder is Tim Hudson (tjh@cryptsoft.com).
14 *
15 * Copyright remains Eric Young's, and as such any Copyright notices in
16 * the code are not to be removed.
17 * If this package is used in a product, Eric Young should be given attribution
18 * as the author of the parts of the library used.
19 * This can be in the form of a textual message at program startup or
20 * in documentation (online or textual) provided with the package.
21 *
22 * Redistribution and use in source and binary forms, with or without
23 * modification, are permitted provided that the following conditions
24 * are met:
25 * 1. Redistributions of source code must retain the copyright
26 * notice, this list of conditions and the following disclaimer.
27 * 2. Redistributions in binary form must reproduce the above copyright
28 * notice, this list of conditions and the following disclaimer in the
29 * documentation and/or other materials provided with the distribution.
30 * 3. All advertising materials mentioning features or use of this software
31 * must display the following acknowledgement:
32 * "This product includes cryptographic software written by
33 * Eric Young (eay@cryptsoft.com)"
34 * The word 'cryptographic' can be left out if the rouines from the library
35 * being used are not cryptographic related :-).
36 * 4. If you include any Windows specific code (or a derivative thereof) from
37 * the apps directory (application code) you must include an acknowledgement:
38 * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
39 *
40 * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
41 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
42 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
43 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
44 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
45 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
46 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
47 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
48 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
49 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
50 * SUCH DAMAGE.
51 *
52 * The licence and distribution terms for any publically available version or
53 * derivative of this code cannot be changed. i.e. this code cannot simply be
54 * copied and put under another distribution licence
55 * [including the GNU Public Licence.]
56 */
57/* ====================================================================
58 * Copyright (c) 1998-2007 The OpenSSL Project. All rights reserved.
59 *
60 * Redistribution and use in source and binary forms, with or without
61 * modification, are permitted provided that the following conditions
62 * are met:
63 *
64 * 1. Redistributions of source code must retain the above copyright
65 * notice, this list of conditions and the following disclaimer.
66 *
67 * 2. Redistributions in binary form must reproduce the above copyright
68 * notice, this list of conditions and the following disclaimer in
69 * the documentation and/or other materials provided with the
70 * distribution.
71 *
72 * 3. All advertising materials mentioning features or use of this
73 * software must display the following acknowledgment:
74 * "This product includes software developed by the OpenSSL Project
75 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
76 *
77 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
78 * endorse or promote products derived from this software without
79 * prior written permission. For written permission, please contact
80 * openssl-core@openssl.org.
81 *
82 * 5. Products derived from this software may not be called "OpenSSL"
83 * nor may "OpenSSL" appear in their names without prior written
84 * permission of the OpenSSL Project.
85 *
86 * 6. Redistributions of any form whatsoever must retain the following
87 * acknowledgment:
88 * "This product includes software developed by the OpenSSL Project
89 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
90 *
91 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
92 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
93 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
94 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
95 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
96 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
97 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
98 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
99 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
100 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
101 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
102 * OF THE POSSIBILITY OF SUCH DAMAGE.
103 * ====================================================================
104 *
105 * This product includes cryptographic software written by Eric Young
106 * (eay@cryptsoft.com). This product includes software written by Tim
107 * Hudson (tjh@cryptsoft.com).
108 *
109 */
110/* ====================================================================
111 * Copyright 2002 Sun Microsystems, Inc. ALL RIGHTS RESERVED.
112 * ECC cipher suite support in OpenSSL originally developed by
113 * SUN MICROSYSTEMS, INC., and contributed to the OpenSSL project.
114 */
115/* ====================================================================
116 * Copyright 2005 Nokia. All rights reserved.
117 *
118 * The portions of the attached software ("Contribution") is developed by
119 * Nokia Corporation and is licensed pursuant to the OpenSSL open source
120 * license.
121 *
122 * The Contribution, originally written by Mika Kousa and Pasi Eronen of
123 * Nokia Corporation, consists of the "PSK" (Pre-Shared Key) ciphersuites
124 * support (see RFC 4279) to OpenSSL.
125 *
126 * No patent licenses or other rights except those expressly stated in
127 * the OpenSSL open source license shall be deemed granted or received
128 * expressly, by implication, estoppel, or otherwise.
129 *
130 * No assurances are provided by Nokia that the Contribution does not
131 * infringe the patent or other intellectual property rights of any third
132 * party or that the license provides you with all the necessary rights
133 * to make use of the Contribution.
134 *
135 * THE SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND. IN
136 * ADDITION TO THE DISCLAIMERS INCLUDED IN THE LICENSE, NOKIA
137 * SPECIFICALLY DISCLAIMS ANY LIABILITY FOR CLAIMS BROUGHT BY YOU OR ANY
138 * OTHER ENTITY BASED ON INFRINGEMENT OF INTELLECTUAL PROPERTY RIGHTS OR
139 * OTHERWISE. */
140
David Benjamin9e4e01e2015-09-15 01:48:04 -0400141#include <openssl/ssl.h>
142
Adam Langley95c29f32014-06-20 12:00:00 -0700143#include <assert.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400144#include <string.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700145
David Benjamin71f07942015-04-08 02:36:59 -0400146#include <openssl/buf.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400147#include <openssl/err.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500148#include <openssl/md5.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700149#include <openssl/mem.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500150#include <openssl/sha.h>
David Benjamin71f07942015-04-08 02:36:59 -0400151#include <openssl/stack.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700152
David Benjamin2ee94aa2015-04-07 22:38:30 -0400153#include "internal.h"
Steven Valdezcb966542016-08-17 16:56:14 -0400154#include "../crypto/internal.h"
Adam Langley95c29f32014-06-20 12:00:00 -0700155
Adam Langley95c29f32014-06-20 12:00:00 -0700156
David Benjamina1c90a52015-05-30 17:03:14 -0400157/* kCiphers is an array of all supported ciphers, sorted by id. */
David Benjamin20c37312015-11-11 21:33:18 -0800158static const SSL_CIPHER kCiphers[] = {
David Benjamina1c90a52015-05-30 17:03:14 -0400159 /* The RSA ciphers */
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700160 /* Cipher 02 */
161 {
David Benjaminff2df332015-11-18 10:01:16 -0500162 SSL3_TXT_RSA_NULL_SHA,
163 SSL3_CK_RSA_NULL_SHA,
164 SSL_kRSA,
165 SSL_aRSA,
166 SSL_eNULL,
167 SSL_SHA1,
168 SSL_HANDSHAKE_MAC_DEFAULT,
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700169 },
170
David Benjamina1c90a52015-05-30 17:03:14 -0400171 /* Cipher 0A */
172 {
David Benjaminff2df332015-11-18 10:01:16 -0500173 SSL3_TXT_RSA_DES_192_CBC3_SHA,
174 SSL3_CK_RSA_DES_192_CBC3_SHA,
175 SSL_kRSA,
176 SSL_aRSA,
177 SSL_3DES,
178 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500179 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400180 },
181
182
183 /* New AES ciphersuites */
184
185 /* Cipher 2F */
186 {
David Benjaminff2df332015-11-18 10:01:16 -0500187 TLS1_TXT_RSA_WITH_AES_128_SHA,
188 TLS1_CK_RSA_WITH_AES_128_SHA,
189 SSL_kRSA,
190 SSL_aRSA,
191 SSL_AES128,
192 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500193 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400194 },
195
196 /* Cipher 33 */
197 {
David Benjaminff2df332015-11-18 10:01:16 -0500198 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA,
199 TLS1_CK_DHE_RSA_WITH_AES_128_SHA,
200 SSL_kDHE,
201 SSL_aRSA,
202 SSL_AES128,
203 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500204 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400205 },
206
207 /* Cipher 35 */
208 {
David Benjaminff2df332015-11-18 10:01:16 -0500209 TLS1_TXT_RSA_WITH_AES_256_SHA,
210 TLS1_CK_RSA_WITH_AES_256_SHA,
211 SSL_kRSA,
212 SSL_aRSA,
213 SSL_AES256,
214 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500215 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400216 },
217
218 /* Cipher 39 */
219 {
David Benjaminff2df332015-11-18 10:01:16 -0500220 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA,
221 TLS1_CK_DHE_RSA_WITH_AES_256_SHA,
222 SSL_kDHE,
223 SSL_aRSA,
224 SSL_AES256,
225 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500226 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400227 },
228
229
230 /* TLS v1.2 ciphersuites */
231
232 /* Cipher 3C */
233 {
David Benjaminff2df332015-11-18 10:01:16 -0500234 TLS1_TXT_RSA_WITH_AES_128_SHA256,
235 TLS1_CK_RSA_WITH_AES_128_SHA256,
236 SSL_kRSA,
237 SSL_aRSA,
238 SSL_AES128,
239 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500240 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400241 },
242
243 /* Cipher 3D */
244 {
David Benjaminff2df332015-11-18 10:01:16 -0500245 TLS1_TXT_RSA_WITH_AES_256_SHA256,
246 TLS1_CK_RSA_WITH_AES_256_SHA256,
247 SSL_kRSA,
248 SSL_aRSA,
249 SSL_AES256,
250 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500251 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400252 },
253
254 /* Cipher 67 */
255 {
256 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500257 TLS1_CK_DHE_RSA_WITH_AES_128_SHA256,
258 SSL_kDHE,
259 SSL_aRSA,
260 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500261 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500262 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400263 },
264
265 /* Cipher 6B */
266 {
267 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500268 TLS1_CK_DHE_RSA_WITH_AES_256_SHA256,
269 SSL_kDHE,
270 SSL_aRSA,
271 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500272 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500273 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400274 },
275
Adam Langley85bc5602015-06-09 09:54:04 -0700276 /* PSK cipher suites. */
277
David Benjamina1c90a52015-05-30 17:03:14 -0400278 /* Cipher 8C */
279 {
David Benjaminff2df332015-11-18 10:01:16 -0500280 TLS1_TXT_PSK_WITH_AES_128_CBC_SHA,
281 TLS1_CK_PSK_WITH_AES_128_CBC_SHA,
282 SSL_kPSK,
283 SSL_aPSK,
284 SSL_AES128,
285 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500286 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400287 },
288
289 /* Cipher 8D */
290 {
David Benjaminff2df332015-11-18 10:01:16 -0500291 TLS1_TXT_PSK_WITH_AES_256_CBC_SHA,
292 TLS1_CK_PSK_WITH_AES_256_CBC_SHA,
293 SSL_kPSK,
294 SSL_aPSK,
295 SSL_AES256,
296 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500297 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400298 },
299
David Benjamina1c90a52015-05-30 17:03:14 -0400300 /* GCM ciphersuites from RFC5288 */
301
302 /* Cipher 9C */
303 {
304 TLS1_TXT_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500305 TLS1_CK_RSA_WITH_AES_128_GCM_SHA256,
306 SSL_kRSA,
307 SSL_aRSA,
308 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500309 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400310 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400311 },
312
313 /* Cipher 9D */
314 {
315 TLS1_TXT_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500316 TLS1_CK_RSA_WITH_AES_256_GCM_SHA384,
317 SSL_kRSA,
318 SSL_aRSA,
319 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500320 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400321 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400322 },
323
324 /* Cipher 9E */
325 {
326 TLS1_TXT_DHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500327 TLS1_CK_DHE_RSA_WITH_AES_128_GCM_SHA256,
328 SSL_kDHE,
329 SSL_aRSA,
330 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500331 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400332 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400333 },
334
335 /* Cipher 9F */
336 {
337 TLS1_TXT_DHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500338 TLS1_CK_DHE_RSA_WITH_AES_256_GCM_SHA384,
339 SSL_kDHE,
340 SSL_aRSA,
341 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500342 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400343 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400344 },
345
Steven Valdez803c77a2016-09-06 14:13:43 -0400346 /* TLS 1.3 suites. */
347
348 /* Cipher 1301 */
349 {
350 TLS1_TXT_AES_128_GCM_SHA256,
351 TLS1_CK_AES_128_GCM_SHA256,
352 SSL_kGENERIC,
353 SSL_aGENERIC,
354 SSL_AES128GCM,
355 SSL_AEAD,
356 SSL_HANDSHAKE_MAC_SHA256,
357 },
358
359 /* Cipher 1302 */
360 {
361 TLS1_TXT_AES_256_GCM_SHA384,
362 TLS1_CK_AES_256_GCM_SHA384,
363 SSL_kGENERIC,
364 SSL_aGENERIC,
365 SSL_AES256GCM,
366 SSL_AEAD,
367 SSL_HANDSHAKE_MAC_SHA384,
368 },
369
370 /* Cipher 1303 */
371 {
372 TLS1_TXT_CHACHA20_POLY1305_SHA256,
373 TLS1_CK_CHACHA20_POLY1305_SHA256,
374 SSL_kGENERIC,
375 SSL_aGENERIC,
376 SSL_CHACHA20POLY1305,
377 SSL_AEAD,
378 SSL_HANDSHAKE_MAC_SHA256,
379 },
380
David Benjamina1c90a52015-05-30 17:03:14 -0400381 /* Cipher C009 */
382 {
383 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500384 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
385 SSL_kECDHE,
386 SSL_aECDSA,
387 SSL_AES128,
388 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500389 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400390 },
391
392 /* Cipher C00A */
393 {
394 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500395 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
396 SSL_kECDHE,
397 SSL_aECDSA,
398 SSL_AES256,
399 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500400 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400401 },
402
David Benjamina1c90a52015-05-30 17:03:14 -0400403 /* Cipher C013 */
404 {
405 TLS1_TXT_ECDHE_RSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500406 TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA,
407 SSL_kECDHE,
408 SSL_aRSA,
409 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500410 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500411 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400412 },
413
414 /* Cipher C014 */
415 {
416 TLS1_TXT_ECDHE_RSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500417 TLS1_CK_ECDHE_RSA_WITH_AES_256_CBC_SHA,
418 SSL_kECDHE,
419 SSL_aRSA,
420 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500421 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500422 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400423 },
424
425
426 /* HMAC based TLS v1.2 ciphersuites from RFC5289 */
427
428 /* Cipher C023 */
429 {
430 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500431 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_SHA256,
432 SSL_kECDHE,
433 SSL_aECDSA,
434 SSL_AES128,
435 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500436 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400437 },
438
439 /* Cipher C024 */
440 {
441 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500442 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_SHA384,
443 SSL_kECDHE,
444 SSL_aECDSA,
445 SSL_AES256,
446 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500447 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400448 },
449
450 /* Cipher C027 */
451 {
452 TLS1_TXT_ECDHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500453 TLS1_CK_ECDHE_RSA_WITH_AES_128_SHA256,
454 SSL_kECDHE,
455 SSL_aRSA,
456 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500457 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500458 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400459 },
460
461 /* Cipher C028 */
462 {
463 TLS1_TXT_ECDHE_RSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500464 TLS1_CK_ECDHE_RSA_WITH_AES_256_SHA384,
465 SSL_kECDHE,
466 SSL_aRSA,
467 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500468 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500469 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400470 },
471
472
473 /* GCM based TLS v1.2 ciphersuites from RFC5289 */
474
475 /* Cipher C02B */
476 {
477 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500478 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
479 SSL_kECDHE,
480 SSL_aECDSA,
481 SSL_AES128GCM,
482 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400483 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400484 },
485
486 /* Cipher C02C */
487 {
488 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500489 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
490 SSL_kECDHE,
491 SSL_aECDSA,
492 SSL_AES256GCM,
493 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400494 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400495 },
496
497 /* Cipher C02F */
498 {
499 TLS1_TXT_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500500 TLS1_CK_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
501 SSL_kECDHE,
502 SSL_aRSA,
503 SSL_AES128GCM,
504 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400505 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400506 },
507
508 /* Cipher C030 */
509 {
510 TLS1_TXT_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500511 TLS1_CK_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
512 SSL_kECDHE,
513 SSL_aRSA,
514 SSL_AES256GCM,
515 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400516 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400517 },
518
Adam Langley85bc5602015-06-09 09:54:04 -0700519 /* ECDHE-PSK cipher suites. */
520
521 /* Cipher C035 */
522 {
523 TLS1_TXT_ECDHE_PSK_WITH_AES_128_CBC_SHA,
524 TLS1_CK_ECDHE_PSK_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500525 SSL_kECDHE,
526 SSL_aPSK,
527 SSL_AES128,
528 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500529 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700530 },
531
532 /* Cipher C036 */
533 {
534 TLS1_TXT_ECDHE_PSK_WITH_AES_256_CBC_SHA,
535 TLS1_CK_ECDHE_PSK_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500536 SSL_kECDHE,
537 SSL_aPSK,
538 SSL_AES256,
539 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500540 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700541 },
542
543 /* ChaCha20-Poly1305 cipher suites. */
544
David Benjamin13414b32015-12-09 23:02:39 -0500545#if !defined(BORINGSSL_ANDROID_SYSTEM)
David Benjamina1c90a52015-05-30 17:03:14 -0400546 {
Brian Smith271777f2015-10-03 13:53:33 -1000547 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500548 TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD,
549 SSL_kECDHE,
550 SSL_aRSA,
551 SSL_CHACHA20POLY1305_OLD,
552 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400553 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400554 },
555
556 {
Brian Smith271777f2015-10-03 13:53:33 -1000557 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500558 TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD,
559 SSL_kECDHE,
560 SSL_aECDSA,
561 SSL_CHACHA20POLY1305_OLD,
562 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400563 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400564 },
Adam Langleyd98dc132015-09-23 16:41:33 -0700565#endif
David Benjamin13414b32015-12-09 23:02:39 -0500566
567 /* Cipher CCA8 */
568 {
569 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
570 TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
571 SSL_kECDHE,
572 SSL_aRSA,
573 SSL_CHACHA20POLY1305,
574 SSL_AEAD,
575 SSL_HANDSHAKE_MAC_SHA256,
576 },
577
578 /* Cipher CCA9 */
579 {
580 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
581 TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
582 SSL_kECDHE,
583 SSL_aECDSA,
584 SSL_CHACHA20POLY1305,
585 SSL_AEAD,
586 SSL_HANDSHAKE_MAC_SHA256,
587 },
588
589 /* Cipher CCAB */
590 {
591 TLS1_TXT_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
592 TLS1_CK_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
593 SSL_kECDHE,
594 SSL_aPSK,
595 SSL_CHACHA20POLY1305,
596 SSL_AEAD,
597 SSL_HANDSHAKE_MAC_SHA256,
598 },
Matt Braithwaite053931e2016-05-25 12:06:05 -0700599
David Benjamina1c90a52015-05-30 17:03:14 -0400600};
601
Steven Valdezcb966542016-08-17 16:56:14 -0400602static const size_t kCiphersLen = OPENSSL_ARRAY_SIZE(kCiphers);
David Benjamina1c90a52015-05-30 17:03:14 -0400603
Adam Langleyfcf25832014-12-18 17:42:32 -0800604#define CIPHER_ADD 1
605#define CIPHER_KILL 2
606#define CIPHER_DEL 3
607#define CIPHER_ORD 4
608#define CIPHER_SPECIAL 5
Adam Langley95c29f32014-06-20 12:00:00 -0700609
Adam Langleyfcf25832014-12-18 17:42:32 -0800610typedef struct cipher_order_st {
611 const SSL_CIPHER *cipher;
612 int active;
Adam Langleyfcf25832014-12-18 17:42:32 -0800613 int in_group;
614 struct cipher_order_st *next, *prev;
615} CIPHER_ORDER;
Adam Langley95c29f32014-06-20 12:00:00 -0700616
David Benjamin0344daf2015-04-08 02:08:01 -0400617typedef struct cipher_alias_st {
618 /* name is the name of the cipher alias. */
619 const char *name;
620
621 /* The following fields are bitmasks for the corresponding fields on
622 * |SSL_CIPHER|. A cipher matches a cipher alias iff, for each bitmask, the
623 * bit corresponding to the cipher's value is set to 1. If any bitmask is
624 * all zeroes, the alias matches nothing. Use |~0u| for the default value. */
625 uint32_t algorithm_mkey;
626 uint32_t algorithm_auth;
627 uint32_t algorithm_enc;
628 uint32_t algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -0500629
630 /* min_version, if non-zero, matches all ciphers which were added in that
631 * particular protocol version. */
632 uint16_t min_version;
David Benjamin0344daf2015-04-08 02:08:01 -0400633} CIPHER_ALIAS;
634
David Benjamina1c90a52015-05-30 17:03:14 -0400635static const CIPHER_ALIAS kCipherAliases[] = {
Matthew Braithwaite651aaef2016-12-08 16:14:36 -0800636 /* "ALL" doesn't include eNULL. It must be explicitly enabled. */
637 {"ALL", ~0u, ~0u, ~SSL_eNULL, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700638
David Benjamina1c90a52015-05-30 17:03:14 -0400639 /* The "COMPLEMENTOFDEFAULT" rule is omitted. It matches nothing. */
Adam Langley95c29f32014-06-20 12:00:00 -0700640
David Benjamina1c90a52015-05-30 17:03:14 -0400641 /* key exchange aliases
642 * (some of those using only a single bit here combine
643 * multiple key exchange algs according to the RFCs,
644 * e.g. kEDH combines DHE_DSS and DHE_RSA) */
David Benjamind6e9eec2015-11-18 09:48:55 -0500645 {"kRSA", SSL_kRSA, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700646
David Benjamind6e9eec2015-11-18 09:48:55 -0500647 {"kDHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
648 {"kEDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
649 {"DH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700650
David Benjamind6e9eec2015-11-18 09:48:55 -0500651 {"kECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
652 {"kEECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
653 {"ECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700654
David Benjamind6e9eec2015-11-18 09:48:55 -0500655 {"kPSK", SSL_kPSK, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700656
David Benjamina1c90a52015-05-30 17:03:14 -0400657 /* server authentication aliases */
Matthew Braithwaite651aaef2016-12-08 16:14:36 -0800658 {"aRSA", ~0u, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
659 {"aECDSA", ~0u, SSL_aECDSA, ~0u, ~0u, 0},
660 {"ECDSA", ~0u, SSL_aECDSA, ~0u, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500661 {"aPSK", ~0u, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700662
David Benjamina1c90a52015-05-30 17:03:14 -0400663 /* aliases combining key exchange and server authentication */
David Benjamind6e9eec2015-11-18 09:48:55 -0500664 {"DHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
665 {"EDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
666 {"ECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
667 {"EECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
668 {"RSA", SSL_kRSA, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
669 {"PSK", SSL_kPSK, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700670
David Benjamina1c90a52015-05-30 17:03:14 -0400671 /* symmetric encryption aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500672 {"3DES", ~0u, ~0u, SSL_3DES, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500673 {"AES128", ~0u, ~0u, SSL_AES128 | SSL_AES128GCM, ~0u, 0},
Matthew Braithwaite651aaef2016-12-08 16:14:36 -0800674 {"AES256", ~0u, ~0u, SSL_AES256 | SSL_AES256GCM, ~0u, 0},
675 {"AES", ~0u, ~0u, SSL_AES, ~0u, 0},
676 {"AESGCM", ~0u, ~0u, SSL_AES128GCM | SSL_AES256GCM, ~0u, 0},
677 {"CHACHA20", ~0u, ~0u, SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD, ~0u,
David Benjamin13414b32015-12-09 23:02:39 -0500678 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700679
David Benjamina1c90a52015-05-30 17:03:14 -0400680 /* MAC aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500681 {"MD5", ~0u, ~0u, ~0u, SSL_MD5, 0},
682 {"SHA1", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
683 {"SHA", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
Matthew Braithwaite651aaef2016-12-08 16:14:36 -0800684 {"SHA256", ~0u, ~0u, ~0u, SSL_SHA256, 0},
685 {"SHA384", ~0u, ~0u, ~0u, SSL_SHA384, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700686
David Benjamindcb6ef02015-11-06 15:35:54 -0500687 /* Legacy protocol minimum version aliases. "TLSv1" is intentionally the
688 * same as "SSLv3". */
Matthew Braithwaite651aaef2016-12-08 16:14:36 -0800689 {"SSLv3", ~0u, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
690 {"TLSv1", ~0u, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
691 {"TLSv1.2", ~0u, ~0u, ~SSL_eNULL, ~0u, TLS1_2_VERSION},
Adam Langley95c29f32014-06-20 12:00:00 -0700692
David Benjamind6e9eec2015-11-18 09:48:55 -0500693 /* Legacy strength classes. */
Matthew Braithwaite651aaef2016-12-08 16:14:36 -0800694 {"HIGH", ~0u, ~0u, ~SSL_eNULL, ~0u, 0},
695 {"FIPS", ~0u, ~0u, ~SSL_eNULL, ~0u, 0},
Adam Langleyfcf25832014-12-18 17:42:32 -0800696};
Adam Langley95c29f32014-06-20 12:00:00 -0700697
Steven Valdezcb966542016-08-17 16:56:14 -0400698static const size_t kCipherAliasesLen = OPENSSL_ARRAY_SIZE(kCipherAliases);
David Benjamina1c90a52015-05-30 17:03:14 -0400699
700static int ssl_cipher_id_cmp(const void *in_a, const void *in_b) {
701 const SSL_CIPHER *a = in_a;
702 const SSL_CIPHER *b = in_b;
703
704 if (a->id > b->id) {
705 return 1;
706 } else if (a->id < b->id) {
707 return -1;
708 } else {
709 return 0;
710 }
711}
712
David Benjamina1c90a52015-05-30 17:03:14 -0400713const SSL_CIPHER *SSL_get_cipher_by_value(uint16_t value) {
714 SSL_CIPHER c;
715
716 c.id = 0x03000000L | value;
717 return bsearch(&c, kCiphers, kCiphersLen, sizeof(SSL_CIPHER),
718 ssl_cipher_id_cmp);
719}
David Benjamin0344daf2015-04-08 02:08:01 -0400720
David Benjaminea72bd02014-12-21 21:27:41 -0500721int ssl_cipher_get_evp_aead(const EVP_AEAD **out_aead,
722 size_t *out_mac_secret_len,
723 size_t *out_fixed_iv_len,
724 const SSL_CIPHER *cipher, uint16_t version) {
725 *out_aead = NULL;
726 *out_mac_secret_len = 0;
727 *out_fixed_iv_len = 0;
Adam Langleyc9fb3752014-06-20 12:00:00 -0700728
David Benjamin305e6fb2016-10-27 18:19:00 -0400729 if (cipher->algorithm_mac == SSL_AEAD) {
730 if (cipher->algorithm_enc == SSL_AES128GCM) {
David Benjaminea72bd02014-12-21 21:27:41 -0500731 *out_aead = EVP_aead_aes_128_gcm();
732 *out_fixed_iv_len = 4;
David Benjamin305e6fb2016-10-27 18:19:00 -0400733 } else if (cipher->algorithm_enc == SSL_AES256GCM) {
David Benjaminea72bd02014-12-21 21:27:41 -0500734 *out_aead = EVP_aead_aes_256_gcm();
735 *out_fixed_iv_len = 4;
Adam Langleyd98dc132015-09-23 16:41:33 -0700736#if !defined(BORINGSSL_ANDROID_SYSTEM)
David Benjamin305e6fb2016-10-27 18:19:00 -0400737 } else if (cipher->algorithm_enc == SSL_CHACHA20POLY1305_OLD) {
Brian Smith3e23e4c2015-10-03 11:38:58 -1000738 *out_aead = EVP_aead_chacha20_poly1305_old();
David Benjaminea72bd02014-12-21 21:27:41 -0500739 *out_fixed_iv_len = 0;
Adam Langleyd98dc132015-09-23 16:41:33 -0700740#endif
David Benjamin305e6fb2016-10-27 18:19:00 -0400741 } else if (cipher->algorithm_enc == SSL_CHACHA20POLY1305) {
David Benjamin13414b32015-12-09 23:02:39 -0500742 *out_aead = EVP_aead_chacha20_poly1305();
743 *out_fixed_iv_len = 12;
David Benjamin305e6fb2016-10-27 18:19:00 -0400744 } else {
David Benjaminea72bd02014-12-21 21:27:41 -0500745 return 0;
David Benjamin305e6fb2016-10-27 18:19:00 -0400746 }
747
748 /* In TLS 1.3, the iv_len is equal to the AEAD nonce length whereas the code
749 * above computes the TLS 1.2 construction. */
750 if (version >= TLS1_3_VERSION) {
751 *out_fixed_iv_len = EVP_AEAD_nonce_length(*out_aead);
752 }
753 } else if (cipher->algorithm_mac == SSL_SHA1) {
754 if (cipher->algorithm_enc == SSL_eNULL) {
755 if (version == SSL3_VERSION) {
756 *out_aead = EVP_aead_null_sha1_ssl3();
757 } else {
758 *out_aead = EVP_aead_null_sha1_tls();
759 }
760 } else if (cipher->algorithm_enc == SSL_3DES) {
761 if (version == SSL3_VERSION) {
762 *out_aead = EVP_aead_des_ede3_cbc_sha1_ssl3();
763 *out_fixed_iv_len = 8;
764 } else if (version == TLS1_VERSION) {
765 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls_implicit_iv();
766 *out_fixed_iv_len = 8;
767 } else {
768 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls();
769 }
770 } else if (cipher->algorithm_enc == SSL_AES128) {
771 if (version == SSL3_VERSION) {
772 *out_aead = EVP_aead_aes_128_cbc_sha1_ssl3();
773 *out_fixed_iv_len = 16;
774 } else if (version == TLS1_VERSION) {
775 *out_aead = EVP_aead_aes_128_cbc_sha1_tls_implicit_iv();
776 *out_fixed_iv_len = 16;
777 } else {
778 *out_aead = EVP_aead_aes_128_cbc_sha1_tls();
779 }
780 } else if (cipher->algorithm_enc == SSL_AES256) {
781 if (version == SSL3_VERSION) {
782 *out_aead = EVP_aead_aes_256_cbc_sha1_ssl3();
783 *out_fixed_iv_len = 16;
784 } else if (version == TLS1_VERSION) {
785 *out_aead = EVP_aead_aes_256_cbc_sha1_tls_implicit_iv();
786 *out_fixed_iv_len = 16;
787 } else {
788 *out_aead = EVP_aead_aes_256_cbc_sha1_tls();
789 }
790 } else {
791 return 0;
792 }
793
794 *out_mac_secret_len = SHA_DIGEST_LENGTH;
795 } else if (cipher->algorithm_mac == SSL_SHA256) {
796 if (cipher->algorithm_enc == SSL_AES128) {
797 *out_aead = EVP_aead_aes_128_cbc_sha256_tls();
798 } else if (cipher->algorithm_enc == SSL_AES256) {
799 *out_aead = EVP_aead_aes_256_cbc_sha256_tls();
800 } else {
801 return 0;
802 }
803
804 *out_mac_secret_len = SHA256_DIGEST_LENGTH;
805 } else if (cipher->algorithm_mac == SSL_SHA384) {
806 if (cipher->algorithm_enc != SSL_AES256) {
807 return 0;
808 }
809
810 *out_aead = EVP_aead_aes_256_cbc_sha384_tls();
811 *out_mac_secret_len = SHA384_DIGEST_LENGTH;
812 } else {
813 return 0;
David Benjaminea72bd02014-12-21 21:27:41 -0500814 }
Steven Valdez79750562016-06-16 06:38:04 -0400815
Steven Valdez79750562016-06-16 06:38:04 -0400816 return 1;
Adam Langleyfcf25832014-12-18 17:42:32 -0800817}
Adam Langleyc9fb3752014-06-20 12:00:00 -0700818
David Benjaminb0883312015-08-06 09:54:13 -0400819const EVP_MD *ssl_get_handshake_digest(uint32_t algorithm_prf) {
820 switch (algorithm_prf) {
821 case SSL_HANDSHAKE_MAC_DEFAULT:
822 return EVP_sha1();
823 case SSL_HANDSHAKE_MAC_SHA256:
824 return EVP_sha256();
825 case SSL_HANDSHAKE_MAC_SHA384:
826 return EVP_sha384();
827 default:
828 return NULL;
Adam Langleyfcf25832014-12-18 17:42:32 -0800829 }
Adam Langley95c29f32014-06-20 12:00:00 -0700830}
831
832#define ITEM_SEP(a) \
Adam Langleyfcf25832014-12-18 17:42:32 -0800833 (((a) == ':') || ((a) == ' ') || ((a) == ';') || ((a) == ','))
Adam Langley95c29f32014-06-20 12:00:00 -0700834
David Benjamin0344daf2015-04-08 02:08:01 -0400835/* rule_equals returns one iff the NUL-terminated string |rule| is equal to the
836 * |buf_len| bytes at |buf|. */
837static int rule_equals(const char *rule, const char *buf, size_t buf_len) {
838 /* |strncmp| alone only checks that |buf| is a prefix of |rule|. */
839 return strncmp(rule, buf, buf_len) == 0 && rule[buf_len] == '\0';
840}
841
Adam Langley95c29f32014-06-20 12:00:00 -0700842static void ll_append_tail(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -0800843 CIPHER_ORDER **tail) {
844 if (curr == *tail) {
845 return;
846 }
847 if (curr == *head) {
848 *head = curr->next;
849 }
850 if (curr->prev != NULL) {
851 curr->prev->next = curr->next;
852 }
853 if (curr->next != NULL) {
854 curr->next->prev = curr->prev;
855 }
856 (*tail)->next = curr;
857 curr->prev = *tail;
858 curr->next = NULL;
859 *tail = curr;
860}
Adam Langley95c29f32014-06-20 12:00:00 -0700861
862static void ll_append_head(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -0800863 CIPHER_ORDER **tail) {
864 if (curr == *head) {
865 return;
866 }
867 if (curr == *tail) {
868 *tail = curr->prev;
869 }
870 if (curr->next != NULL) {
871 curr->next->prev = curr->prev;
872 }
873 if (curr->prev != NULL) {
874 curr->prev->next = curr->next;
875 }
876 (*head)->prev = curr;
877 curr->next = *head;
878 curr->prev = NULL;
879 *head = curr;
880}
Adam Langley95c29f32014-06-20 12:00:00 -0700881
David Benjamin82c9e902014-12-12 15:55:27 -0500882static void ssl_cipher_collect_ciphers(const SSL_PROTOCOL_METHOD *ssl_method,
Adam Langleyfcf25832014-12-18 17:42:32 -0800883 CIPHER_ORDER *co_list,
884 CIPHER_ORDER **head_p,
885 CIPHER_ORDER **tail_p) {
David Benjamina1c90a52015-05-30 17:03:14 -0400886 /* The set of ciphers is static, but some subset may be unsupported by
887 * |ssl_method|, so the list may be smaller. */
888 size_t co_list_num = 0;
David Benjamin54091232016-09-05 12:47:25 -0400889 for (size_t i = 0; i < kCiphersLen; i++) {
David Benjamina1c90a52015-05-30 17:03:14 -0400890 const SSL_CIPHER *cipher = &kCiphers[i];
David Benjaminabbbee12016-10-31 19:20:42 -0400891 if (ssl_method->supports_cipher(cipher) &&
892 /* TLS 1.3 ciphers do not participate in this mechanism. */
893 cipher->algorithm_mkey != SSL_kGENERIC) {
David Benjamina1c90a52015-05-30 17:03:14 -0400894 co_list[co_list_num].cipher = cipher;
Adam Langleyfcf25832014-12-18 17:42:32 -0800895 co_list[co_list_num].next = NULL;
896 co_list[co_list_num].prev = NULL;
897 co_list[co_list_num].active = 0;
898 co_list[co_list_num].in_group = 0;
899 co_list_num++;
900 }
901 }
Adam Langley95c29f32014-06-20 12:00:00 -0700902
Adam Langleyfcf25832014-12-18 17:42:32 -0800903 /* Prepare linked list from list entries. */
904 if (co_list_num > 0) {
905 co_list[0].prev = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -0700906
Adam Langleyfcf25832014-12-18 17:42:32 -0800907 if (co_list_num > 1) {
908 co_list[0].next = &co_list[1];
Adam Langley95c29f32014-06-20 12:00:00 -0700909
David Benjamin54091232016-09-05 12:47:25 -0400910 for (size_t i = 1; i < co_list_num - 1; i++) {
Adam Langleyfcf25832014-12-18 17:42:32 -0800911 co_list[i].prev = &co_list[i - 1];
912 co_list[i].next = &co_list[i + 1];
913 }
Adam Langley95c29f32014-06-20 12:00:00 -0700914
Adam Langleyfcf25832014-12-18 17:42:32 -0800915 co_list[co_list_num - 1].prev = &co_list[co_list_num - 2];
916 }
917
918 co_list[co_list_num - 1].next = NULL;
919
920 *head_p = &co_list[0];
921 *tail_p = &co_list[co_list_num - 1];
922 }
923}
Adam Langley95c29f32014-06-20 12:00:00 -0700924
David Benjamin0344daf2015-04-08 02:08:01 -0400925/* ssl_cipher_apply_rule applies the rule type |rule| to ciphers matching its
926 * parameters in the linked list from |*head_p| to |*tail_p|. It writes the new
927 * head and tail of the list to |*head_p| and |*tail_p|, respectively.
928 *
929 * - If |cipher_id| is non-zero, only that cipher is selected.
930 * - Otherwise, if |strength_bits| is non-negative, it selects ciphers
931 * of that strength.
David Benjamind6e9eec2015-11-18 09:48:55 -0500932 * - Otherwise, it selects ciphers that match each bitmasks in |alg_*| and
David Benjamindcb6ef02015-11-06 15:35:54 -0500933 * |min_version|. */
Adam Langleyfcf25832014-12-18 17:42:32 -0800934static void ssl_cipher_apply_rule(
David Benjamin107db582015-04-08 00:41:59 -0400935 uint32_t cipher_id, uint32_t alg_mkey, uint32_t alg_auth,
David Benjamind6e9eec2015-11-18 09:48:55 -0500936 uint32_t alg_enc, uint32_t alg_mac, uint16_t min_version, int rule,
937 int strength_bits, int in_group, CIPHER_ORDER **head_p,
938 CIPHER_ORDER **tail_p) {
Adam Langleyfcf25832014-12-18 17:42:32 -0800939 CIPHER_ORDER *head, *tail, *curr, *next, *last;
940 const SSL_CIPHER *cp;
941 int reverse = 0;
Adam Langley95c29f32014-06-20 12:00:00 -0700942
David Benjamindcb6ef02015-11-06 15:35:54 -0500943 if (cipher_id == 0 && strength_bits == -1 && min_version == 0 &&
David Benjamind6e9eec2015-11-18 09:48:55 -0500944 (alg_mkey == 0 || alg_auth == 0 || alg_enc == 0 || alg_mac == 0)) {
David Benjamin0344daf2015-04-08 02:08:01 -0400945 /* The rule matches nothing, so bail early. */
946 return;
947 }
948
Adam Langleyfcf25832014-12-18 17:42:32 -0800949 if (rule == CIPHER_DEL) {
950 /* needed to maintain sorting between currently deleted ciphers */
951 reverse = 1;
952 }
Adam Langley95c29f32014-06-20 12:00:00 -0700953
Adam Langleyfcf25832014-12-18 17:42:32 -0800954 head = *head_p;
955 tail = *tail_p;
Adam Langley95c29f32014-06-20 12:00:00 -0700956
Adam Langleyfcf25832014-12-18 17:42:32 -0800957 if (reverse) {
958 next = tail;
959 last = head;
960 } else {
961 next = head;
962 last = tail;
963 }
Adam Langley95c29f32014-06-20 12:00:00 -0700964
Adam Langleyfcf25832014-12-18 17:42:32 -0800965 curr = NULL;
966 for (;;) {
967 if (curr == last) {
968 break;
969 }
Adam Langley95c29f32014-06-20 12:00:00 -0700970
Adam Langleyfcf25832014-12-18 17:42:32 -0800971 curr = next;
972 if (curr == NULL) {
973 break;
974 }
Adam Langleye3142a72014-07-24 17:56:48 -0700975
Adam Langleyfcf25832014-12-18 17:42:32 -0800976 next = reverse ? curr->prev : curr->next;
977 cp = curr->cipher;
Adam Langleye3142a72014-07-24 17:56:48 -0700978
David Benjamin0344daf2015-04-08 02:08:01 -0400979 /* Selection criteria is either a specific cipher, the value of
980 * |strength_bits|, or the algorithms used. */
981 if (cipher_id != 0) {
982 if (cipher_id != cp->id) {
983 continue;
984 }
985 } else if (strength_bits >= 0) {
David Benjamin9f2e2772015-11-18 09:59:43 -0500986 if (strength_bits != SSL_CIPHER_get_bits(cp, NULL)) {
Adam Langleyfcf25832014-12-18 17:42:32 -0800987 continue;
988 }
David Benjamin881f1962016-08-10 18:29:12 -0400989 } else {
990 if (!(alg_mkey & cp->algorithm_mkey) ||
991 !(alg_auth & cp->algorithm_auth) ||
992 !(alg_enc & cp->algorithm_enc) ||
993 !(alg_mac & cp->algorithm_mac) ||
994 (min_version != 0 && SSL_CIPHER_get_min_version(cp) != min_version)) {
995 continue;
996 }
Adam Langleyfcf25832014-12-18 17:42:32 -0800997 }
Adam Langleye3142a72014-07-24 17:56:48 -0700998
Adam Langleyfcf25832014-12-18 17:42:32 -0800999 /* add the cipher if it has not been added yet. */
1000 if (rule == CIPHER_ADD) {
1001 /* reverse == 0 */
1002 if (!curr->active) {
1003 ll_append_tail(&head, curr, &tail);
1004 curr->active = 1;
1005 curr->in_group = in_group;
1006 }
1007 }
Adam Langley95c29f32014-06-20 12:00:00 -07001008
Adam Langleyfcf25832014-12-18 17:42:32 -08001009 /* Move the added cipher to this location */
1010 else if (rule == CIPHER_ORD) {
1011 /* reverse == 0 */
1012 if (curr->active) {
1013 ll_append_tail(&head, curr, &tail);
1014 curr->in_group = 0;
1015 }
1016 } else if (rule == CIPHER_DEL) {
1017 /* reverse == 1 */
1018 if (curr->active) {
1019 /* most recently deleted ciphersuites get best positions
1020 * for any future CIPHER_ADD (note that the CIPHER_DEL loop
1021 * works in reverse to maintain the order) */
1022 ll_append_head(&head, curr, &tail);
1023 curr->active = 0;
1024 curr->in_group = 0;
1025 }
1026 } else if (rule == CIPHER_KILL) {
1027 /* reverse == 0 */
1028 if (head == curr) {
1029 head = curr->next;
1030 } else {
1031 curr->prev->next = curr->next;
1032 }
Adam Langley95c29f32014-06-20 12:00:00 -07001033
Adam Langleyfcf25832014-12-18 17:42:32 -08001034 if (tail == curr) {
1035 tail = curr->prev;
1036 }
1037 curr->active = 0;
1038 if (curr->next != NULL) {
1039 curr->next->prev = curr->prev;
1040 }
1041 if (curr->prev != NULL) {
1042 curr->prev->next = curr->next;
1043 }
1044 curr->next = NULL;
1045 curr->prev = NULL;
1046 }
1047 }
Adam Langley95c29f32014-06-20 12:00:00 -07001048
Adam Langleyfcf25832014-12-18 17:42:32 -08001049 *head_p = head;
1050 *tail_p = tail;
1051}
Adam Langley95c29f32014-06-20 12:00:00 -07001052
1053static int ssl_cipher_strength_sort(CIPHER_ORDER **head_p,
Adam Langleyfcf25832014-12-18 17:42:32 -08001054 CIPHER_ORDER **tail_p) {
1055 int max_strength_bits, i, *number_uses;
1056 CIPHER_ORDER *curr;
Adam Langley95c29f32014-06-20 12:00:00 -07001057
Adam Langleyfcf25832014-12-18 17:42:32 -08001058 /* This routine sorts the ciphers with descending strength. The sorting must
1059 * keep the pre-sorted sequence, so we apply the normal sorting routine as
1060 * '+' movement to the end of the list. */
1061 max_strength_bits = 0;
1062 curr = *head_p;
1063 while (curr != NULL) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001064 if (curr->active &&
1065 SSL_CIPHER_get_bits(curr->cipher, NULL) > max_strength_bits) {
1066 max_strength_bits = SSL_CIPHER_get_bits(curr->cipher, NULL);
Adam Langleyfcf25832014-12-18 17:42:32 -08001067 }
1068 curr = curr->next;
1069 }
Adam Langley95c29f32014-06-20 12:00:00 -07001070
Adam Langleyfcf25832014-12-18 17:42:32 -08001071 number_uses = OPENSSL_malloc((max_strength_bits + 1) * sizeof(int));
1072 if (!number_uses) {
David Benjamin3570d732015-06-29 00:28:17 -04001073 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001074 return 0;
1075 }
1076 memset(number_uses, 0, (max_strength_bits + 1) * sizeof(int));
Adam Langley95c29f32014-06-20 12:00:00 -07001077
Adam Langleyfcf25832014-12-18 17:42:32 -08001078 /* Now find the strength_bits values actually used. */
1079 curr = *head_p;
1080 while (curr != NULL) {
1081 if (curr->active) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001082 number_uses[SSL_CIPHER_get_bits(curr->cipher, NULL)]++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001083 }
1084 curr = curr->next;
1085 }
Adam Langley95c29f32014-06-20 12:00:00 -07001086
Adam Langleyfcf25832014-12-18 17:42:32 -08001087 /* Go through the list of used strength_bits values in descending order. */
1088 for (i = max_strength_bits; i >= 0; i--) {
1089 if (number_uses[i] > 0) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001090 ssl_cipher_apply_rule(0, 0, 0, 0, 0, 0, CIPHER_ORD, i, 0, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001091 }
1092 }
1093
1094 OPENSSL_free(number_uses);
1095 return 1;
1096}
Adam Langley95c29f32014-06-20 12:00:00 -07001097
David Benjamin0344daf2015-04-08 02:08:01 -04001098static int ssl_cipher_process_rulestr(const SSL_PROTOCOL_METHOD *ssl_method,
1099 const char *rule_str,
Adam Langleyfcf25832014-12-18 17:42:32 -08001100 CIPHER_ORDER **head_p,
David Benjamin0344daf2015-04-08 02:08:01 -04001101 CIPHER_ORDER **tail_p) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001102 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001103 uint16_t min_version;
Adam Langleyfcf25832014-12-18 17:42:32 -08001104 const char *l, *buf;
Adam Langleyf139c992016-10-02 09:56:09 -07001105 int multi, skip_rule, rule, ok, in_group = 0, has_group = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001106 size_t j, buf_len;
1107 uint32_t cipher_id;
Adam Langleyfcf25832014-12-18 17:42:32 -08001108 char ch;
Adam Langley95c29f32014-06-20 12:00:00 -07001109
Adam Langleyfcf25832014-12-18 17:42:32 -08001110 l = rule_str;
1111 for (;;) {
1112 ch = *l;
Adam Langley95c29f32014-06-20 12:00:00 -07001113
Adam Langleyfcf25832014-12-18 17:42:32 -08001114 if (ch == '\0') {
1115 break; /* done */
1116 }
Adam Langley95c29f32014-06-20 12:00:00 -07001117
Adam Langleyfcf25832014-12-18 17:42:32 -08001118 if (in_group) {
1119 if (ch == ']') {
Adam Langleyfcf25832014-12-18 17:42:32 -08001120 if (*tail_p) {
1121 (*tail_p)->in_group = 0;
1122 }
1123 in_group = 0;
1124 l++;
1125 continue;
1126 }
David Benjamin37d92462014-09-20 17:54:24 -04001127
Adam Langleyfcf25832014-12-18 17:42:32 -08001128 if (ch == '|') {
1129 rule = CIPHER_ADD;
1130 l++;
1131 continue;
1132 } else if (!(ch >= 'a' && ch <= 'z') && !(ch >= 'A' && ch <= 'Z') &&
1133 !(ch >= '0' && ch <= '9')) {
David Benjamin3570d732015-06-29 00:28:17 -04001134 OPENSSL_PUT_ERROR(SSL, SSL_R_UNEXPECTED_OPERATOR_IN_GROUP);
Adam Langleyf139c992016-10-02 09:56:09 -07001135 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001136 } else {
1137 rule = CIPHER_ADD;
1138 }
1139 } else if (ch == '-') {
1140 rule = CIPHER_DEL;
1141 l++;
1142 } else if (ch == '+') {
1143 rule = CIPHER_ORD;
1144 l++;
1145 } else if (ch == '!') {
1146 rule = CIPHER_KILL;
1147 l++;
1148 } else if (ch == '@') {
1149 rule = CIPHER_SPECIAL;
1150 l++;
1151 } else if (ch == '[') {
1152 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001153 OPENSSL_PUT_ERROR(SSL, SSL_R_NESTED_GROUP);
Adam Langleyf139c992016-10-02 09:56:09 -07001154 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001155 }
1156 in_group = 1;
1157 has_group = 1;
1158 l++;
1159 continue;
1160 } else {
1161 rule = CIPHER_ADD;
1162 }
Adam Langley95c29f32014-06-20 12:00:00 -07001163
Adam Langleyfcf25832014-12-18 17:42:32 -08001164 /* If preference groups are enabled, the only legal operator is +.
1165 * Otherwise the in_group bits will get mixed up. */
1166 if (has_group && rule != CIPHER_ADD) {
David Benjamin3570d732015-06-29 00:28:17 -04001167 OPENSSL_PUT_ERROR(SSL, SSL_R_MIXED_SPECIAL_OPERATOR_WITH_GROUPS);
Adam Langleyf139c992016-10-02 09:56:09 -07001168 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001169 }
Adam Langley95c29f32014-06-20 12:00:00 -07001170
Adam Langleyfcf25832014-12-18 17:42:32 -08001171 if (ITEM_SEP(ch)) {
1172 l++;
1173 continue;
1174 }
Adam Langley95c29f32014-06-20 12:00:00 -07001175
David Benjamin0344daf2015-04-08 02:08:01 -04001176 multi = 0;
1177 cipher_id = 0;
1178 alg_mkey = ~0u;
1179 alg_auth = ~0u;
1180 alg_enc = ~0u;
1181 alg_mac = ~0u;
David Benjamindcb6ef02015-11-06 15:35:54 -05001182 min_version = 0;
1183 skip_rule = 0;
Adam Langley95c29f32014-06-20 12:00:00 -07001184
Adam Langleyfcf25832014-12-18 17:42:32 -08001185 for (;;) {
1186 ch = *l;
1187 buf = l;
David Benjamin0344daf2015-04-08 02:08:01 -04001188 buf_len = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001189 while (((ch >= 'A') && (ch <= 'Z')) || ((ch >= '0') && (ch <= '9')) ||
1190 ((ch >= 'a') && (ch <= 'z')) || (ch == '-') || (ch == '.')) {
1191 ch = *(++l);
David Benjamin0344daf2015-04-08 02:08:01 -04001192 buf_len++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001193 }
Adam Langley95c29f32014-06-20 12:00:00 -07001194
David Benjamin0344daf2015-04-08 02:08:01 -04001195 if (buf_len == 0) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001196 /* We hit something we cannot deal with, it is no command or separator
1197 * nor alphanumeric, so we call this an error. */
David Benjamin3570d732015-06-29 00:28:17 -04001198 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyf99f2442016-10-02 09:53:38 -07001199 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001200 }
Adam Langley95c29f32014-06-20 12:00:00 -07001201
Adam Langleyfcf25832014-12-18 17:42:32 -08001202 if (rule == CIPHER_SPECIAL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001203 break;
1204 }
David Benjamin0344daf2015-04-08 02:08:01 -04001205
1206 /* Look for a matching exact cipher. These aren't allowed in multipart
1207 * rules. */
1208 if (!multi && ch != '+') {
David Benjamina1c90a52015-05-30 17:03:14 -04001209 for (j = 0; j < kCiphersLen; j++) {
1210 const SSL_CIPHER *cipher = &kCiphers[j];
1211 if (rule_equals(cipher->name, buf, buf_len)) {
David Benjamin0344daf2015-04-08 02:08:01 -04001212 cipher_id = cipher->id;
1213 break;
1214 }
1215 }
1216 }
1217 if (cipher_id == 0) {
1218 /* If not an exact cipher, look for a matching cipher alias. */
David Benjamina1c90a52015-05-30 17:03:14 -04001219 for (j = 0; j < kCipherAliasesLen; j++) {
David Benjamin0344daf2015-04-08 02:08:01 -04001220 if (rule_equals(kCipherAliases[j].name, buf, buf_len)) {
1221 alg_mkey &= kCipherAliases[j].algorithm_mkey;
1222 alg_auth &= kCipherAliases[j].algorithm_auth;
1223 alg_enc &= kCipherAliases[j].algorithm_enc;
1224 alg_mac &= kCipherAliases[j].algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001225
1226 if (min_version != 0 &&
1227 min_version != kCipherAliases[j].min_version) {
1228 skip_rule = 1;
1229 } else {
1230 min_version = kCipherAliases[j].min_version;
1231 }
David Benjamin0344daf2015-04-08 02:08:01 -04001232 break;
1233 }
1234 }
David Benjamina1c90a52015-05-30 17:03:14 -04001235 if (j == kCipherAliasesLen) {
David Benjamindcb6ef02015-11-06 15:35:54 -05001236 skip_rule = 1;
David Benjamin0344daf2015-04-08 02:08:01 -04001237 }
1238 }
1239
1240 /* Check for a multipart rule. */
1241 if (ch != '+') {
1242 break;
1243 }
1244 l++;
1245 multi = 1;
Adam Langleyfcf25832014-12-18 17:42:32 -08001246 }
Adam Langley95c29f32014-06-20 12:00:00 -07001247
David Benjamin13414b32015-12-09 23:02:39 -05001248 /* If one of the CHACHA20_POLY1305 variants is selected, include the other
1249 * as well. They have the same name to avoid requiring changes in
1250 * configuration. Apply this transformation late so that the cipher name
1251 * still behaves as an exact name and not an alias in multipart rules.
1252 *
1253 * This is temporary and will be removed when the pre-standard construction
1254 * is removed. */
1255 if (cipher_id == TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD ||
1256 cipher_id == TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256) {
1257 cipher_id = 0;
1258 alg_mkey = SSL_kECDHE;
1259 alg_auth = SSL_aRSA;
1260 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1261 alg_mac = SSL_AEAD;
1262 } else if (cipher_id == TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD ||
1263 cipher_id == TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256) {
1264 cipher_id = 0;
1265 alg_mkey = SSL_kECDHE;
1266 alg_auth = SSL_aECDSA;
1267 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1268 alg_mac = SSL_AEAD;
1269 }
1270
Adam Langleyfcf25832014-12-18 17:42:32 -08001271 /* Ok, we have the rule, now apply it. */
1272 if (rule == CIPHER_SPECIAL) {
1273 /* special command */
1274 ok = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001275 if (buf_len == 8 && !strncmp(buf, "STRENGTH", 8)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001276 ok = ssl_cipher_strength_sort(head_p, tail_p);
1277 } else {
David Benjamin3570d732015-06-29 00:28:17 -04001278 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyfcf25832014-12-18 17:42:32 -08001279 }
Adam Langley95c29f32014-06-20 12:00:00 -07001280
Adam Langleyfcf25832014-12-18 17:42:32 -08001281 if (ok == 0) {
Adam Langleyf139c992016-10-02 09:56:09 -07001282 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001283 }
Adam Langley95c29f32014-06-20 12:00:00 -07001284
Adam Langleyfcf25832014-12-18 17:42:32 -08001285 /* We do not support any "multi" options together with "@", so throw away
1286 * the rest of the command, if any left, until end or ':' is found. */
1287 while (*l != '\0' && !ITEM_SEP(*l)) {
1288 l++;
1289 }
David Benjamindcb6ef02015-11-06 15:35:54 -05001290 } else if (!skip_rule) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001291 ssl_cipher_apply_rule(cipher_id, alg_mkey, alg_auth, alg_enc, alg_mac,
David Benjamind6e9eec2015-11-18 09:48:55 -05001292 min_version, rule, -1, in_group, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001293 }
1294 }
Adam Langley95c29f32014-06-20 12:00:00 -07001295
Adam Langleyfcf25832014-12-18 17:42:32 -08001296 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001297 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyf139c992016-10-02 09:56:09 -07001298 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001299 }
Adam Langley95c29f32014-06-20 12:00:00 -07001300
Adam Langleyf139c992016-10-02 09:56:09 -07001301 return 1;
Adam Langleyfcf25832014-12-18 17:42:32 -08001302}
Adam Langley95c29f32014-06-20 12:00:00 -07001303
Adam Langleyfcf25832014-12-18 17:42:32 -08001304STACK_OF(SSL_CIPHER) *
1305ssl_create_cipher_list(const SSL_PROTOCOL_METHOD *ssl_method,
David Benjamin71f07942015-04-08 02:36:59 -04001306 struct ssl_cipher_preference_list_st **out_cipher_list,
David Benjamin71f07942015-04-08 02:36:59 -04001307 const char *rule_str) {
David Benjamind2cb1c12016-11-02 17:49:09 -04001308 STACK_OF(SSL_CIPHER) *cipherstack = NULL;
Adam Langleyfcf25832014-12-18 17:42:32 -08001309 CIPHER_ORDER *co_list = NULL, *head = NULL, *tail = NULL, *curr;
Adam Langleyfcf25832014-12-18 17:42:32 -08001310 uint8_t *in_group_flags = NULL;
1311 unsigned int num_in_group_flags = 0;
1312 struct ssl_cipher_preference_list_st *pref_list = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -07001313
Adam Langleyfcf25832014-12-18 17:42:32 -08001314 /* Return with error if nothing to do. */
David Benjamin71f07942015-04-08 02:36:59 -04001315 if (rule_str == NULL || out_cipher_list == NULL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001316 return NULL;
1317 }
David Benjamin5213df42014-08-20 14:19:54 -04001318
Adam Langleyfcf25832014-12-18 17:42:32 -08001319 /* Now we have to collect the available ciphers from the compiled in ciphers.
1320 * We cannot get more than the number compiled in, so it is used for
1321 * allocation. */
Brian Smith5ba06892016-02-07 09:36:04 -10001322 co_list = OPENSSL_malloc(sizeof(CIPHER_ORDER) * kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001323 if (co_list == NULL) {
David Benjamin3570d732015-06-29 00:28:17 -04001324 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001325 return NULL;
1326 }
Adam Langley95c29f32014-06-20 12:00:00 -07001327
David Benjamina1c90a52015-05-30 17:03:14 -04001328 ssl_cipher_collect_ciphers(ssl_method, co_list, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001329
Adam Langleyfcf25832014-12-18 17:42:32 -08001330 /* Now arrange all ciphers by preference:
1331 * TODO(davidben): Compute this order once and copy it. */
Adam Langley95c29f32014-06-20 12:00:00 -07001332
David Benjaminabbbee12016-10-31 19:20:42 -04001333 /* Everything else being equal, prefer ECDHE_ECDSA and ECDHE_RSA over other
1334 * key exchange mechanisms */
David Benjamind6e9eec2015-11-18 09:48:55 -05001335 ssl_cipher_apply_rule(0, SSL_kECDHE, SSL_aECDSA, ~0u, ~0u, 0, CIPHER_ADD, -1,
Adam Langleyfcf25832014-12-18 17:42:32 -08001336 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001337 ssl_cipher_apply_rule(0, SSL_kECDHE, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0,
1338 &head, &tail);
Steven Valdez803c77a2016-09-06 14:13:43 -04001339 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0, &head,
1340 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001341
Adam Langleyfcf25832014-12-18 17:42:32 -08001342 /* Order the bulk ciphers. First the preferred AEAD ciphers. We prefer
1343 * CHACHA20 unless there is hardware support for fast and constant-time
David Benjamin13414b32015-12-09 23:02:39 -05001344 * AES_GCM. Of the two CHACHA20 variants, the new one is preferred over the
1345 * old one. */
Adam Langleyfcf25832014-12-18 17:42:32 -08001346 if (EVP_has_aes_hardware()) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001347 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1348 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001349 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1350 &head, &tail);
David Benjamin13414b32015-12-09 23:02:39 -05001351 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1352 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001353 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001354 CIPHER_ADD, -1, 0, &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001355 } else {
David Benjamin13414b32015-12-09 23:02:39 -05001356 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1357 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001358 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001359 CIPHER_ADD, -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001360 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1361 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001362 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1363 &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001364 }
Adam Langley95c29f32014-06-20 12:00:00 -07001365
David Benjamin43336652016-03-03 15:32:29 -05001366 /* Then the legacy non-AEAD ciphers: AES_128_CBC, AES_256_CBC,
Matthew Braithwaite8aaa9e12016-09-07 15:09:58 -07001367 * 3DES_EDE_CBC_SHA. */
David Benjamind6e9eec2015-11-18 09:48:55 -05001368 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128, ~0u, 0, CIPHER_ADD, -1, 0,
1369 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001370 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256, ~0u, 0, CIPHER_ADD, -1, 0,
1371 &head, &tail);
1372 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_3DES, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1373 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001374
Adam Langleyfcf25832014-12-18 17:42:32 -08001375 /* Temporarily enable everything else for sorting */
David Benjamind6e9eec2015-11-18 09:48:55 -05001376 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1377 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001378
Adam Langleyfcf25832014-12-18 17:42:32 -08001379 /* Move ciphers without forward secrecy to the end. */
Steven Valdez803c77a2016-09-06 14:13:43 -04001380 ssl_cipher_apply_rule(0, (SSL_kRSA | SSL_kPSK), ~0u, ~0u, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001381 CIPHER_ORD, -1, 0, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001382
Adam Langleyfcf25832014-12-18 17:42:32 -08001383 /* Now disable everything (maintaining the ordering!) */
David Benjamind6e9eec2015-11-18 09:48:55 -05001384 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0, &head,
1385 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001386
Adam Langleyfcf25832014-12-18 17:42:32 -08001387 /* If the rule_string begins with DEFAULT, apply the default rule before
1388 * using the (possibly available) additional rules. */
David Benjamin11a7b3c2016-11-03 17:03:48 -04001389 const char *rule_p = rule_str;
Adam Langleyfcf25832014-12-18 17:42:32 -08001390 if (strncmp(rule_str, "DEFAULT", 7) == 0) {
David Benjamin11a7b3c2016-11-03 17:03:48 -04001391 if (!ssl_cipher_process_rulestr(ssl_method, SSL_DEFAULT_CIPHER_LIST, &head,
1392 &tail)) {
1393 goto err;
1394 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001395 rule_p += 7;
1396 if (*rule_p == ':') {
1397 rule_p++;
1398 }
1399 }
Adam Langley858a88d2014-06-20 12:00:00 -07001400
David Benjamin11a7b3c2016-11-03 17:03:48 -04001401 if (*rule_p != '\0' &&
1402 !ssl_cipher_process_rulestr(ssl_method, rule_p, &head, &tail)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001403 goto err;
1404 }
1405
1406 /* Allocate new "cipherstack" for the result, return with error
1407 * if we cannot get one. */
1408 cipherstack = sk_SSL_CIPHER_new_null();
1409 if (cipherstack == NULL) {
1410 goto err;
1411 }
1412
David Benjamina1c90a52015-05-30 17:03:14 -04001413 in_group_flags = OPENSSL_malloc(kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001414 if (!in_group_flags) {
1415 goto err;
1416 }
1417
1418 /* The cipher selection for the list is done. The ciphers are added
1419 * to the resulting precedence to the STACK_OF(SSL_CIPHER). */
1420 for (curr = head; curr != NULL; curr = curr->next) {
1421 if (curr->active) {
David Benjamin2adb7ec2015-01-11 19:59:06 -05001422 if (!sk_SSL_CIPHER_push(cipherstack, curr->cipher)) {
1423 goto err;
1424 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001425 in_group_flags[num_in_group_flags++] = curr->in_group;
1426 }
1427 }
1428 OPENSSL_free(co_list); /* Not needed any longer */
1429 co_list = NULL;
1430
Adam Langleyfcf25832014-12-18 17:42:32 -08001431 pref_list = OPENSSL_malloc(sizeof(struct ssl_cipher_preference_list_st));
1432 if (!pref_list) {
1433 goto err;
1434 }
1435 pref_list->ciphers = cipherstack;
1436 pref_list->in_group_flags = OPENSSL_malloc(num_in_group_flags);
1437 if (!pref_list->in_group_flags) {
1438 goto err;
1439 }
1440 memcpy(pref_list->in_group_flags, in_group_flags, num_in_group_flags);
1441 OPENSSL_free(in_group_flags);
1442 in_group_flags = NULL;
David Benjamin71f07942015-04-08 02:36:59 -04001443 if (*out_cipher_list != NULL) {
1444 ssl_cipher_preference_list_free(*out_cipher_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001445 }
David Benjamin71f07942015-04-08 02:36:59 -04001446 *out_cipher_list = pref_list;
Adam Langleyfcf25832014-12-18 17:42:32 -08001447 pref_list = NULL;
1448
Adam Langleyfcf25832014-12-18 17:42:32 -08001449 return cipherstack;
Adam Langley858a88d2014-06-20 12:00:00 -07001450
1451err:
David Benjamin2755a3e2015-04-22 16:17:58 -04001452 OPENSSL_free(co_list);
1453 OPENSSL_free(in_group_flags);
1454 sk_SSL_CIPHER_free(cipherstack);
David Benjamin2755a3e2015-04-22 16:17:58 -04001455 if (pref_list) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001456 OPENSSL_free(pref_list->in_group_flags);
1457 }
David Benjamin2755a3e2015-04-22 16:17:58 -04001458 OPENSSL_free(pref_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001459 return NULL;
1460}
Adam Langley95c29f32014-06-20 12:00:00 -07001461
David Benjamin71f07942015-04-08 02:36:59 -04001462uint32_t SSL_CIPHER_get_id(const SSL_CIPHER *cipher) { return cipher->id; }
1463
David Benjamina1c90a52015-05-30 17:03:14 -04001464uint16_t ssl_cipher_get_value(const SSL_CIPHER *cipher) {
1465 uint32_t id = cipher->id;
1466 /* All ciphers are SSLv3. */
1467 assert((id & 0xff000000) == 0x03000000);
1468 return id & 0xffff;
1469}
1470
David Benjamin71f07942015-04-08 02:36:59 -04001471int SSL_CIPHER_is_AES(const SSL_CIPHER *cipher) {
1472 return (cipher->algorithm_enc & SSL_AES) != 0;
1473}
1474
1475int SSL_CIPHER_has_MD5_HMAC(const SSL_CIPHER *cipher) {
1476 return (cipher->algorithm_mac & SSL_MD5) != 0;
1477}
1478
David Benjaminef793f42015-11-05 18:16:27 -05001479int SSL_CIPHER_has_SHA1_HMAC(const SSL_CIPHER *cipher) {
1480 return (cipher->algorithm_mac & SSL_SHA1) != 0;
1481}
1482
David Benjamina211aee2016-02-24 17:18:44 -05001483int SSL_CIPHER_has_SHA256_HMAC(const SSL_CIPHER *cipher) {
1484 return (cipher->algorithm_mac & SSL_SHA256) != 0;
1485}
1486
David Benjamin71f07942015-04-08 02:36:59 -04001487int SSL_CIPHER_is_AESGCM(const SSL_CIPHER *cipher) {
David Benjaminc0125ef2015-09-09 09:11:07 -04001488 return (cipher->algorithm_enc & (SSL_AES128GCM | SSL_AES256GCM)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001489}
1490
David Benjaminef793f42015-11-05 18:16:27 -05001491int SSL_CIPHER_is_AES128GCM(const SSL_CIPHER *cipher) {
1492 return (cipher->algorithm_enc & SSL_AES128GCM) != 0;
1493}
1494
Adam Langleyb00061c2015-11-16 17:44:52 -08001495int SSL_CIPHER_is_AES128CBC(const SSL_CIPHER *cipher) {
1496 return (cipher->algorithm_enc & SSL_AES128) != 0;
1497}
1498
1499int SSL_CIPHER_is_AES256CBC(const SSL_CIPHER *cipher) {
1500 return (cipher->algorithm_enc & SSL_AES256) != 0;
1501}
1502
David Benjamin51a01a52015-10-29 13:19:56 -04001503int SSL_CIPHER_is_CHACHA20POLY1305(const SSL_CIPHER *cipher) {
David Benjamin13414b32015-12-09 23:02:39 -05001504 return (cipher->algorithm_enc &
1505 (SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001506}
1507
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001508int SSL_CIPHER_is_NULL(const SSL_CIPHER *cipher) {
1509 return (cipher->algorithm_enc & SSL_eNULL) != 0;
1510}
1511
1512int SSL_CIPHER_is_block_cipher(const SSL_CIPHER *cipher) {
Matthew Braithwaite8aaa9e12016-09-07 15:09:58 -07001513 return (cipher->algorithm_enc & SSL_eNULL) == 0 &&
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001514 cipher->algorithm_mac != SSL_AEAD;
1515}
1516
David Benjaminef793f42015-11-05 18:16:27 -05001517int SSL_CIPHER_is_ECDSA(const SSL_CIPHER *cipher) {
1518 return (cipher->algorithm_auth & SSL_aECDSA) != 0;
1519}
1520
David Benjamin0fc7df52016-06-02 18:36:33 -04001521int SSL_CIPHER_is_DHE(const SSL_CIPHER *cipher) {
1522 return (cipher->algorithm_mkey & SSL_kDHE) != 0;
1523}
1524
David Benjamin4cc36ad2015-12-19 14:23:26 -05001525int SSL_CIPHER_is_ECDHE(const SSL_CIPHER *cipher) {
1526 return (cipher->algorithm_mkey & SSL_kECDHE) != 0;
1527}
1528
David Benjaminef793f42015-11-05 18:16:27 -05001529uint16_t SSL_CIPHER_get_min_version(const SSL_CIPHER *cipher) {
Steven Valdez803c77a2016-09-06 14:13:43 -04001530 if (cipher->algorithm_mkey == SSL_kGENERIC ||
1531 cipher->algorithm_auth == SSL_aGENERIC) {
1532 return TLS1_3_VERSION;
1533 }
1534
David Benjamindcb6ef02015-11-06 15:35:54 -05001535 if (cipher->algorithm_prf != SSL_HANDSHAKE_MAC_DEFAULT) {
1536 /* Cipher suites before TLS 1.2 use the default PRF, while all those added
1537 * afterwards specify a particular hash. */
David Benjaminef793f42015-11-05 18:16:27 -05001538 return TLS1_2_VERSION;
1539 }
1540 return SSL3_VERSION;
1541}
1542
Nick Harper1fd39d82016-06-14 18:14:35 -07001543uint16_t SSL_CIPHER_get_max_version(const SSL_CIPHER *cipher) {
Steven Valdez803c77a2016-09-06 14:13:43 -04001544 if (cipher->algorithm_mkey == SSL_kGENERIC ||
1545 cipher->algorithm_auth == SSL_aGENERIC) {
Nick Harper1fd39d82016-06-14 18:14:35 -07001546 return TLS1_3_VERSION;
1547 }
1548 return TLS1_2_VERSION;
1549}
1550
David Benjamin71f07942015-04-08 02:36:59 -04001551/* return the actual cipher being used */
1552const char *SSL_CIPHER_get_name(const SSL_CIPHER *cipher) {
1553 if (cipher != NULL) {
1554 return cipher->name;
1555 }
1556
1557 return "(NONE)";
1558}
1559
1560const char *SSL_CIPHER_get_kx_name(const SSL_CIPHER *cipher) {
1561 if (cipher == NULL) {
1562 return "";
1563 }
1564
1565 switch (cipher->algorithm_mkey) {
1566 case SSL_kRSA:
1567 return "RSA";
1568
1569 case SSL_kDHE:
1570 switch (cipher->algorithm_auth) {
1571 case SSL_aRSA:
1572 return "DHE_RSA";
1573 default:
1574 assert(0);
1575 return "UNKNOWN";
1576 }
1577
1578 case SSL_kECDHE:
1579 switch (cipher->algorithm_auth) {
1580 case SSL_aECDSA:
1581 return "ECDHE_ECDSA";
1582 case SSL_aRSA:
1583 return "ECDHE_RSA";
1584 case SSL_aPSK:
1585 return "ECDHE_PSK";
1586 default:
1587 assert(0);
1588 return "UNKNOWN";
1589 }
1590
1591 case SSL_kPSK:
1592 assert(cipher->algorithm_auth == SSL_aPSK);
1593 return "PSK";
1594
Steven Valdez803c77a2016-09-06 14:13:43 -04001595 case SSL_kGENERIC:
1596 assert(cipher->algorithm_auth == SSL_aGENERIC);
1597 return "GENERIC";
1598
David Benjamin71f07942015-04-08 02:36:59 -04001599 default:
1600 assert(0);
1601 return "UNKNOWN";
1602 }
1603}
1604
1605static const char *ssl_cipher_get_enc_name(const SSL_CIPHER *cipher) {
1606 switch (cipher->algorithm_enc) {
1607 case SSL_3DES:
1608 return "3DES_EDE_CBC";
David Benjamin71f07942015-04-08 02:36:59 -04001609 case SSL_AES128:
1610 return "AES_128_CBC";
1611 case SSL_AES256:
1612 return "AES_256_CBC";
1613 case SSL_AES128GCM:
1614 return "AES_128_GCM";
1615 case SSL_AES256GCM:
1616 return "AES_256_GCM";
David Benjamin13414b32015-12-09 23:02:39 -05001617 case SSL_CHACHA20POLY1305:
Brian Smith271777f2015-10-03 13:53:33 -10001618 case SSL_CHACHA20POLY1305_OLD:
David Benjamin71f07942015-04-08 02:36:59 -04001619 return "CHACHA20_POLY1305";
1620 break;
1621 default:
1622 assert(0);
1623 return "UNKNOWN";
1624 }
1625}
1626
1627static const char *ssl_cipher_get_prf_name(const SSL_CIPHER *cipher) {
David Benjaminb0883312015-08-06 09:54:13 -04001628 switch (cipher->algorithm_prf) {
1629 case SSL_HANDSHAKE_MAC_DEFAULT:
1630 /* Before TLS 1.2, the PRF component is the hash used in the HMAC, which is
1631 * only ever MD5 or SHA-1. */
1632 switch (cipher->algorithm_mac) {
1633 case SSL_MD5:
1634 return "MD5";
1635 case SSL_SHA1:
1636 return "SHA";
1637 }
1638 break;
1639 case SSL_HANDSHAKE_MAC_SHA256:
1640 return "SHA256";
1641 case SSL_HANDSHAKE_MAC_SHA384:
1642 return "SHA384";
David Benjamin71f07942015-04-08 02:36:59 -04001643 }
David Benjaminb0883312015-08-06 09:54:13 -04001644 assert(0);
1645 return "UNKNOWN";
David Benjamin71f07942015-04-08 02:36:59 -04001646}
1647
1648char *SSL_CIPHER_get_rfc_name(const SSL_CIPHER *cipher) {
1649 if (cipher == NULL) {
1650 return NULL;
1651 }
1652
1653 const char *kx_name = SSL_CIPHER_get_kx_name(cipher);
1654 const char *enc_name = ssl_cipher_get_enc_name(cipher);
1655 const char *prf_name = ssl_cipher_get_prf_name(cipher);
1656
Steven Valdez803c77a2016-09-06 14:13:43 -04001657 /* The final name is TLS_{kx_name}_WITH_{enc_name}_{prf_name} or
1658 * TLS_{enc_name}_{prf_name} depending on whether the cipher is AEAD-only. */
1659 size_t len = 4 + strlen(enc_name) + 1 + strlen(prf_name) + 1;
1660
1661 if (cipher->algorithm_mkey != SSL_kGENERIC) {
1662 len += strlen(kx_name) + 6;
1663 }
1664
David Benjamin71f07942015-04-08 02:36:59 -04001665 char *ret = OPENSSL_malloc(len);
1666 if (ret == NULL) {
1667 return NULL;
1668 }
Steven Valdez803c77a2016-09-06 14:13:43 -04001669
David Benjamin71f07942015-04-08 02:36:59 -04001670 if (BUF_strlcpy(ret, "TLS_", len) >= len ||
Steven Valdez803c77a2016-09-06 14:13:43 -04001671 (cipher->algorithm_mkey != SSL_kGENERIC &&
1672 (BUF_strlcat(ret, kx_name, len) >= len ||
1673 BUF_strlcat(ret, "_WITH_", len) >= len)) ||
David Benjamin71f07942015-04-08 02:36:59 -04001674 BUF_strlcat(ret, enc_name, len) >= len ||
1675 BUF_strlcat(ret, "_", len) >= len ||
1676 BUF_strlcat(ret, prf_name, len) >= len) {
1677 assert(0);
1678 OPENSSL_free(ret);
1679 return NULL;
1680 }
Steven Valdez803c77a2016-09-06 14:13:43 -04001681
David Benjamin71f07942015-04-08 02:36:59 -04001682 assert(strlen(ret) + 1 == len);
1683 return ret;
1684}
1685
1686int SSL_CIPHER_get_bits(const SSL_CIPHER *cipher, int *out_alg_bits) {
1687 if (cipher == NULL) {
1688 return 0;
1689 }
1690
David Benjamin9f2e2772015-11-18 09:59:43 -05001691 int alg_bits, strength_bits;
1692 switch (cipher->algorithm_enc) {
1693 case SSL_AES128:
1694 case SSL_AES128GCM:
David Benjamin9f2e2772015-11-18 09:59:43 -05001695 alg_bits = 128;
1696 strength_bits = 128;
1697 break;
1698
1699 case SSL_AES256:
1700 case SSL_AES256GCM:
1701#if !defined(BORINGSSL_ANDROID_SYSTEM)
1702 case SSL_CHACHA20POLY1305_OLD:
1703#endif
David Benjamin13414b32015-12-09 23:02:39 -05001704 case SSL_CHACHA20POLY1305:
David Benjamin9f2e2772015-11-18 09:59:43 -05001705 alg_bits = 256;
1706 strength_bits = 256;
1707 break;
1708
1709 case SSL_3DES:
1710 alg_bits = 168;
1711 strength_bits = 112;
1712 break;
1713
1714 case SSL_eNULL:
1715 alg_bits = 0;
1716 strength_bits = 0;
1717 break;
1718
1719 default:
1720 assert(0);
1721 alg_bits = 0;
1722 strength_bits = 0;
David Benjamin71f07942015-04-08 02:36:59 -04001723 }
David Benjamin9f2e2772015-11-18 09:59:43 -05001724
1725 if (out_alg_bits != NULL) {
1726 *out_alg_bits = alg_bits;
1727 }
1728 return strength_bits;
David Benjamin71f07942015-04-08 02:36:59 -04001729}
1730
Adam Langleyfcf25832014-12-18 17:42:32 -08001731const char *SSL_CIPHER_description(const SSL_CIPHER *cipher, char *buf,
1732 int len) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001733 const char *kx, *au, *enc, *mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001734 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001735
Adam Langleyfcf25832014-12-18 17:42:32 -08001736 alg_mkey = cipher->algorithm_mkey;
1737 alg_auth = cipher->algorithm_auth;
1738 alg_enc = cipher->algorithm_enc;
1739 alg_mac = cipher->algorithm_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001740
Adam Langleyfcf25832014-12-18 17:42:32 -08001741 switch (alg_mkey) {
1742 case SSL_kRSA:
1743 kx = "RSA";
1744 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001745
David Benjamin7061e282015-03-19 11:10:48 -04001746 case SSL_kDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001747 kx = "DH";
1748 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001749
David Benjamin7061e282015-03-19 11:10:48 -04001750 case SSL_kECDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001751 kx = "ECDH";
1752 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001753
Adam Langleyfcf25832014-12-18 17:42:32 -08001754 case SSL_kPSK:
1755 kx = "PSK";
1756 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001757
Steven Valdez803c77a2016-09-06 14:13:43 -04001758 case SSL_kGENERIC:
1759 kx = "GENERIC";
1760 break;
1761
Adam Langleyfcf25832014-12-18 17:42:32 -08001762 default:
1763 kx = "unknown";
1764 }
Adam Langley95c29f32014-06-20 12:00:00 -07001765
Adam Langleyfcf25832014-12-18 17:42:32 -08001766 switch (alg_auth) {
1767 case SSL_aRSA:
1768 au = "RSA";
1769 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001770
Adam Langleyfcf25832014-12-18 17:42:32 -08001771 case SSL_aECDSA:
1772 au = "ECDSA";
1773 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07001774
Adam Langleyfcf25832014-12-18 17:42:32 -08001775 case SSL_aPSK:
1776 au = "PSK";
1777 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07001778
Steven Valdez803c77a2016-09-06 14:13:43 -04001779 case SSL_aGENERIC:
1780 au = "GENERIC";
1781 break;
1782
Adam Langleyfcf25832014-12-18 17:42:32 -08001783 default:
1784 au = "unknown";
1785 break;
1786 }
Adam Langleyde0b2022014-06-20 12:00:00 -07001787
Adam Langleyfcf25832014-12-18 17:42:32 -08001788 switch (alg_enc) {
1789 case SSL_3DES:
1790 enc = "3DES(168)";
1791 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001792
Adam Langleyfcf25832014-12-18 17:42:32 -08001793 case SSL_AES128:
1794 enc = "AES(128)";
1795 break;
1796
1797 case SSL_AES256:
1798 enc = "AES(256)";
1799 break;
1800
1801 case SSL_AES128GCM:
1802 enc = "AESGCM(128)";
1803 break;
1804
1805 case SSL_AES256GCM:
1806 enc = "AESGCM(256)";
1807 break;
1808
Brian Smith271777f2015-10-03 13:53:33 -10001809 case SSL_CHACHA20POLY1305_OLD:
David Benjamin13414b32015-12-09 23:02:39 -05001810 enc = "ChaCha20-Poly1305-Old";
1811 break;
1812
1813 case SSL_CHACHA20POLY1305:
Adam Langleyfcf25832014-12-18 17:42:32 -08001814 enc = "ChaCha20-Poly1305";
1815 break;
1816
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001817 case SSL_eNULL:
1818 enc="None";
1819 break;
1820
Adam Langleyfcf25832014-12-18 17:42:32 -08001821 default:
1822 enc = "unknown";
1823 break;
1824 }
1825
1826 switch (alg_mac) {
1827 case SSL_MD5:
1828 mac = "MD5";
1829 break;
1830
1831 case SSL_SHA1:
1832 mac = "SHA1";
1833 break;
1834
1835 case SSL_SHA256:
1836 mac = "SHA256";
1837 break;
1838
1839 case SSL_SHA384:
1840 mac = "SHA384";
1841 break;
1842
1843 case SSL_AEAD:
1844 mac = "AEAD";
1845 break;
1846
1847 default:
1848 mac = "unknown";
1849 break;
1850 }
1851
1852 if (buf == NULL) {
1853 len = 128;
1854 buf = OPENSSL_malloc(len);
David Benjamin1eed2c02015-02-08 23:20:06 -05001855 if (buf == NULL) {
1856 return NULL;
1857 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001858 } else if (len < 128) {
1859 return "Buffer too small";
1860 }
1861
Brian Smith0687bdf2016-01-17 09:18:26 -10001862 BIO_snprintf(buf, len, "%-23s Kx=%-8s Au=%-4s Enc=%-9s Mac=%-4s\n",
1863 cipher->name, kx, au, enc, mac);
Adam Langleyfcf25832014-12-18 17:42:32 -08001864 return buf;
1865}
1866
David Benjamin71f07942015-04-08 02:36:59 -04001867const char *SSL_CIPHER_get_version(const SSL_CIPHER *cipher) {
1868 return "TLSv1/SSLv3";
Adam Langleyfcf25832014-12-18 17:42:32 -08001869}
1870
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07001871COMP_METHOD *SSL_COMP_get_compression_methods(void) { return NULL; }
Adam Langleyfcf25832014-12-18 17:42:32 -08001872
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07001873int SSL_COMP_add_compression_method(int id, COMP_METHOD *cm) { return 1; }
Adam Langleyfcf25832014-12-18 17:42:32 -08001874
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07001875const char *SSL_COMP_get_name(const COMP_METHOD *comp) { return NULL; }
Adam Langley95c29f32014-06-20 12:00:00 -07001876
Adam Langley3e9e0432016-10-03 15:58:07 -07001877void SSL_COMP_free_compression_methods(void) {}
1878
David Benjamind1d80782015-07-05 11:54:09 -04001879int ssl_cipher_get_key_type(const SSL_CIPHER *cipher) {
David Benjamin71f07942015-04-08 02:36:59 -04001880 uint32_t alg_a = cipher->algorithm_auth;
Adam Langley95c29f32014-06-20 12:00:00 -07001881
Adam Langleyfcf25832014-12-18 17:42:32 -08001882 if (alg_a & SSL_aECDSA) {
David Benjamind1d80782015-07-05 11:54:09 -04001883 return EVP_PKEY_EC;
Adam Langleyfcf25832014-12-18 17:42:32 -08001884 } else if (alg_a & SSL_aRSA) {
David Benjamind1d80782015-07-05 11:54:09 -04001885 return EVP_PKEY_RSA;
Adam Langleyfcf25832014-12-18 17:42:32 -08001886 }
Adam Langley95c29f32014-06-20 12:00:00 -07001887
David Benjamind1d80782015-07-05 11:54:09 -04001888 return EVP_PKEY_NONE;
Adam Langleyfcf25832014-12-18 17:42:32 -08001889}
David Benjamin9c651c92014-07-12 13:27:45 -04001890
David Benjaminc032dfa2016-05-12 14:54:57 -04001891int ssl_cipher_uses_certificate_auth(const SSL_CIPHER *cipher) {
1892 return (cipher->algorithm_auth & SSL_aCERT) != 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001893}
1894
Adam Langleyfcf25832014-12-18 17:42:32 -08001895int ssl_cipher_requires_server_key_exchange(const SSL_CIPHER *cipher) {
1896 /* Ephemeral Diffie-Hellman key exchanges require a ServerKeyExchange. */
Matt Braithwaite053931e2016-05-25 12:06:05 -07001897 if (cipher->algorithm_mkey & SSL_kDHE ||
Matthew Braithwaite651aaef2016-12-08 16:14:36 -08001898 cipher->algorithm_mkey & SSL_kECDHE) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001899 return 1;
1900 }
1901
1902 /* It is optional in all others. */
1903 return 0;
1904}
David Benjaminb8d28cf2015-07-28 21:34:45 -04001905
1906size_t ssl_cipher_get_record_split_len(const SSL_CIPHER *cipher) {
1907 size_t block_size;
1908 switch (cipher->algorithm_enc) {
1909 case SSL_3DES:
1910 block_size = 8;
1911 break;
1912 case SSL_AES128:
1913 case SSL_AES256:
1914 block_size = 16;
1915 break;
1916 default:
1917 return 0;
1918 }
1919
1920 size_t mac_len;
1921 switch (cipher->algorithm_mac) {
1922 case SSL_MD5:
1923 mac_len = MD5_DIGEST_LENGTH;
1924 break;
1925 case SSL_SHA1:
1926 mac_len = SHA_DIGEST_LENGTH;
1927 break;
1928 default:
1929 return 0;
1930 }
1931
1932 size_t ret = 1 + mac_len;
1933 ret += block_size - (ret % block_size);
1934 return ret;
1935}