blob: 7ca79ab81b5fa6620632724f69ffb6907a053581 [file] [log] [blame]
Adam Langley95c29f32014-06-20 12:00:00 -07001/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
2 * All rights reserved.
3 *
4 * This package is an SSL implementation written
5 * by Eric Young (eay@cryptsoft.com).
6 * The implementation was written so as to conform with Netscapes SSL.
7 *
8 * This library is free for commercial and non-commercial use as long as
9 * the following conditions are aheared to. The following conditions
10 * apply to all code found in this distribution, be it the RC4, RSA,
11 * lhash, DES, etc., code; not just the SSL code. The SSL documentation
12 * included with this distribution is covered by the same copyright terms
13 * except that the holder is Tim Hudson (tjh@cryptsoft.com).
14 *
15 * Copyright remains Eric Young's, and as such any Copyright notices in
16 * the code are not to be removed.
17 * If this package is used in a product, Eric Young should be given attribution
18 * as the author of the parts of the library used.
19 * This can be in the form of a textual message at program startup or
20 * in documentation (online or textual) provided with the package.
21 *
22 * Redistribution and use in source and binary forms, with or without
23 * modification, are permitted provided that the following conditions
24 * are met:
25 * 1. Redistributions of source code must retain the copyright
26 * notice, this list of conditions and the following disclaimer.
27 * 2. Redistributions in binary form must reproduce the above copyright
28 * notice, this list of conditions and the following disclaimer in the
29 * documentation and/or other materials provided with the distribution.
30 * 3. All advertising materials mentioning features or use of this software
31 * must display the following acknowledgement:
32 * "This product includes cryptographic software written by
33 * Eric Young (eay@cryptsoft.com)"
34 * The word 'cryptographic' can be left out if the rouines from the library
35 * being used are not cryptographic related :-).
36 * 4. If you include any Windows specific code (or a derivative thereof) from
37 * the apps directory (application code) you must include an acknowledgement:
38 * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
39 *
40 * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
41 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
42 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
43 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
44 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
45 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
46 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
47 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
48 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
49 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
50 * SUCH DAMAGE.
51 *
52 * The licence and distribution terms for any publically available version or
53 * derivative of this code cannot be changed. i.e. this code cannot simply be
54 * copied and put under another distribution licence
55 * [including the GNU Public Licence.]
56 */
57/* ====================================================================
58 * Copyright (c) 1998-2007 The OpenSSL Project. All rights reserved.
59 *
60 * Redistribution and use in source and binary forms, with or without
61 * modification, are permitted provided that the following conditions
62 * are met:
63 *
64 * 1. Redistributions of source code must retain the above copyright
65 * notice, this list of conditions and the following disclaimer.
66 *
67 * 2. Redistributions in binary form must reproduce the above copyright
68 * notice, this list of conditions and the following disclaimer in
69 * the documentation and/or other materials provided with the
70 * distribution.
71 *
72 * 3. All advertising materials mentioning features or use of this
73 * software must display the following acknowledgment:
74 * "This product includes software developed by the OpenSSL Project
75 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
76 *
77 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
78 * endorse or promote products derived from this software without
79 * prior written permission. For written permission, please contact
80 * openssl-core@openssl.org.
81 *
82 * 5. Products derived from this software may not be called "OpenSSL"
83 * nor may "OpenSSL" appear in their names without prior written
84 * permission of the OpenSSL Project.
85 *
86 * 6. Redistributions of any form whatsoever must retain the following
87 * acknowledgment:
88 * "This product includes software developed by the OpenSSL Project
89 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
90 *
91 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
92 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
93 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
94 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
95 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
96 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
97 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
98 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
99 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
100 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
101 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
102 * OF THE POSSIBILITY OF SUCH DAMAGE.
103 * ====================================================================
104 *
105 * This product includes cryptographic software written by Eric Young
106 * (eay@cryptsoft.com). This product includes software written by Tim
107 * Hudson (tjh@cryptsoft.com).
108 *
109 */
110/* ====================================================================
111 * Copyright 2002 Sun Microsystems, Inc. ALL RIGHTS RESERVED.
112 * ECC cipher suite support in OpenSSL originally developed by
113 * SUN MICROSYSTEMS, INC., and contributed to the OpenSSL project.
114 */
115/* ====================================================================
116 * Copyright 2005 Nokia. All rights reserved.
117 *
118 * The portions of the attached software ("Contribution") is developed by
119 * Nokia Corporation and is licensed pursuant to the OpenSSL open source
120 * license.
121 *
122 * The Contribution, originally written by Mika Kousa and Pasi Eronen of
123 * Nokia Corporation, consists of the "PSK" (Pre-Shared Key) ciphersuites
124 * support (see RFC 4279) to OpenSSL.
125 *
126 * No patent licenses or other rights except those expressly stated in
127 * the OpenSSL open source license shall be deemed granted or received
128 * expressly, by implication, estoppel, or otherwise.
129 *
130 * No assurances are provided by Nokia that the Contribution does not
131 * infringe the patent or other intellectual property rights of any third
132 * party or that the license provides you with all the necessary rights
133 * to make use of the Contribution.
134 *
135 * THE SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND. IN
136 * ADDITION TO THE DISCLAIMERS INCLUDED IN THE LICENSE, NOKIA
137 * SPECIFICALLY DISCLAIMS ANY LIABILITY FOR CLAIMS BROUGHT BY YOU OR ANY
138 * OTHER ENTITY BASED ON INFRINGEMENT OF INTELLECTUAL PROPERTY RIGHTS OR
139 * OTHERWISE. */
140
David Benjamin9e4e01e2015-09-15 01:48:04 -0400141#include <openssl/ssl.h>
142
Adam Langley95c29f32014-06-20 12:00:00 -0700143#include <assert.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400144#include <string.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700145
David Benjamin71f07942015-04-08 02:36:59 -0400146#include <openssl/buf.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400147#include <openssl/err.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500148#include <openssl/md5.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700149#include <openssl/mem.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500150#include <openssl/sha.h>
David Benjamin71f07942015-04-08 02:36:59 -0400151#include <openssl/stack.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700152
David Benjamin2ee94aa2015-04-07 22:38:30 -0400153#include "internal.h"
Steven Valdezcb966542016-08-17 16:56:14 -0400154#include "../crypto/internal.h"
Adam Langley95c29f32014-06-20 12:00:00 -0700155
Adam Langley95c29f32014-06-20 12:00:00 -0700156
David Benjamina1c90a52015-05-30 17:03:14 -0400157/* kCiphers is an array of all supported ciphers, sorted by id. */
David Benjamin20c37312015-11-11 21:33:18 -0800158static const SSL_CIPHER kCiphers[] = {
David Benjamina1c90a52015-05-30 17:03:14 -0400159 /* The RSA ciphers */
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700160 /* Cipher 02 */
161 {
David Benjaminff2df332015-11-18 10:01:16 -0500162 SSL3_TXT_RSA_NULL_SHA,
163 SSL3_CK_RSA_NULL_SHA,
164 SSL_kRSA,
165 SSL_aRSA,
166 SSL_eNULL,
167 SSL_SHA1,
168 SSL_HANDSHAKE_MAC_DEFAULT,
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700169 },
170
David Benjamina1c90a52015-05-30 17:03:14 -0400171 /* Cipher 0A */
172 {
David Benjaminff2df332015-11-18 10:01:16 -0500173 SSL3_TXT_RSA_DES_192_CBC3_SHA,
174 SSL3_CK_RSA_DES_192_CBC3_SHA,
175 SSL_kRSA,
176 SSL_aRSA,
177 SSL_3DES,
178 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500179 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400180 },
181
182
183 /* New AES ciphersuites */
184
185 /* Cipher 2F */
186 {
David Benjaminff2df332015-11-18 10:01:16 -0500187 TLS1_TXT_RSA_WITH_AES_128_SHA,
188 TLS1_CK_RSA_WITH_AES_128_SHA,
189 SSL_kRSA,
190 SSL_aRSA,
191 SSL_AES128,
192 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500193 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400194 },
195
196 /* Cipher 33 */
197 {
David Benjaminff2df332015-11-18 10:01:16 -0500198 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA,
199 TLS1_CK_DHE_RSA_WITH_AES_128_SHA,
200 SSL_kDHE,
201 SSL_aRSA,
202 SSL_AES128,
203 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500204 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400205 },
206
207 /* Cipher 35 */
208 {
David Benjaminff2df332015-11-18 10:01:16 -0500209 TLS1_TXT_RSA_WITH_AES_256_SHA,
210 TLS1_CK_RSA_WITH_AES_256_SHA,
211 SSL_kRSA,
212 SSL_aRSA,
213 SSL_AES256,
214 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500215 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400216 },
217
218 /* Cipher 39 */
219 {
David Benjaminff2df332015-11-18 10:01:16 -0500220 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA,
221 TLS1_CK_DHE_RSA_WITH_AES_256_SHA,
222 SSL_kDHE,
223 SSL_aRSA,
224 SSL_AES256,
225 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500226 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400227 },
228
229
230 /* TLS v1.2 ciphersuites */
231
232 /* Cipher 3C */
233 {
David Benjaminff2df332015-11-18 10:01:16 -0500234 TLS1_TXT_RSA_WITH_AES_128_SHA256,
235 TLS1_CK_RSA_WITH_AES_128_SHA256,
236 SSL_kRSA,
237 SSL_aRSA,
238 SSL_AES128,
239 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500240 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400241 },
242
243 /* Cipher 3D */
244 {
David Benjaminff2df332015-11-18 10:01:16 -0500245 TLS1_TXT_RSA_WITH_AES_256_SHA256,
246 TLS1_CK_RSA_WITH_AES_256_SHA256,
247 SSL_kRSA,
248 SSL_aRSA,
249 SSL_AES256,
250 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500251 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400252 },
253
254 /* Cipher 67 */
255 {
256 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500257 TLS1_CK_DHE_RSA_WITH_AES_128_SHA256,
258 SSL_kDHE,
259 SSL_aRSA,
260 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500261 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500262 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400263 },
264
265 /* Cipher 6B */
266 {
267 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500268 TLS1_CK_DHE_RSA_WITH_AES_256_SHA256,
269 SSL_kDHE,
270 SSL_aRSA,
271 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500272 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500273 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400274 },
275
Adam Langley85bc5602015-06-09 09:54:04 -0700276 /* PSK cipher suites. */
277
David Benjamina1c90a52015-05-30 17:03:14 -0400278 /* Cipher 8C */
279 {
David Benjaminff2df332015-11-18 10:01:16 -0500280 TLS1_TXT_PSK_WITH_AES_128_CBC_SHA,
281 TLS1_CK_PSK_WITH_AES_128_CBC_SHA,
282 SSL_kPSK,
283 SSL_aPSK,
284 SSL_AES128,
285 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500286 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400287 },
288
289 /* Cipher 8D */
290 {
David Benjaminff2df332015-11-18 10:01:16 -0500291 TLS1_TXT_PSK_WITH_AES_256_CBC_SHA,
292 TLS1_CK_PSK_WITH_AES_256_CBC_SHA,
293 SSL_kPSK,
294 SSL_aPSK,
295 SSL_AES256,
296 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500297 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400298 },
299
David Benjamina1c90a52015-05-30 17:03:14 -0400300 /* GCM ciphersuites from RFC5288 */
301
302 /* Cipher 9C */
303 {
304 TLS1_TXT_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500305 TLS1_CK_RSA_WITH_AES_128_GCM_SHA256,
306 SSL_kRSA,
307 SSL_aRSA,
308 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500309 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400310 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400311 },
312
313 /* Cipher 9D */
314 {
315 TLS1_TXT_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500316 TLS1_CK_RSA_WITH_AES_256_GCM_SHA384,
317 SSL_kRSA,
318 SSL_aRSA,
319 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500320 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400321 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400322 },
323
324 /* Cipher 9E */
325 {
326 TLS1_TXT_DHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500327 TLS1_CK_DHE_RSA_WITH_AES_128_GCM_SHA256,
328 SSL_kDHE,
329 SSL_aRSA,
330 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500331 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400332 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400333 },
334
335 /* Cipher 9F */
336 {
337 TLS1_TXT_DHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500338 TLS1_CK_DHE_RSA_WITH_AES_256_GCM_SHA384,
339 SSL_kDHE,
340 SSL_aRSA,
341 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500342 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400343 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400344 },
345
Steven Valdez803c77a2016-09-06 14:13:43 -0400346 /* TLS 1.3 suites. */
347
348 /* Cipher 1301 */
349 {
350 TLS1_TXT_AES_128_GCM_SHA256,
351 TLS1_CK_AES_128_GCM_SHA256,
352 SSL_kGENERIC,
353 SSL_aGENERIC,
354 SSL_AES128GCM,
355 SSL_AEAD,
356 SSL_HANDSHAKE_MAC_SHA256,
357 },
358
359 /* Cipher 1302 */
360 {
361 TLS1_TXT_AES_256_GCM_SHA384,
362 TLS1_CK_AES_256_GCM_SHA384,
363 SSL_kGENERIC,
364 SSL_aGENERIC,
365 SSL_AES256GCM,
366 SSL_AEAD,
367 SSL_HANDSHAKE_MAC_SHA384,
368 },
369
370 /* Cipher 1303 */
371 {
372 TLS1_TXT_CHACHA20_POLY1305_SHA256,
373 TLS1_CK_CHACHA20_POLY1305_SHA256,
374 SSL_kGENERIC,
375 SSL_aGENERIC,
376 SSL_CHACHA20POLY1305,
377 SSL_AEAD,
378 SSL_HANDSHAKE_MAC_SHA256,
379 },
380
David Benjamina1c90a52015-05-30 17:03:14 -0400381 /* Cipher C009 */
382 {
383 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500384 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
385 SSL_kECDHE,
386 SSL_aECDSA,
387 SSL_AES128,
388 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500389 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400390 },
391
392 /* Cipher C00A */
393 {
394 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500395 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
396 SSL_kECDHE,
397 SSL_aECDSA,
398 SSL_AES256,
399 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500400 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400401 },
402
David Benjamina1c90a52015-05-30 17:03:14 -0400403 /* Cipher C013 */
404 {
405 TLS1_TXT_ECDHE_RSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500406 TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA,
407 SSL_kECDHE,
408 SSL_aRSA,
409 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500410 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500411 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400412 },
413
414 /* Cipher C014 */
415 {
416 TLS1_TXT_ECDHE_RSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500417 TLS1_CK_ECDHE_RSA_WITH_AES_256_CBC_SHA,
418 SSL_kECDHE,
419 SSL_aRSA,
420 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500421 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500422 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400423 },
424
425
426 /* HMAC based TLS v1.2 ciphersuites from RFC5289 */
427
428 /* Cipher C023 */
429 {
430 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500431 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_SHA256,
432 SSL_kECDHE,
433 SSL_aECDSA,
434 SSL_AES128,
435 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500436 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400437 },
438
439 /* Cipher C024 */
440 {
441 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500442 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_SHA384,
443 SSL_kECDHE,
444 SSL_aECDSA,
445 SSL_AES256,
446 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500447 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400448 },
449
450 /* Cipher C027 */
451 {
452 TLS1_TXT_ECDHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500453 TLS1_CK_ECDHE_RSA_WITH_AES_128_SHA256,
454 SSL_kECDHE,
455 SSL_aRSA,
456 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500457 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500458 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400459 },
460
461 /* Cipher C028 */
462 {
463 TLS1_TXT_ECDHE_RSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500464 TLS1_CK_ECDHE_RSA_WITH_AES_256_SHA384,
465 SSL_kECDHE,
466 SSL_aRSA,
467 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500468 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500469 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400470 },
471
472
473 /* GCM based TLS v1.2 ciphersuites from RFC5289 */
474
475 /* Cipher C02B */
476 {
477 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500478 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
479 SSL_kECDHE,
480 SSL_aECDSA,
481 SSL_AES128GCM,
482 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400483 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400484 },
485
486 /* Cipher C02C */
487 {
488 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500489 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
490 SSL_kECDHE,
491 SSL_aECDSA,
492 SSL_AES256GCM,
493 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400494 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400495 },
496
497 /* Cipher C02F */
498 {
499 TLS1_TXT_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500500 TLS1_CK_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
501 SSL_kECDHE,
502 SSL_aRSA,
503 SSL_AES128GCM,
504 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400505 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400506 },
507
508 /* Cipher C030 */
509 {
510 TLS1_TXT_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500511 TLS1_CK_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
512 SSL_kECDHE,
513 SSL_aRSA,
514 SSL_AES256GCM,
515 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400516 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400517 },
518
Adam Langley85bc5602015-06-09 09:54:04 -0700519 /* ECDHE-PSK cipher suites. */
520
521 /* Cipher C035 */
522 {
523 TLS1_TXT_ECDHE_PSK_WITH_AES_128_CBC_SHA,
524 TLS1_CK_ECDHE_PSK_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500525 SSL_kECDHE,
526 SSL_aPSK,
527 SSL_AES128,
528 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500529 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700530 },
531
532 /* Cipher C036 */
533 {
534 TLS1_TXT_ECDHE_PSK_WITH_AES_256_CBC_SHA,
535 TLS1_CK_ECDHE_PSK_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500536 SSL_kECDHE,
537 SSL_aPSK,
538 SSL_AES256,
539 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500540 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700541 },
542
543 /* ChaCha20-Poly1305 cipher suites. */
544
David Benjamin13414b32015-12-09 23:02:39 -0500545#if !defined(BORINGSSL_ANDROID_SYSTEM)
David Benjamina1c90a52015-05-30 17:03:14 -0400546 {
Brian Smith271777f2015-10-03 13:53:33 -1000547 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500548 TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD,
549 SSL_kECDHE,
550 SSL_aRSA,
551 SSL_CHACHA20POLY1305_OLD,
552 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400553 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400554 },
555
556 {
Brian Smith271777f2015-10-03 13:53:33 -1000557 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500558 TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD,
559 SSL_kECDHE,
560 SSL_aECDSA,
561 SSL_CHACHA20POLY1305_OLD,
562 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400563 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400564 },
Adam Langleyd98dc132015-09-23 16:41:33 -0700565#endif
David Benjamin13414b32015-12-09 23:02:39 -0500566
567 /* Cipher CCA8 */
568 {
569 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
570 TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
571 SSL_kECDHE,
572 SSL_aRSA,
573 SSL_CHACHA20POLY1305,
574 SSL_AEAD,
575 SSL_HANDSHAKE_MAC_SHA256,
576 },
577
578 /* Cipher CCA9 */
579 {
580 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
581 TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
582 SSL_kECDHE,
583 SSL_aECDSA,
584 SSL_CHACHA20POLY1305,
585 SSL_AEAD,
586 SSL_HANDSHAKE_MAC_SHA256,
587 },
588
589 /* Cipher CCAB */
590 {
591 TLS1_TXT_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
592 TLS1_CK_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
593 SSL_kECDHE,
594 SSL_aPSK,
595 SSL_CHACHA20POLY1305,
596 SSL_AEAD,
597 SSL_HANDSHAKE_MAC_SHA256,
598 },
Matt Braithwaite053931e2016-05-25 12:06:05 -0700599
David Benjamina1c90a52015-05-30 17:03:14 -0400600};
601
Steven Valdezcb966542016-08-17 16:56:14 -0400602static const size_t kCiphersLen = OPENSSL_ARRAY_SIZE(kCiphers);
David Benjamina1c90a52015-05-30 17:03:14 -0400603
Adam Langleyfcf25832014-12-18 17:42:32 -0800604#define CIPHER_ADD 1
605#define CIPHER_KILL 2
606#define CIPHER_DEL 3
607#define CIPHER_ORD 4
608#define CIPHER_SPECIAL 5
Adam Langley95c29f32014-06-20 12:00:00 -0700609
Adam Langleyfcf25832014-12-18 17:42:32 -0800610typedef struct cipher_order_st {
611 const SSL_CIPHER *cipher;
612 int active;
Adam Langleyfcf25832014-12-18 17:42:32 -0800613 int in_group;
614 struct cipher_order_st *next, *prev;
615} CIPHER_ORDER;
Adam Langley95c29f32014-06-20 12:00:00 -0700616
David Benjamin0344daf2015-04-08 02:08:01 -0400617typedef struct cipher_alias_st {
618 /* name is the name of the cipher alias. */
619 const char *name;
620
621 /* The following fields are bitmasks for the corresponding fields on
622 * |SSL_CIPHER|. A cipher matches a cipher alias iff, for each bitmask, the
623 * bit corresponding to the cipher's value is set to 1. If any bitmask is
624 * all zeroes, the alias matches nothing. Use |~0u| for the default value. */
625 uint32_t algorithm_mkey;
626 uint32_t algorithm_auth;
627 uint32_t algorithm_enc;
628 uint32_t algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -0500629
630 /* min_version, if non-zero, matches all ciphers which were added in that
631 * particular protocol version. */
632 uint16_t min_version;
David Benjamin0344daf2015-04-08 02:08:01 -0400633} CIPHER_ALIAS;
634
David Benjamina1c90a52015-05-30 17:03:14 -0400635static const CIPHER_ALIAS kCipherAliases[] = {
Matthew Braithwaite651aaef2016-12-08 16:14:36 -0800636 /* "ALL" doesn't include eNULL. It must be explicitly enabled. */
637 {"ALL", ~0u, ~0u, ~SSL_eNULL, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700638
David Benjamina1c90a52015-05-30 17:03:14 -0400639 /* The "COMPLEMENTOFDEFAULT" rule is omitted. It matches nothing. */
Adam Langley95c29f32014-06-20 12:00:00 -0700640
David Benjamina1c90a52015-05-30 17:03:14 -0400641 /* key exchange aliases
642 * (some of those using only a single bit here combine
643 * multiple key exchange algs according to the RFCs,
644 * e.g. kEDH combines DHE_DSS and DHE_RSA) */
David Benjamind6e9eec2015-11-18 09:48:55 -0500645 {"kRSA", SSL_kRSA, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700646
David Benjamind6e9eec2015-11-18 09:48:55 -0500647 {"kDHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
648 {"kEDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
649 {"DH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700650
David Benjamind6e9eec2015-11-18 09:48:55 -0500651 {"kECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
652 {"kEECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
653 {"ECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700654
David Benjamind6e9eec2015-11-18 09:48:55 -0500655 {"kPSK", SSL_kPSK, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700656
David Benjamina1c90a52015-05-30 17:03:14 -0400657 /* server authentication aliases */
Matthew Braithwaite651aaef2016-12-08 16:14:36 -0800658 {"aRSA", ~0u, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
659 {"aECDSA", ~0u, SSL_aECDSA, ~0u, ~0u, 0},
660 {"ECDSA", ~0u, SSL_aECDSA, ~0u, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500661 {"aPSK", ~0u, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700662
David Benjamina1c90a52015-05-30 17:03:14 -0400663 /* aliases combining key exchange and server authentication */
David Benjamind6e9eec2015-11-18 09:48:55 -0500664 {"DHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
665 {"EDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
666 {"ECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
667 {"EECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
668 {"RSA", SSL_kRSA, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
669 {"PSK", SSL_kPSK, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700670
David Benjamina1c90a52015-05-30 17:03:14 -0400671 /* symmetric encryption aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500672 {"3DES", ~0u, ~0u, SSL_3DES, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500673 {"AES128", ~0u, ~0u, SSL_AES128 | SSL_AES128GCM, ~0u, 0},
Matthew Braithwaite651aaef2016-12-08 16:14:36 -0800674 {"AES256", ~0u, ~0u, SSL_AES256 | SSL_AES256GCM, ~0u, 0},
675 {"AES", ~0u, ~0u, SSL_AES, ~0u, 0},
676 {"AESGCM", ~0u, ~0u, SSL_AES128GCM | SSL_AES256GCM, ~0u, 0},
677 {"CHACHA20", ~0u, ~0u, SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD, ~0u,
David Benjamin13414b32015-12-09 23:02:39 -0500678 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700679
David Benjamina1c90a52015-05-30 17:03:14 -0400680 /* MAC aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500681 {"SHA1", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
682 {"SHA", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
Matthew Braithwaite651aaef2016-12-08 16:14:36 -0800683 {"SHA256", ~0u, ~0u, ~0u, SSL_SHA256, 0},
684 {"SHA384", ~0u, ~0u, ~0u, SSL_SHA384, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700685
David Benjamindcb6ef02015-11-06 15:35:54 -0500686 /* Legacy protocol minimum version aliases. "TLSv1" is intentionally the
687 * same as "SSLv3". */
Matthew Braithwaite651aaef2016-12-08 16:14:36 -0800688 {"SSLv3", ~0u, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
689 {"TLSv1", ~0u, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
690 {"TLSv1.2", ~0u, ~0u, ~SSL_eNULL, ~0u, TLS1_2_VERSION},
Adam Langley95c29f32014-06-20 12:00:00 -0700691
David Benjamind6e9eec2015-11-18 09:48:55 -0500692 /* Legacy strength classes. */
Matthew Braithwaite651aaef2016-12-08 16:14:36 -0800693 {"HIGH", ~0u, ~0u, ~SSL_eNULL, ~0u, 0},
694 {"FIPS", ~0u, ~0u, ~SSL_eNULL, ~0u, 0},
Adam Langleyfcf25832014-12-18 17:42:32 -0800695};
Adam Langley95c29f32014-06-20 12:00:00 -0700696
Steven Valdezcb966542016-08-17 16:56:14 -0400697static const size_t kCipherAliasesLen = OPENSSL_ARRAY_SIZE(kCipherAliases);
David Benjamina1c90a52015-05-30 17:03:14 -0400698
699static int ssl_cipher_id_cmp(const void *in_a, const void *in_b) {
700 const SSL_CIPHER *a = in_a;
701 const SSL_CIPHER *b = in_b;
702
703 if (a->id > b->id) {
704 return 1;
705 } else if (a->id < b->id) {
706 return -1;
707 } else {
708 return 0;
709 }
710}
711
David Benjamina1c90a52015-05-30 17:03:14 -0400712const SSL_CIPHER *SSL_get_cipher_by_value(uint16_t value) {
713 SSL_CIPHER c;
714
715 c.id = 0x03000000L | value;
716 return bsearch(&c, kCiphers, kCiphersLen, sizeof(SSL_CIPHER),
717 ssl_cipher_id_cmp);
718}
David Benjamin0344daf2015-04-08 02:08:01 -0400719
David Benjaminea72bd02014-12-21 21:27:41 -0500720int ssl_cipher_get_evp_aead(const EVP_AEAD **out_aead,
721 size_t *out_mac_secret_len,
722 size_t *out_fixed_iv_len,
723 const SSL_CIPHER *cipher, uint16_t version) {
724 *out_aead = NULL;
725 *out_mac_secret_len = 0;
726 *out_fixed_iv_len = 0;
Adam Langleyc9fb3752014-06-20 12:00:00 -0700727
David Benjamin305e6fb2016-10-27 18:19:00 -0400728 if (cipher->algorithm_mac == SSL_AEAD) {
729 if (cipher->algorithm_enc == SSL_AES128GCM) {
David Benjaminea72bd02014-12-21 21:27:41 -0500730 *out_aead = EVP_aead_aes_128_gcm();
731 *out_fixed_iv_len = 4;
David Benjamin305e6fb2016-10-27 18:19:00 -0400732 } else if (cipher->algorithm_enc == SSL_AES256GCM) {
David Benjaminea72bd02014-12-21 21:27:41 -0500733 *out_aead = EVP_aead_aes_256_gcm();
734 *out_fixed_iv_len = 4;
Adam Langleyd98dc132015-09-23 16:41:33 -0700735#if !defined(BORINGSSL_ANDROID_SYSTEM)
David Benjamin305e6fb2016-10-27 18:19:00 -0400736 } else if (cipher->algorithm_enc == SSL_CHACHA20POLY1305_OLD) {
Brian Smith3e23e4c2015-10-03 11:38:58 -1000737 *out_aead = EVP_aead_chacha20_poly1305_old();
David Benjaminea72bd02014-12-21 21:27:41 -0500738 *out_fixed_iv_len = 0;
Adam Langleyd98dc132015-09-23 16:41:33 -0700739#endif
David Benjamin305e6fb2016-10-27 18:19:00 -0400740 } else if (cipher->algorithm_enc == SSL_CHACHA20POLY1305) {
David Benjamin13414b32015-12-09 23:02:39 -0500741 *out_aead = EVP_aead_chacha20_poly1305();
742 *out_fixed_iv_len = 12;
David Benjamin305e6fb2016-10-27 18:19:00 -0400743 } else {
David Benjaminea72bd02014-12-21 21:27:41 -0500744 return 0;
David Benjamin305e6fb2016-10-27 18:19:00 -0400745 }
746
747 /* In TLS 1.3, the iv_len is equal to the AEAD nonce length whereas the code
748 * above computes the TLS 1.2 construction. */
749 if (version >= TLS1_3_VERSION) {
750 *out_fixed_iv_len = EVP_AEAD_nonce_length(*out_aead);
751 }
752 } else if (cipher->algorithm_mac == SSL_SHA1) {
753 if (cipher->algorithm_enc == SSL_eNULL) {
754 if (version == SSL3_VERSION) {
755 *out_aead = EVP_aead_null_sha1_ssl3();
756 } else {
757 *out_aead = EVP_aead_null_sha1_tls();
758 }
759 } else if (cipher->algorithm_enc == SSL_3DES) {
760 if (version == SSL3_VERSION) {
761 *out_aead = EVP_aead_des_ede3_cbc_sha1_ssl3();
762 *out_fixed_iv_len = 8;
763 } else if (version == TLS1_VERSION) {
764 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls_implicit_iv();
765 *out_fixed_iv_len = 8;
766 } else {
767 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls();
768 }
769 } else if (cipher->algorithm_enc == SSL_AES128) {
770 if (version == SSL3_VERSION) {
771 *out_aead = EVP_aead_aes_128_cbc_sha1_ssl3();
772 *out_fixed_iv_len = 16;
773 } else if (version == TLS1_VERSION) {
774 *out_aead = EVP_aead_aes_128_cbc_sha1_tls_implicit_iv();
775 *out_fixed_iv_len = 16;
776 } else {
777 *out_aead = EVP_aead_aes_128_cbc_sha1_tls();
778 }
779 } else if (cipher->algorithm_enc == SSL_AES256) {
780 if (version == SSL3_VERSION) {
781 *out_aead = EVP_aead_aes_256_cbc_sha1_ssl3();
782 *out_fixed_iv_len = 16;
783 } else if (version == TLS1_VERSION) {
784 *out_aead = EVP_aead_aes_256_cbc_sha1_tls_implicit_iv();
785 *out_fixed_iv_len = 16;
786 } else {
787 *out_aead = EVP_aead_aes_256_cbc_sha1_tls();
788 }
789 } else {
790 return 0;
791 }
792
793 *out_mac_secret_len = SHA_DIGEST_LENGTH;
794 } else if (cipher->algorithm_mac == SSL_SHA256) {
795 if (cipher->algorithm_enc == SSL_AES128) {
796 *out_aead = EVP_aead_aes_128_cbc_sha256_tls();
797 } else if (cipher->algorithm_enc == SSL_AES256) {
798 *out_aead = EVP_aead_aes_256_cbc_sha256_tls();
799 } else {
800 return 0;
801 }
802
803 *out_mac_secret_len = SHA256_DIGEST_LENGTH;
804 } else if (cipher->algorithm_mac == SSL_SHA384) {
805 if (cipher->algorithm_enc != SSL_AES256) {
806 return 0;
807 }
808
809 *out_aead = EVP_aead_aes_256_cbc_sha384_tls();
810 *out_mac_secret_len = SHA384_DIGEST_LENGTH;
811 } else {
812 return 0;
David Benjaminea72bd02014-12-21 21:27:41 -0500813 }
Steven Valdez79750562016-06-16 06:38:04 -0400814
Steven Valdez79750562016-06-16 06:38:04 -0400815 return 1;
Adam Langleyfcf25832014-12-18 17:42:32 -0800816}
Adam Langleyc9fb3752014-06-20 12:00:00 -0700817
David Benjaminb0883312015-08-06 09:54:13 -0400818const EVP_MD *ssl_get_handshake_digest(uint32_t algorithm_prf) {
819 switch (algorithm_prf) {
820 case SSL_HANDSHAKE_MAC_DEFAULT:
821 return EVP_sha1();
822 case SSL_HANDSHAKE_MAC_SHA256:
823 return EVP_sha256();
824 case SSL_HANDSHAKE_MAC_SHA384:
825 return EVP_sha384();
826 default:
827 return NULL;
Adam Langleyfcf25832014-12-18 17:42:32 -0800828 }
Adam Langley95c29f32014-06-20 12:00:00 -0700829}
830
831#define ITEM_SEP(a) \
Adam Langleyfcf25832014-12-18 17:42:32 -0800832 (((a) == ':') || ((a) == ' ') || ((a) == ';') || ((a) == ','))
Adam Langley95c29f32014-06-20 12:00:00 -0700833
David Benjamin0344daf2015-04-08 02:08:01 -0400834/* rule_equals returns one iff the NUL-terminated string |rule| is equal to the
835 * |buf_len| bytes at |buf|. */
836static int rule_equals(const char *rule, const char *buf, size_t buf_len) {
837 /* |strncmp| alone only checks that |buf| is a prefix of |rule|. */
838 return strncmp(rule, buf, buf_len) == 0 && rule[buf_len] == '\0';
839}
840
Adam Langley95c29f32014-06-20 12:00:00 -0700841static void ll_append_tail(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -0800842 CIPHER_ORDER **tail) {
843 if (curr == *tail) {
844 return;
845 }
846 if (curr == *head) {
847 *head = curr->next;
848 }
849 if (curr->prev != NULL) {
850 curr->prev->next = curr->next;
851 }
852 if (curr->next != NULL) {
853 curr->next->prev = curr->prev;
854 }
855 (*tail)->next = curr;
856 curr->prev = *tail;
857 curr->next = NULL;
858 *tail = curr;
859}
Adam Langley95c29f32014-06-20 12:00:00 -0700860
861static void ll_append_head(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -0800862 CIPHER_ORDER **tail) {
863 if (curr == *head) {
864 return;
865 }
866 if (curr == *tail) {
867 *tail = curr->prev;
868 }
869 if (curr->next != NULL) {
870 curr->next->prev = curr->prev;
871 }
872 if (curr->prev != NULL) {
873 curr->prev->next = curr->next;
874 }
875 (*head)->prev = curr;
876 curr->next = *head;
877 curr->prev = NULL;
878 *head = curr;
879}
Adam Langley95c29f32014-06-20 12:00:00 -0700880
David Benjamin82c9e902014-12-12 15:55:27 -0500881static void ssl_cipher_collect_ciphers(const SSL_PROTOCOL_METHOD *ssl_method,
Adam Langleyfcf25832014-12-18 17:42:32 -0800882 CIPHER_ORDER *co_list,
883 CIPHER_ORDER **head_p,
884 CIPHER_ORDER **tail_p) {
David Benjamina1c90a52015-05-30 17:03:14 -0400885 /* The set of ciphers is static, but some subset may be unsupported by
886 * |ssl_method|, so the list may be smaller. */
887 size_t co_list_num = 0;
David Benjamin54091232016-09-05 12:47:25 -0400888 for (size_t i = 0; i < kCiphersLen; i++) {
David Benjamina1c90a52015-05-30 17:03:14 -0400889 const SSL_CIPHER *cipher = &kCiphers[i];
David Benjaminabbbee12016-10-31 19:20:42 -0400890 if (ssl_method->supports_cipher(cipher) &&
891 /* TLS 1.3 ciphers do not participate in this mechanism. */
892 cipher->algorithm_mkey != SSL_kGENERIC) {
David Benjamina1c90a52015-05-30 17:03:14 -0400893 co_list[co_list_num].cipher = cipher;
Adam Langleyfcf25832014-12-18 17:42:32 -0800894 co_list[co_list_num].next = NULL;
895 co_list[co_list_num].prev = NULL;
896 co_list[co_list_num].active = 0;
897 co_list[co_list_num].in_group = 0;
898 co_list_num++;
899 }
900 }
Adam Langley95c29f32014-06-20 12:00:00 -0700901
Adam Langleyfcf25832014-12-18 17:42:32 -0800902 /* Prepare linked list from list entries. */
903 if (co_list_num > 0) {
904 co_list[0].prev = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -0700905
Adam Langleyfcf25832014-12-18 17:42:32 -0800906 if (co_list_num > 1) {
907 co_list[0].next = &co_list[1];
Adam Langley95c29f32014-06-20 12:00:00 -0700908
David Benjamin54091232016-09-05 12:47:25 -0400909 for (size_t i = 1; i < co_list_num - 1; i++) {
Adam Langleyfcf25832014-12-18 17:42:32 -0800910 co_list[i].prev = &co_list[i - 1];
911 co_list[i].next = &co_list[i + 1];
912 }
Adam Langley95c29f32014-06-20 12:00:00 -0700913
Adam Langleyfcf25832014-12-18 17:42:32 -0800914 co_list[co_list_num - 1].prev = &co_list[co_list_num - 2];
915 }
916
917 co_list[co_list_num - 1].next = NULL;
918
919 *head_p = &co_list[0];
920 *tail_p = &co_list[co_list_num - 1];
921 }
922}
Adam Langley95c29f32014-06-20 12:00:00 -0700923
David Benjamin0344daf2015-04-08 02:08:01 -0400924/* ssl_cipher_apply_rule applies the rule type |rule| to ciphers matching its
925 * parameters in the linked list from |*head_p| to |*tail_p|. It writes the new
926 * head and tail of the list to |*head_p| and |*tail_p|, respectively.
927 *
928 * - If |cipher_id| is non-zero, only that cipher is selected.
929 * - Otherwise, if |strength_bits| is non-negative, it selects ciphers
930 * of that strength.
David Benjamind6e9eec2015-11-18 09:48:55 -0500931 * - Otherwise, it selects ciphers that match each bitmasks in |alg_*| and
David Benjamindcb6ef02015-11-06 15:35:54 -0500932 * |min_version|. */
Adam Langleyfcf25832014-12-18 17:42:32 -0800933static void ssl_cipher_apply_rule(
David Benjamin107db582015-04-08 00:41:59 -0400934 uint32_t cipher_id, uint32_t alg_mkey, uint32_t alg_auth,
David Benjamind6e9eec2015-11-18 09:48:55 -0500935 uint32_t alg_enc, uint32_t alg_mac, uint16_t min_version, int rule,
936 int strength_bits, int in_group, CIPHER_ORDER **head_p,
937 CIPHER_ORDER **tail_p) {
Adam Langleyfcf25832014-12-18 17:42:32 -0800938 CIPHER_ORDER *head, *tail, *curr, *next, *last;
939 const SSL_CIPHER *cp;
940 int reverse = 0;
Adam Langley95c29f32014-06-20 12:00:00 -0700941
David Benjamindcb6ef02015-11-06 15:35:54 -0500942 if (cipher_id == 0 && strength_bits == -1 && min_version == 0 &&
David Benjamind6e9eec2015-11-18 09:48:55 -0500943 (alg_mkey == 0 || alg_auth == 0 || alg_enc == 0 || alg_mac == 0)) {
David Benjamin0344daf2015-04-08 02:08:01 -0400944 /* The rule matches nothing, so bail early. */
945 return;
946 }
947
Adam Langleyfcf25832014-12-18 17:42:32 -0800948 if (rule == CIPHER_DEL) {
949 /* needed to maintain sorting between currently deleted ciphers */
950 reverse = 1;
951 }
Adam Langley95c29f32014-06-20 12:00:00 -0700952
Adam Langleyfcf25832014-12-18 17:42:32 -0800953 head = *head_p;
954 tail = *tail_p;
Adam Langley95c29f32014-06-20 12:00:00 -0700955
Adam Langleyfcf25832014-12-18 17:42:32 -0800956 if (reverse) {
957 next = tail;
958 last = head;
959 } else {
960 next = head;
961 last = tail;
962 }
Adam Langley95c29f32014-06-20 12:00:00 -0700963
Adam Langleyfcf25832014-12-18 17:42:32 -0800964 curr = NULL;
965 for (;;) {
966 if (curr == last) {
967 break;
968 }
Adam Langley95c29f32014-06-20 12:00:00 -0700969
Adam Langleyfcf25832014-12-18 17:42:32 -0800970 curr = next;
971 if (curr == NULL) {
972 break;
973 }
Adam Langleye3142a72014-07-24 17:56:48 -0700974
Adam Langleyfcf25832014-12-18 17:42:32 -0800975 next = reverse ? curr->prev : curr->next;
976 cp = curr->cipher;
Adam Langleye3142a72014-07-24 17:56:48 -0700977
David Benjamin0344daf2015-04-08 02:08:01 -0400978 /* Selection criteria is either a specific cipher, the value of
979 * |strength_bits|, or the algorithms used. */
980 if (cipher_id != 0) {
981 if (cipher_id != cp->id) {
982 continue;
983 }
984 } else if (strength_bits >= 0) {
David Benjamin9f2e2772015-11-18 09:59:43 -0500985 if (strength_bits != SSL_CIPHER_get_bits(cp, NULL)) {
Adam Langleyfcf25832014-12-18 17:42:32 -0800986 continue;
987 }
David Benjamin881f1962016-08-10 18:29:12 -0400988 } else {
989 if (!(alg_mkey & cp->algorithm_mkey) ||
990 !(alg_auth & cp->algorithm_auth) ||
991 !(alg_enc & cp->algorithm_enc) ||
992 !(alg_mac & cp->algorithm_mac) ||
993 (min_version != 0 && SSL_CIPHER_get_min_version(cp) != min_version)) {
994 continue;
995 }
Adam Langleyfcf25832014-12-18 17:42:32 -0800996 }
Adam Langleye3142a72014-07-24 17:56:48 -0700997
Adam Langleyfcf25832014-12-18 17:42:32 -0800998 /* add the cipher if it has not been added yet. */
999 if (rule == CIPHER_ADD) {
1000 /* reverse == 0 */
1001 if (!curr->active) {
1002 ll_append_tail(&head, curr, &tail);
1003 curr->active = 1;
1004 curr->in_group = in_group;
1005 }
1006 }
Adam Langley95c29f32014-06-20 12:00:00 -07001007
Adam Langleyfcf25832014-12-18 17:42:32 -08001008 /* Move the added cipher to this location */
1009 else if (rule == CIPHER_ORD) {
1010 /* reverse == 0 */
1011 if (curr->active) {
1012 ll_append_tail(&head, curr, &tail);
1013 curr->in_group = 0;
1014 }
1015 } else if (rule == CIPHER_DEL) {
1016 /* reverse == 1 */
1017 if (curr->active) {
1018 /* most recently deleted ciphersuites get best positions
1019 * for any future CIPHER_ADD (note that the CIPHER_DEL loop
1020 * works in reverse to maintain the order) */
1021 ll_append_head(&head, curr, &tail);
1022 curr->active = 0;
1023 curr->in_group = 0;
1024 }
1025 } else if (rule == CIPHER_KILL) {
1026 /* reverse == 0 */
1027 if (head == curr) {
1028 head = curr->next;
1029 } else {
1030 curr->prev->next = curr->next;
1031 }
Adam Langley95c29f32014-06-20 12:00:00 -07001032
Adam Langleyfcf25832014-12-18 17:42:32 -08001033 if (tail == curr) {
1034 tail = curr->prev;
1035 }
1036 curr->active = 0;
1037 if (curr->next != NULL) {
1038 curr->next->prev = curr->prev;
1039 }
1040 if (curr->prev != NULL) {
1041 curr->prev->next = curr->next;
1042 }
1043 curr->next = NULL;
1044 curr->prev = NULL;
1045 }
1046 }
Adam Langley95c29f32014-06-20 12:00:00 -07001047
Adam Langleyfcf25832014-12-18 17:42:32 -08001048 *head_p = head;
1049 *tail_p = tail;
1050}
Adam Langley95c29f32014-06-20 12:00:00 -07001051
1052static int ssl_cipher_strength_sort(CIPHER_ORDER **head_p,
Adam Langleyfcf25832014-12-18 17:42:32 -08001053 CIPHER_ORDER **tail_p) {
1054 int max_strength_bits, i, *number_uses;
1055 CIPHER_ORDER *curr;
Adam Langley95c29f32014-06-20 12:00:00 -07001056
Adam Langleyfcf25832014-12-18 17:42:32 -08001057 /* This routine sorts the ciphers with descending strength. The sorting must
1058 * keep the pre-sorted sequence, so we apply the normal sorting routine as
1059 * '+' movement to the end of the list. */
1060 max_strength_bits = 0;
1061 curr = *head_p;
1062 while (curr != NULL) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001063 if (curr->active &&
1064 SSL_CIPHER_get_bits(curr->cipher, NULL) > max_strength_bits) {
1065 max_strength_bits = SSL_CIPHER_get_bits(curr->cipher, NULL);
Adam Langleyfcf25832014-12-18 17:42:32 -08001066 }
1067 curr = curr->next;
1068 }
Adam Langley95c29f32014-06-20 12:00:00 -07001069
Adam Langleyfcf25832014-12-18 17:42:32 -08001070 number_uses = OPENSSL_malloc((max_strength_bits + 1) * sizeof(int));
1071 if (!number_uses) {
David Benjamin3570d732015-06-29 00:28:17 -04001072 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001073 return 0;
1074 }
David Benjamin17cf2cb2016-12-13 01:07:13 -05001075 OPENSSL_memset(number_uses, 0, (max_strength_bits + 1) * sizeof(int));
Adam Langley95c29f32014-06-20 12:00:00 -07001076
Adam Langleyfcf25832014-12-18 17:42:32 -08001077 /* Now find the strength_bits values actually used. */
1078 curr = *head_p;
1079 while (curr != NULL) {
1080 if (curr->active) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001081 number_uses[SSL_CIPHER_get_bits(curr->cipher, NULL)]++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001082 }
1083 curr = curr->next;
1084 }
Adam Langley95c29f32014-06-20 12:00:00 -07001085
Adam Langleyfcf25832014-12-18 17:42:32 -08001086 /* Go through the list of used strength_bits values in descending order. */
1087 for (i = max_strength_bits; i >= 0; i--) {
1088 if (number_uses[i] > 0) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001089 ssl_cipher_apply_rule(0, 0, 0, 0, 0, 0, CIPHER_ORD, i, 0, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001090 }
1091 }
1092
1093 OPENSSL_free(number_uses);
1094 return 1;
1095}
Adam Langley95c29f32014-06-20 12:00:00 -07001096
David Benjamin0344daf2015-04-08 02:08:01 -04001097static int ssl_cipher_process_rulestr(const SSL_PROTOCOL_METHOD *ssl_method,
1098 const char *rule_str,
Adam Langleyfcf25832014-12-18 17:42:32 -08001099 CIPHER_ORDER **head_p,
David Benjamin0344daf2015-04-08 02:08:01 -04001100 CIPHER_ORDER **tail_p) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001101 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001102 uint16_t min_version;
Adam Langleyfcf25832014-12-18 17:42:32 -08001103 const char *l, *buf;
Adam Langleyf139c992016-10-02 09:56:09 -07001104 int multi, skip_rule, rule, ok, in_group = 0, has_group = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001105 size_t j, buf_len;
1106 uint32_t cipher_id;
Adam Langleyfcf25832014-12-18 17:42:32 -08001107 char ch;
Adam Langley95c29f32014-06-20 12:00:00 -07001108
Adam Langleyfcf25832014-12-18 17:42:32 -08001109 l = rule_str;
1110 for (;;) {
1111 ch = *l;
Adam Langley95c29f32014-06-20 12:00:00 -07001112
Adam Langleyfcf25832014-12-18 17:42:32 -08001113 if (ch == '\0') {
1114 break; /* done */
1115 }
Adam Langley95c29f32014-06-20 12:00:00 -07001116
Adam Langleyfcf25832014-12-18 17:42:32 -08001117 if (in_group) {
1118 if (ch == ']') {
Adam Langleyfcf25832014-12-18 17:42:32 -08001119 if (*tail_p) {
1120 (*tail_p)->in_group = 0;
1121 }
1122 in_group = 0;
1123 l++;
1124 continue;
1125 }
David Benjamin37d92462014-09-20 17:54:24 -04001126
Adam Langleyfcf25832014-12-18 17:42:32 -08001127 if (ch == '|') {
1128 rule = CIPHER_ADD;
1129 l++;
1130 continue;
1131 } else if (!(ch >= 'a' && ch <= 'z') && !(ch >= 'A' && ch <= 'Z') &&
1132 !(ch >= '0' && ch <= '9')) {
David Benjamin3570d732015-06-29 00:28:17 -04001133 OPENSSL_PUT_ERROR(SSL, SSL_R_UNEXPECTED_OPERATOR_IN_GROUP);
Adam Langleyf139c992016-10-02 09:56:09 -07001134 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001135 } else {
1136 rule = CIPHER_ADD;
1137 }
1138 } else if (ch == '-') {
1139 rule = CIPHER_DEL;
1140 l++;
1141 } else if (ch == '+') {
1142 rule = CIPHER_ORD;
1143 l++;
1144 } else if (ch == '!') {
1145 rule = CIPHER_KILL;
1146 l++;
1147 } else if (ch == '@') {
1148 rule = CIPHER_SPECIAL;
1149 l++;
1150 } else if (ch == '[') {
1151 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001152 OPENSSL_PUT_ERROR(SSL, SSL_R_NESTED_GROUP);
Adam Langleyf139c992016-10-02 09:56:09 -07001153 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001154 }
1155 in_group = 1;
1156 has_group = 1;
1157 l++;
1158 continue;
1159 } else {
1160 rule = CIPHER_ADD;
1161 }
Adam Langley95c29f32014-06-20 12:00:00 -07001162
Adam Langleyfcf25832014-12-18 17:42:32 -08001163 /* If preference groups are enabled, the only legal operator is +.
1164 * Otherwise the in_group bits will get mixed up. */
1165 if (has_group && rule != CIPHER_ADD) {
David Benjamin3570d732015-06-29 00:28:17 -04001166 OPENSSL_PUT_ERROR(SSL, SSL_R_MIXED_SPECIAL_OPERATOR_WITH_GROUPS);
Adam Langleyf139c992016-10-02 09:56:09 -07001167 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001168 }
Adam Langley95c29f32014-06-20 12:00:00 -07001169
Adam Langleyfcf25832014-12-18 17:42:32 -08001170 if (ITEM_SEP(ch)) {
1171 l++;
1172 continue;
1173 }
Adam Langley95c29f32014-06-20 12:00:00 -07001174
David Benjamin0344daf2015-04-08 02:08:01 -04001175 multi = 0;
1176 cipher_id = 0;
1177 alg_mkey = ~0u;
1178 alg_auth = ~0u;
1179 alg_enc = ~0u;
1180 alg_mac = ~0u;
David Benjamindcb6ef02015-11-06 15:35:54 -05001181 min_version = 0;
1182 skip_rule = 0;
Adam Langley95c29f32014-06-20 12:00:00 -07001183
Adam Langleyfcf25832014-12-18 17:42:32 -08001184 for (;;) {
1185 ch = *l;
1186 buf = l;
David Benjamin0344daf2015-04-08 02:08:01 -04001187 buf_len = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001188 while (((ch >= 'A') && (ch <= 'Z')) || ((ch >= '0') && (ch <= '9')) ||
1189 ((ch >= 'a') && (ch <= 'z')) || (ch == '-') || (ch == '.')) {
1190 ch = *(++l);
David Benjamin0344daf2015-04-08 02:08:01 -04001191 buf_len++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001192 }
Adam Langley95c29f32014-06-20 12:00:00 -07001193
David Benjamin0344daf2015-04-08 02:08:01 -04001194 if (buf_len == 0) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001195 /* We hit something we cannot deal with, it is no command or separator
1196 * nor alphanumeric, so we call this an error. */
David Benjamin3570d732015-06-29 00:28:17 -04001197 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyf99f2442016-10-02 09:53:38 -07001198 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001199 }
Adam Langley95c29f32014-06-20 12:00:00 -07001200
Adam Langleyfcf25832014-12-18 17:42:32 -08001201 if (rule == CIPHER_SPECIAL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001202 break;
1203 }
David Benjamin0344daf2015-04-08 02:08:01 -04001204
1205 /* Look for a matching exact cipher. These aren't allowed in multipart
1206 * rules. */
1207 if (!multi && ch != '+') {
David Benjamina1c90a52015-05-30 17:03:14 -04001208 for (j = 0; j < kCiphersLen; j++) {
1209 const SSL_CIPHER *cipher = &kCiphers[j];
1210 if (rule_equals(cipher->name, buf, buf_len)) {
David Benjamin0344daf2015-04-08 02:08:01 -04001211 cipher_id = cipher->id;
1212 break;
1213 }
1214 }
1215 }
1216 if (cipher_id == 0) {
1217 /* If not an exact cipher, look for a matching cipher alias. */
David Benjamina1c90a52015-05-30 17:03:14 -04001218 for (j = 0; j < kCipherAliasesLen; j++) {
David Benjamin0344daf2015-04-08 02:08:01 -04001219 if (rule_equals(kCipherAliases[j].name, buf, buf_len)) {
1220 alg_mkey &= kCipherAliases[j].algorithm_mkey;
1221 alg_auth &= kCipherAliases[j].algorithm_auth;
1222 alg_enc &= kCipherAliases[j].algorithm_enc;
1223 alg_mac &= kCipherAliases[j].algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001224
1225 if (min_version != 0 &&
1226 min_version != kCipherAliases[j].min_version) {
1227 skip_rule = 1;
1228 } else {
1229 min_version = kCipherAliases[j].min_version;
1230 }
David Benjamin0344daf2015-04-08 02:08:01 -04001231 break;
1232 }
1233 }
David Benjamina1c90a52015-05-30 17:03:14 -04001234 if (j == kCipherAliasesLen) {
David Benjamindcb6ef02015-11-06 15:35:54 -05001235 skip_rule = 1;
David Benjamin0344daf2015-04-08 02:08:01 -04001236 }
1237 }
1238
1239 /* Check for a multipart rule. */
1240 if (ch != '+') {
1241 break;
1242 }
1243 l++;
1244 multi = 1;
Adam Langleyfcf25832014-12-18 17:42:32 -08001245 }
Adam Langley95c29f32014-06-20 12:00:00 -07001246
David Benjamin13414b32015-12-09 23:02:39 -05001247 /* If one of the CHACHA20_POLY1305 variants is selected, include the other
1248 * as well. They have the same name to avoid requiring changes in
1249 * configuration. Apply this transformation late so that the cipher name
1250 * still behaves as an exact name and not an alias in multipart rules.
1251 *
1252 * This is temporary and will be removed when the pre-standard construction
1253 * is removed. */
1254 if (cipher_id == TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD ||
1255 cipher_id == TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256) {
1256 cipher_id = 0;
1257 alg_mkey = SSL_kECDHE;
1258 alg_auth = SSL_aRSA;
1259 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1260 alg_mac = SSL_AEAD;
1261 } else if (cipher_id == TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD ||
1262 cipher_id == TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256) {
1263 cipher_id = 0;
1264 alg_mkey = SSL_kECDHE;
1265 alg_auth = SSL_aECDSA;
1266 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1267 alg_mac = SSL_AEAD;
1268 }
1269
Adam Langleyfcf25832014-12-18 17:42:32 -08001270 /* Ok, we have the rule, now apply it. */
1271 if (rule == CIPHER_SPECIAL) {
1272 /* special command */
1273 ok = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001274 if (buf_len == 8 && !strncmp(buf, "STRENGTH", 8)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001275 ok = ssl_cipher_strength_sort(head_p, tail_p);
1276 } else {
David Benjamin3570d732015-06-29 00:28:17 -04001277 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyfcf25832014-12-18 17:42:32 -08001278 }
Adam Langley95c29f32014-06-20 12:00:00 -07001279
Adam Langleyfcf25832014-12-18 17:42:32 -08001280 if (ok == 0) {
Adam Langleyf139c992016-10-02 09:56:09 -07001281 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001282 }
Adam Langley95c29f32014-06-20 12:00:00 -07001283
Adam Langleyfcf25832014-12-18 17:42:32 -08001284 /* We do not support any "multi" options together with "@", so throw away
1285 * the rest of the command, if any left, until end or ':' is found. */
1286 while (*l != '\0' && !ITEM_SEP(*l)) {
1287 l++;
1288 }
David Benjamindcb6ef02015-11-06 15:35:54 -05001289 } else if (!skip_rule) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001290 ssl_cipher_apply_rule(cipher_id, alg_mkey, alg_auth, alg_enc, alg_mac,
David Benjamind6e9eec2015-11-18 09:48:55 -05001291 min_version, rule, -1, in_group, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001292 }
1293 }
Adam Langley95c29f32014-06-20 12:00:00 -07001294
Adam Langleyfcf25832014-12-18 17:42:32 -08001295 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001296 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyf139c992016-10-02 09:56:09 -07001297 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001298 }
Adam Langley95c29f32014-06-20 12:00:00 -07001299
Adam Langleyf139c992016-10-02 09:56:09 -07001300 return 1;
Adam Langleyfcf25832014-12-18 17:42:32 -08001301}
Adam Langley95c29f32014-06-20 12:00:00 -07001302
Adam Langleyfcf25832014-12-18 17:42:32 -08001303STACK_OF(SSL_CIPHER) *
1304ssl_create_cipher_list(const SSL_PROTOCOL_METHOD *ssl_method,
David Benjamin71f07942015-04-08 02:36:59 -04001305 struct ssl_cipher_preference_list_st **out_cipher_list,
David Benjamin71f07942015-04-08 02:36:59 -04001306 const char *rule_str) {
David Benjamind2cb1c12016-11-02 17:49:09 -04001307 STACK_OF(SSL_CIPHER) *cipherstack = NULL;
Adam Langleyfcf25832014-12-18 17:42:32 -08001308 CIPHER_ORDER *co_list = NULL, *head = NULL, *tail = NULL, *curr;
Adam Langleyfcf25832014-12-18 17:42:32 -08001309 uint8_t *in_group_flags = NULL;
1310 unsigned int num_in_group_flags = 0;
1311 struct ssl_cipher_preference_list_st *pref_list = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -07001312
Adam Langleyfcf25832014-12-18 17:42:32 -08001313 /* Return with error if nothing to do. */
David Benjamin71f07942015-04-08 02:36:59 -04001314 if (rule_str == NULL || out_cipher_list == NULL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001315 return NULL;
1316 }
David Benjamin5213df42014-08-20 14:19:54 -04001317
Adam Langleyfcf25832014-12-18 17:42:32 -08001318 /* Now we have to collect the available ciphers from the compiled in ciphers.
1319 * We cannot get more than the number compiled in, so it is used for
1320 * allocation. */
Brian Smith5ba06892016-02-07 09:36:04 -10001321 co_list = OPENSSL_malloc(sizeof(CIPHER_ORDER) * kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001322 if (co_list == NULL) {
David Benjamin3570d732015-06-29 00:28:17 -04001323 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001324 return NULL;
1325 }
Adam Langley95c29f32014-06-20 12:00:00 -07001326
David Benjamina1c90a52015-05-30 17:03:14 -04001327 ssl_cipher_collect_ciphers(ssl_method, co_list, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001328
Adam Langleyfcf25832014-12-18 17:42:32 -08001329 /* Now arrange all ciphers by preference:
1330 * TODO(davidben): Compute this order once and copy it. */
Adam Langley95c29f32014-06-20 12:00:00 -07001331
David Benjaminabbbee12016-10-31 19:20:42 -04001332 /* Everything else being equal, prefer ECDHE_ECDSA and ECDHE_RSA over other
1333 * key exchange mechanisms */
David Benjamind6e9eec2015-11-18 09:48:55 -05001334 ssl_cipher_apply_rule(0, SSL_kECDHE, SSL_aECDSA, ~0u, ~0u, 0, CIPHER_ADD, -1,
Adam Langleyfcf25832014-12-18 17:42:32 -08001335 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001336 ssl_cipher_apply_rule(0, SSL_kECDHE, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0,
1337 &head, &tail);
Steven Valdez803c77a2016-09-06 14:13:43 -04001338 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0, &head,
1339 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001340
Adam Langleyfcf25832014-12-18 17:42:32 -08001341 /* Order the bulk ciphers. First the preferred AEAD ciphers. We prefer
1342 * CHACHA20 unless there is hardware support for fast and constant-time
David Benjamin13414b32015-12-09 23:02:39 -05001343 * AES_GCM. Of the two CHACHA20 variants, the new one is preferred over the
1344 * old one. */
Adam Langleyfcf25832014-12-18 17:42:32 -08001345 if (EVP_has_aes_hardware()) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001346 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1347 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001348 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1349 &head, &tail);
David Benjamin13414b32015-12-09 23:02:39 -05001350 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1351 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001352 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001353 CIPHER_ADD, -1, 0, &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001354 } else {
David Benjamin13414b32015-12-09 23:02:39 -05001355 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1356 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001357 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001358 CIPHER_ADD, -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001359 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1360 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001361 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1362 &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001363 }
Adam Langley95c29f32014-06-20 12:00:00 -07001364
David Benjamin43336652016-03-03 15:32:29 -05001365 /* Then the legacy non-AEAD ciphers: AES_128_CBC, AES_256_CBC,
Matthew Braithwaite8aaa9e12016-09-07 15:09:58 -07001366 * 3DES_EDE_CBC_SHA. */
David Benjamind6e9eec2015-11-18 09:48:55 -05001367 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128, ~0u, 0, CIPHER_ADD, -1, 0,
1368 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001369 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256, ~0u, 0, CIPHER_ADD, -1, 0,
1370 &head, &tail);
1371 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_3DES, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1372 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001373
Adam Langleyfcf25832014-12-18 17:42:32 -08001374 /* Temporarily enable everything else for sorting */
David Benjamind6e9eec2015-11-18 09:48:55 -05001375 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1376 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001377
Adam Langleyfcf25832014-12-18 17:42:32 -08001378 /* Move ciphers without forward secrecy to the end. */
Steven Valdez803c77a2016-09-06 14:13:43 -04001379 ssl_cipher_apply_rule(0, (SSL_kRSA | SSL_kPSK), ~0u, ~0u, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001380 CIPHER_ORD, -1, 0, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001381
Adam Langleyfcf25832014-12-18 17:42:32 -08001382 /* Now disable everything (maintaining the ordering!) */
David Benjamind6e9eec2015-11-18 09:48:55 -05001383 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0, &head,
1384 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001385
Adam Langleyfcf25832014-12-18 17:42:32 -08001386 /* If the rule_string begins with DEFAULT, apply the default rule before
1387 * using the (possibly available) additional rules. */
David Benjamin11a7b3c2016-11-03 17:03:48 -04001388 const char *rule_p = rule_str;
Adam Langleyfcf25832014-12-18 17:42:32 -08001389 if (strncmp(rule_str, "DEFAULT", 7) == 0) {
David Benjamin11a7b3c2016-11-03 17:03:48 -04001390 if (!ssl_cipher_process_rulestr(ssl_method, SSL_DEFAULT_CIPHER_LIST, &head,
1391 &tail)) {
1392 goto err;
1393 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001394 rule_p += 7;
1395 if (*rule_p == ':') {
1396 rule_p++;
1397 }
1398 }
Adam Langley858a88d2014-06-20 12:00:00 -07001399
David Benjamin11a7b3c2016-11-03 17:03:48 -04001400 if (*rule_p != '\0' &&
1401 !ssl_cipher_process_rulestr(ssl_method, rule_p, &head, &tail)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001402 goto err;
1403 }
1404
1405 /* Allocate new "cipherstack" for the result, return with error
1406 * if we cannot get one. */
1407 cipherstack = sk_SSL_CIPHER_new_null();
1408 if (cipherstack == NULL) {
1409 goto err;
1410 }
1411
David Benjamina1c90a52015-05-30 17:03:14 -04001412 in_group_flags = OPENSSL_malloc(kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001413 if (!in_group_flags) {
1414 goto err;
1415 }
1416
1417 /* The cipher selection for the list is done. The ciphers are added
1418 * to the resulting precedence to the STACK_OF(SSL_CIPHER). */
1419 for (curr = head; curr != NULL; curr = curr->next) {
1420 if (curr->active) {
David Benjamin2adb7ec2015-01-11 19:59:06 -05001421 if (!sk_SSL_CIPHER_push(cipherstack, curr->cipher)) {
1422 goto err;
1423 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001424 in_group_flags[num_in_group_flags++] = curr->in_group;
1425 }
1426 }
1427 OPENSSL_free(co_list); /* Not needed any longer */
1428 co_list = NULL;
1429
Adam Langleyfcf25832014-12-18 17:42:32 -08001430 pref_list = OPENSSL_malloc(sizeof(struct ssl_cipher_preference_list_st));
1431 if (!pref_list) {
1432 goto err;
1433 }
1434 pref_list->ciphers = cipherstack;
1435 pref_list->in_group_flags = OPENSSL_malloc(num_in_group_flags);
1436 if (!pref_list->in_group_flags) {
1437 goto err;
1438 }
David Benjamin17cf2cb2016-12-13 01:07:13 -05001439 OPENSSL_memcpy(pref_list->in_group_flags, in_group_flags, num_in_group_flags);
Adam Langleyfcf25832014-12-18 17:42:32 -08001440 OPENSSL_free(in_group_flags);
1441 in_group_flags = NULL;
David Benjamin71f07942015-04-08 02:36:59 -04001442 if (*out_cipher_list != NULL) {
1443 ssl_cipher_preference_list_free(*out_cipher_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001444 }
David Benjamin71f07942015-04-08 02:36:59 -04001445 *out_cipher_list = pref_list;
Adam Langleyfcf25832014-12-18 17:42:32 -08001446 pref_list = NULL;
1447
Adam Langleyfcf25832014-12-18 17:42:32 -08001448 return cipherstack;
Adam Langley858a88d2014-06-20 12:00:00 -07001449
1450err:
David Benjamin2755a3e2015-04-22 16:17:58 -04001451 OPENSSL_free(co_list);
1452 OPENSSL_free(in_group_flags);
1453 sk_SSL_CIPHER_free(cipherstack);
David Benjamin2755a3e2015-04-22 16:17:58 -04001454 if (pref_list) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001455 OPENSSL_free(pref_list->in_group_flags);
1456 }
David Benjamin2755a3e2015-04-22 16:17:58 -04001457 OPENSSL_free(pref_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001458 return NULL;
1459}
Adam Langley95c29f32014-06-20 12:00:00 -07001460
David Benjamin71f07942015-04-08 02:36:59 -04001461uint32_t SSL_CIPHER_get_id(const SSL_CIPHER *cipher) { return cipher->id; }
1462
David Benjamina1c90a52015-05-30 17:03:14 -04001463uint16_t ssl_cipher_get_value(const SSL_CIPHER *cipher) {
1464 uint32_t id = cipher->id;
1465 /* All ciphers are SSLv3. */
1466 assert((id & 0xff000000) == 0x03000000);
1467 return id & 0xffff;
1468}
1469
David Benjamin71f07942015-04-08 02:36:59 -04001470int SSL_CIPHER_is_AES(const SSL_CIPHER *cipher) {
1471 return (cipher->algorithm_enc & SSL_AES) != 0;
1472}
1473
1474int SSL_CIPHER_has_MD5_HMAC(const SSL_CIPHER *cipher) {
David Benjamin5fc99c62017-01-10 08:19:12 -05001475 return 0;
David Benjamin71f07942015-04-08 02:36:59 -04001476}
1477
David Benjaminef793f42015-11-05 18:16:27 -05001478int SSL_CIPHER_has_SHA1_HMAC(const SSL_CIPHER *cipher) {
1479 return (cipher->algorithm_mac & SSL_SHA1) != 0;
1480}
1481
David Benjamina211aee2016-02-24 17:18:44 -05001482int SSL_CIPHER_has_SHA256_HMAC(const SSL_CIPHER *cipher) {
1483 return (cipher->algorithm_mac & SSL_SHA256) != 0;
1484}
1485
Alessandro Ghedini0726fb72017-01-17 13:27:08 +00001486int SSL_CIPHER_is_AEAD(const SSL_CIPHER *cipher) {
1487 return (cipher->algorithm_mac & SSL_AEAD) != 0;
1488}
1489
David Benjamin71f07942015-04-08 02:36:59 -04001490int SSL_CIPHER_is_AESGCM(const SSL_CIPHER *cipher) {
David Benjaminc0125ef2015-09-09 09:11:07 -04001491 return (cipher->algorithm_enc & (SSL_AES128GCM | SSL_AES256GCM)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001492}
1493
David Benjaminef793f42015-11-05 18:16:27 -05001494int SSL_CIPHER_is_AES128GCM(const SSL_CIPHER *cipher) {
1495 return (cipher->algorithm_enc & SSL_AES128GCM) != 0;
1496}
1497
Adam Langleyb00061c2015-11-16 17:44:52 -08001498int SSL_CIPHER_is_AES128CBC(const SSL_CIPHER *cipher) {
1499 return (cipher->algorithm_enc & SSL_AES128) != 0;
1500}
1501
1502int SSL_CIPHER_is_AES256CBC(const SSL_CIPHER *cipher) {
1503 return (cipher->algorithm_enc & SSL_AES256) != 0;
1504}
1505
David Benjamin51a01a52015-10-29 13:19:56 -04001506int SSL_CIPHER_is_CHACHA20POLY1305(const SSL_CIPHER *cipher) {
David Benjamin13414b32015-12-09 23:02:39 -05001507 return (cipher->algorithm_enc &
1508 (SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001509}
1510
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001511int SSL_CIPHER_is_NULL(const SSL_CIPHER *cipher) {
1512 return (cipher->algorithm_enc & SSL_eNULL) != 0;
1513}
1514
1515int SSL_CIPHER_is_block_cipher(const SSL_CIPHER *cipher) {
Matthew Braithwaite8aaa9e12016-09-07 15:09:58 -07001516 return (cipher->algorithm_enc & SSL_eNULL) == 0 &&
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001517 cipher->algorithm_mac != SSL_AEAD;
1518}
1519
David Benjaminef793f42015-11-05 18:16:27 -05001520int SSL_CIPHER_is_ECDSA(const SSL_CIPHER *cipher) {
1521 return (cipher->algorithm_auth & SSL_aECDSA) != 0;
1522}
1523
David Benjamin0fc7df52016-06-02 18:36:33 -04001524int SSL_CIPHER_is_DHE(const SSL_CIPHER *cipher) {
1525 return (cipher->algorithm_mkey & SSL_kDHE) != 0;
1526}
1527
David Benjamin4cc36ad2015-12-19 14:23:26 -05001528int SSL_CIPHER_is_ECDHE(const SSL_CIPHER *cipher) {
1529 return (cipher->algorithm_mkey & SSL_kECDHE) != 0;
1530}
1531
David Benjamin745745d2017-01-10 08:34:14 -05001532int SSL_CIPHER_is_static_RSA(const SSL_CIPHER *cipher) {
1533 return (cipher->algorithm_mkey & SSL_kRSA) != 0;
1534}
1535
David Benjaminef793f42015-11-05 18:16:27 -05001536uint16_t SSL_CIPHER_get_min_version(const SSL_CIPHER *cipher) {
Steven Valdez803c77a2016-09-06 14:13:43 -04001537 if (cipher->algorithm_mkey == SSL_kGENERIC ||
1538 cipher->algorithm_auth == SSL_aGENERIC) {
1539 return TLS1_3_VERSION;
1540 }
1541
David Benjamindcb6ef02015-11-06 15:35:54 -05001542 if (cipher->algorithm_prf != SSL_HANDSHAKE_MAC_DEFAULT) {
1543 /* Cipher suites before TLS 1.2 use the default PRF, while all those added
1544 * afterwards specify a particular hash. */
David Benjaminef793f42015-11-05 18:16:27 -05001545 return TLS1_2_VERSION;
1546 }
1547 return SSL3_VERSION;
1548}
1549
Nick Harper1fd39d82016-06-14 18:14:35 -07001550uint16_t SSL_CIPHER_get_max_version(const SSL_CIPHER *cipher) {
Steven Valdez803c77a2016-09-06 14:13:43 -04001551 if (cipher->algorithm_mkey == SSL_kGENERIC ||
1552 cipher->algorithm_auth == SSL_aGENERIC) {
Nick Harper1fd39d82016-06-14 18:14:35 -07001553 return TLS1_3_VERSION;
1554 }
1555 return TLS1_2_VERSION;
1556}
1557
David Benjamin71f07942015-04-08 02:36:59 -04001558/* return the actual cipher being used */
1559const char *SSL_CIPHER_get_name(const SSL_CIPHER *cipher) {
1560 if (cipher != NULL) {
1561 return cipher->name;
1562 }
1563
1564 return "(NONE)";
1565}
1566
1567const char *SSL_CIPHER_get_kx_name(const SSL_CIPHER *cipher) {
1568 if (cipher == NULL) {
1569 return "";
1570 }
1571
1572 switch (cipher->algorithm_mkey) {
1573 case SSL_kRSA:
1574 return "RSA";
1575
1576 case SSL_kDHE:
1577 switch (cipher->algorithm_auth) {
1578 case SSL_aRSA:
1579 return "DHE_RSA";
1580 default:
1581 assert(0);
1582 return "UNKNOWN";
1583 }
1584
1585 case SSL_kECDHE:
1586 switch (cipher->algorithm_auth) {
1587 case SSL_aECDSA:
1588 return "ECDHE_ECDSA";
1589 case SSL_aRSA:
1590 return "ECDHE_RSA";
1591 case SSL_aPSK:
1592 return "ECDHE_PSK";
1593 default:
1594 assert(0);
1595 return "UNKNOWN";
1596 }
1597
1598 case SSL_kPSK:
1599 assert(cipher->algorithm_auth == SSL_aPSK);
1600 return "PSK";
1601
Steven Valdez803c77a2016-09-06 14:13:43 -04001602 case SSL_kGENERIC:
1603 assert(cipher->algorithm_auth == SSL_aGENERIC);
1604 return "GENERIC";
1605
David Benjamin71f07942015-04-08 02:36:59 -04001606 default:
1607 assert(0);
1608 return "UNKNOWN";
1609 }
1610}
1611
1612static const char *ssl_cipher_get_enc_name(const SSL_CIPHER *cipher) {
1613 switch (cipher->algorithm_enc) {
1614 case SSL_3DES:
1615 return "3DES_EDE_CBC";
David Benjamin71f07942015-04-08 02:36:59 -04001616 case SSL_AES128:
1617 return "AES_128_CBC";
1618 case SSL_AES256:
1619 return "AES_256_CBC";
1620 case SSL_AES128GCM:
1621 return "AES_128_GCM";
1622 case SSL_AES256GCM:
1623 return "AES_256_GCM";
David Benjamin13414b32015-12-09 23:02:39 -05001624 case SSL_CHACHA20POLY1305:
Brian Smith271777f2015-10-03 13:53:33 -10001625 case SSL_CHACHA20POLY1305_OLD:
David Benjamin71f07942015-04-08 02:36:59 -04001626 return "CHACHA20_POLY1305";
1627 break;
1628 default:
1629 assert(0);
1630 return "UNKNOWN";
1631 }
1632}
1633
1634static const char *ssl_cipher_get_prf_name(const SSL_CIPHER *cipher) {
David Benjaminb0883312015-08-06 09:54:13 -04001635 switch (cipher->algorithm_prf) {
1636 case SSL_HANDSHAKE_MAC_DEFAULT:
David Benjamin5fc99c62017-01-10 08:19:12 -05001637 /* Before TLS 1.2, the PRF component is the hash used in the HMAC, which
1638 * is SHA-1 for all supported ciphers. */
1639 assert(cipher->algorithm_mac == SSL_SHA1);
1640 return "SHA";
David Benjaminb0883312015-08-06 09:54:13 -04001641 case SSL_HANDSHAKE_MAC_SHA256:
1642 return "SHA256";
1643 case SSL_HANDSHAKE_MAC_SHA384:
1644 return "SHA384";
David Benjamin71f07942015-04-08 02:36:59 -04001645 }
David Benjaminb0883312015-08-06 09:54:13 -04001646 assert(0);
1647 return "UNKNOWN";
David Benjamin71f07942015-04-08 02:36:59 -04001648}
1649
1650char *SSL_CIPHER_get_rfc_name(const SSL_CIPHER *cipher) {
1651 if (cipher == NULL) {
1652 return NULL;
1653 }
1654
1655 const char *kx_name = SSL_CIPHER_get_kx_name(cipher);
1656 const char *enc_name = ssl_cipher_get_enc_name(cipher);
1657 const char *prf_name = ssl_cipher_get_prf_name(cipher);
1658
Steven Valdez803c77a2016-09-06 14:13:43 -04001659 /* The final name is TLS_{kx_name}_WITH_{enc_name}_{prf_name} or
1660 * TLS_{enc_name}_{prf_name} depending on whether the cipher is AEAD-only. */
1661 size_t len = 4 + strlen(enc_name) + 1 + strlen(prf_name) + 1;
1662
1663 if (cipher->algorithm_mkey != SSL_kGENERIC) {
1664 len += strlen(kx_name) + 6;
1665 }
1666
David Benjamin71f07942015-04-08 02:36:59 -04001667 char *ret = OPENSSL_malloc(len);
1668 if (ret == NULL) {
1669 return NULL;
1670 }
Steven Valdez803c77a2016-09-06 14:13:43 -04001671
David Benjamin71f07942015-04-08 02:36:59 -04001672 if (BUF_strlcpy(ret, "TLS_", len) >= len ||
Steven Valdez803c77a2016-09-06 14:13:43 -04001673 (cipher->algorithm_mkey != SSL_kGENERIC &&
1674 (BUF_strlcat(ret, kx_name, len) >= len ||
1675 BUF_strlcat(ret, "_WITH_", len) >= len)) ||
David Benjamin71f07942015-04-08 02:36:59 -04001676 BUF_strlcat(ret, enc_name, len) >= len ||
1677 BUF_strlcat(ret, "_", len) >= len ||
1678 BUF_strlcat(ret, prf_name, len) >= len) {
1679 assert(0);
1680 OPENSSL_free(ret);
1681 return NULL;
1682 }
Steven Valdez803c77a2016-09-06 14:13:43 -04001683
David Benjamin71f07942015-04-08 02:36:59 -04001684 assert(strlen(ret) + 1 == len);
1685 return ret;
1686}
1687
1688int SSL_CIPHER_get_bits(const SSL_CIPHER *cipher, int *out_alg_bits) {
1689 if (cipher == NULL) {
1690 return 0;
1691 }
1692
David Benjamin9f2e2772015-11-18 09:59:43 -05001693 int alg_bits, strength_bits;
1694 switch (cipher->algorithm_enc) {
1695 case SSL_AES128:
1696 case SSL_AES128GCM:
David Benjamin9f2e2772015-11-18 09:59:43 -05001697 alg_bits = 128;
1698 strength_bits = 128;
1699 break;
1700
1701 case SSL_AES256:
1702 case SSL_AES256GCM:
1703#if !defined(BORINGSSL_ANDROID_SYSTEM)
1704 case SSL_CHACHA20POLY1305_OLD:
1705#endif
David Benjamin13414b32015-12-09 23:02:39 -05001706 case SSL_CHACHA20POLY1305:
David Benjamin9f2e2772015-11-18 09:59:43 -05001707 alg_bits = 256;
1708 strength_bits = 256;
1709 break;
1710
1711 case SSL_3DES:
1712 alg_bits = 168;
1713 strength_bits = 112;
1714 break;
1715
1716 case SSL_eNULL:
1717 alg_bits = 0;
1718 strength_bits = 0;
1719 break;
1720
1721 default:
1722 assert(0);
1723 alg_bits = 0;
1724 strength_bits = 0;
David Benjamin71f07942015-04-08 02:36:59 -04001725 }
David Benjamin9f2e2772015-11-18 09:59:43 -05001726
1727 if (out_alg_bits != NULL) {
1728 *out_alg_bits = alg_bits;
1729 }
1730 return strength_bits;
David Benjamin71f07942015-04-08 02:36:59 -04001731}
1732
Adam Langleyfcf25832014-12-18 17:42:32 -08001733const char *SSL_CIPHER_description(const SSL_CIPHER *cipher, char *buf,
1734 int len) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001735 const char *kx, *au, *enc, *mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001736 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001737
Adam Langleyfcf25832014-12-18 17:42:32 -08001738 alg_mkey = cipher->algorithm_mkey;
1739 alg_auth = cipher->algorithm_auth;
1740 alg_enc = cipher->algorithm_enc;
1741 alg_mac = cipher->algorithm_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001742
Adam Langleyfcf25832014-12-18 17:42:32 -08001743 switch (alg_mkey) {
1744 case SSL_kRSA:
1745 kx = "RSA";
1746 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001747
David Benjamin7061e282015-03-19 11:10:48 -04001748 case SSL_kDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001749 kx = "DH";
1750 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001751
David Benjamin7061e282015-03-19 11:10:48 -04001752 case SSL_kECDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001753 kx = "ECDH";
1754 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001755
Adam Langleyfcf25832014-12-18 17:42:32 -08001756 case SSL_kPSK:
1757 kx = "PSK";
1758 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001759
Steven Valdez803c77a2016-09-06 14:13:43 -04001760 case SSL_kGENERIC:
1761 kx = "GENERIC";
1762 break;
1763
Adam Langleyfcf25832014-12-18 17:42:32 -08001764 default:
1765 kx = "unknown";
1766 }
Adam Langley95c29f32014-06-20 12:00:00 -07001767
Adam Langleyfcf25832014-12-18 17:42:32 -08001768 switch (alg_auth) {
1769 case SSL_aRSA:
1770 au = "RSA";
1771 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001772
Adam Langleyfcf25832014-12-18 17:42:32 -08001773 case SSL_aECDSA:
1774 au = "ECDSA";
1775 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07001776
Adam Langleyfcf25832014-12-18 17:42:32 -08001777 case SSL_aPSK:
1778 au = "PSK";
1779 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07001780
Steven Valdez803c77a2016-09-06 14:13:43 -04001781 case SSL_aGENERIC:
1782 au = "GENERIC";
1783 break;
1784
Adam Langleyfcf25832014-12-18 17:42:32 -08001785 default:
1786 au = "unknown";
1787 break;
1788 }
Adam Langleyde0b2022014-06-20 12:00:00 -07001789
Adam Langleyfcf25832014-12-18 17:42:32 -08001790 switch (alg_enc) {
1791 case SSL_3DES:
1792 enc = "3DES(168)";
1793 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001794
Adam Langleyfcf25832014-12-18 17:42:32 -08001795 case SSL_AES128:
1796 enc = "AES(128)";
1797 break;
1798
1799 case SSL_AES256:
1800 enc = "AES(256)";
1801 break;
1802
1803 case SSL_AES128GCM:
1804 enc = "AESGCM(128)";
1805 break;
1806
1807 case SSL_AES256GCM:
1808 enc = "AESGCM(256)";
1809 break;
1810
Brian Smith271777f2015-10-03 13:53:33 -10001811 case SSL_CHACHA20POLY1305_OLD:
David Benjamin13414b32015-12-09 23:02:39 -05001812 enc = "ChaCha20-Poly1305-Old";
1813 break;
1814
1815 case SSL_CHACHA20POLY1305:
Adam Langleyfcf25832014-12-18 17:42:32 -08001816 enc = "ChaCha20-Poly1305";
1817 break;
1818
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001819 case SSL_eNULL:
1820 enc="None";
1821 break;
1822
Adam Langleyfcf25832014-12-18 17:42:32 -08001823 default:
1824 enc = "unknown";
1825 break;
1826 }
1827
1828 switch (alg_mac) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001829 case SSL_SHA1:
1830 mac = "SHA1";
1831 break;
1832
1833 case SSL_SHA256:
1834 mac = "SHA256";
1835 break;
1836
1837 case SSL_SHA384:
1838 mac = "SHA384";
1839 break;
1840
1841 case SSL_AEAD:
1842 mac = "AEAD";
1843 break;
1844
1845 default:
1846 mac = "unknown";
1847 break;
1848 }
1849
1850 if (buf == NULL) {
1851 len = 128;
1852 buf = OPENSSL_malloc(len);
David Benjamin1eed2c02015-02-08 23:20:06 -05001853 if (buf == NULL) {
1854 return NULL;
1855 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001856 } else if (len < 128) {
1857 return "Buffer too small";
1858 }
1859
Brian Smith0687bdf2016-01-17 09:18:26 -10001860 BIO_snprintf(buf, len, "%-23s Kx=%-8s Au=%-4s Enc=%-9s Mac=%-4s\n",
1861 cipher->name, kx, au, enc, mac);
Adam Langleyfcf25832014-12-18 17:42:32 -08001862 return buf;
1863}
1864
David Benjamin71f07942015-04-08 02:36:59 -04001865const char *SSL_CIPHER_get_version(const SSL_CIPHER *cipher) {
1866 return "TLSv1/SSLv3";
Adam Langleyfcf25832014-12-18 17:42:32 -08001867}
1868
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07001869COMP_METHOD *SSL_COMP_get_compression_methods(void) { return NULL; }
Adam Langleyfcf25832014-12-18 17:42:32 -08001870
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07001871int SSL_COMP_add_compression_method(int id, COMP_METHOD *cm) { return 1; }
Adam Langleyfcf25832014-12-18 17:42:32 -08001872
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07001873const char *SSL_COMP_get_name(const COMP_METHOD *comp) { return NULL; }
Adam Langley95c29f32014-06-20 12:00:00 -07001874
Adam Langley3e9e0432016-10-03 15:58:07 -07001875void SSL_COMP_free_compression_methods(void) {}
1876
David Benjamind1d80782015-07-05 11:54:09 -04001877int ssl_cipher_get_key_type(const SSL_CIPHER *cipher) {
David Benjamin71f07942015-04-08 02:36:59 -04001878 uint32_t alg_a = cipher->algorithm_auth;
Adam Langley95c29f32014-06-20 12:00:00 -07001879
Adam Langleyfcf25832014-12-18 17:42:32 -08001880 if (alg_a & SSL_aECDSA) {
David Benjamind1d80782015-07-05 11:54:09 -04001881 return EVP_PKEY_EC;
Adam Langleyfcf25832014-12-18 17:42:32 -08001882 } else if (alg_a & SSL_aRSA) {
David Benjamind1d80782015-07-05 11:54:09 -04001883 return EVP_PKEY_RSA;
Adam Langleyfcf25832014-12-18 17:42:32 -08001884 }
Adam Langley95c29f32014-06-20 12:00:00 -07001885
David Benjamind1d80782015-07-05 11:54:09 -04001886 return EVP_PKEY_NONE;
Adam Langleyfcf25832014-12-18 17:42:32 -08001887}
David Benjamin9c651c92014-07-12 13:27:45 -04001888
David Benjaminc032dfa2016-05-12 14:54:57 -04001889int ssl_cipher_uses_certificate_auth(const SSL_CIPHER *cipher) {
1890 return (cipher->algorithm_auth & SSL_aCERT) != 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001891}
1892
Adam Langleyfcf25832014-12-18 17:42:32 -08001893int ssl_cipher_requires_server_key_exchange(const SSL_CIPHER *cipher) {
1894 /* Ephemeral Diffie-Hellman key exchanges require a ServerKeyExchange. */
Matt Braithwaite053931e2016-05-25 12:06:05 -07001895 if (cipher->algorithm_mkey & SSL_kDHE ||
Matthew Braithwaite651aaef2016-12-08 16:14:36 -08001896 cipher->algorithm_mkey & SSL_kECDHE) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001897 return 1;
1898 }
1899
1900 /* It is optional in all others. */
1901 return 0;
1902}
David Benjaminb8d28cf2015-07-28 21:34:45 -04001903
1904size_t ssl_cipher_get_record_split_len(const SSL_CIPHER *cipher) {
1905 size_t block_size;
1906 switch (cipher->algorithm_enc) {
1907 case SSL_3DES:
1908 block_size = 8;
1909 break;
1910 case SSL_AES128:
1911 case SSL_AES256:
1912 block_size = 16;
1913 break;
1914 default:
1915 return 0;
1916 }
1917
David Benjamin5fc99c62017-01-10 08:19:12 -05001918 /* All supported TLS 1.0 ciphers use SHA-1. */
1919 assert(cipher->algorithm_mac == SSL_SHA1);
1920 size_t ret = 1 + SHA_DIGEST_LENGTH;
David Benjaminb8d28cf2015-07-28 21:34:45 -04001921 ret += block_size - (ret % block_size);
1922 return ret;
1923}