blob: e58d8893fc64e5080df183a087afea842c099cbd [file] [log] [blame]
Adam Langley95c29f32014-06-20 12:00:00 -07001/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
2 * All rights reserved.
3 *
4 * This package is an SSL implementation written
5 * by Eric Young (eay@cryptsoft.com).
6 * The implementation was written so as to conform with Netscapes SSL.
7 *
8 * This library is free for commercial and non-commercial use as long as
9 * the following conditions are aheared to. The following conditions
10 * apply to all code found in this distribution, be it the RC4, RSA,
11 * lhash, DES, etc., code; not just the SSL code. The SSL documentation
12 * included with this distribution is covered by the same copyright terms
13 * except that the holder is Tim Hudson (tjh@cryptsoft.com).
14 *
15 * Copyright remains Eric Young's, and as such any Copyright notices in
16 * the code are not to be removed.
17 * If this package is used in a product, Eric Young should be given attribution
18 * as the author of the parts of the library used.
19 * This can be in the form of a textual message at program startup or
20 * in documentation (online or textual) provided with the package.
21 *
22 * Redistribution and use in source and binary forms, with or without
23 * modification, are permitted provided that the following conditions
24 * are met:
25 * 1. Redistributions of source code must retain the copyright
26 * notice, this list of conditions and the following disclaimer.
27 * 2. Redistributions in binary form must reproduce the above copyright
28 * notice, this list of conditions and the following disclaimer in the
29 * documentation and/or other materials provided with the distribution.
30 * 3. All advertising materials mentioning features or use of this software
31 * must display the following acknowledgement:
32 * "This product includes cryptographic software written by
33 * Eric Young (eay@cryptsoft.com)"
34 * The word 'cryptographic' can be left out if the rouines from the library
35 * being used are not cryptographic related :-).
36 * 4. If you include any Windows specific code (or a derivative thereof) from
37 * the apps directory (application code) you must include an acknowledgement:
38 * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
39 *
40 * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
41 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
42 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
43 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
44 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
45 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
46 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
47 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
48 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
49 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
50 * SUCH DAMAGE.
51 *
52 * The licence and distribution terms for any publically available version or
53 * derivative of this code cannot be changed. i.e. this code cannot simply be
54 * copied and put under another distribution licence
55 * [including the GNU Public Licence.]
56 */
57/* ====================================================================
58 * Copyright (c) 1998-2007 The OpenSSL Project. All rights reserved.
59 *
60 * Redistribution and use in source and binary forms, with or without
61 * modification, are permitted provided that the following conditions
62 * are met:
63 *
64 * 1. Redistributions of source code must retain the above copyright
65 * notice, this list of conditions and the following disclaimer.
66 *
67 * 2. Redistributions in binary form must reproduce the above copyright
68 * notice, this list of conditions and the following disclaimer in
69 * the documentation and/or other materials provided with the
70 * distribution.
71 *
72 * 3. All advertising materials mentioning features or use of this
73 * software must display the following acknowledgment:
74 * "This product includes software developed by the OpenSSL Project
75 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
76 *
77 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
78 * endorse or promote products derived from this software without
79 * prior written permission. For written permission, please contact
80 * openssl-core@openssl.org.
81 *
82 * 5. Products derived from this software may not be called "OpenSSL"
83 * nor may "OpenSSL" appear in their names without prior written
84 * permission of the OpenSSL Project.
85 *
86 * 6. Redistributions of any form whatsoever must retain the following
87 * acknowledgment:
88 * "This product includes software developed by the OpenSSL Project
89 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
90 *
91 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
92 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
93 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
94 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
95 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
96 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
97 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
98 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
99 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
100 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
101 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
102 * OF THE POSSIBILITY OF SUCH DAMAGE.
103 * ====================================================================
104 *
105 * This product includes cryptographic software written by Eric Young
106 * (eay@cryptsoft.com). This product includes software written by Tim
107 * Hudson (tjh@cryptsoft.com).
108 *
109 */
110/* ====================================================================
111 * Copyright 2002 Sun Microsystems, Inc. ALL RIGHTS RESERVED.
112 * ECC cipher suite support in OpenSSL originally developed by
113 * SUN MICROSYSTEMS, INC., and contributed to the OpenSSL project.
114 */
115/* ====================================================================
116 * Copyright 2005 Nokia. All rights reserved.
117 *
118 * The portions of the attached software ("Contribution") is developed by
119 * Nokia Corporation and is licensed pursuant to the OpenSSL open source
120 * license.
121 *
122 * The Contribution, originally written by Mika Kousa and Pasi Eronen of
123 * Nokia Corporation, consists of the "PSK" (Pre-Shared Key) ciphersuites
124 * support (see RFC 4279) to OpenSSL.
125 *
126 * No patent licenses or other rights except those expressly stated in
127 * the OpenSSL open source license shall be deemed granted or received
128 * expressly, by implication, estoppel, or otherwise.
129 *
130 * No assurances are provided by Nokia that the Contribution does not
131 * infringe the patent or other intellectual property rights of any third
132 * party or that the license provides you with all the necessary rights
133 * to make use of the Contribution.
134 *
135 * THE SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND. IN
136 * ADDITION TO THE DISCLAIMERS INCLUDED IN THE LICENSE, NOKIA
137 * SPECIFICALLY DISCLAIMS ANY LIABILITY FOR CLAIMS BROUGHT BY YOU OR ANY
138 * OTHER ENTITY BASED ON INFRINGEMENT OF INTELLECTUAL PROPERTY RIGHTS OR
139 * OTHERWISE. */
140
David Benjamin9e4e01e2015-09-15 01:48:04 -0400141#include <openssl/ssl.h>
142
Adam Langley95c29f32014-06-20 12:00:00 -0700143#include <assert.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400144#include <string.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700145
David Benjamin71f07942015-04-08 02:36:59 -0400146#include <openssl/buf.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400147#include <openssl/err.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500148#include <openssl/md5.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700149#include <openssl/mem.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500150#include <openssl/sha.h>
David Benjamin71f07942015-04-08 02:36:59 -0400151#include <openssl/stack.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700152
David Benjamin2ee94aa2015-04-07 22:38:30 -0400153#include "internal.h"
Steven Valdezcb966542016-08-17 16:56:14 -0400154#include "../crypto/internal.h"
Adam Langley95c29f32014-06-20 12:00:00 -0700155
Adam Langley95c29f32014-06-20 12:00:00 -0700156
David Benjamina1c90a52015-05-30 17:03:14 -0400157/* kCiphers is an array of all supported ciphers, sorted by id. */
David Benjamin20c37312015-11-11 21:33:18 -0800158static const SSL_CIPHER kCiphers[] = {
David Benjamina1c90a52015-05-30 17:03:14 -0400159 /* The RSA ciphers */
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700160 /* Cipher 02 */
161 {
David Benjaminff2df332015-11-18 10:01:16 -0500162 SSL3_TXT_RSA_NULL_SHA,
163 SSL3_CK_RSA_NULL_SHA,
164 SSL_kRSA,
165 SSL_aRSA,
166 SSL_eNULL,
167 SSL_SHA1,
168 SSL_HANDSHAKE_MAC_DEFAULT,
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700169 },
170
Matt Braithwaite9c8c4182016-08-24 14:36:54 -0700171#ifdef BORINGSSL_ENABLE_RC4_TLS
David Benjamina1c90a52015-05-30 17:03:14 -0400172 /* Cipher 04 */
173 {
David Benjaminff2df332015-11-18 10:01:16 -0500174 SSL3_TXT_RSA_RC4_128_MD5,
175 SSL3_CK_RSA_RC4_128_MD5,
176 SSL_kRSA,
177 SSL_aRSA,
178 SSL_RC4,
179 SSL_MD5,
180 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400181 },
182
183 /* Cipher 05 */
184 {
David Benjaminff2df332015-11-18 10:01:16 -0500185 SSL3_TXT_RSA_RC4_128_SHA,
186 SSL3_CK_RSA_RC4_128_SHA,
187 SSL_kRSA,
188 SSL_aRSA,
189 SSL_RC4,
190 SSL_SHA1,
191 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400192 },
Matt Braithwaite9c8c4182016-08-24 14:36:54 -0700193#endif
David Benjamina1c90a52015-05-30 17:03:14 -0400194
195 /* Cipher 0A */
196 {
David Benjaminff2df332015-11-18 10:01:16 -0500197 SSL3_TXT_RSA_DES_192_CBC3_SHA,
198 SSL3_CK_RSA_DES_192_CBC3_SHA,
199 SSL_kRSA,
200 SSL_aRSA,
201 SSL_3DES,
202 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500203 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400204 },
205
206
207 /* New AES ciphersuites */
208
209 /* Cipher 2F */
210 {
David Benjaminff2df332015-11-18 10:01:16 -0500211 TLS1_TXT_RSA_WITH_AES_128_SHA,
212 TLS1_CK_RSA_WITH_AES_128_SHA,
213 SSL_kRSA,
214 SSL_aRSA,
215 SSL_AES128,
216 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500217 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400218 },
219
220 /* Cipher 33 */
221 {
David Benjaminff2df332015-11-18 10:01:16 -0500222 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA,
223 TLS1_CK_DHE_RSA_WITH_AES_128_SHA,
224 SSL_kDHE,
225 SSL_aRSA,
226 SSL_AES128,
227 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500228 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400229 },
230
231 /* Cipher 35 */
232 {
David Benjaminff2df332015-11-18 10:01:16 -0500233 TLS1_TXT_RSA_WITH_AES_256_SHA,
234 TLS1_CK_RSA_WITH_AES_256_SHA,
235 SSL_kRSA,
236 SSL_aRSA,
237 SSL_AES256,
238 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500239 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400240 },
241
242 /* Cipher 39 */
243 {
David Benjaminff2df332015-11-18 10:01:16 -0500244 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA,
245 TLS1_CK_DHE_RSA_WITH_AES_256_SHA,
246 SSL_kDHE,
247 SSL_aRSA,
248 SSL_AES256,
249 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500250 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400251 },
252
253
254 /* TLS v1.2 ciphersuites */
255
256 /* Cipher 3C */
257 {
David Benjaminff2df332015-11-18 10:01:16 -0500258 TLS1_TXT_RSA_WITH_AES_128_SHA256,
259 TLS1_CK_RSA_WITH_AES_128_SHA256,
260 SSL_kRSA,
261 SSL_aRSA,
262 SSL_AES128,
263 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500264 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400265 },
266
267 /* Cipher 3D */
268 {
David Benjaminff2df332015-11-18 10:01:16 -0500269 TLS1_TXT_RSA_WITH_AES_256_SHA256,
270 TLS1_CK_RSA_WITH_AES_256_SHA256,
271 SSL_kRSA,
272 SSL_aRSA,
273 SSL_AES256,
274 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500275 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400276 },
277
278 /* Cipher 67 */
279 {
280 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500281 TLS1_CK_DHE_RSA_WITH_AES_128_SHA256,
282 SSL_kDHE,
283 SSL_aRSA,
284 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500285 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500286 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400287 },
288
289 /* Cipher 6B */
290 {
291 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500292 TLS1_CK_DHE_RSA_WITH_AES_256_SHA256,
293 SSL_kDHE,
294 SSL_aRSA,
295 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500296 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500297 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400298 },
299
Adam Langley85bc5602015-06-09 09:54:04 -0700300 /* PSK cipher suites. */
301
Matt Braithwaite9c8c4182016-08-24 14:36:54 -0700302#ifdef BORINGSSL_ENABLE_RC4_TLS
David Benjamina1c90a52015-05-30 17:03:14 -0400303 /* Cipher 8A */
304 {
David Benjaminff2df332015-11-18 10:01:16 -0500305 TLS1_TXT_PSK_WITH_RC4_128_SHA,
306 TLS1_CK_PSK_WITH_RC4_128_SHA,
307 SSL_kPSK,
308 SSL_aPSK,
309 SSL_RC4,
310 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500311 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400312 },
Matt Braithwaite9c8c4182016-08-24 14:36:54 -0700313#endif
David Benjamina1c90a52015-05-30 17:03:14 -0400314
315 /* Cipher 8C */
316 {
David Benjaminff2df332015-11-18 10:01:16 -0500317 TLS1_TXT_PSK_WITH_AES_128_CBC_SHA,
318 TLS1_CK_PSK_WITH_AES_128_CBC_SHA,
319 SSL_kPSK,
320 SSL_aPSK,
321 SSL_AES128,
322 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500323 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400324 },
325
326 /* Cipher 8D */
327 {
David Benjaminff2df332015-11-18 10:01:16 -0500328 TLS1_TXT_PSK_WITH_AES_256_CBC_SHA,
329 TLS1_CK_PSK_WITH_AES_256_CBC_SHA,
330 SSL_kPSK,
331 SSL_aPSK,
332 SSL_AES256,
333 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500334 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400335 },
336
David Benjamina1c90a52015-05-30 17:03:14 -0400337 /* GCM ciphersuites from RFC5288 */
338
339 /* Cipher 9C */
340 {
341 TLS1_TXT_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500342 TLS1_CK_RSA_WITH_AES_128_GCM_SHA256,
343 SSL_kRSA,
344 SSL_aRSA,
345 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500346 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400347 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400348 },
349
350 /* Cipher 9D */
351 {
352 TLS1_TXT_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500353 TLS1_CK_RSA_WITH_AES_256_GCM_SHA384,
354 SSL_kRSA,
355 SSL_aRSA,
356 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500357 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400358 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400359 },
360
361 /* Cipher 9E */
362 {
363 TLS1_TXT_DHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500364 TLS1_CK_DHE_RSA_WITH_AES_128_GCM_SHA256,
365 SSL_kDHE,
366 SSL_aRSA,
367 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500368 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400369 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400370 },
371
372 /* Cipher 9F */
373 {
374 TLS1_TXT_DHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500375 TLS1_CK_DHE_RSA_WITH_AES_256_GCM_SHA384,
376 SSL_kDHE,
377 SSL_aRSA,
378 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500379 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400380 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400381 },
382
Matt Braithwaite053931e2016-05-25 12:06:05 -0700383 /* CECPQ1 (combined elliptic curve + post-quantum) suites. */
384
385 /* Cipher 16B7 */
386 {
387 TLS1_TXT_CECPQ1_RSA_WITH_CHACHA20_POLY1305_SHA256,
388 TLS1_CK_CECPQ1_RSA_WITH_CHACHA20_POLY1305_SHA256,
389 SSL_kCECPQ1,
390 SSL_aRSA,
391 SSL_CHACHA20POLY1305,
392 SSL_AEAD,
393 SSL_HANDSHAKE_MAC_SHA256,
394 },
395
396 /* Cipher 16B8 */
397 {
398 TLS1_TXT_CECPQ1_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
399 TLS1_CK_CECPQ1_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
400 SSL_kCECPQ1,
401 SSL_aECDSA,
402 SSL_CHACHA20POLY1305,
403 SSL_AEAD,
404 SSL_HANDSHAKE_MAC_SHA256,
405 },
406
407 /* Cipher 16B9 */
408 {
409 TLS1_TXT_CECPQ1_RSA_WITH_AES_256_GCM_SHA384,
410 TLS1_CK_CECPQ1_RSA_WITH_AES_256_GCM_SHA384,
411 SSL_kCECPQ1,
412 SSL_aRSA,
413 SSL_AES256GCM,
414 SSL_AEAD,
415 SSL_HANDSHAKE_MAC_SHA384,
416 },
417
418 /* Cipher 16BA */
419 {
420 TLS1_TXT_CECPQ1_ECDSA_WITH_AES_256_GCM_SHA384,
421 TLS1_CK_CECPQ1_ECDSA_WITH_AES_256_GCM_SHA384,
422 SSL_kCECPQ1,
423 SSL_aECDSA,
424 SSL_AES256GCM,
425 SSL_AEAD,
426 SSL_HANDSHAKE_MAC_SHA384,
427 },
428
Matt Braithwaite9c8c4182016-08-24 14:36:54 -0700429#ifdef BORINGSSL_ENABLE_RC4_TLS
David Benjamina1c90a52015-05-30 17:03:14 -0400430 /* Cipher C007 */
431 {
432 TLS1_TXT_ECDHE_ECDSA_WITH_RC4_128_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500433 TLS1_CK_ECDHE_ECDSA_WITH_RC4_128_SHA,
434 SSL_kECDHE,
435 SSL_aECDSA,
436 SSL_RC4,
437 SSL_SHA1,
438 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400439 },
Matt Braithwaite9c8c4182016-08-24 14:36:54 -0700440#endif
David Benjamina1c90a52015-05-30 17:03:14 -0400441
442 /* Cipher C009 */
443 {
444 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500445 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
446 SSL_kECDHE,
447 SSL_aECDSA,
448 SSL_AES128,
449 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500450 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400451 },
452
453 /* Cipher C00A */
454 {
455 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500456 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
457 SSL_kECDHE,
458 SSL_aECDSA,
459 SSL_AES256,
460 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500461 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400462 },
463
Matt Braithwaite9c8c4182016-08-24 14:36:54 -0700464#ifdef BORINGSSL_ENABLE_RC4_TLS
David Benjamina1c90a52015-05-30 17:03:14 -0400465 /* Cipher C011 */
466 {
David Benjaminff2df332015-11-18 10:01:16 -0500467 TLS1_TXT_ECDHE_RSA_WITH_RC4_128_SHA,
468 TLS1_CK_ECDHE_RSA_WITH_RC4_128_SHA,
469 SSL_kECDHE,
470 SSL_aRSA,
471 SSL_RC4,
472 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500473 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400474 },
Matt Braithwaite9c8c4182016-08-24 14:36:54 -0700475#endif
David Benjamina1c90a52015-05-30 17:03:14 -0400476
477 /* Cipher C013 */
478 {
479 TLS1_TXT_ECDHE_RSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500480 TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA,
481 SSL_kECDHE,
482 SSL_aRSA,
483 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500484 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500485 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400486 },
487
488 /* Cipher C014 */
489 {
490 TLS1_TXT_ECDHE_RSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500491 TLS1_CK_ECDHE_RSA_WITH_AES_256_CBC_SHA,
492 SSL_kECDHE,
493 SSL_aRSA,
494 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500495 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500496 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400497 },
498
499
500 /* HMAC based TLS v1.2 ciphersuites from RFC5289 */
501
502 /* Cipher C023 */
503 {
504 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500505 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_SHA256,
506 SSL_kECDHE,
507 SSL_aECDSA,
508 SSL_AES128,
509 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500510 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400511 },
512
513 /* Cipher C024 */
514 {
515 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500516 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_SHA384,
517 SSL_kECDHE,
518 SSL_aECDSA,
519 SSL_AES256,
520 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500521 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400522 },
523
524 /* Cipher C027 */
525 {
526 TLS1_TXT_ECDHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500527 TLS1_CK_ECDHE_RSA_WITH_AES_128_SHA256,
528 SSL_kECDHE,
529 SSL_aRSA,
530 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500531 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500532 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400533 },
534
535 /* Cipher C028 */
536 {
537 TLS1_TXT_ECDHE_RSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500538 TLS1_CK_ECDHE_RSA_WITH_AES_256_SHA384,
539 SSL_kECDHE,
540 SSL_aRSA,
541 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500542 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500543 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400544 },
545
546
547 /* GCM based TLS v1.2 ciphersuites from RFC5289 */
548
549 /* Cipher C02B */
550 {
551 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500552 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
553 SSL_kECDHE,
554 SSL_aECDSA,
555 SSL_AES128GCM,
556 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400557 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400558 },
559
560 /* Cipher C02C */
561 {
562 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500563 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
564 SSL_kECDHE,
565 SSL_aECDSA,
566 SSL_AES256GCM,
567 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400568 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400569 },
570
571 /* Cipher C02F */
572 {
573 TLS1_TXT_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500574 TLS1_CK_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
575 SSL_kECDHE,
576 SSL_aRSA,
577 SSL_AES128GCM,
578 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400579 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400580 },
581
582 /* Cipher C030 */
583 {
584 TLS1_TXT_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500585 TLS1_CK_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
586 SSL_kECDHE,
587 SSL_aRSA,
588 SSL_AES256GCM,
589 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400590 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400591 },
592
Adam Langley85bc5602015-06-09 09:54:04 -0700593 /* ECDHE-PSK cipher suites. */
594
595 /* Cipher C035 */
596 {
597 TLS1_TXT_ECDHE_PSK_WITH_AES_128_CBC_SHA,
598 TLS1_CK_ECDHE_PSK_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500599 SSL_kECDHE,
600 SSL_aPSK,
601 SSL_AES128,
602 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500603 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700604 },
605
606 /* Cipher C036 */
607 {
608 TLS1_TXT_ECDHE_PSK_WITH_AES_256_CBC_SHA,
609 TLS1_CK_ECDHE_PSK_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500610 SSL_kECDHE,
611 SSL_aPSK,
612 SSL_AES256,
613 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500614 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700615 },
616
617 /* ChaCha20-Poly1305 cipher suites. */
618
David Benjamin13414b32015-12-09 23:02:39 -0500619#if !defined(BORINGSSL_ANDROID_SYSTEM)
David Benjamina1c90a52015-05-30 17:03:14 -0400620 {
Brian Smith271777f2015-10-03 13:53:33 -1000621 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500622 TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD,
623 SSL_kECDHE,
624 SSL_aRSA,
625 SSL_CHACHA20POLY1305_OLD,
626 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400627 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400628 },
629
630 {
Brian Smith271777f2015-10-03 13:53:33 -1000631 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500632 TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD,
633 SSL_kECDHE,
634 SSL_aECDSA,
635 SSL_CHACHA20POLY1305_OLD,
636 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400637 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400638 },
Adam Langleyd98dc132015-09-23 16:41:33 -0700639#endif
David Benjamin13414b32015-12-09 23:02:39 -0500640
641 /* Cipher CCA8 */
642 {
643 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
644 TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
645 SSL_kECDHE,
646 SSL_aRSA,
647 SSL_CHACHA20POLY1305,
648 SSL_AEAD,
649 SSL_HANDSHAKE_MAC_SHA256,
650 },
651
652 /* Cipher CCA9 */
653 {
654 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
655 TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
656 SSL_kECDHE,
657 SSL_aECDSA,
658 SSL_CHACHA20POLY1305,
659 SSL_AEAD,
660 SSL_HANDSHAKE_MAC_SHA256,
661 },
662
663 /* Cipher CCAB */
664 {
665 TLS1_TXT_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
666 TLS1_CK_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
667 SSL_kECDHE,
668 SSL_aPSK,
669 SSL_CHACHA20POLY1305,
670 SSL_AEAD,
671 SSL_HANDSHAKE_MAC_SHA256,
672 },
Matt Braithwaite053931e2016-05-25 12:06:05 -0700673
Steven Valdez3084e7b2016-06-02 12:07:20 -0400674 /* Cipher D001 */
675 {
676 TLS1_TXT_ECDHE_PSK_WITH_AES_128_GCM_SHA256,
677 TLS1_CK_ECDHE_PSK_WITH_AES_128_GCM_SHA256,
678 SSL_kECDHE,
679 SSL_aPSK,
680 SSL_AES128GCM,
David Benjaminc9a43682016-06-21 17:30:54 -0400681 SSL_AEAD,
Steven Valdez3084e7b2016-06-02 12:07:20 -0400682 SSL_HANDSHAKE_MAC_SHA256,
683 },
684
685 /* Cipher D002 */
686 {
687 TLS1_TXT_ECDHE_PSK_WITH_AES_256_GCM_SHA384,
688 TLS1_CK_ECDHE_PSK_WITH_AES_256_GCM_SHA384,
689 SSL_kECDHE,
690 SSL_aPSK,
691 SSL_AES256GCM,
David Benjaminc9a43682016-06-21 17:30:54 -0400692 SSL_AEAD,
Steven Valdez3084e7b2016-06-02 12:07:20 -0400693 SSL_HANDSHAKE_MAC_SHA384,
694 },
695
David Benjamina1c90a52015-05-30 17:03:14 -0400696};
697
Steven Valdezcb966542016-08-17 16:56:14 -0400698static const size_t kCiphersLen = OPENSSL_ARRAY_SIZE(kCiphers);
David Benjamina1c90a52015-05-30 17:03:14 -0400699
Adam Langleyfcf25832014-12-18 17:42:32 -0800700#define CIPHER_ADD 1
701#define CIPHER_KILL 2
702#define CIPHER_DEL 3
703#define CIPHER_ORD 4
704#define CIPHER_SPECIAL 5
Adam Langley95c29f32014-06-20 12:00:00 -0700705
Adam Langleyfcf25832014-12-18 17:42:32 -0800706typedef struct cipher_order_st {
707 const SSL_CIPHER *cipher;
708 int active;
Adam Langleyfcf25832014-12-18 17:42:32 -0800709 int in_group;
710 struct cipher_order_st *next, *prev;
711} CIPHER_ORDER;
Adam Langley95c29f32014-06-20 12:00:00 -0700712
David Benjamin0344daf2015-04-08 02:08:01 -0400713typedef struct cipher_alias_st {
714 /* name is the name of the cipher alias. */
715 const char *name;
716
717 /* The following fields are bitmasks for the corresponding fields on
718 * |SSL_CIPHER|. A cipher matches a cipher alias iff, for each bitmask, the
719 * bit corresponding to the cipher's value is set to 1. If any bitmask is
720 * all zeroes, the alias matches nothing. Use |~0u| for the default value. */
721 uint32_t algorithm_mkey;
722 uint32_t algorithm_auth;
723 uint32_t algorithm_enc;
724 uint32_t algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -0500725
726 /* min_version, if non-zero, matches all ciphers which were added in that
727 * particular protocol version. */
728 uint16_t min_version;
David Benjamin0344daf2015-04-08 02:08:01 -0400729} CIPHER_ALIAS;
730
David Benjamina1c90a52015-05-30 17:03:14 -0400731static const CIPHER_ALIAS kCipherAliases[] = {
Matt Braithwaite053931e2016-05-25 12:06:05 -0700732 /* "ALL" doesn't include eNULL nor kCECPQ1. These must be explicitly
733 * enabled. */
734 {"ALL", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700735
David Benjamina1c90a52015-05-30 17:03:14 -0400736 /* The "COMPLEMENTOFDEFAULT" rule is omitted. It matches nothing. */
Adam Langley95c29f32014-06-20 12:00:00 -0700737
David Benjamina1c90a52015-05-30 17:03:14 -0400738 /* key exchange aliases
739 * (some of those using only a single bit here combine
740 * multiple key exchange algs according to the RFCs,
741 * e.g. kEDH combines DHE_DSS and DHE_RSA) */
David Benjamind6e9eec2015-11-18 09:48:55 -0500742 {"kRSA", SSL_kRSA, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700743
David Benjamind6e9eec2015-11-18 09:48:55 -0500744 {"kDHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
745 {"kEDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
746 {"DH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700747
David Benjamind6e9eec2015-11-18 09:48:55 -0500748 {"kECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700749 {"kCECPQ1", SSL_kCECPQ1, ~0u, ~0u, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500750 {"kEECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
751 {"ECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700752
David Benjamind6e9eec2015-11-18 09:48:55 -0500753 {"kPSK", SSL_kPSK, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700754
David Benjamina1c90a52015-05-30 17:03:14 -0400755 /* server authentication aliases */
Matt Braithwaite053931e2016-05-25 12:06:05 -0700756 {"aRSA", ~SSL_kCECPQ1, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
757 {"aECDSA", ~SSL_kCECPQ1, SSL_aECDSA, ~0u, ~0u, 0},
758 {"ECDSA", ~SSL_kCECPQ1, SSL_aECDSA, ~0u, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500759 {"aPSK", ~0u, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700760
David Benjamina1c90a52015-05-30 17:03:14 -0400761 /* aliases combining key exchange and server authentication */
David Benjamind6e9eec2015-11-18 09:48:55 -0500762 {"DHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
763 {"EDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
764 {"ECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
765 {"EECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
766 {"RSA", SSL_kRSA, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
767 {"PSK", SSL_kPSK, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700768
David Benjamina1c90a52015-05-30 17:03:14 -0400769 /* symmetric encryption aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500770 {"3DES", ~0u, ~0u, SSL_3DES, ~0u, 0},
771 {"RC4", ~0u, ~0u, SSL_RC4, ~0u, 0},
772 {"AES128", ~0u, ~0u, SSL_AES128 | SSL_AES128GCM, ~0u, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700773 {"AES256", ~SSL_kCECPQ1, ~0u, SSL_AES256 | SSL_AES256GCM, ~0u, 0},
774 {"AES", ~SSL_kCECPQ1, ~0u, SSL_AES, ~0u, 0},
775 {"AESGCM", ~SSL_kCECPQ1, ~0u, SSL_AES128GCM | SSL_AES256GCM, ~0u, 0},
776 {"CHACHA20", ~SSL_kCECPQ1, ~0u, SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD, ~0u,
David Benjamin13414b32015-12-09 23:02:39 -0500777 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700778
David Benjamina1c90a52015-05-30 17:03:14 -0400779 /* MAC aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500780 {"MD5", ~0u, ~0u, ~0u, SSL_MD5, 0},
781 {"SHA1", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
782 {"SHA", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700783 {"SHA256", ~SSL_kCECPQ1, ~0u, ~0u, SSL_SHA256, 0},
784 {"SHA384", ~SSL_kCECPQ1, ~0u, ~0u, SSL_SHA384, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700785
David Benjamindcb6ef02015-11-06 15:35:54 -0500786 /* Legacy protocol minimum version aliases. "TLSv1" is intentionally the
787 * same as "SSLv3". */
Matt Braithwaite053931e2016-05-25 12:06:05 -0700788 {"SSLv3", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
789 {"TLSv1", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
790 {"TLSv1.2", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, TLS1_2_VERSION},
Adam Langley95c29f32014-06-20 12:00:00 -0700791
David Benjamind6e9eec2015-11-18 09:48:55 -0500792 /* Legacy strength classes. */
793 {"MEDIUM", ~0u, ~0u, SSL_RC4, ~0u, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700794 {"HIGH", ~SSL_kCECPQ1, ~0u, ~(SSL_eNULL|SSL_RC4), ~0u, 0},
795 {"FIPS", ~SSL_kCECPQ1, ~0u, ~(SSL_eNULL|SSL_RC4), ~0u, 0},
Adam Langleyfcf25832014-12-18 17:42:32 -0800796};
Adam Langley95c29f32014-06-20 12:00:00 -0700797
Steven Valdezcb966542016-08-17 16:56:14 -0400798static const size_t kCipherAliasesLen = OPENSSL_ARRAY_SIZE(kCipherAliases);
David Benjamina1c90a52015-05-30 17:03:14 -0400799
800static int ssl_cipher_id_cmp(const void *in_a, const void *in_b) {
801 const SSL_CIPHER *a = in_a;
802 const SSL_CIPHER *b = in_b;
803
804 if (a->id > b->id) {
805 return 1;
806 } else if (a->id < b->id) {
807 return -1;
808 } else {
809 return 0;
810 }
811}
812
813static int ssl_cipher_ptr_id_cmp(const SSL_CIPHER **a, const SSL_CIPHER **b) {
814 return ssl_cipher_id_cmp(*a, *b);
815}
816
817const SSL_CIPHER *SSL_get_cipher_by_value(uint16_t value) {
818 SSL_CIPHER c;
819
820 c.id = 0x03000000L | value;
821 return bsearch(&c, kCiphers, kCiphersLen, sizeof(SSL_CIPHER),
822 ssl_cipher_id_cmp);
823}
David Benjamin0344daf2015-04-08 02:08:01 -0400824
David Benjaminea72bd02014-12-21 21:27:41 -0500825int ssl_cipher_get_evp_aead(const EVP_AEAD **out_aead,
826 size_t *out_mac_secret_len,
827 size_t *out_fixed_iv_len,
828 const SSL_CIPHER *cipher, uint16_t version) {
829 *out_aead = NULL;
830 *out_mac_secret_len = 0;
831 *out_fixed_iv_len = 0;
Adam Langleyc9fb3752014-06-20 12:00:00 -0700832
David Benjaminea72bd02014-12-21 21:27:41 -0500833 switch (cipher->algorithm_enc) {
Adam Langleyfcf25832014-12-18 17:42:32 -0800834 case SSL_AES128GCM:
David Benjaminea72bd02014-12-21 21:27:41 -0500835 *out_aead = EVP_aead_aes_128_gcm();
836 *out_fixed_iv_len = 4;
Steven Valdez79750562016-06-16 06:38:04 -0400837 break;
Adam Langleyfcf25832014-12-18 17:42:32 -0800838
839 case SSL_AES256GCM:
David Benjaminea72bd02014-12-21 21:27:41 -0500840 *out_aead = EVP_aead_aes_256_gcm();
841 *out_fixed_iv_len = 4;
Steven Valdez79750562016-06-16 06:38:04 -0400842 break;
Adam Langleyfcf25832014-12-18 17:42:32 -0800843
Adam Langleyd98dc132015-09-23 16:41:33 -0700844#if !defined(BORINGSSL_ANDROID_SYSTEM)
Brian Smith271777f2015-10-03 13:53:33 -1000845 case SSL_CHACHA20POLY1305_OLD:
Brian Smith3e23e4c2015-10-03 11:38:58 -1000846 *out_aead = EVP_aead_chacha20_poly1305_old();
David Benjaminea72bd02014-12-21 21:27:41 -0500847 *out_fixed_iv_len = 0;
Steven Valdez79750562016-06-16 06:38:04 -0400848 break;
Adam Langleyd98dc132015-09-23 16:41:33 -0700849#endif
Adam Langleyfcf25832014-12-18 17:42:32 -0800850
David Benjamin13414b32015-12-09 23:02:39 -0500851 case SSL_CHACHA20POLY1305:
852 *out_aead = EVP_aead_chacha20_poly1305();
853 *out_fixed_iv_len = 12;
Steven Valdez79750562016-06-16 06:38:04 -0400854 break;
David Benjamin13414b32015-12-09 23:02:39 -0500855
Matt Braithwaite9c8c4182016-08-24 14:36:54 -0700856#ifdef BORINGSSL_ENABLE_RC4_TLS
Adam Langleyfcf25832014-12-18 17:42:32 -0800857 case SSL_RC4:
David Benjaminea72bd02014-12-21 21:27:41 -0500858 switch (cipher->algorithm_mac) {
859 case SSL_MD5:
David Benjamin044abb02014-12-23 10:57:17 -0500860 if (version == SSL3_VERSION) {
861 *out_aead = EVP_aead_rc4_md5_ssl3();
862 } else {
863 *out_aead = EVP_aead_rc4_md5_tls();
864 }
David Benjaminea72bd02014-12-21 21:27:41 -0500865 *out_mac_secret_len = MD5_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400866 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500867 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500868 if (version == SSL3_VERSION) {
869 *out_aead = EVP_aead_rc4_sha1_ssl3();
870 } else {
871 *out_aead = EVP_aead_rc4_sha1_tls();
872 }
David Benjaminea72bd02014-12-21 21:27:41 -0500873 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400874 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500875 default:
876 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -0800877 }
Steven Valdez79750562016-06-16 06:38:04 -0400878 break;
Matt Braithwaite9c8c4182016-08-24 14:36:54 -0700879#endif
Adam Langleyfcf25832014-12-18 17:42:32 -0800880
David Benjaminea72bd02014-12-21 21:27:41 -0500881 case SSL_AES128:
882 switch (cipher->algorithm_mac) {
883 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500884 if (version == SSL3_VERSION) {
885 *out_aead = EVP_aead_aes_128_cbc_sha1_ssl3();
886 *out_fixed_iv_len = 16;
887 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500888 *out_aead = EVP_aead_aes_128_cbc_sha1_tls_implicit_iv();
889 *out_fixed_iv_len = 16;
890 } else {
891 *out_aead = EVP_aead_aes_128_cbc_sha1_tls();
892 }
893 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400894 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500895 case SSL_SHA256:
896 *out_aead = EVP_aead_aes_128_cbc_sha256_tls();
897 *out_mac_secret_len = SHA256_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400898 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500899 default:
900 return 0;
901 }
Steven Valdez79750562016-06-16 06:38:04 -0400902 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500903
904 case SSL_AES256:
905 switch (cipher->algorithm_mac) {
906 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500907 if (version == SSL3_VERSION) {
908 *out_aead = EVP_aead_aes_256_cbc_sha1_ssl3();
909 *out_fixed_iv_len = 16;
910 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500911 *out_aead = EVP_aead_aes_256_cbc_sha1_tls_implicit_iv();
912 *out_fixed_iv_len = 16;
913 } else {
914 *out_aead = EVP_aead_aes_256_cbc_sha1_tls();
915 }
916 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400917 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500918 case SSL_SHA256:
919 *out_aead = EVP_aead_aes_256_cbc_sha256_tls();
920 *out_mac_secret_len = SHA256_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400921 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500922 case SSL_SHA384:
923 *out_aead = EVP_aead_aes_256_cbc_sha384_tls();
924 *out_mac_secret_len = SHA384_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400925 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500926 default:
927 return 0;
928 }
Steven Valdez79750562016-06-16 06:38:04 -0400929 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500930
931 case SSL_3DES:
932 switch (cipher->algorithm_mac) {
933 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500934 if (version == SSL3_VERSION) {
935 *out_aead = EVP_aead_des_ede3_cbc_sha1_ssl3();
936 *out_fixed_iv_len = 8;
937 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500938 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls_implicit_iv();
939 *out_fixed_iv_len = 8;
940 } else {
941 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls();
942 }
943 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400944 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500945 default:
946 return 0;
947 }
Steven Valdez79750562016-06-16 06:38:04 -0400948 break;
David Benjaminea72bd02014-12-21 21:27:41 -0500949
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700950 case SSL_eNULL:
951 switch (cipher->algorithm_mac) {
952 case SSL_SHA1:
953 if (version == SSL3_VERSION) {
954 *out_aead = EVP_aead_null_sha1_ssl3();
955 } else {
956 *out_aead = EVP_aead_null_sha1_tls();
957 }
958 *out_mac_secret_len = SHA_DIGEST_LENGTH;
Steven Valdez79750562016-06-16 06:38:04 -0400959 break;
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700960 default:
961 return 0;
962 }
Steven Valdez79750562016-06-16 06:38:04 -0400963 break;
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700964
David Benjaminea72bd02014-12-21 21:27:41 -0500965 default:
966 return 0;
967 }
Steven Valdez79750562016-06-16 06:38:04 -0400968
969 /* In TLS 1.3, the iv_len is equal to the AEAD nonce length whereas the code
970 * above computes the TLS 1.2 construction.
971 *
972 * TODO(davidben,svaldez): Avoid computing the wrong value and fixing it. */
973 if (version >= TLS1_3_VERSION) {
974 *out_fixed_iv_len = EVP_AEAD_nonce_length(*out_aead);
975 assert(*out_fixed_iv_len >= 8);
976 }
977 return 1;
Adam Langleyfcf25832014-12-18 17:42:32 -0800978}
Adam Langleyc9fb3752014-06-20 12:00:00 -0700979
David Benjaminb0883312015-08-06 09:54:13 -0400980const EVP_MD *ssl_get_handshake_digest(uint32_t algorithm_prf) {
981 switch (algorithm_prf) {
982 case SSL_HANDSHAKE_MAC_DEFAULT:
983 return EVP_sha1();
984 case SSL_HANDSHAKE_MAC_SHA256:
985 return EVP_sha256();
986 case SSL_HANDSHAKE_MAC_SHA384:
987 return EVP_sha384();
988 default:
989 return NULL;
Adam Langleyfcf25832014-12-18 17:42:32 -0800990 }
Adam Langley95c29f32014-06-20 12:00:00 -0700991}
992
993#define ITEM_SEP(a) \
Adam Langleyfcf25832014-12-18 17:42:32 -0800994 (((a) == ':') || ((a) == ' ') || ((a) == ';') || ((a) == ','))
Adam Langley95c29f32014-06-20 12:00:00 -0700995
David Benjamin0344daf2015-04-08 02:08:01 -0400996/* rule_equals returns one iff the NUL-terminated string |rule| is equal to the
997 * |buf_len| bytes at |buf|. */
998static int rule_equals(const char *rule, const char *buf, size_t buf_len) {
999 /* |strncmp| alone only checks that |buf| is a prefix of |rule|. */
1000 return strncmp(rule, buf, buf_len) == 0 && rule[buf_len] == '\0';
1001}
1002
Adam Langley95c29f32014-06-20 12:00:00 -07001003static void ll_append_tail(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -08001004 CIPHER_ORDER **tail) {
1005 if (curr == *tail) {
1006 return;
1007 }
1008 if (curr == *head) {
1009 *head = curr->next;
1010 }
1011 if (curr->prev != NULL) {
1012 curr->prev->next = curr->next;
1013 }
1014 if (curr->next != NULL) {
1015 curr->next->prev = curr->prev;
1016 }
1017 (*tail)->next = curr;
1018 curr->prev = *tail;
1019 curr->next = NULL;
1020 *tail = curr;
1021}
Adam Langley95c29f32014-06-20 12:00:00 -07001022
1023static void ll_append_head(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -08001024 CIPHER_ORDER **tail) {
1025 if (curr == *head) {
1026 return;
1027 }
1028 if (curr == *tail) {
1029 *tail = curr->prev;
1030 }
1031 if (curr->next != NULL) {
1032 curr->next->prev = curr->prev;
1033 }
1034 if (curr->prev != NULL) {
1035 curr->prev->next = curr->next;
1036 }
1037 (*head)->prev = curr;
1038 curr->next = *head;
1039 curr->prev = NULL;
1040 *head = curr;
1041}
Adam Langley95c29f32014-06-20 12:00:00 -07001042
David Benjamin82c9e902014-12-12 15:55:27 -05001043static void ssl_cipher_collect_ciphers(const SSL_PROTOCOL_METHOD *ssl_method,
Adam Langleyfcf25832014-12-18 17:42:32 -08001044 CIPHER_ORDER *co_list,
1045 CIPHER_ORDER **head_p,
1046 CIPHER_ORDER **tail_p) {
David Benjamina1c90a52015-05-30 17:03:14 -04001047 /* The set of ciphers is static, but some subset may be unsupported by
1048 * |ssl_method|, so the list may be smaller. */
1049 size_t co_list_num = 0;
1050 size_t i;
1051 for (i = 0; i < kCiphersLen; i++) {
1052 const SSL_CIPHER *cipher = &kCiphers[i];
1053 if (ssl_method->supports_cipher(cipher)) {
1054 co_list[co_list_num].cipher = cipher;
Adam Langleyfcf25832014-12-18 17:42:32 -08001055 co_list[co_list_num].next = NULL;
1056 co_list[co_list_num].prev = NULL;
1057 co_list[co_list_num].active = 0;
1058 co_list[co_list_num].in_group = 0;
1059 co_list_num++;
1060 }
1061 }
Adam Langley95c29f32014-06-20 12:00:00 -07001062
Adam Langleyfcf25832014-12-18 17:42:32 -08001063 /* Prepare linked list from list entries. */
1064 if (co_list_num > 0) {
1065 co_list[0].prev = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -07001066
Adam Langleyfcf25832014-12-18 17:42:32 -08001067 if (co_list_num > 1) {
1068 co_list[0].next = &co_list[1];
Adam Langley95c29f32014-06-20 12:00:00 -07001069
Adam Langleyfcf25832014-12-18 17:42:32 -08001070 for (i = 1; i < co_list_num - 1; i++) {
1071 co_list[i].prev = &co_list[i - 1];
1072 co_list[i].next = &co_list[i + 1];
1073 }
Adam Langley95c29f32014-06-20 12:00:00 -07001074
Adam Langleyfcf25832014-12-18 17:42:32 -08001075 co_list[co_list_num - 1].prev = &co_list[co_list_num - 2];
1076 }
1077
1078 co_list[co_list_num - 1].next = NULL;
1079
1080 *head_p = &co_list[0];
1081 *tail_p = &co_list[co_list_num - 1];
1082 }
1083}
Adam Langley95c29f32014-06-20 12:00:00 -07001084
David Benjamin0344daf2015-04-08 02:08:01 -04001085/* ssl_cipher_apply_rule applies the rule type |rule| to ciphers matching its
1086 * parameters in the linked list from |*head_p| to |*tail_p|. It writes the new
1087 * head and tail of the list to |*head_p| and |*tail_p|, respectively.
1088 *
1089 * - If |cipher_id| is non-zero, only that cipher is selected.
1090 * - Otherwise, if |strength_bits| is non-negative, it selects ciphers
1091 * of that strength.
David Benjamind6e9eec2015-11-18 09:48:55 -05001092 * - Otherwise, it selects ciphers that match each bitmasks in |alg_*| and
David Benjamindcb6ef02015-11-06 15:35:54 -05001093 * |min_version|. */
Adam Langleyfcf25832014-12-18 17:42:32 -08001094static void ssl_cipher_apply_rule(
David Benjamin107db582015-04-08 00:41:59 -04001095 uint32_t cipher_id, uint32_t alg_mkey, uint32_t alg_auth,
David Benjamind6e9eec2015-11-18 09:48:55 -05001096 uint32_t alg_enc, uint32_t alg_mac, uint16_t min_version, int rule,
1097 int strength_bits, int in_group, CIPHER_ORDER **head_p,
1098 CIPHER_ORDER **tail_p) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001099 CIPHER_ORDER *head, *tail, *curr, *next, *last;
1100 const SSL_CIPHER *cp;
1101 int reverse = 0;
Adam Langley95c29f32014-06-20 12:00:00 -07001102
David Benjamindcb6ef02015-11-06 15:35:54 -05001103 if (cipher_id == 0 && strength_bits == -1 && min_version == 0 &&
David Benjamind6e9eec2015-11-18 09:48:55 -05001104 (alg_mkey == 0 || alg_auth == 0 || alg_enc == 0 || alg_mac == 0)) {
David Benjamin0344daf2015-04-08 02:08:01 -04001105 /* The rule matches nothing, so bail early. */
1106 return;
1107 }
1108
Adam Langleyfcf25832014-12-18 17:42:32 -08001109 if (rule == CIPHER_DEL) {
1110 /* needed to maintain sorting between currently deleted ciphers */
1111 reverse = 1;
1112 }
Adam Langley95c29f32014-06-20 12:00:00 -07001113
Adam Langleyfcf25832014-12-18 17:42:32 -08001114 head = *head_p;
1115 tail = *tail_p;
Adam Langley95c29f32014-06-20 12:00:00 -07001116
Adam Langleyfcf25832014-12-18 17:42:32 -08001117 if (reverse) {
1118 next = tail;
1119 last = head;
1120 } else {
1121 next = head;
1122 last = tail;
1123 }
Adam Langley95c29f32014-06-20 12:00:00 -07001124
Adam Langleyfcf25832014-12-18 17:42:32 -08001125 curr = NULL;
1126 for (;;) {
1127 if (curr == last) {
1128 break;
1129 }
Adam Langley95c29f32014-06-20 12:00:00 -07001130
Adam Langleyfcf25832014-12-18 17:42:32 -08001131 curr = next;
1132 if (curr == NULL) {
1133 break;
1134 }
Adam Langleye3142a72014-07-24 17:56:48 -07001135
Adam Langleyfcf25832014-12-18 17:42:32 -08001136 next = reverse ? curr->prev : curr->next;
1137 cp = curr->cipher;
Adam Langleye3142a72014-07-24 17:56:48 -07001138
David Benjamin0344daf2015-04-08 02:08:01 -04001139 /* Selection criteria is either a specific cipher, the value of
1140 * |strength_bits|, or the algorithms used. */
1141 if (cipher_id != 0) {
1142 if (cipher_id != cp->id) {
1143 continue;
1144 }
1145 } else if (strength_bits >= 0) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001146 if (strength_bits != SSL_CIPHER_get_bits(cp, NULL)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001147 continue;
1148 }
David Benjamin881f1962016-08-10 18:29:12 -04001149 } else {
1150 if (!(alg_mkey & cp->algorithm_mkey) ||
1151 !(alg_auth & cp->algorithm_auth) ||
1152 !(alg_enc & cp->algorithm_enc) ||
1153 !(alg_mac & cp->algorithm_mac) ||
1154 (min_version != 0 && SSL_CIPHER_get_min_version(cp) != min_version)) {
1155 continue;
1156 }
1157
1158 /* The following ciphers are internal implementation details of TLS 1.3
1159 * resumption but are not yet finalized. Disable them by default until
1160 * then. */
1161 if (cp->id == TLS1_CK_ECDHE_PSK_WITH_AES_128_GCM_SHA256 ||
1162 cp->id == TLS1_CK_ECDHE_PSK_WITH_AES_256_GCM_SHA384) {
1163 continue;
1164 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001165 }
Adam Langleye3142a72014-07-24 17:56:48 -07001166
Adam Langleyfcf25832014-12-18 17:42:32 -08001167 /* add the cipher if it has not been added yet. */
1168 if (rule == CIPHER_ADD) {
1169 /* reverse == 0 */
1170 if (!curr->active) {
1171 ll_append_tail(&head, curr, &tail);
1172 curr->active = 1;
1173 curr->in_group = in_group;
1174 }
1175 }
Adam Langley95c29f32014-06-20 12:00:00 -07001176
Adam Langleyfcf25832014-12-18 17:42:32 -08001177 /* Move the added cipher to this location */
1178 else if (rule == CIPHER_ORD) {
1179 /* reverse == 0 */
1180 if (curr->active) {
1181 ll_append_tail(&head, curr, &tail);
1182 curr->in_group = 0;
1183 }
1184 } else if (rule == CIPHER_DEL) {
1185 /* reverse == 1 */
1186 if (curr->active) {
1187 /* most recently deleted ciphersuites get best positions
1188 * for any future CIPHER_ADD (note that the CIPHER_DEL loop
1189 * works in reverse to maintain the order) */
1190 ll_append_head(&head, curr, &tail);
1191 curr->active = 0;
1192 curr->in_group = 0;
1193 }
1194 } else if (rule == CIPHER_KILL) {
1195 /* reverse == 0 */
1196 if (head == curr) {
1197 head = curr->next;
1198 } else {
1199 curr->prev->next = curr->next;
1200 }
Adam Langley95c29f32014-06-20 12:00:00 -07001201
Adam Langleyfcf25832014-12-18 17:42:32 -08001202 if (tail == curr) {
1203 tail = curr->prev;
1204 }
1205 curr->active = 0;
1206 if (curr->next != NULL) {
1207 curr->next->prev = curr->prev;
1208 }
1209 if (curr->prev != NULL) {
1210 curr->prev->next = curr->next;
1211 }
1212 curr->next = NULL;
1213 curr->prev = NULL;
1214 }
1215 }
Adam Langley95c29f32014-06-20 12:00:00 -07001216
Adam Langleyfcf25832014-12-18 17:42:32 -08001217 *head_p = head;
1218 *tail_p = tail;
1219}
Adam Langley95c29f32014-06-20 12:00:00 -07001220
1221static int ssl_cipher_strength_sort(CIPHER_ORDER **head_p,
Adam Langleyfcf25832014-12-18 17:42:32 -08001222 CIPHER_ORDER **tail_p) {
1223 int max_strength_bits, i, *number_uses;
1224 CIPHER_ORDER *curr;
Adam Langley95c29f32014-06-20 12:00:00 -07001225
Adam Langleyfcf25832014-12-18 17:42:32 -08001226 /* This routine sorts the ciphers with descending strength. The sorting must
1227 * keep the pre-sorted sequence, so we apply the normal sorting routine as
1228 * '+' movement to the end of the list. */
1229 max_strength_bits = 0;
1230 curr = *head_p;
1231 while (curr != NULL) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001232 if (curr->active &&
1233 SSL_CIPHER_get_bits(curr->cipher, NULL) > max_strength_bits) {
1234 max_strength_bits = SSL_CIPHER_get_bits(curr->cipher, NULL);
Adam Langleyfcf25832014-12-18 17:42:32 -08001235 }
1236 curr = curr->next;
1237 }
Adam Langley95c29f32014-06-20 12:00:00 -07001238
Adam Langleyfcf25832014-12-18 17:42:32 -08001239 number_uses = OPENSSL_malloc((max_strength_bits + 1) * sizeof(int));
1240 if (!number_uses) {
David Benjamin3570d732015-06-29 00:28:17 -04001241 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001242 return 0;
1243 }
1244 memset(number_uses, 0, (max_strength_bits + 1) * sizeof(int));
Adam Langley95c29f32014-06-20 12:00:00 -07001245
Adam Langleyfcf25832014-12-18 17:42:32 -08001246 /* Now find the strength_bits values actually used. */
1247 curr = *head_p;
1248 while (curr != NULL) {
1249 if (curr->active) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001250 number_uses[SSL_CIPHER_get_bits(curr->cipher, NULL)]++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001251 }
1252 curr = curr->next;
1253 }
Adam Langley95c29f32014-06-20 12:00:00 -07001254
Adam Langleyfcf25832014-12-18 17:42:32 -08001255 /* Go through the list of used strength_bits values in descending order. */
1256 for (i = max_strength_bits; i >= 0; i--) {
1257 if (number_uses[i] > 0) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001258 ssl_cipher_apply_rule(0, 0, 0, 0, 0, 0, CIPHER_ORD, i, 0, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001259 }
1260 }
1261
1262 OPENSSL_free(number_uses);
1263 return 1;
1264}
Adam Langley95c29f32014-06-20 12:00:00 -07001265
David Benjamin0344daf2015-04-08 02:08:01 -04001266static int ssl_cipher_process_rulestr(const SSL_PROTOCOL_METHOD *ssl_method,
1267 const char *rule_str,
Adam Langleyfcf25832014-12-18 17:42:32 -08001268 CIPHER_ORDER **head_p,
David Benjamin0344daf2015-04-08 02:08:01 -04001269 CIPHER_ORDER **tail_p) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001270 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001271 uint16_t min_version;
Adam Langleyfcf25832014-12-18 17:42:32 -08001272 const char *l, *buf;
David Benjamindcb6ef02015-11-06 15:35:54 -05001273 int multi, skip_rule, rule, retval, ok, in_group = 0, has_group = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001274 size_t j, buf_len;
1275 uint32_t cipher_id;
Adam Langleyfcf25832014-12-18 17:42:32 -08001276 char ch;
Adam Langley95c29f32014-06-20 12:00:00 -07001277
Adam Langleyfcf25832014-12-18 17:42:32 -08001278 retval = 1;
1279 l = rule_str;
1280 for (;;) {
1281 ch = *l;
Adam Langley95c29f32014-06-20 12:00:00 -07001282
Adam Langleyfcf25832014-12-18 17:42:32 -08001283 if (ch == '\0') {
1284 break; /* done */
1285 }
Adam Langley95c29f32014-06-20 12:00:00 -07001286
Adam Langleyfcf25832014-12-18 17:42:32 -08001287 if (in_group) {
1288 if (ch == ']') {
Adam Langleyfcf25832014-12-18 17:42:32 -08001289 if (*tail_p) {
1290 (*tail_p)->in_group = 0;
1291 }
1292 in_group = 0;
1293 l++;
1294 continue;
1295 }
David Benjamin37d92462014-09-20 17:54:24 -04001296
Adam Langleyfcf25832014-12-18 17:42:32 -08001297 if (ch == '|') {
1298 rule = CIPHER_ADD;
1299 l++;
1300 continue;
1301 } else if (!(ch >= 'a' && ch <= 'z') && !(ch >= 'A' && ch <= 'Z') &&
1302 !(ch >= '0' && ch <= '9')) {
David Benjamin3570d732015-06-29 00:28:17 -04001303 OPENSSL_PUT_ERROR(SSL, SSL_R_UNEXPECTED_OPERATOR_IN_GROUP);
David Benjamin0344daf2015-04-08 02:08:01 -04001304 retval = in_group = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001305 break;
1306 } else {
1307 rule = CIPHER_ADD;
1308 }
1309 } else if (ch == '-') {
1310 rule = CIPHER_DEL;
1311 l++;
1312 } else if (ch == '+') {
1313 rule = CIPHER_ORD;
1314 l++;
1315 } else if (ch == '!') {
1316 rule = CIPHER_KILL;
1317 l++;
1318 } else if (ch == '@') {
1319 rule = CIPHER_SPECIAL;
1320 l++;
1321 } else if (ch == '[') {
1322 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001323 OPENSSL_PUT_ERROR(SSL, SSL_R_NESTED_GROUP);
David Benjamin0344daf2015-04-08 02:08:01 -04001324 retval = in_group = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001325 break;
1326 }
1327 in_group = 1;
1328 has_group = 1;
1329 l++;
1330 continue;
1331 } else {
1332 rule = CIPHER_ADD;
1333 }
Adam Langley95c29f32014-06-20 12:00:00 -07001334
Adam Langleyfcf25832014-12-18 17:42:32 -08001335 /* If preference groups are enabled, the only legal operator is +.
1336 * Otherwise the in_group bits will get mixed up. */
1337 if (has_group && rule != CIPHER_ADD) {
David Benjamin3570d732015-06-29 00:28:17 -04001338 OPENSSL_PUT_ERROR(SSL, SSL_R_MIXED_SPECIAL_OPERATOR_WITH_GROUPS);
David Benjamin0344daf2015-04-08 02:08:01 -04001339 retval = in_group = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001340 break;
1341 }
Adam Langley95c29f32014-06-20 12:00:00 -07001342
Adam Langleyfcf25832014-12-18 17:42:32 -08001343 if (ITEM_SEP(ch)) {
1344 l++;
1345 continue;
1346 }
Adam Langley95c29f32014-06-20 12:00:00 -07001347
David Benjamin0344daf2015-04-08 02:08:01 -04001348 multi = 0;
1349 cipher_id = 0;
1350 alg_mkey = ~0u;
1351 alg_auth = ~0u;
1352 alg_enc = ~0u;
1353 alg_mac = ~0u;
David Benjamindcb6ef02015-11-06 15:35:54 -05001354 min_version = 0;
1355 skip_rule = 0;
Adam Langley95c29f32014-06-20 12:00:00 -07001356
Adam Langleyfcf25832014-12-18 17:42:32 -08001357 for (;;) {
1358 ch = *l;
1359 buf = l;
David Benjamin0344daf2015-04-08 02:08:01 -04001360 buf_len = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001361 while (((ch >= 'A') && (ch <= 'Z')) || ((ch >= '0') && (ch <= '9')) ||
1362 ((ch >= 'a') && (ch <= 'z')) || (ch == '-') || (ch == '.')) {
1363 ch = *(++l);
David Benjamin0344daf2015-04-08 02:08:01 -04001364 buf_len++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001365 }
Adam Langley95c29f32014-06-20 12:00:00 -07001366
David Benjamin0344daf2015-04-08 02:08:01 -04001367 if (buf_len == 0) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001368 /* We hit something we cannot deal with, it is no command or separator
1369 * nor alphanumeric, so we call this an error. */
David Benjamin3570d732015-06-29 00:28:17 -04001370 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
David Benjamin0344daf2015-04-08 02:08:01 -04001371 retval = in_group = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001372 l++;
1373 break;
1374 }
Adam Langley95c29f32014-06-20 12:00:00 -07001375
Adam Langleyfcf25832014-12-18 17:42:32 -08001376 if (rule == CIPHER_SPECIAL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001377 break;
1378 }
David Benjamin0344daf2015-04-08 02:08:01 -04001379
1380 /* Look for a matching exact cipher. These aren't allowed in multipart
1381 * rules. */
1382 if (!multi && ch != '+') {
David Benjamina1c90a52015-05-30 17:03:14 -04001383 for (j = 0; j < kCiphersLen; j++) {
1384 const SSL_CIPHER *cipher = &kCiphers[j];
1385 if (rule_equals(cipher->name, buf, buf_len)) {
David Benjamin0344daf2015-04-08 02:08:01 -04001386 cipher_id = cipher->id;
1387 break;
1388 }
1389 }
1390 }
1391 if (cipher_id == 0) {
1392 /* If not an exact cipher, look for a matching cipher alias. */
David Benjamina1c90a52015-05-30 17:03:14 -04001393 for (j = 0; j < kCipherAliasesLen; j++) {
David Benjamin0344daf2015-04-08 02:08:01 -04001394 if (rule_equals(kCipherAliases[j].name, buf, buf_len)) {
1395 alg_mkey &= kCipherAliases[j].algorithm_mkey;
1396 alg_auth &= kCipherAliases[j].algorithm_auth;
1397 alg_enc &= kCipherAliases[j].algorithm_enc;
1398 alg_mac &= kCipherAliases[j].algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001399
1400 if (min_version != 0 &&
1401 min_version != kCipherAliases[j].min_version) {
1402 skip_rule = 1;
1403 } else {
1404 min_version = kCipherAliases[j].min_version;
1405 }
David Benjamin0344daf2015-04-08 02:08:01 -04001406 break;
1407 }
1408 }
David Benjamina1c90a52015-05-30 17:03:14 -04001409 if (j == kCipherAliasesLen) {
David Benjamindcb6ef02015-11-06 15:35:54 -05001410 skip_rule = 1;
David Benjamin0344daf2015-04-08 02:08:01 -04001411 }
1412 }
1413
1414 /* Check for a multipart rule. */
1415 if (ch != '+') {
1416 break;
1417 }
1418 l++;
1419 multi = 1;
Adam Langleyfcf25832014-12-18 17:42:32 -08001420 }
Adam Langley95c29f32014-06-20 12:00:00 -07001421
David Benjamin13414b32015-12-09 23:02:39 -05001422 /* If one of the CHACHA20_POLY1305 variants is selected, include the other
1423 * as well. They have the same name to avoid requiring changes in
1424 * configuration. Apply this transformation late so that the cipher name
1425 * still behaves as an exact name and not an alias in multipart rules.
1426 *
1427 * This is temporary and will be removed when the pre-standard construction
1428 * is removed. */
1429 if (cipher_id == TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD ||
1430 cipher_id == TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256) {
1431 cipher_id = 0;
1432 alg_mkey = SSL_kECDHE;
1433 alg_auth = SSL_aRSA;
1434 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1435 alg_mac = SSL_AEAD;
1436 } else if (cipher_id == TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD ||
1437 cipher_id == TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256) {
1438 cipher_id = 0;
1439 alg_mkey = SSL_kECDHE;
1440 alg_auth = SSL_aECDSA;
1441 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1442 alg_mac = SSL_AEAD;
1443 }
1444
Adam Langleyfcf25832014-12-18 17:42:32 -08001445 /* Ok, we have the rule, now apply it. */
1446 if (rule == CIPHER_SPECIAL) {
1447 /* special command */
1448 ok = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001449 if (buf_len == 8 && !strncmp(buf, "STRENGTH", 8)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001450 ok = ssl_cipher_strength_sort(head_p, tail_p);
1451 } else {
David Benjamin3570d732015-06-29 00:28:17 -04001452 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyfcf25832014-12-18 17:42:32 -08001453 }
Adam Langley95c29f32014-06-20 12:00:00 -07001454
Adam Langleyfcf25832014-12-18 17:42:32 -08001455 if (ok == 0) {
1456 retval = 0;
1457 }
Adam Langley95c29f32014-06-20 12:00:00 -07001458
Adam Langleyfcf25832014-12-18 17:42:32 -08001459 /* We do not support any "multi" options together with "@", so throw away
1460 * the rest of the command, if any left, until end or ':' is found. */
1461 while (*l != '\0' && !ITEM_SEP(*l)) {
1462 l++;
1463 }
David Benjamindcb6ef02015-11-06 15:35:54 -05001464 } else if (!skip_rule) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001465 ssl_cipher_apply_rule(cipher_id, alg_mkey, alg_auth, alg_enc, alg_mac,
David Benjamind6e9eec2015-11-18 09:48:55 -05001466 min_version, rule, -1, in_group, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001467 }
1468 }
Adam Langley95c29f32014-06-20 12:00:00 -07001469
Adam Langleyfcf25832014-12-18 17:42:32 -08001470 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001471 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyfcf25832014-12-18 17:42:32 -08001472 retval = 0;
1473 }
Adam Langley95c29f32014-06-20 12:00:00 -07001474
Adam Langleyfcf25832014-12-18 17:42:32 -08001475 return retval;
1476}
Adam Langley95c29f32014-06-20 12:00:00 -07001477
Adam Langleyfcf25832014-12-18 17:42:32 -08001478STACK_OF(SSL_CIPHER) *
1479ssl_create_cipher_list(const SSL_PROTOCOL_METHOD *ssl_method,
David Benjamin71f07942015-04-08 02:36:59 -04001480 struct ssl_cipher_preference_list_st **out_cipher_list,
1481 STACK_OF(SSL_CIPHER) **out_cipher_list_by_id,
1482 const char *rule_str) {
David Benjamin0344daf2015-04-08 02:08:01 -04001483 int ok;
Adam Langleyfcf25832014-12-18 17:42:32 -08001484 STACK_OF(SSL_CIPHER) *cipherstack = NULL, *tmp_cipher_list = NULL;
1485 const char *rule_p;
1486 CIPHER_ORDER *co_list = NULL, *head = NULL, *tail = NULL, *curr;
Adam Langleyfcf25832014-12-18 17:42:32 -08001487 uint8_t *in_group_flags = NULL;
1488 unsigned int num_in_group_flags = 0;
1489 struct ssl_cipher_preference_list_st *pref_list = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -07001490
Adam Langleyfcf25832014-12-18 17:42:32 -08001491 /* Return with error if nothing to do. */
David Benjamin71f07942015-04-08 02:36:59 -04001492 if (rule_str == NULL || out_cipher_list == NULL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001493 return NULL;
1494 }
David Benjamin5213df42014-08-20 14:19:54 -04001495
Adam Langleyfcf25832014-12-18 17:42:32 -08001496 /* Now we have to collect the available ciphers from the compiled in ciphers.
1497 * We cannot get more than the number compiled in, so it is used for
1498 * allocation. */
Brian Smith5ba06892016-02-07 09:36:04 -10001499 co_list = OPENSSL_malloc(sizeof(CIPHER_ORDER) * kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001500 if (co_list == NULL) {
David Benjamin3570d732015-06-29 00:28:17 -04001501 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001502 return NULL;
1503 }
Adam Langley95c29f32014-06-20 12:00:00 -07001504
David Benjamina1c90a52015-05-30 17:03:14 -04001505 ssl_cipher_collect_ciphers(ssl_method, co_list, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001506
Adam Langleyfcf25832014-12-18 17:42:32 -08001507 /* Now arrange all ciphers by preference:
1508 * TODO(davidben): Compute this order once and copy it. */
Adam Langley95c29f32014-06-20 12:00:00 -07001509
Adam Langleyfcf25832014-12-18 17:42:32 -08001510 /* Everything else being equal, prefer ECDHE_ECDSA then ECDHE_RSA over other
1511 * key exchange mechanisms */
Matt Braithwaite053931e2016-05-25 12:06:05 -07001512
David Benjamind6e9eec2015-11-18 09:48:55 -05001513 ssl_cipher_apply_rule(0, SSL_kECDHE, SSL_aECDSA, ~0u, ~0u, 0, CIPHER_ADD, -1,
Adam Langleyfcf25832014-12-18 17:42:32 -08001514 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001515 ssl_cipher_apply_rule(0, SSL_kECDHE, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0,
1516 &head, &tail);
1517 ssl_cipher_apply_rule(0, SSL_kECDHE, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0,
1518 &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001519
Adam Langleyfcf25832014-12-18 17:42:32 -08001520 /* Order the bulk ciphers. First the preferred AEAD ciphers. We prefer
1521 * CHACHA20 unless there is hardware support for fast and constant-time
David Benjamin13414b32015-12-09 23:02:39 -05001522 * AES_GCM. Of the two CHACHA20 variants, the new one is preferred over the
1523 * old one. */
Adam Langleyfcf25832014-12-18 17:42:32 -08001524 if (EVP_has_aes_hardware()) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001525 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1526 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001527 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1528 &head, &tail);
David Benjamin13414b32015-12-09 23:02:39 -05001529 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1530 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001531 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001532 CIPHER_ADD, -1, 0, &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001533 } else {
David Benjamin13414b32015-12-09 23:02:39 -05001534 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1535 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001536 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001537 CIPHER_ADD, -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001538 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1539 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001540 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1541 &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001542 }
Adam Langley95c29f32014-06-20 12:00:00 -07001543
David Benjamin43336652016-03-03 15:32:29 -05001544 /* Then the legacy non-AEAD ciphers: AES_128_CBC, AES_256_CBC,
1545 * 3DES_EDE_CBC_SHA, RC4_128_SHA, RC4_128_MD5. */
David Benjamind6e9eec2015-11-18 09:48:55 -05001546 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128, ~0u, 0, CIPHER_ADD, -1, 0,
1547 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001548 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256, ~0u, 0, CIPHER_ADD, -1, 0,
1549 &head, &tail);
1550 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_3DES, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1551 &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001552 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_RC4, ~SSL_MD5, 0, CIPHER_ADD, -1, 0,
1553 &head, &tail);
1554 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_RC4, SSL_MD5, 0, CIPHER_ADD, -1, 0,
1555 &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001556
Adam Langleyfcf25832014-12-18 17:42:32 -08001557 /* Temporarily enable everything else for sorting */
David Benjamind6e9eec2015-11-18 09:48:55 -05001558 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1559 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001560
Adam Langleyfcf25832014-12-18 17:42:32 -08001561 /* Move ciphers without forward secrecy to the end. */
David Benjamind6e9eec2015-11-18 09:48:55 -05001562 ssl_cipher_apply_rule(0, ~(SSL_kDHE | SSL_kECDHE), ~0u, ~0u, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001563 CIPHER_ORD, -1, 0, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001564
Adam Langleyfcf25832014-12-18 17:42:32 -08001565 /* Now disable everything (maintaining the ordering!) */
David Benjamind6e9eec2015-11-18 09:48:55 -05001566 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0, &head,
1567 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001568
Adam Langleyfcf25832014-12-18 17:42:32 -08001569 /* If the rule_string begins with DEFAULT, apply the default rule before
1570 * using the (possibly available) additional rules. */
1571 ok = 1;
1572 rule_p = rule_str;
1573 if (strncmp(rule_str, "DEFAULT", 7) == 0) {
David Benjamin0344daf2015-04-08 02:08:01 -04001574 ok = ssl_cipher_process_rulestr(ssl_method, SSL_DEFAULT_CIPHER_LIST, &head,
1575 &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001576 rule_p += 7;
1577 if (*rule_p == ':') {
1578 rule_p++;
1579 }
1580 }
Adam Langley858a88d2014-06-20 12:00:00 -07001581
Adam Langleyfcf25832014-12-18 17:42:32 -08001582 if (ok && strlen(rule_p) > 0) {
David Benjamin0344daf2015-04-08 02:08:01 -04001583 ok = ssl_cipher_process_rulestr(ssl_method, rule_p, &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001584 }
Adam Langley95c29f32014-06-20 12:00:00 -07001585
Adam Langleyfcf25832014-12-18 17:42:32 -08001586 if (!ok) {
1587 goto err;
1588 }
1589
1590 /* Allocate new "cipherstack" for the result, return with error
1591 * if we cannot get one. */
1592 cipherstack = sk_SSL_CIPHER_new_null();
1593 if (cipherstack == NULL) {
1594 goto err;
1595 }
1596
David Benjamina1c90a52015-05-30 17:03:14 -04001597 in_group_flags = OPENSSL_malloc(kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001598 if (!in_group_flags) {
1599 goto err;
1600 }
1601
1602 /* The cipher selection for the list is done. The ciphers are added
1603 * to the resulting precedence to the STACK_OF(SSL_CIPHER). */
1604 for (curr = head; curr != NULL; curr = curr->next) {
1605 if (curr->active) {
David Benjamin2adb7ec2015-01-11 19:59:06 -05001606 if (!sk_SSL_CIPHER_push(cipherstack, curr->cipher)) {
1607 goto err;
1608 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001609 in_group_flags[num_in_group_flags++] = curr->in_group;
1610 }
1611 }
1612 OPENSSL_free(co_list); /* Not needed any longer */
1613 co_list = NULL;
1614
1615 tmp_cipher_list = sk_SSL_CIPHER_dup(cipherstack);
1616 if (tmp_cipher_list == NULL) {
1617 goto err;
1618 }
1619 pref_list = OPENSSL_malloc(sizeof(struct ssl_cipher_preference_list_st));
1620 if (!pref_list) {
1621 goto err;
1622 }
1623 pref_list->ciphers = cipherstack;
1624 pref_list->in_group_flags = OPENSSL_malloc(num_in_group_flags);
1625 if (!pref_list->in_group_flags) {
1626 goto err;
1627 }
1628 memcpy(pref_list->in_group_flags, in_group_flags, num_in_group_flags);
1629 OPENSSL_free(in_group_flags);
1630 in_group_flags = NULL;
David Benjamin71f07942015-04-08 02:36:59 -04001631 if (*out_cipher_list != NULL) {
1632 ssl_cipher_preference_list_free(*out_cipher_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001633 }
David Benjamin71f07942015-04-08 02:36:59 -04001634 *out_cipher_list = pref_list;
Adam Langleyfcf25832014-12-18 17:42:32 -08001635 pref_list = NULL;
1636
David Benjamin71f07942015-04-08 02:36:59 -04001637 if (out_cipher_list_by_id != NULL) {
David Benjamin2755a3e2015-04-22 16:17:58 -04001638 sk_SSL_CIPHER_free(*out_cipher_list_by_id);
David Benjamin71f07942015-04-08 02:36:59 -04001639 *out_cipher_list_by_id = tmp_cipher_list;
Adam Langleyfcf25832014-12-18 17:42:32 -08001640 tmp_cipher_list = NULL;
David Benjamin71f07942015-04-08 02:36:59 -04001641 (void) sk_SSL_CIPHER_set_cmp_func(*out_cipher_list_by_id,
1642 ssl_cipher_ptr_id_cmp);
Adam Langleyfcf25832014-12-18 17:42:32 -08001643
David Benjamin71f07942015-04-08 02:36:59 -04001644 sk_SSL_CIPHER_sort(*out_cipher_list_by_id);
Adam Langleyfcf25832014-12-18 17:42:32 -08001645 } else {
1646 sk_SSL_CIPHER_free(tmp_cipher_list);
1647 tmp_cipher_list = NULL;
1648 }
1649
1650 return cipherstack;
Adam Langley858a88d2014-06-20 12:00:00 -07001651
1652err:
David Benjamin2755a3e2015-04-22 16:17:58 -04001653 OPENSSL_free(co_list);
1654 OPENSSL_free(in_group_flags);
1655 sk_SSL_CIPHER_free(cipherstack);
1656 sk_SSL_CIPHER_free(tmp_cipher_list);
1657 if (pref_list) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001658 OPENSSL_free(pref_list->in_group_flags);
1659 }
David Benjamin2755a3e2015-04-22 16:17:58 -04001660 OPENSSL_free(pref_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001661 return NULL;
1662}
Adam Langley95c29f32014-06-20 12:00:00 -07001663
David Benjamin71f07942015-04-08 02:36:59 -04001664uint32_t SSL_CIPHER_get_id(const SSL_CIPHER *cipher) { return cipher->id; }
1665
David Benjamina1c90a52015-05-30 17:03:14 -04001666uint16_t ssl_cipher_get_value(const SSL_CIPHER *cipher) {
1667 uint32_t id = cipher->id;
1668 /* All ciphers are SSLv3. */
1669 assert((id & 0xff000000) == 0x03000000);
1670 return id & 0xffff;
1671}
1672
Steven Valdez4aa154e2016-07-29 14:32:55 -04001673int ssl_cipher_get_ecdhe_psk_cipher(const SSL_CIPHER *cipher,
1674 uint16_t *out_cipher) {
1675 switch (cipher->id) {
1676 case TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256:
1677 case TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256:
1678 case TLS1_CK_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256:
1679 *out_cipher = TLS1_CK_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256 & 0xffff;
1680 return 1;
1681
1682 case TLS1_CK_ECDHE_RSA_WITH_AES_128_GCM_SHA256:
1683 case TLS1_CK_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256:
1684 case TLS1_CK_ECDHE_PSK_WITH_AES_128_GCM_SHA256:
1685 *out_cipher = TLS1_CK_ECDHE_PSK_WITH_AES_128_GCM_SHA256 & 0xffff;
1686 return 1;
1687
1688 case TLS1_CK_ECDHE_RSA_WITH_AES_256_GCM_SHA384:
1689 case TLS1_CK_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384:
1690 case TLS1_CK_ECDHE_PSK_WITH_AES_256_GCM_SHA384:
1691 *out_cipher = TLS1_CK_ECDHE_PSK_WITH_AES_256_GCM_SHA384 & 0xffff;
1692 return 1;
1693 }
1694 return 0;
1695}
1696
David Benjamin71f07942015-04-08 02:36:59 -04001697int SSL_CIPHER_is_AES(const SSL_CIPHER *cipher) {
1698 return (cipher->algorithm_enc & SSL_AES) != 0;
1699}
1700
1701int SSL_CIPHER_has_MD5_HMAC(const SSL_CIPHER *cipher) {
1702 return (cipher->algorithm_mac & SSL_MD5) != 0;
1703}
1704
David Benjaminef793f42015-11-05 18:16:27 -05001705int SSL_CIPHER_has_SHA1_HMAC(const SSL_CIPHER *cipher) {
1706 return (cipher->algorithm_mac & SSL_SHA1) != 0;
1707}
1708
David Benjamina211aee2016-02-24 17:18:44 -05001709int SSL_CIPHER_has_SHA256_HMAC(const SSL_CIPHER *cipher) {
1710 return (cipher->algorithm_mac & SSL_SHA256) != 0;
1711}
1712
David Benjamin71f07942015-04-08 02:36:59 -04001713int SSL_CIPHER_is_AESGCM(const SSL_CIPHER *cipher) {
David Benjaminc0125ef2015-09-09 09:11:07 -04001714 return (cipher->algorithm_enc & (SSL_AES128GCM | SSL_AES256GCM)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001715}
1716
David Benjaminef793f42015-11-05 18:16:27 -05001717int SSL_CIPHER_is_AES128GCM(const SSL_CIPHER *cipher) {
1718 return (cipher->algorithm_enc & SSL_AES128GCM) != 0;
1719}
1720
Adam Langleyb00061c2015-11-16 17:44:52 -08001721int SSL_CIPHER_is_AES128CBC(const SSL_CIPHER *cipher) {
1722 return (cipher->algorithm_enc & SSL_AES128) != 0;
1723}
1724
1725int SSL_CIPHER_is_AES256CBC(const SSL_CIPHER *cipher) {
1726 return (cipher->algorithm_enc & SSL_AES256) != 0;
1727}
1728
David Benjamin51a01a52015-10-29 13:19:56 -04001729int SSL_CIPHER_is_CHACHA20POLY1305(const SSL_CIPHER *cipher) {
David Benjamin13414b32015-12-09 23:02:39 -05001730 return (cipher->algorithm_enc &
1731 (SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001732}
1733
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001734int SSL_CIPHER_is_NULL(const SSL_CIPHER *cipher) {
1735 return (cipher->algorithm_enc & SSL_eNULL) != 0;
1736}
1737
Adam Langleyd7fe75c2015-09-18 15:40:48 -07001738int SSL_CIPHER_is_RC4(const SSL_CIPHER *cipher) {
1739 return (cipher->algorithm_enc & SSL_RC4) != 0;
1740}
1741
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001742int SSL_CIPHER_is_block_cipher(const SSL_CIPHER *cipher) {
1743 /* Neither stream cipher nor AEAD. */
1744 return (cipher->algorithm_enc & (SSL_RC4 | SSL_eNULL)) == 0 &&
1745 cipher->algorithm_mac != SSL_AEAD;
1746}
1747
David Benjaminef793f42015-11-05 18:16:27 -05001748int SSL_CIPHER_is_ECDSA(const SSL_CIPHER *cipher) {
1749 return (cipher->algorithm_auth & SSL_aECDSA) != 0;
1750}
1751
David Benjamin0fc7df52016-06-02 18:36:33 -04001752int SSL_CIPHER_is_DHE(const SSL_CIPHER *cipher) {
1753 return (cipher->algorithm_mkey & SSL_kDHE) != 0;
1754}
1755
David Benjamin4cc36ad2015-12-19 14:23:26 -05001756int SSL_CIPHER_is_ECDHE(const SSL_CIPHER *cipher) {
1757 return (cipher->algorithm_mkey & SSL_kECDHE) != 0;
1758}
1759
Matt Braithwaite053931e2016-05-25 12:06:05 -07001760int SSL_CIPHER_is_CECPQ1(const SSL_CIPHER *cipher) {
1761 return (cipher->algorithm_mkey & SSL_kCECPQ1) != 0;
1762}
1763
David Benjaminef793f42015-11-05 18:16:27 -05001764uint16_t SSL_CIPHER_get_min_version(const SSL_CIPHER *cipher) {
David Benjamindcb6ef02015-11-06 15:35:54 -05001765 if (cipher->algorithm_prf != SSL_HANDSHAKE_MAC_DEFAULT) {
1766 /* Cipher suites before TLS 1.2 use the default PRF, while all those added
1767 * afterwards specify a particular hash. */
David Benjaminef793f42015-11-05 18:16:27 -05001768 return TLS1_2_VERSION;
1769 }
1770 return SSL3_VERSION;
1771}
1772
Nick Harper1fd39d82016-06-14 18:14:35 -07001773uint16_t SSL_CIPHER_get_max_version(const SSL_CIPHER *cipher) {
1774 if (cipher->algorithm_mac == SSL_AEAD &&
1775 (cipher->algorithm_enc & SSL_CHACHA20POLY1305_OLD) == 0 &&
David Benjamin54c217c2016-07-13 12:35:25 -04001776 (cipher->algorithm_mkey & SSL_kECDHE) != 0 &&
1777 /* TODO(davidben,svaldez): Support PSK-based ciphers in TLS 1.3. */
1778 (cipher->algorithm_auth & SSL_aCERT) != 0) {
Nick Harper1fd39d82016-06-14 18:14:35 -07001779 return TLS1_3_VERSION;
1780 }
1781 return TLS1_2_VERSION;
1782}
1783
David Benjamin71f07942015-04-08 02:36:59 -04001784/* return the actual cipher being used */
1785const char *SSL_CIPHER_get_name(const SSL_CIPHER *cipher) {
1786 if (cipher != NULL) {
1787 return cipher->name;
1788 }
1789
1790 return "(NONE)";
1791}
1792
1793const char *SSL_CIPHER_get_kx_name(const SSL_CIPHER *cipher) {
1794 if (cipher == NULL) {
1795 return "";
1796 }
1797
1798 switch (cipher->algorithm_mkey) {
1799 case SSL_kRSA:
1800 return "RSA";
1801
1802 case SSL_kDHE:
1803 switch (cipher->algorithm_auth) {
1804 case SSL_aRSA:
1805 return "DHE_RSA";
1806 default:
1807 assert(0);
1808 return "UNKNOWN";
1809 }
1810
1811 case SSL_kECDHE:
1812 switch (cipher->algorithm_auth) {
1813 case SSL_aECDSA:
1814 return "ECDHE_ECDSA";
1815 case SSL_aRSA:
1816 return "ECDHE_RSA";
1817 case SSL_aPSK:
1818 return "ECDHE_PSK";
1819 default:
1820 assert(0);
1821 return "UNKNOWN";
1822 }
1823
Matt Braithwaite053931e2016-05-25 12:06:05 -07001824 case SSL_kCECPQ1:
1825 switch (cipher->algorithm_auth) {
1826 case SSL_aECDSA:
1827 return "CECPQ1_ECDSA";
1828 case SSL_aRSA:
1829 return "CECPQ1_RSA";
1830 default:
1831 assert(0);
1832 return "UNKNOWN";
1833 }
1834
David Benjamin71f07942015-04-08 02:36:59 -04001835 case SSL_kPSK:
1836 assert(cipher->algorithm_auth == SSL_aPSK);
1837 return "PSK";
1838
1839 default:
1840 assert(0);
1841 return "UNKNOWN";
1842 }
1843}
1844
1845static const char *ssl_cipher_get_enc_name(const SSL_CIPHER *cipher) {
1846 switch (cipher->algorithm_enc) {
1847 case SSL_3DES:
1848 return "3DES_EDE_CBC";
1849 case SSL_RC4:
1850 return "RC4";
1851 case SSL_AES128:
1852 return "AES_128_CBC";
1853 case SSL_AES256:
1854 return "AES_256_CBC";
1855 case SSL_AES128GCM:
1856 return "AES_128_GCM";
1857 case SSL_AES256GCM:
1858 return "AES_256_GCM";
David Benjamin13414b32015-12-09 23:02:39 -05001859 case SSL_CHACHA20POLY1305:
Brian Smith271777f2015-10-03 13:53:33 -10001860 case SSL_CHACHA20POLY1305_OLD:
David Benjamin71f07942015-04-08 02:36:59 -04001861 return "CHACHA20_POLY1305";
1862 break;
1863 default:
1864 assert(0);
1865 return "UNKNOWN";
1866 }
1867}
1868
1869static const char *ssl_cipher_get_prf_name(const SSL_CIPHER *cipher) {
David Benjaminb0883312015-08-06 09:54:13 -04001870 switch (cipher->algorithm_prf) {
1871 case SSL_HANDSHAKE_MAC_DEFAULT:
1872 /* Before TLS 1.2, the PRF component is the hash used in the HMAC, which is
1873 * only ever MD5 or SHA-1. */
1874 switch (cipher->algorithm_mac) {
1875 case SSL_MD5:
1876 return "MD5";
1877 case SSL_SHA1:
1878 return "SHA";
1879 }
1880 break;
1881 case SSL_HANDSHAKE_MAC_SHA256:
1882 return "SHA256";
1883 case SSL_HANDSHAKE_MAC_SHA384:
1884 return "SHA384";
David Benjamin71f07942015-04-08 02:36:59 -04001885 }
David Benjaminb0883312015-08-06 09:54:13 -04001886 assert(0);
1887 return "UNKNOWN";
David Benjamin71f07942015-04-08 02:36:59 -04001888}
1889
1890char *SSL_CIPHER_get_rfc_name(const SSL_CIPHER *cipher) {
1891 if (cipher == NULL) {
1892 return NULL;
1893 }
1894
1895 const char *kx_name = SSL_CIPHER_get_kx_name(cipher);
1896 const char *enc_name = ssl_cipher_get_enc_name(cipher);
1897 const char *prf_name = ssl_cipher_get_prf_name(cipher);
1898
1899 /* The final name is TLS_{kx_name}_WITH_{enc_name}_{prf_name}. */
1900 size_t len = 4 + strlen(kx_name) + 6 + strlen(enc_name) + 1 +
1901 strlen(prf_name) + 1;
1902 char *ret = OPENSSL_malloc(len);
1903 if (ret == NULL) {
1904 return NULL;
1905 }
1906 if (BUF_strlcpy(ret, "TLS_", len) >= len ||
1907 BUF_strlcat(ret, kx_name, len) >= len ||
1908 BUF_strlcat(ret, "_WITH_", len) >= len ||
1909 BUF_strlcat(ret, enc_name, len) >= len ||
1910 BUF_strlcat(ret, "_", len) >= len ||
1911 BUF_strlcat(ret, prf_name, len) >= len) {
1912 assert(0);
1913 OPENSSL_free(ret);
1914 return NULL;
1915 }
1916 assert(strlen(ret) + 1 == len);
1917 return ret;
1918}
1919
1920int SSL_CIPHER_get_bits(const SSL_CIPHER *cipher, int *out_alg_bits) {
1921 if (cipher == NULL) {
1922 return 0;
1923 }
1924
David Benjamin9f2e2772015-11-18 09:59:43 -05001925 int alg_bits, strength_bits;
1926 switch (cipher->algorithm_enc) {
1927 case SSL_AES128:
1928 case SSL_AES128GCM:
1929 case SSL_RC4:
1930 alg_bits = 128;
1931 strength_bits = 128;
1932 break;
1933
1934 case SSL_AES256:
1935 case SSL_AES256GCM:
1936#if !defined(BORINGSSL_ANDROID_SYSTEM)
1937 case SSL_CHACHA20POLY1305_OLD:
1938#endif
David Benjamin13414b32015-12-09 23:02:39 -05001939 case SSL_CHACHA20POLY1305:
David Benjamin9f2e2772015-11-18 09:59:43 -05001940 alg_bits = 256;
1941 strength_bits = 256;
1942 break;
1943
1944 case SSL_3DES:
1945 alg_bits = 168;
1946 strength_bits = 112;
1947 break;
1948
1949 case SSL_eNULL:
1950 alg_bits = 0;
1951 strength_bits = 0;
1952 break;
1953
1954 default:
1955 assert(0);
1956 alg_bits = 0;
1957 strength_bits = 0;
David Benjamin71f07942015-04-08 02:36:59 -04001958 }
David Benjamin9f2e2772015-11-18 09:59:43 -05001959
1960 if (out_alg_bits != NULL) {
1961 *out_alg_bits = alg_bits;
1962 }
1963 return strength_bits;
David Benjamin71f07942015-04-08 02:36:59 -04001964}
1965
Adam Langleyfcf25832014-12-18 17:42:32 -08001966const char *SSL_CIPHER_description(const SSL_CIPHER *cipher, char *buf,
1967 int len) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001968 const char *kx, *au, *enc, *mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001969 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001970
Adam Langleyfcf25832014-12-18 17:42:32 -08001971 alg_mkey = cipher->algorithm_mkey;
1972 alg_auth = cipher->algorithm_auth;
1973 alg_enc = cipher->algorithm_enc;
1974 alg_mac = cipher->algorithm_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001975
Adam Langleyfcf25832014-12-18 17:42:32 -08001976 switch (alg_mkey) {
1977 case SSL_kRSA:
1978 kx = "RSA";
1979 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001980
David Benjamin7061e282015-03-19 11:10:48 -04001981 case SSL_kDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001982 kx = "DH";
1983 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001984
David Benjamin7061e282015-03-19 11:10:48 -04001985 case SSL_kECDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001986 kx = "ECDH";
1987 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001988
Matt Braithwaite053931e2016-05-25 12:06:05 -07001989 case SSL_kCECPQ1:
1990 kx = "CECPQ1";
1991 break;
1992
Adam Langleyfcf25832014-12-18 17:42:32 -08001993 case SSL_kPSK:
1994 kx = "PSK";
1995 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001996
Adam Langleyfcf25832014-12-18 17:42:32 -08001997 default:
1998 kx = "unknown";
1999 }
Adam Langley95c29f32014-06-20 12:00:00 -07002000
Adam Langleyfcf25832014-12-18 17:42:32 -08002001 switch (alg_auth) {
2002 case SSL_aRSA:
2003 au = "RSA";
2004 break;
Adam Langley95c29f32014-06-20 12:00:00 -07002005
Adam Langleyfcf25832014-12-18 17:42:32 -08002006 case SSL_aECDSA:
2007 au = "ECDSA";
2008 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07002009
Adam Langleyfcf25832014-12-18 17:42:32 -08002010 case SSL_aPSK:
2011 au = "PSK";
2012 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07002013
Adam Langleyfcf25832014-12-18 17:42:32 -08002014 default:
2015 au = "unknown";
2016 break;
2017 }
Adam Langleyde0b2022014-06-20 12:00:00 -07002018
Adam Langleyfcf25832014-12-18 17:42:32 -08002019 switch (alg_enc) {
2020 case SSL_3DES:
2021 enc = "3DES(168)";
2022 break;
Adam Langley95c29f32014-06-20 12:00:00 -07002023
Adam Langleyfcf25832014-12-18 17:42:32 -08002024 case SSL_RC4:
2025 enc = "RC4(128)";
2026 break;
2027
2028 case SSL_AES128:
2029 enc = "AES(128)";
2030 break;
2031
2032 case SSL_AES256:
2033 enc = "AES(256)";
2034 break;
2035
2036 case SSL_AES128GCM:
2037 enc = "AESGCM(128)";
2038 break;
2039
2040 case SSL_AES256GCM:
2041 enc = "AESGCM(256)";
2042 break;
2043
Brian Smith271777f2015-10-03 13:53:33 -10002044 case SSL_CHACHA20POLY1305_OLD:
David Benjamin13414b32015-12-09 23:02:39 -05002045 enc = "ChaCha20-Poly1305-Old";
2046 break;
2047
2048 case SSL_CHACHA20POLY1305:
Adam Langleyfcf25832014-12-18 17:42:32 -08002049 enc = "ChaCha20-Poly1305";
2050 break;
2051
Matt Braithwaiteaf096752015-09-02 19:48:16 -07002052 case SSL_eNULL:
2053 enc="None";
2054 break;
2055
Adam Langleyfcf25832014-12-18 17:42:32 -08002056 default:
2057 enc = "unknown";
2058 break;
2059 }
2060
2061 switch (alg_mac) {
2062 case SSL_MD5:
2063 mac = "MD5";
2064 break;
2065
2066 case SSL_SHA1:
2067 mac = "SHA1";
2068 break;
2069
2070 case SSL_SHA256:
2071 mac = "SHA256";
2072 break;
2073
2074 case SSL_SHA384:
2075 mac = "SHA384";
2076 break;
2077
2078 case SSL_AEAD:
2079 mac = "AEAD";
2080 break;
2081
2082 default:
2083 mac = "unknown";
2084 break;
2085 }
2086
2087 if (buf == NULL) {
2088 len = 128;
2089 buf = OPENSSL_malloc(len);
David Benjamin1eed2c02015-02-08 23:20:06 -05002090 if (buf == NULL) {
2091 return NULL;
2092 }
Adam Langleyfcf25832014-12-18 17:42:32 -08002093 } else if (len < 128) {
2094 return "Buffer too small";
2095 }
2096
Brian Smith0687bdf2016-01-17 09:18:26 -10002097 BIO_snprintf(buf, len, "%-23s Kx=%-8s Au=%-4s Enc=%-9s Mac=%-4s\n",
2098 cipher->name, kx, au, enc, mac);
Adam Langleyfcf25832014-12-18 17:42:32 -08002099 return buf;
2100}
2101
David Benjamin71f07942015-04-08 02:36:59 -04002102const char *SSL_CIPHER_get_version(const SSL_CIPHER *cipher) {
2103 return "TLSv1/SSLv3";
Adam Langleyfcf25832014-12-18 17:42:32 -08002104}
2105
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07002106COMP_METHOD *SSL_COMP_get_compression_methods(void) { return NULL; }
Adam Langleyfcf25832014-12-18 17:42:32 -08002107
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07002108int SSL_COMP_add_compression_method(int id, COMP_METHOD *cm) { return 1; }
Adam Langleyfcf25832014-12-18 17:42:32 -08002109
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07002110const char *SSL_COMP_get_name(const COMP_METHOD *comp) { return NULL; }
Adam Langley95c29f32014-06-20 12:00:00 -07002111
David Benjamind1d80782015-07-05 11:54:09 -04002112int ssl_cipher_get_key_type(const SSL_CIPHER *cipher) {
David Benjamin71f07942015-04-08 02:36:59 -04002113 uint32_t alg_a = cipher->algorithm_auth;
Adam Langley95c29f32014-06-20 12:00:00 -07002114
Adam Langleyfcf25832014-12-18 17:42:32 -08002115 if (alg_a & SSL_aECDSA) {
David Benjamind1d80782015-07-05 11:54:09 -04002116 return EVP_PKEY_EC;
Adam Langleyfcf25832014-12-18 17:42:32 -08002117 } else if (alg_a & SSL_aRSA) {
David Benjamind1d80782015-07-05 11:54:09 -04002118 return EVP_PKEY_RSA;
Adam Langleyfcf25832014-12-18 17:42:32 -08002119 }
Adam Langley95c29f32014-06-20 12:00:00 -07002120
David Benjamind1d80782015-07-05 11:54:09 -04002121 return EVP_PKEY_NONE;
Adam Langleyfcf25832014-12-18 17:42:32 -08002122}
David Benjamin9c651c92014-07-12 13:27:45 -04002123
David Benjaminc032dfa2016-05-12 14:54:57 -04002124int ssl_cipher_uses_certificate_auth(const SSL_CIPHER *cipher) {
2125 return (cipher->algorithm_auth & SSL_aCERT) != 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08002126}
2127
Adam Langleyfcf25832014-12-18 17:42:32 -08002128int ssl_cipher_requires_server_key_exchange(const SSL_CIPHER *cipher) {
2129 /* Ephemeral Diffie-Hellman key exchanges require a ServerKeyExchange. */
Matt Braithwaite053931e2016-05-25 12:06:05 -07002130 if (cipher->algorithm_mkey & SSL_kDHE ||
2131 cipher->algorithm_mkey & SSL_kECDHE ||
2132 cipher->algorithm_mkey & SSL_kCECPQ1) {
Adam Langleyfcf25832014-12-18 17:42:32 -08002133 return 1;
2134 }
2135
2136 /* It is optional in all others. */
2137 return 0;
2138}
David Benjaminb8d28cf2015-07-28 21:34:45 -04002139
2140size_t ssl_cipher_get_record_split_len(const SSL_CIPHER *cipher) {
2141 size_t block_size;
2142 switch (cipher->algorithm_enc) {
2143 case SSL_3DES:
2144 block_size = 8;
2145 break;
2146 case SSL_AES128:
2147 case SSL_AES256:
2148 block_size = 16;
2149 break;
2150 default:
2151 return 0;
2152 }
2153
2154 size_t mac_len;
2155 switch (cipher->algorithm_mac) {
2156 case SSL_MD5:
2157 mac_len = MD5_DIGEST_LENGTH;
2158 break;
2159 case SSL_SHA1:
2160 mac_len = SHA_DIGEST_LENGTH;
2161 break;
2162 default:
2163 return 0;
2164 }
2165
2166 size_t ret = 1 + mac_len;
2167 ret += block_size - (ret % block_size);
2168 return ret;
2169}