blob: dcee293b02f534087c8c690c10dc63b3811912d9 [file] [log] [blame]
Adam Langley95c29f32014-06-20 12:00:00 -07001/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
2 * All rights reserved.
3 *
4 * This package is an SSL implementation written
5 * by Eric Young (eay@cryptsoft.com).
6 * The implementation was written so as to conform with Netscapes SSL.
7 *
8 * This library is free for commercial and non-commercial use as long as
9 * the following conditions are aheared to. The following conditions
10 * apply to all code found in this distribution, be it the RC4, RSA,
11 * lhash, DES, etc., code; not just the SSL code. The SSL documentation
12 * included with this distribution is covered by the same copyright terms
13 * except that the holder is Tim Hudson (tjh@cryptsoft.com).
14 *
15 * Copyright remains Eric Young's, and as such any Copyright notices in
16 * the code are not to be removed.
17 * If this package is used in a product, Eric Young should be given attribution
18 * as the author of the parts of the library used.
19 * This can be in the form of a textual message at program startup or
20 * in documentation (online or textual) provided with the package.
21 *
22 * Redistribution and use in source and binary forms, with or without
23 * modification, are permitted provided that the following conditions
24 * are met:
25 * 1. Redistributions of source code must retain the copyright
26 * notice, this list of conditions and the following disclaimer.
27 * 2. Redistributions in binary form must reproduce the above copyright
28 * notice, this list of conditions and the following disclaimer in the
29 * documentation and/or other materials provided with the distribution.
30 * 3. All advertising materials mentioning features or use of this software
31 * must display the following acknowledgement:
32 * "This product includes cryptographic software written by
33 * Eric Young (eay@cryptsoft.com)"
34 * The word 'cryptographic' can be left out if the rouines from the library
35 * being used are not cryptographic related :-).
36 * 4. If you include any Windows specific code (or a derivative thereof) from
37 * the apps directory (application code) you must include an acknowledgement:
38 * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
39 *
40 * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
41 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
42 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
43 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
44 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
45 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
46 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
47 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
48 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
49 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
50 * SUCH DAMAGE.
51 *
52 * The licence and distribution terms for any publically available version or
53 * derivative of this code cannot be changed. i.e. this code cannot simply be
54 * copied and put under another distribution licence
55 * [including the GNU Public Licence.]
56 */
57/* ====================================================================
58 * Copyright (c) 1998-2007 The OpenSSL Project. All rights reserved.
59 *
60 * Redistribution and use in source and binary forms, with or without
61 * modification, are permitted provided that the following conditions
62 * are met:
63 *
64 * 1. Redistributions of source code must retain the above copyright
65 * notice, this list of conditions and the following disclaimer.
66 *
67 * 2. Redistributions in binary form must reproduce the above copyright
68 * notice, this list of conditions and the following disclaimer in
69 * the documentation and/or other materials provided with the
70 * distribution.
71 *
72 * 3. All advertising materials mentioning features or use of this
73 * software must display the following acknowledgment:
74 * "This product includes software developed by the OpenSSL Project
75 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
76 *
77 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
78 * endorse or promote products derived from this software without
79 * prior written permission. For written permission, please contact
80 * openssl-core@openssl.org.
81 *
82 * 5. Products derived from this software may not be called "OpenSSL"
83 * nor may "OpenSSL" appear in their names without prior written
84 * permission of the OpenSSL Project.
85 *
86 * 6. Redistributions of any form whatsoever must retain the following
87 * acknowledgment:
88 * "This product includes software developed by the OpenSSL Project
89 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
90 *
91 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
92 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
93 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
94 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
95 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
96 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
97 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
98 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
99 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
100 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
101 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
102 * OF THE POSSIBILITY OF SUCH DAMAGE.
103 * ====================================================================
104 *
105 * This product includes cryptographic software written by Eric Young
106 * (eay@cryptsoft.com). This product includes software written by Tim
107 * Hudson (tjh@cryptsoft.com).
108 *
109 */
110/* ====================================================================
111 * Copyright 2002 Sun Microsystems, Inc. ALL RIGHTS RESERVED.
112 * ECC cipher suite support in OpenSSL originally developed by
113 * SUN MICROSYSTEMS, INC., and contributed to the OpenSSL project.
114 */
115/* ====================================================================
116 * Copyright 2005 Nokia. All rights reserved.
117 *
118 * The portions of the attached software ("Contribution") is developed by
119 * Nokia Corporation and is licensed pursuant to the OpenSSL open source
120 * license.
121 *
122 * The Contribution, originally written by Mika Kousa and Pasi Eronen of
123 * Nokia Corporation, consists of the "PSK" (Pre-Shared Key) ciphersuites
124 * support (see RFC 4279) to OpenSSL.
125 *
126 * No patent licenses or other rights except those expressly stated in
127 * the OpenSSL open source license shall be deemed granted or received
128 * expressly, by implication, estoppel, or otherwise.
129 *
130 * No assurances are provided by Nokia that the Contribution does not
131 * infringe the patent or other intellectual property rights of any third
132 * party or that the license provides you with all the necessary rights
133 * to make use of the Contribution.
134 *
135 * THE SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND. IN
136 * ADDITION TO THE DISCLAIMERS INCLUDED IN THE LICENSE, NOKIA
137 * SPECIFICALLY DISCLAIMS ANY LIABILITY FOR CLAIMS BROUGHT BY YOU OR ANY
138 * OTHER ENTITY BASED ON INFRINGEMENT OF INTELLECTUAL PROPERTY RIGHTS OR
139 * OTHERWISE. */
140
David Benjamin9e4e01e2015-09-15 01:48:04 -0400141#include <openssl/ssl.h>
142
Adam Langley95c29f32014-06-20 12:00:00 -0700143#include <assert.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400144#include <string.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700145
David Benjamin71f07942015-04-08 02:36:59 -0400146#include <openssl/buf.h>
David Benjaminf0ae1702015-04-07 23:05:04 -0400147#include <openssl/err.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500148#include <openssl/md5.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700149#include <openssl/mem.h>
David Benjaminea72bd02014-12-21 21:27:41 -0500150#include <openssl/sha.h>
David Benjamin71f07942015-04-08 02:36:59 -0400151#include <openssl/stack.h>
Adam Langley95c29f32014-06-20 12:00:00 -0700152
David Benjamin2ee94aa2015-04-07 22:38:30 -0400153#include "internal.h"
Adam Langley95c29f32014-06-20 12:00:00 -0700154
Adam Langley95c29f32014-06-20 12:00:00 -0700155
David Benjamina1c90a52015-05-30 17:03:14 -0400156/* kCiphers is an array of all supported ciphers, sorted by id. */
David Benjamin20c37312015-11-11 21:33:18 -0800157static const SSL_CIPHER kCiphers[] = {
David Benjamina1c90a52015-05-30 17:03:14 -0400158 /* The RSA ciphers */
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700159 /* Cipher 02 */
160 {
David Benjaminff2df332015-11-18 10:01:16 -0500161 SSL3_TXT_RSA_NULL_SHA,
162 SSL3_CK_RSA_NULL_SHA,
163 SSL_kRSA,
164 SSL_aRSA,
165 SSL_eNULL,
166 SSL_SHA1,
167 SSL_HANDSHAKE_MAC_DEFAULT,
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700168 },
169
David Benjamina1c90a52015-05-30 17:03:14 -0400170 /* Cipher 04 */
171 {
David Benjaminff2df332015-11-18 10:01:16 -0500172 SSL3_TXT_RSA_RC4_128_MD5,
173 SSL3_CK_RSA_RC4_128_MD5,
174 SSL_kRSA,
175 SSL_aRSA,
176 SSL_RC4,
177 SSL_MD5,
178 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400179 },
180
181 /* Cipher 05 */
182 {
David Benjaminff2df332015-11-18 10:01:16 -0500183 SSL3_TXT_RSA_RC4_128_SHA,
184 SSL3_CK_RSA_RC4_128_SHA,
185 SSL_kRSA,
186 SSL_aRSA,
187 SSL_RC4,
188 SSL_SHA1,
189 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400190 },
191
192 /* Cipher 0A */
193 {
David Benjaminff2df332015-11-18 10:01:16 -0500194 SSL3_TXT_RSA_DES_192_CBC3_SHA,
195 SSL3_CK_RSA_DES_192_CBC3_SHA,
196 SSL_kRSA,
197 SSL_aRSA,
198 SSL_3DES,
199 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500200 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400201 },
202
203
204 /* New AES ciphersuites */
205
206 /* Cipher 2F */
207 {
David Benjaminff2df332015-11-18 10:01:16 -0500208 TLS1_TXT_RSA_WITH_AES_128_SHA,
209 TLS1_CK_RSA_WITH_AES_128_SHA,
210 SSL_kRSA,
211 SSL_aRSA,
212 SSL_AES128,
213 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500214 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400215 },
216
217 /* Cipher 33 */
218 {
David Benjaminff2df332015-11-18 10:01:16 -0500219 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA,
220 TLS1_CK_DHE_RSA_WITH_AES_128_SHA,
221 SSL_kDHE,
222 SSL_aRSA,
223 SSL_AES128,
224 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500225 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400226 },
227
228 /* Cipher 35 */
229 {
David Benjaminff2df332015-11-18 10:01:16 -0500230 TLS1_TXT_RSA_WITH_AES_256_SHA,
231 TLS1_CK_RSA_WITH_AES_256_SHA,
232 SSL_kRSA,
233 SSL_aRSA,
234 SSL_AES256,
235 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500236 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400237 },
238
239 /* Cipher 39 */
240 {
David Benjaminff2df332015-11-18 10:01:16 -0500241 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA,
242 TLS1_CK_DHE_RSA_WITH_AES_256_SHA,
243 SSL_kDHE,
244 SSL_aRSA,
245 SSL_AES256,
246 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500247 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400248 },
249
250
251 /* TLS v1.2 ciphersuites */
252
253 /* Cipher 3C */
254 {
David Benjaminff2df332015-11-18 10:01:16 -0500255 TLS1_TXT_RSA_WITH_AES_128_SHA256,
256 TLS1_CK_RSA_WITH_AES_128_SHA256,
257 SSL_kRSA,
258 SSL_aRSA,
259 SSL_AES128,
260 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500261 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400262 },
263
264 /* Cipher 3D */
265 {
David Benjaminff2df332015-11-18 10:01:16 -0500266 TLS1_TXT_RSA_WITH_AES_256_SHA256,
267 TLS1_CK_RSA_WITH_AES_256_SHA256,
268 SSL_kRSA,
269 SSL_aRSA,
270 SSL_AES256,
271 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500272 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400273 },
274
275 /* Cipher 67 */
276 {
277 TLS1_TXT_DHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500278 TLS1_CK_DHE_RSA_WITH_AES_128_SHA256,
279 SSL_kDHE,
280 SSL_aRSA,
281 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500282 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500283 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400284 },
285
286 /* Cipher 6B */
287 {
288 TLS1_TXT_DHE_RSA_WITH_AES_256_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500289 TLS1_CK_DHE_RSA_WITH_AES_256_SHA256,
290 SSL_kDHE,
291 SSL_aRSA,
292 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500293 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500294 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400295 },
296
Adam Langley85bc5602015-06-09 09:54:04 -0700297 /* PSK cipher suites. */
298
David Benjamina1c90a52015-05-30 17:03:14 -0400299 /* Cipher 8A */
300 {
David Benjaminff2df332015-11-18 10:01:16 -0500301 TLS1_TXT_PSK_WITH_RC4_128_SHA,
302 TLS1_CK_PSK_WITH_RC4_128_SHA,
303 SSL_kPSK,
304 SSL_aPSK,
305 SSL_RC4,
306 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500307 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400308 },
309
310 /* Cipher 8C */
311 {
David Benjaminff2df332015-11-18 10:01:16 -0500312 TLS1_TXT_PSK_WITH_AES_128_CBC_SHA,
313 TLS1_CK_PSK_WITH_AES_128_CBC_SHA,
314 SSL_kPSK,
315 SSL_aPSK,
316 SSL_AES128,
317 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500318 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400319 },
320
321 /* Cipher 8D */
322 {
David Benjaminff2df332015-11-18 10:01:16 -0500323 TLS1_TXT_PSK_WITH_AES_256_CBC_SHA,
324 TLS1_CK_PSK_WITH_AES_256_CBC_SHA,
325 SSL_kPSK,
326 SSL_aPSK,
327 SSL_AES256,
328 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500329 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400330 },
331
David Benjamina1c90a52015-05-30 17:03:14 -0400332 /* GCM ciphersuites from RFC5288 */
333
334 /* Cipher 9C */
335 {
336 TLS1_TXT_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500337 TLS1_CK_RSA_WITH_AES_128_GCM_SHA256,
338 SSL_kRSA,
339 SSL_aRSA,
340 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500341 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400342 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400343 },
344
345 /* Cipher 9D */
346 {
347 TLS1_TXT_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500348 TLS1_CK_RSA_WITH_AES_256_GCM_SHA384,
349 SSL_kRSA,
350 SSL_aRSA,
351 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500352 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400353 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400354 },
355
356 /* Cipher 9E */
357 {
358 TLS1_TXT_DHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500359 TLS1_CK_DHE_RSA_WITH_AES_128_GCM_SHA256,
360 SSL_kDHE,
361 SSL_aRSA,
362 SSL_AES128GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500363 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400364 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400365 },
366
367 /* Cipher 9F */
368 {
369 TLS1_TXT_DHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500370 TLS1_CK_DHE_RSA_WITH_AES_256_GCM_SHA384,
371 SSL_kDHE,
372 SSL_aRSA,
373 SSL_AES256GCM,
David Benjamind6e9eec2015-11-18 09:48:55 -0500374 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400375 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400376 },
377
Matt Braithwaite053931e2016-05-25 12:06:05 -0700378 /* CECPQ1 (combined elliptic curve + post-quantum) suites. */
379
380 /* Cipher 16B7 */
381 {
382 TLS1_TXT_CECPQ1_RSA_WITH_CHACHA20_POLY1305_SHA256,
383 TLS1_CK_CECPQ1_RSA_WITH_CHACHA20_POLY1305_SHA256,
384 SSL_kCECPQ1,
385 SSL_aRSA,
386 SSL_CHACHA20POLY1305,
387 SSL_AEAD,
388 SSL_HANDSHAKE_MAC_SHA256,
389 },
390
391 /* Cipher 16B8 */
392 {
393 TLS1_TXT_CECPQ1_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
394 TLS1_CK_CECPQ1_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
395 SSL_kCECPQ1,
396 SSL_aECDSA,
397 SSL_CHACHA20POLY1305,
398 SSL_AEAD,
399 SSL_HANDSHAKE_MAC_SHA256,
400 },
401
402 /* Cipher 16B9 */
403 {
404 TLS1_TXT_CECPQ1_RSA_WITH_AES_256_GCM_SHA384,
405 TLS1_CK_CECPQ1_RSA_WITH_AES_256_GCM_SHA384,
406 SSL_kCECPQ1,
407 SSL_aRSA,
408 SSL_AES256GCM,
409 SSL_AEAD,
410 SSL_HANDSHAKE_MAC_SHA384,
411 },
412
413 /* Cipher 16BA */
414 {
415 TLS1_TXT_CECPQ1_ECDSA_WITH_AES_256_GCM_SHA384,
416 TLS1_CK_CECPQ1_ECDSA_WITH_AES_256_GCM_SHA384,
417 SSL_kCECPQ1,
418 SSL_aECDSA,
419 SSL_AES256GCM,
420 SSL_AEAD,
421 SSL_HANDSHAKE_MAC_SHA384,
422 },
423
David Benjamina1c90a52015-05-30 17:03:14 -0400424 /* Cipher C007 */
425 {
426 TLS1_TXT_ECDHE_ECDSA_WITH_RC4_128_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500427 TLS1_CK_ECDHE_ECDSA_WITH_RC4_128_SHA,
428 SSL_kECDHE,
429 SSL_aECDSA,
430 SSL_RC4,
431 SSL_SHA1,
432 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400433 },
434
435 /* Cipher C009 */
436 {
437 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500438 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
439 SSL_kECDHE,
440 SSL_aECDSA,
441 SSL_AES128,
442 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500443 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400444 },
445
446 /* Cipher C00A */
447 {
448 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500449 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
450 SSL_kECDHE,
451 SSL_aECDSA,
452 SSL_AES256,
453 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500454 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400455 },
456
457 /* Cipher C011 */
458 {
David Benjaminff2df332015-11-18 10:01:16 -0500459 TLS1_TXT_ECDHE_RSA_WITH_RC4_128_SHA,
460 TLS1_CK_ECDHE_RSA_WITH_RC4_128_SHA,
461 SSL_kECDHE,
462 SSL_aRSA,
463 SSL_RC4,
464 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500465 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400466 },
467
468 /* Cipher C013 */
469 {
470 TLS1_TXT_ECDHE_RSA_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500471 TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA,
472 SSL_kECDHE,
473 SSL_aRSA,
474 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500475 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500476 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400477 },
478
479 /* Cipher C014 */
480 {
481 TLS1_TXT_ECDHE_RSA_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500482 TLS1_CK_ECDHE_RSA_WITH_AES_256_CBC_SHA,
483 SSL_kECDHE,
484 SSL_aRSA,
485 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500486 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500487 SSL_HANDSHAKE_MAC_DEFAULT,
David Benjamina1c90a52015-05-30 17:03:14 -0400488 },
489
490
491 /* HMAC based TLS v1.2 ciphersuites from RFC5289 */
492
493 /* Cipher C023 */
494 {
495 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500496 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_SHA256,
497 SSL_kECDHE,
498 SSL_aECDSA,
499 SSL_AES128,
500 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500501 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400502 },
503
504 /* Cipher C024 */
505 {
506 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500507 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_SHA384,
508 SSL_kECDHE,
509 SSL_aECDSA,
510 SSL_AES256,
511 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500512 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400513 },
514
515 /* Cipher C027 */
516 {
517 TLS1_TXT_ECDHE_RSA_WITH_AES_128_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500518 TLS1_CK_ECDHE_RSA_WITH_AES_128_SHA256,
519 SSL_kECDHE,
520 SSL_aRSA,
521 SSL_AES128,
David Benjamind6e9eec2015-11-18 09:48:55 -0500522 SSL_SHA256,
David Benjamin9f2e2772015-11-18 09:59:43 -0500523 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400524 },
525
526 /* Cipher C028 */
527 {
528 TLS1_TXT_ECDHE_RSA_WITH_AES_256_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500529 TLS1_CK_ECDHE_RSA_WITH_AES_256_SHA384,
530 SSL_kECDHE,
531 SSL_aRSA,
532 SSL_AES256,
David Benjamind6e9eec2015-11-18 09:48:55 -0500533 SSL_SHA384,
David Benjamin9f2e2772015-11-18 09:59:43 -0500534 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400535 },
536
537
538 /* GCM based TLS v1.2 ciphersuites from RFC5289 */
539
540 /* Cipher C02B */
541 {
542 TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500543 TLS1_CK_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
544 SSL_kECDHE,
545 SSL_aECDSA,
546 SSL_AES128GCM,
547 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400548 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400549 },
550
551 /* Cipher C02C */
552 {
553 TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500554 TLS1_CK_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
555 SSL_kECDHE,
556 SSL_aECDSA,
557 SSL_AES256GCM,
558 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400559 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400560 },
561
562 /* Cipher C02F */
563 {
564 TLS1_TXT_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
David Benjaminff2df332015-11-18 10:01:16 -0500565 TLS1_CK_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
566 SSL_kECDHE,
567 SSL_aRSA,
568 SSL_AES128GCM,
569 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400570 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400571 },
572
573 /* Cipher C030 */
574 {
575 TLS1_TXT_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
David Benjaminff2df332015-11-18 10:01:16 -0500576 TLS1_CK_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
577 SSL_kECDHE,
578 SSL_aRSA,
579 SSL_AES256GCM,
580 SSL_AEAD,
David Benjaminb2a985b2015-06-21 15:13:57 -0400581 SSL_HANDSHAKE_MAC_SHA384,
David Benjamina1c90a52015-05-30 17:03:14 -0400582 },
583
Adam Langley85bc5602015-06-09 09:54:04 -0700584 /* ECDHE-PSK cipher suites. */
585
586 /* Cipher C035 */
587 {
588 TLS1_TXT_ECDHE_PSK_WITH_AES_128_CBC_SHA,
589 TLS1_CK_ECDHE_PSK_WITH_AES_128_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500590 SSL_kECDHE,
591 SSL_aPSK,
592 SSL_AES128,
593 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500594 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700595 },
596
597 /* Cipher C036 */
598 {
599 TLS1_TXT_ECDHE_PSK_WITH_AES_256_CBC_SHA,
600 TLS1_CK_ECDHE_PSK_WITH_AES_256_CBC_SHA,
David Benjaminff2df332015-11-18 10:01:16 -0500601 SSL_kECDHE,
602 SSL_aPSK,
603 SSL_AES256,
604 SSL_SHA1,
David Benjamin9f2e2772015-11-18 09:59:43 -0500605 SSL_HANDSHAKE_MAC_DEFAULT,
Adam Langley85bc5602015-06-09 09:54:04 -0700606 },
607
608 /* ChaCha20-Poly1305 cipher suites. */
609
David Benjamin13414b32015-12-09 23:02:39 -0500610#if !defined(BORINGSSL_ANDROID_SYSTEM)
David Benjamina1c90a52015-05-30 17:03:14 -0400611 {
Brian Smith271777f2015-10-03 13:53:33 -1000612 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500613 TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD,
614 SSL_kECDHE,
615 SSL_aRSA,
616 SSL_CHACHA20POLY1305_OLD,
617 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400618 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400619 },
620
621 {
Brian Smith271777f2015-10-03 13:53:33 -1000622 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_OLD,
David Benjaminff2df332015-11-18 10:01:16 -0500623 TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD,
624 SSL_kECDHE,
625 SSL_aECDSA,
626 SSL_CHACHA20POLY1305_OLD,
627 SSL_AEAD,
David Benjamina1c90a52015-05-30 17:03:14 -0400628 SSL_HANDSHAKE_MAC_SHA256,
David Benjamina1c90a52015-05-30 17:03:14 -0400629 },
Adam Langleyd98dc132015-09-23 16:41:33 -0700630#endif
David Benjamin13414b32015-12-09 23:02:39 -0500631
632 /* Cipher CCA8 */
633 {
634 TLS1_TXT_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
635 TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
636 SSL_kECDHE,
637 SSL_aRSA,
638 SSL_CHACHA20POLY1305,
639 SSL_AEAD,
640 SSL_HANDSHAKE_MAC_SHA256,
641 },
642
643 /* Cipher CCA9 */
644 {
645 TLS1_TXT_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
646 TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
647 SSL_kECDHE,
648 SSL_aECDSA,
649 SSL_CHACHA20POLY1305,
650 SSL_AEAD,
651 SSL_HANDSHAKE_MAC_SHA256,
652 },
653
654 /* Cipher CCAB */
655 {
656 TLS1_TXT_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
657 TLS1_CK_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256,
658 SSL_kECDHE,
659 SSL_aPSK,
660 SSL_CHACHA20POLY1305,
661 SSL_AEAD,
662 SSL_HANDSHAKE_MAC_SHA256,
663 },
Matt Braithwaite053931e2016-05-25 12:06:05 -0700664
David Benjamina1c90a52015-05-30 17:03:14 -0400665};
666
667static const size_t kCiphersLen = sizeof(kCiphers) / sizeof(kCiphers[0]);
668
Adam Langleyfcf25832014-12-18 17:42:32 -0800669#define CIPHER_ADD 1
670#define CIPHER_KILL 2
671#define CIPHER_DEL 3
672#define CIPHER_ORD 4
673#define CIPHER_SPECIAL 5
Adam Langley95c29f32014-06-20 12:00:00 -0700674
Adam Langleyfcf25832014-12-18 17:42:32 -0800675typedef struct cipher_order_st {
676 const SSL_CIPHER *cipher;
677 int active;
Adam Langleyfcf25832014-12-18 17:42:32 -0800678 int in_group;
679 struct cipher_order_st *next, *prev;
680} CIPHER_ORDER;
Adam Langley95c29f32014-06-20 12:00:00 -0700681
David Benjamin0344daf2015-04-08 02:08:01 -0400682typedef struct cipher_alias_st {
683 /* name is the name of the cipher alias. */
684 const char *name;
685
686 /* The following fields are bitmasks for the corresponding fields on
687 * |SSL_CIPHER|. A cipher matches a cipher alias iff, for each bitmask, the
688 * bit corresponding to the cipher's value is set to 1. If any bitmask is
689 * all zeroes, the alias matches nothing. Use |~0u| for the default value. */
690 uint32_t algorithm_mkey;
691 uint32_t algorithm_auth;
692 uint32_t algorithm_enc;
693 uint32_t algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -0500694
695 /* min_version, if non-zero, matches all ciphers which were added in that
696 * particular protocol version. */
697 uint16_t min_version;
David Benjamin0344daf2015-04-08 02:08:01 -0400698} CIPHER_ALIAS;
699
David Benjamina1c90a52015-05-30 17:03:14 -0400700static const CIPHER_ALIAS kCipherAliases[] = {
Matt Braithwaite053931e2016-05-25 12:06:05 -0700701 /* "ALL" doesn't include eNULL nor kCECPQ1. These must be explicitly
702 * enabled. */
703 {"ALL", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700704
David Benjamina1c90a52015-05-30 17:03:14 -0400705 /* The "COMPLEMENTOFDEFAULT" rule is omitted. It matches nothing. */
Adam Langley95c29f32014-06-20 12:00:00 -0700706
David Benjamina1c90a52015-05-30 17:03:14 -0400707 /* key exchange aliases
708 * (some of those using only a single bit here combine
709 * multiple key exchange algs according to the RFCs,
710 * e.g. kEDH combines DHE_DSS and DHE_RSA) */
David Benjamind6e9eec2015-11-18 09:48:55 -0500711 {"kRSA", SSL_kRSA, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700712
David Benjamind6e9eec2015-11-18 09:48:55 -0500713 {"kDHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
714 {"kEDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
715 {"DH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700716
David Benjamind6e9eec2015-11-18 09:48:55 -0500717 {"kECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700718 {"kCECPQ1", SSL_kCECPQ1, ~0u, ~0u, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500719 {"kEECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
720 {"ECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700721
David Benjamind6e9eec2015-11-18 09:48:55 -0500722 {"kPSK", SSL_kPSK, ~0u, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700723
David Benjamina1c90a52015-05-30 17:03:14 -0400724 /* server authentication aliases */
Matt Braithwaite053931e2016-05-25 12:06:05 -0700725 {"aRSA", ~SSL_kCECPQ1, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
726 {"aECDSA", ~SSL_kCECPQ1, SSL_aECDSA, ~0u, ~0u, 0},
727 {"ECDSA", ~SSL_kCECPQ1, SSL_aECDSA, ~0u, ~0u, 0},
David Benjamind6e9eec2015-11-18 09:48:55 -0500728 {"aPSK", ~0u, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700729
David Benjamina1c90a52015-05-30 17:03:14 -0400730 /* aliases combining key exchange and server authentication */
David Benjamind6e9eec2015-11-18 09:48:55 -0500731 {"DHE", SSL_kDHE, ~0u, ~0u, ~0u, 0},
732 {"EDH", SSL_kDHE, ~0u, ~0u, ~0u, 0},
733 {"ECDHE", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
734 {"EECDH", SSL_kECDHE, ~0u, ~0u, ~0u, 0},
735 {"RSA", SSL_kRSA, SSL_aRSA, ~SSL_eNULL, ~0u, 0},
736 {"PSK", SSL_kPSK, SSL_aPSK, ~0u, ~0u, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700737
David Benjamina1c90a52015-05-30 17:03:14 -0400738 /* symmetric encryption aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500739 {"3DES", ~0u, ~0u, SSL_3DES, ~0u, 0},
740 {"RC4", ~0u, ~0u, SSL_RC4, ~0u, 0},
741 {"AES128", ~0u, ~0u, SSL_AES128 | SSL_AES128GCM, ~0u, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700742 {"AES256", ~SSL_kCECPQ1, ~0u, SSL_AES256 | SSL_AES256GCM, ~0u, 0},
743 {"AES", ~SSL_kCECPQ1, ~0u, SSL_AES, ~0u, 0},
744 {"AESGCM", ~SSL_kCECPQ1, ~0u, SSL_AES128GCM | SSL_AES256GCM, ~0u, 0},
745 {"CHACHA20", ~SSL_kCECPQ1, ~0u, SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD, ~0u,
David Benjamin13414b32015-12-09 23:02:39 -0500746 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700747
David Benjamina1c90a52015-05-30 17:03:14 -0400748 /* MAC aliases */
David Benjamind6e9eec2015-11-18 09:48:55 -0500749 {"MD5", ~0u, ~0u, ~0u, SSL_MD5, 0},
750 {"SHA1", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
751 {"SHA", ~0u, ~0u, ~SSL_eNULL, SSL_SHA1, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700752 {"SHA256", ~SSL_kCECPQ1, ~0u, ~0u, SSL_SHA256, 0},
753 {"SHA384", ~SSL_kCECPQ1, ~0u, ~0u, SSL_SHA384, 0},
Adam Langley95c29f32014-06-20 12:00:00 -0700754
David Benjamindcb6ef02015-11-06 15:35:54 -0500755 /* Legacy protocol minimum version aliases. "TLSv1" is intentionally the
756 * same as "SSLv3". */
Matt Braithwaite053931e2016-05-25 12:06:05 -0700757 {"SSLv3", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
758 {"TLSv1", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, SSL3_VERSION},
759 {"TLSv1.2", ~SSL_kCECPQ1, ~0u, ~SSL_eNULL, ~0u, TLS1_2_VERSION},
Adam Langley95c29f32014-06-20 12:00:00 -0700760
David Benjamind6e9eec2015-11-18 09:48:55 -0500761 /* Legacy strength classes. */
762 {"MEDIUM", ~0u, ~0u, SSL_RC4, ~0u, 0},
Matt Braithwaite053931e2016-05-25 12:06:05 -0700763 {"HIGH", ~SSL_kCECPQ1, ~0u, ~(SSL_eNULL|SSL_RC4), ~0u, 0},
764 {"FIPS", ~SSL_kCECPQ1, ~0u, ~(SSL_eNULL|SSL_RC4), ~0u, 0},
Adam Langleyfcf25832014-12-18 17:42:32 -0800765};
Adam Langley95c29f32014-06-20 12:00:00 -0700766
David Benjamina1c90a52015-05-30 17:03:14 -0400767static const size_t kCipherAliasesLen =
768 sizeof(kCipherAliases) / sizeof(kCipherAliases[0]);
769
770static int ssl_cipher_id_cmp(const void *in_a, const void *in_b) {
771 const SSL_CIPHER *a = in_a;
772 const SSL_CIPHER *b = in_b;
773
774 if (a->id > b->id) {
775 return 1;
776 } else if (a->id < b->id) {
777 return -1;
778 } else {
779 return 0;
780 }
781}
782
783static int ssl_cipher_ptr_id_cmp(const SSL_CIPHER **a, const SSL_CIPHER **b) {
784 return ssl_cipher_id_cmp(*a, *b);
785}
786
787const SSL_CIPHER *SSL_get_cipher_by_value(uint16_t value) {
788 SSL_CIPHER c;
789
790 c.id = 0x03000000L | value;
791 return bsearch(&c, kCiphers, kCiphersLen, sizeof(SSL_CIPHER),
792 ssl_cipher_id_cmp);
793}
David Benjamin0344daf2015-04-08 02:08:01 -0400794
David Benjaminea72bd02014-12-21 21:27:41 -0500795int ssl_cipher_get_evp_aead(const EVP_AEAD **out_aead,
796 size_t *out_mac_secret_len,
797 size_t *out_fixed_iv_len,
798 const SSL_CIPHER *cipher, uint16_t version) {
799 *out_aead = NULL;
800 *out_mac_secret_len = 0;
801 *out_fixed_iv_len = 0;
Adam Langleyc9fb3752014-06-20 12:00:00 -0700802
David Benjaminea72bd02014-12-21 21:27:41 -0500803 switch (cipher->algorithm_enc) {
Adam Langleyfcf25832014-12-18 17:42:32 -0800804 case SSL_AES128GCM:
David Benjaminea72bd02014-12-21 21:27:41 -0500805 *out_aead = EVP_aead_aes_128_gcm();
806 *out_fixed_iv_len = 4;
Adam Langleyfcf25832014-12-18 17:42:32 -0800807 return 1;
808
809 case SSL_AES256GCM:
David Benjaminea72bd02014-12-21 21:27:41 -0500810 *out_aead = EVP_aead_aes_256_gcm();
811 *out_fixed_iv_len = 4;
Adam Langleyfcf25832014-12-18 17:42:32 -0800812 return 1;
813
Adam Langleyd98dc132015-09-23 16:41:33 -0700814#if !defined(BORINGSSL_ANDROID_SYSTEM)
Brian Smith271777f2015-10-03 13:53:33 -1000815 case SSL_CHACHA20POLY1305_OLD:
Brian Smith3e23e4c2015-10-03 11:38:58 -1000816 *out_aead = EVP_aead_chacha20_poly1305_old();
David Benjaminea72bd02014-12-21 21:27:41 -0500817 *out_fixed_iv_len = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -0800818 return 1;
Adam Langleyd98dc132015-09-23 16:41:33 -0700819#endif
Adam Langleyfcf25832014-12-18 17:42:32 -0800820
David Benjamin13414b32015-12-09 23:02:39 -0500821 case SSL_CHACHA20POLY1305:
822 *out_aead = EVP_aead_chacha20_poly1305();
823 *out_fixed_iv_len = 12;
824 return 1;
825
Adam Langleyfcf25832014-12-18 17:42:32 -0800826 case SSL_RC4:
David Benjaminea72bd02014-12-21 21:27:41 -0500827 switch (cipher->algorithm_mac) {
828 case SSL_MD5:
David Benjamin044abb02014-12-23 10:57:17 -0500829 if (version == SSL3_VERSION) {
830 *out_aead = EVP_aead_rc4_md5_ssl3();
831 } else {
832 *out_aead = EVP_aead_rc4_md5_tls();
833 }
David Benjaminea72bd02014-12-21 21:27:41 -0500834 *out_mac_secret_len = MD5_DIGEST_LENGTH;
835 return 1;
836 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500837 if (version == SSL3_VERSION) {
838 *out_aead = EVP_aead_rc4_sha1_ssl3();
839 } else {
840 *out_aead = EVP_aead_rc4_sha1_tls();
841 }
David Benjaminea72bd02014-12-21 21:27:41 -0500842 *out_mac_secret_len = SHA_DIGEST_LENGTH;
843 return 1;
844 default:
845 return 0;
Adam Langleyfcf25832014-12-18 17:42:32 -0800846 }
Adam Langleyfcf25832014-12-18 17:42:32 -0800847
David Benjaminea72bd02014-12-21 21:27:41 -0500848 case SSL_AES128:
849 switch (cipher->algorithm_mac) {
850 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500851 if (version == SSL3_VERSION) {
852 *out_aead = EVP_aead_aes_128_cbc_sha1_ssl3();
853 *out_fixed_iv_len = 16;
854 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500855 *out_aead = EVP_aead_aes_128_cbc_sha1_tls_implicit_iv();
856 *out_fixed_iv_len = 16;
857 } else {
858 *out_aead = EVP_aead_aes_128_cbc_sha1_tls();
859 }
860 *out_mac_secret_len = SHA_DIGEST_LENGTH;
861 return 1;
862 case SSL_SHA256:
863 *out_aead = EVP_aead_aes_128_cbc_sha256_tls();
864 *out_mac_secret_len = SHA256_DIGEST_LENGTH;
865 return 1;
866 default:
867 return 0;
868 }
869
870 case SSL_AES256:
871 switch (cipher->algorithm_mac) {
872 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500873 if (version == SSL3_VERSION) {
874 *out_aead = EVP_aead_aes_256_cbc_sha1_ssl3();
875 *out_fixed_iv_len = 16;
876 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500877 *out_aead = EVP_aead_aes_256_cbc_sha1_tls_implicit_iv();
878 *out_fixed_iv_len = 16;
879 } else {
880 *out_aead = EVP_aead_aes_256_cbc_sha1_tls();
881 }
882 *out_mac_secret_len = SHA_DIGEST_LENGTH;
883 return 1;
884 case SSL_SHA256:
885 *out_aead = EVP_aead_aes_256_cbc_sha256_tls();
886 *out_mac_secret_len = SHA256_DIGEST_LENGTH;
887 return 1;
888 case SSL_SHA384:
889 *out_aead = EVP_aead_aes_256_cbc_sha384_tls();
890 *out_mac_secret_len = SHA384_DIGEST_LENGTH;
891 return 1;
892 default:
893 return 0;
894 }
895
896 case SSL_3DES:
897 switch (cipher->algorithm_mac) {
898 case SSL_SHA1:
David Benjamin044abb02014-12-23 10:57:17 -0500899 if (version == SSL3_VERSION) {
900 *out_aead = EVP_aead_des_ede3_cbc_sha1_ssl3();
901 *out_fixed_iv_len = 8;
902 } else if (version == TLS1_VERSION) {
David Benjaminea72bd02014-12-21 21:27:41 -0500903 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls_implicit_iv();
904 *out_fixed_iv_len = 8;
905 } else {
906 *out_aead = EVP_aead_des_ede3_cbc_sha1_tls();
907 }
908 *out_mac_secret_len = SHA_DIGEST_LENGTH;
909 return 1;
910 default:
911 return 0;
912 }
913
Matt Braithwaiteaf096752015-09-02 19:48:16 -0700914 case SSL_eNULL:
915 switch (cipher->algorithm_mac) {
916 case SSL_SHA1:
917 if (version == SSL3_VERSION) {
918 *out_aead = EVP_aead_null_sha1_ssl3();
919 } else {
920 *out_aead = EVP_aead_null_sha1_tls();
921 }
922 *out_mac_secret_len = SHA_DIGEST_LENGTH;
923 return 1;
924 default:
925 return 0;
926 }
927
David Benjaminea72bd02014-12-21 21:27:41 -0500928 default:
929 return 0;
930 }
Adam Langleyfcf25832014-12-18 17:42:32 -0800931}
Adam Langleyc9fb3752014-06-20 12:00:00 -0700932
David Benjaminb0883312015-08-06 09:54:13 -0400933const EVP_MD *ssl_get_handshake_digest(uint32_t algorithm_prf) {
934 switch (algorithm_prf) {
935 case SSL_HANDSHAKE_MAC_DEFAULT:
936 return EVP_sha1();
937 case SSL_HANDSHAKE_MAC_SHA256:
938 return EVP_sha256();
939 case SSL_HANDSHAKE_MAC_SHA384:
940 return EVP_sha384();
941 default:
942 return NULL;
Adam Langleyfcf25832014-12-18 17:42:32 -0800943 }
Adam Langley95c29f32014-06-20 12:00:00 -0700944}
945
946#define ITEM_SEP(a) \
Adam Langleyfcf25832014-12-18 17:42:32 -0800947 (((a) == ':') || ((a) == ' ') || ((a) == ';') || ((a) == ','))
Adam Langley95c29f32014-06-20 12:00:00 -0700948
David Benjamin0344daf2015-04-08 02:08:01 -0400949/* rule_equals returns one iff the NUL-terminated string |rule| is equal to the
950 * |buf_len| bytes at |buf|. */
951static int rule_equals(const char *rule, const char *buf, size_t buf_len) {
952 /* |strncmp| alone only checks that |buf| is a prefix of |rule|. */
953 return strncmp(rule, buf, buf_len) == 0 && rule[buf_len] == '\0';
954}
955
Adam Langley95c29f32014-06-20 12:00:00 -0700956static void ll_append_tail(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -0800957 CIPHER_ORDER **tail) {
958 if (curr == *tail) {
959 return;
960 }
961 if (curr == *head) {
962 *head = curr->next;
963 }
964 if (curr->prev != NULL) {
965 curr->prev->next = curr->next;
966 }
967 if (curr->next != NULL) {
968 curr->next->prev = curr->prev;
969 }
970 (*tail)->next = curr;
971 curr->prev = *tail;
972 curr->next = NULL;
973 *tail = curr;
974}
Adam Langley95c29f32014-06-20 12:00:00 -0700975
976static void ll_append_head(CIPHER_ORDER **head, CIPHER_ORDER *curr,
Adam Langleyfcf25832014-12-18 17:42:32 -0800977 CIPHER_ORDER **tail) {
978 if (curr == *head) {
979 return;
980 }
981 if (curr == *tail) {
982 *tail = curr->prev;
983 }
984 if (curr->next != NULL) {
985 curr->next->prev = curr->prev;
986 }
987 if (curr->prev != NULL) {
988 curr->prev->next = curr->next;
989 }
990 (*head)->prev = curr;
991 curr->next = *head;
992 curr->prev = NULL;
993 *head = curr;
994}
Adam Langley95c29f32014-06-20 12:00:00 -0700995
David Benjamin82c9e902014-12-12 15:55:27 -0500996static void ssl_cipher_collect_ciphers(const SSL_PROTOCOL_METHOD *ssl_method,
Adam Langleyfcf25832014-12-18 17:42:32 -0800997 CIPHER_ORDER *co_list,
998 CIPHER_ORDER **head_p,
999 CIPHER_ORDER **tail_p) {
David Benjamina1c90a52015-05-30 17:03:14 -04001000 /* The set of ciphers is static, but some subset may be unsupported by
1001 * |ssl_method|, so the list may be smaller. */
1002 size_t co_list_num = 0;
1003 size_t i;
1004 for (i = 0; i < kCiphersLen; i++) {
1005 const SSL_CIPHER *cipher = &kCiphers[i];
1006 if (ssl_method->supports_cipher(cipher)) {
1007 co_list[co_list_num].cipher = cipher;
Adam Langleyfcf25832014-12-18 17:42:32 -08001008 co_list[co_list_num].next = NULL;
1009 co_list[co_list_num].prev = NULL;
1010 co_list[co_list_num].active = 0;
1011 co_list[co_list_num].in_group = 0;
1012 co_list_num++;
1013 }
1014 }
Adam Langley95c29f32014-06-20 12:00:00 -07001015
Adam Langleyfcf25832014-12-18 17:42:32 -08001016 /* Prepare linked list from list entries. */
1017 if (co_list_num > 0) {
1018 co_list[0].prev = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -07001019
Adam Langleyfcf25832014-12-18 17:42:32 -08001020 if (co_list_num > 1) {
1021 co_list[0].next = &co_list[1];
Adam Langley95c29f32014-06-20 12:00:00 -07001022
Adam Langleyfcf25832014-12-18 17:42:32 -08001023 for (i = 1; i < co_list_num - 1; i++) {
1024 co_list[i].prev = &co_list[i - 1];
1025 co_list[i].next = &co_list[i + 1];
1026 }
Adam Langley95c29f32014-06-20 12:00:00 -07001027
Adam Langleyfcf25832014-12-18 17:42:32 -08001028 co_list[co_list_num - 1].prev = &co_list[co_list_num - 2];
1029 }
1030
1031 co_list[co_list_num - 1].next = NULL;
1032
1033 *head_p = &co_list[0];
1034 *tail_p = &co_list[co_list_num - 1];
1035 }
1036}
Adam Langley95c29f32014-06-20 12:00:00 -07001037
David Benjamin0344daf2015-04-08 02:08:01 -04001038/* ssl_cipher_apply_rule applies the rule type |rule| to ciphers matching its
1039 * parameters in the linked list from |*head_p| to |*tail_p|. It writes the new
1040 * head and tail of the list to |*head_p| and |*tail_p|, respectively.
1041 *
1042 * - If |cipher_id| is non-zero, only that cipher is selected.
1043 * - Otherwise, if |strength_bits| is non-negative, it selects ciphers
1044 * of that strength.
David Benjamind6e9eec2015-11-18 09:48:55 -05001045 * - Otherwise, it selects ciphers that match each bitmasks in |alg_*| and
David Benjamindcb6ef02015-11-06 15:35:54 -05001046 * |min_version|. */
Adam Langleyfcf25832014-12-18 17:42:32 -08001047static void ssl_cipher_apply_rule(
David Benjamin107db582015-04-08 00:41:59 -04001048 uint32_t cipher_id, uint32_t alg_mkey, uint32_t alg_auth,
David Benjamind6e9eec2015-11-18 09:48:55 -05001049 uint32_t alg_enc, uint32_t alg_mac, uint16_t min_version, int rule,
1050 int strength_bits, int in_group, CIPHER_ORDER **head_p,
1051 CIPHER_ORDER **tail_p) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001052 CIPHER_ORDER *head, *tail, *curr, *next, *last;
1053 const SSL_CIPHER *cp;
1054 int reverse = 0;
Adam Langley95c29f32014-06-20 12:00:00 -07001055
David Benjamindcb6ef02015-11-06 15:35:54 -05001056 if (cipher_id == 0 && strength_bits == -1 && min_version == 0 &&
David Benjamind6e9eec2015-11-18 09:48:55 -05001057 (alg_mkey == 0 || alg_auth == 0 || alg_enc == 0 || alg_mac == 0)) {
David Benjamin0344daf2015-04-08 02:08:01 -04001058 /* The rule matches nothing, so bail early. */
1059 return;
1060 }
1061
Adam Langleyfcf25832014-12-18 17:42:32 -08001062 if (rule == CIPHER_DEL) {
1063 /* needed to maintain sorting between currently deleted ciphers */
1064 reverse = 1;
1065 }
Adam Langley95c29f32014-06-20 12:00:00 -07001066
Adam Langleyfcf25832014-12-18 17:42:32 -08001067 head = *head_p;
1068 tail = *tail_p;
Adam Langley95c29f32014-06-20 12:00:00 -07001069
Adam Langleyfcf25832014-12-18 17:42:32 -08001070 if (reverse) {
1071 next = tail;
1072 last = head;
1073 } else {
1074 next = head;
1075 last = tail;
1076 }
Adam Langley95c29f32014-06-20 12:00:00 -07001077
Adam Langleyfcf25832014-12-18 17:42:32 -08001078 curr = NULL;
1079 for (;;) {
1080 if (curr == last) {
1081 break;
1082 }
Adam Langley95c29f32014-06-20 12:00:00 -07001083
Adam Langleyfcf25832014-12-18 17:42:32 -08001084 curr = next;
1085 if (curr == NULL) {
1086 break;
1087 }
Adam Langleye3142a72014-07-24 17:56:48 -07001088
Adam Langleyfcf25832014-12-18 17:42:32 -08001089 next = reverse ? curr->prev : curr->next;
1090 cp = curr->cipher;
Adam Langleye3142a72014-07-24 17:56:48 -07001091
David Benjamin0344daf2015-04-08 02:08:01 -04001092 /* Selection criteria is either a specific cipher, the value of
1093 * |strength_bits|, or the algorithms used. */
1094 if (cipher_id != 0) {
1095 if (cipher_id != cp->id) {
1096 continue;
1097 }
1098 } else if (strength_bits >= 0) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001099 if (strength_bits != SSL_CIPHER_get_bits(cp, NULL)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001100 continue;
1101 }
David Benjamin0344daf2015-04-08 02:08:01 -04001102 } else if (!(alg_mkey & cp->algorithm_mkey) ||
1103 !(alg_auth & cp->algorithm_auth) ||
1104 !(alg_enc & cp->algorithm_enc) ||
1105 !(alg_mac & cp->algorithm_mac) ||
David Benjamindcb6ef02015-11-06 15:35:54 -05001106 (min_version != 0 &&
1107 SSL_CIPHER_get_min_version(cp) != min_version)) {
David Benjamin0344daf2015-04-08 02:08:01 -04001108 continue;
Adam Langleyfcf25832014-12-18 17:42:32 -08001109 }
Adam Langleye3142a72014-07-24 17:56:48 -07001110
Adam Langleyfcf25832014-12-18 17:42:32 -08001111 /* add the cipher if it has not been added yet. */
1112 if (rule == CIPHER_ADD) {
1113 /* reverse == 0 */
1114 if (!curr->active) {
1115 ll_append_tail(&head, curr, &tail);
1116 curr->active = 1;
1117 curr->in_group = in_group;
1118 }
1119 }
Adam Langley95c29f32014-06-20 12:00:00 -07001120
Adam Langleyfcf25832014-12-18 17:42:32 -08001121 /* Move the added cipher to this location */
1122 else if (rule == CIPHER_ORD) {
1123 /* reverse == 0 */
1124 if (curr->active) {
1125 ll_append_tail(&head, curr, &tail);
1126 curr->in_group = 0;
1127 }
1128 } else if (rule == CIPHER_DEL) {
1129 /* reverse == 1 */
1130 if (curr->active) {
1131 /* most recently deleted ciphersuites get best positions
1132 * for any future CIPHER_ADD (note that the CIPHER_DEL loop
1133 * works in reverse to maintain the order) */
1134 ll_append_head(&head, curr, &tail);
1135 curr->active = 0;
1136 curr->in_group = 0;
1137 }
1138 } else if (rule == CIPHER_KILL) {
1139 /* reverse == 0 */
1140 if (head == curr) {
1141 head = curr->next;
1142 } else {
1143 curr->prev->next = curr->next;
1144 }
Adam Langley95c29f32014-06-20 12:00:00 -07001145
Adam Langleyfcf25832014-12-18 17:42:32 -08001146 if (tail == curr) {
1147 tail = curr->prev;
1148 }
1149 curr->active = 0;
1150 if (curr->next != NULL) {
1151 curr->next->prev = curr->prev;
1152 }
1153 if (curr->prev != NULL) {
1154 curr->prev->next = curr->next;
1155 }
1156 curr->next = NULL;
1157 curr->prev = NULL;
1158 }
1159 }
Adam Langley95c29f32014-06-20 12:00:00 -07001160
Adam Langleyfcf25832014-12-18 17:42:32 -08001161 *head_p = head;
1162 *tail_p = tail;
1163}
Adam Langley95c29f32014-06-20 12:00:00 -07001164
1165static int ssl_cipher_strength_sort(CIPHER_ORDER **head_p,
Adam Langleyfcf25832014-12-18 17:42:32 -08001166 CIPHER_ORDER **tail_p) {
1167 int max_strength_bits, i, *number_uses;
1168 CIPHER_ORDER *curr;
Adam Langley95c29f32014-06-20 12:00:00 -07001169
Adam Langleyfcf25832014-12-18 17:42:32 -08001170 /* This routine sorts the ciphers with descending strength. The sorting must
1171 * keep the pre-sorted sequence, so we apply the normal sorting routine as
1172 * '+' movement to the end of the list. */
1173 max_strength_bits = 0;
1174 curr = *head_p;
1175 while (curr != NULL) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001176 if (curr->active &&
1177 SSL_CIPHER_get_bits(curr->cipher, NULL) > max_strength_bits) {
1178 max_strength_bits = SSL_CIPHER_get_bits(curr->cipher, NULL);
Adam Langleyfcf25832014-12-18 17:42:32 -08001179 }
1180 curr = curr->next;
1181 }
Adam Langley95c29f32014-06-20 12:00:00 -07001182
Adam Langleyfcf25832014-12-18 17:42:32 -08001183 number_uses = OPENSSL_malloc((max_strength_bits + 1) * sizeof(int));
1184 if (!number_uses) {
David Benjamin3570d732015-06-29 00:28:17 -04001185 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001186 return 0;
1187 }
1188 memset(number_uses, 0, (max_strength_bits + 1) * sizeof(int));
Adam Langley95c29f32014-06-20 12:00:00 -07001189
Adam Langleyfcf25832014-12-18 17:42:32 -08001190 /* Now find the strength_bits values actually used. */
1191 curr = *head_p;
1192 while (curr != NULL) {
1193 if (curr->active) {
David Benjamin9f2e2772015-11-18 09:59:43 -05001194 number_uses[SSL_CIPHER_get_bits(curr->cipher, NULL)]++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001195 }
1196 curr = curr->next;
1197 }
Adam Langley95c29f32014-06-20 12:00:00 -07001198
Adam Langleyfcf25832014-12-18 17:42:32 -08001199 /* Go through the list of used strength_bits values in descending order. */
1200 for (i = max_strength_bits; i >= 0; i--) {
1201 if (number_uses[i] > 0) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001202 ssl_cipher_apply_rule(0, 0, 0, 0, 0, 0, CIPHER_ORD, i, 0, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001203 }
1204 }
1205
1206 OPENSSL_free(number_uses);
1207 return 1;
1208}
Adam Langley95c29f32014-06-20 12:00:00 -07001209
David Benjamin0344daf2015-04-08 02:08:01 -04001210static int ssl_cipher_process_rulestr(const SSL_PROTOCOL_METHOD *ssl_method,
1211 const char *rule_str,
Adam Langleyfcf25832014-12-18 17:42:32 -08001212 CIPHER_ORDER **head_p,
David Benjamin0344daf2015-04-08 02:08:01 -04001213 CIPHER_ORDER **tail_p) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001214 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001215 uint16_t min_version;
Adam Langleyfcf25832014-12-18 17:42:32 -08001216 const char *l, *buf;
David Benjamindcb6ef02015-11-06 15:35:54 -05001217 int multi, skip_rule, rule, retval, ok, in_group = 0, has_group = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001218 size_t j, buf_len;
1219 uint32_t cipher_id;
Adam Langleyfcf25832014-12-18 17:42:32 -08001220 char ch;
Adam Langley95c29f32014-06-20 12:00:00 -07001221
Adam Langleyfcf25832014-12-18 17:42:32 -08001222 retval = 1;
1223 l = rule_str;
1224 for (;;) {
1225 ch = *l;
Adam Langley95c29f32014-06-20 12:00:00 -07001226
Adam Langleyfcf25832014-12-18 17:42:32 -08001227 if (ch == '\0') {
1228 break; /* done */
1229 }
Adam Langley95c29f32014-06-20 12:00:00 -07001230
Adam Langleyfcf25832014-12-18 17:42:32 -08001231 if (in_group) {
1232 if (ch == ']') {
Adam Langleyfcf25832014-12-18 17:42:32 -08001233 if (*tail_p) {
1234 (*tail_p)->in_group = 0;
1235 }
1236 in_group = 0;
1237 l++;
1238 continue;
1239 }
David Benjamin37d92462014-09-20 17:54:24 -04001240
Adam Langleyfcf25832014-12-18 17:42:32 -08001241 if (ch == '|') {
1242 rule = CIPHER_ADD;
1243 l++;
1244 continue;
1245 } else if (!(ch >= 'a' && ch <= 'z') && !(ch >= 'A' && ch <= 'Z') &&
1246 !(ch >= '0' && ch <= '9')) {
David Benjamin3570d732015-06-29 00:28:17 -04001247 OPENSSL_PUT_ERROR(SSL, SSL_R_UNEXPECTED_OPERATOR_IN_GROUP);
David Benjamin0344daf2015-04-08 02:08:01 -04001248 retval = in_group = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001249 break;
1250 } else {
1251 rule = CIPHER_ADD;
1252 }
1253 } else if (ch == '-') {
1254 rule = CIPHER_DEL;
1255 l++;
1256 } else if (ch == '+') {
1257 rule = CIPHER_ORD;
1258 l++;
1259 } else if (ch == '!') {
1260 rule = CIPHER_KILL;
1261 l++;
1262 } else if (ch == '@') {
1263 rule = CIPHER_SPECIAL;
1264 l++;
1265 } else if (ch == '[') {
1266 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001267 OPENSSL_PUT_ERROR(SSL, SSL_R_NESTED_GROUP);
David Benjamin0344daf2015-04-08 02:08:01 -04001268 retval = in_group = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001269 break;
1270 }
1271 in_group = 1;
1272 has_group = 1;
1273 l++;
1274 continue;
1275 } else {
1276 rule = CIPHER_ADD;
1277 }
Adam Langley95c29f32014-06-20 12:00:00 -07001278
Adam Langleyfcf25832014-12-18 17:42:32 -08001279 /* If preference groups are enabled, the only legal operator is +.
1280 * Otherwise the in_group bits will get mixed up. */
1281 if (has_group && rule != CIPHER_ADD) {
David Benjamin3570d732015-06-29 00:28:17 -04001282 OPENSSL_PUT_ERROR(SSL, SSL_R_MIXED_SPECIAL_OPERATOR_WITH_GROUPS);
David Benjamin0344daf2015-04-08 02:08:01 -04001283 retval = in_group = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001284 break;
1285 }
Adam Langley95c29f32014-06-20 12:00:00 -07001286
Adam Langleyfcf25832014-12-18 17:42:32 -08001287 if (ITEM_SEP(ch)) {
1288 l++;
1289 continue;
1290 }
Adam Langley95c29f32014-06-20 12:00:00 -07001291
David Benjamin0344daf2015-04-08 02:08:01 -04001292 multi = 0;
1293 cipher_id = 0;
1294 alg_mkey = ~0u;
1295 alg_auth = ~0u;
1296 alg_enc = ~0u;
1297 alg_mac = ~0u;
David Benjamindcb6ef02015-11-06 15:35:54 -05001298 min_version = 0;
1299 skip_rule = 0;
Adam Langley95c29f32014-06-20 12:00:00 -07001300
Adam Langleyfcf25832014-12-18 17:42:32 -08001301 for (;;) {
1302 ch = *l;
1303 buf = l;
David Benjamin0344daf2015-04-08 02:08:01 -04001304 buf_len = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001305 while (((ch >= 'A') && (ch <= 'Z')) || ((ch >= '0') && (ch <= '9')) ||
1306 ((ch >= 'a') && (ch <= 'z')) || (ch == '-') || (ch == '.')) {
1307 ch = *(++l);
David Benjamin0344daf2015-04-08 02:08:01 -04001308 buf_len++;
Adam Langleyfcf25832014-12-18 17:42:32 -08001309 }
Adam Langley95c29f32014-06-20 12:00:00 -07001310
David Benjamin0344daf2015-04-08 02:08:01 -04001311 if (buf_len == 0) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001312 /* We hit something we cannot deal with, it is no command or separator
1313 * nor alphanumeric, so we call this an error. */
David Benjamin3570d732015-06-29 00:28:17 -04001314 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
David Benjamin0344daf2015-04-08 02:08:01 -04001315 retval = in_group = 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08001316 l++;
1317 break;
1318 }
Adam Langley95c29f32014-06-20 12:00:00 -07001319
Adam Langleyfcf25832014-12-18 17:42:32 -08001320 if (rule == CIPHER_SPECIAL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001321 break;
1322 }
David Benjamin0344daf2015-04-08 02:08:01 -04001323
1324 /* Look for a matching exact cipher. These aren't allowed in multipart
1325 * rules. */
1326 if (!multi && ch != '+') {
David Benjamina1c90a52015-05-30 17:03:14 -04001327 for (j = 0; j < kCiphersLen; j++) {
1328 const SSL_CIPHER *cipher = &kCiphers[j];
1329 if (rule_equals(cipher->name, buf, buf_len)) {
David Benjamin0344daf2015-04-08 02:08:01 -04001330 cipher_id = cipher->id;
1331 break;
1332 }
1333 }
1334 }
1335 if (cipher_id == 0) {
1336 /* If not an exact cipher, look for a matching cipher alias. */
David Benjamina1c90a52015-05-30 17:03:14 -04001337 for (j = 0; j < kCipherAliasesLen; j++) {
David Benjamin0344daf2015-04-08 02:08:01 -04001338 if (rule_equals(kCipherAliases[j].name, buf, buf_len)) {
1339 alg_mkey &= kCipherAliases[j].algorithm_mkey;
1340 alg_auth &= kCipherAliases[j].algorithm_auth;
1341 alg_enc &= kCipherAliases[j].algorithm_enc;
1342 alg_mac &= kCipherAliases[j].algorithm_mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001343
1344 if (min_version != 0 &&
1345 min_version != kCipherAliases[j].min_version) {
1346 skip_rule = 1;
1347 } else {
1348 min_version = kCipherAliases[j].min_version;
1349 }
David Benjamin0344daf2015-04-08 02:08:01 -04001350 break;
1351 }
1352 }
David Benjamina1c90a52015-05-30 17:03:14 -04001353 if (j == kCipherAliasesLen) {
David Benjamindcb6ef02015-11-06 15:35:54 -05001354 skip_rule = 1;
David Benjamin0344daf2015-04-08 02:08:01 -04001355 }
1356 }
1357
1358 /* Check for a multipart rule. */
1359 if (ch != '+') {
1360 break;
1361 }
1362 l++;
1363 multi = 1;
Adam Langleyfcf25832014-12-18 17:42:32 -08001364 }
Adam Langley95c29f32014-06-20 12:00:00 -07001365
David Benjamin13414b32015-12-09 23:02:39 -05001366 /* If one of the CHACHA20_POLY1305 variants is selected, include the other
1367 * as well. They have the same name to avoid requiring changes in
1368 * configuration. Apply this transformation late so that the cipher name
1369 * still behaves as an exact name and not an alias in multipart rules.
1370 *
1371 * This is temporary and will be removed when the pre-standard construction
1372 * is removed. */
1373 if (cipher_id == TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD ||
1374 cipher_id == TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256) {
1375 cipher_id = 0;
1376 alg_mkey = SSL_kECDHE;
1377 alg_auth = SSL_aRSA;
1378 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1379 alg_mac = SSL_AEAD;
1380 } else if (cipher_id == TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD ||
1381 cipher_id == TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256) {
1382 cipher_id = 0;
1383 alg_mkey = SSL_kECDHE;
1384 alg_auth = SSL_aECDSA;
1385 alg_enc = SSL_CHACHA20POLY1305|SSL_CHACHA20POLY1305_OLD;
1386 alg_mac = SSL_AEAD;
1387 }
1388
Adam Langleyfcf25832014-12-18 17:42:32 -08001389 /* Ok, we have the rule, now apply it. */
1390 if (rule == CIPHER_SPECIAL) {
1391 /* special command */
1392 ok = 0;
David Benjamin0344daf2015-04-08 02:08:01 -04001393 if (buf_len == 8 && !strncmp(buf, "STRENGTH", 8)) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001394 ok = ssl_cipher_strength_sort(head_p, tail_p);
1395 } else {
David Benjamin3570d732015-06-29 00:28:17 -04001396 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyfcf25832014-12-18 17:42:32 -08001397 }
Adam Langley95c29f32014-06-20 12:00:00 -07001398
Adam Langleyfcf25832014-12-18 17:42:32 -08001399 if (ok == 0) {
1400 retval = 0;
1401 }
Adam Langley95c29f32014-06-20 12:00:00 -07001402
Adam Langleyfcf25832014-12-18 17:42:32 -08001403 /* We do not support any "multi" options together with "@", so throw away
1404 * the rest of the command, if any left, until end or ':' is found. */
1405 while (*l != '\0' && !ITEM_SEP(*l)) {
1406 l++;
1407 }
David Benjamindcb6ef02015-11-06 15:35:54 -05001408 } else if (!skip_rule) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001409 ssl_cipher_apply_rule(cipher_id, alg_mkey, alg_auth, alg_enc, alg_mac,
David Benjamind6e9eec2015-11-18 09:48:55 -05001410 min_version, rule, -1, in_group, head_p, tail_p);
Adam Langleyfcf25832014-12-18 17:42:32 -08001411 }
1412 }
Adam Langley95c29f32014-06-20 12:00:00 -07001413
Adam Langleyfcf25832014-12-18 17:42:32 -08001414 if (in_group) {
David Benjamin3570d732015-06-29 00:28:17 -04001415 OPENSSL_PUT_ERROR(SSL, SSL_R_INVALID_COMMAND);
Adam Langleyfcf25832014-12-18 17:42:32 -08001416 retval = 0;
1417 }
Adam Langley95c29f32014-06-20 12:00:00 -07001418
Adam Langleyfcf25832014-12-18 17:42:32 -08001419 return retval;
1420}
Adam Langley95c29f32014-06-20 12:00:00 -07001421
Adam Langleyfcf25832014-12-18 17:42:32 -08001422STACK_OF(SSL_CIPHER) *
1423ssl_create_cipher_list(const SSL_PROTOCOL_METHOD *ssl_method,
David Benjamin71f07942015-04-08 02:36:59 -04001424 struct ssl_cipher_preference_list_st **out_cipher_list,
1425 STACK_OF(SSL_CIPHER) **out_cipher_list_by_id,
1426 const char *rule_str) {
David Benjamin0344daf2015-04-08 02:08:01 -04001427 int ok;
Adam Langleyfcf25832014-12-18 17:42:32 -08001428 STACK_OF(SSL_CIPHER) *cipherstack = NULL, *tmp_cipher_list = NULL;
1429 const char *rule_p;
1430 CIPHER_ORDER *co_list = NULL, *head = NULL, *tail = NULL, *curr;
Adam Langleyfcf25832014-12-18 17:42:32 -08001431 uint8_t *in_group_flags = NULL;
1432 unsigned int num_in_group_flags = 0;
1433 struct ssl_cipher_preference_list_st *pref_list = NULL;
Adam Langley95c29f32014-06-20 12:00:00 -07001434
Adam Langleyfcf25832014-12-18 17:42:32 -08001435 /* Return with error if nothing to do. */
David Benjamin71f07942015-04-08 02:36:59 -04001436 if (rule_str == NULL || out_cipher_list == NULL) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001437 return NULL;
1438 }
David Benjamin5213df42014-08-20 14:19:54 -04001439
Adam Langleyfcf25832014-12-18 17:42:32 -08001440 /* Now we have to collect the available ciphers from the compiled in ciphers.
1441 * We cannot get more than the number compiled in, so it is used for
1442 * allocation. */
Brian Smith5ba06892016-02-07 09:36:04 -10001443 co_list = OPENSSL_malloc(sizeof(CIPHER_ORDER) * kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001444 if (co_list == NULL) {
David Benjamin3570d732015-06-29 00:28:17 -04001445 OPENSSL_PUT_ERROR(SSL, ERR_R_MALLOC_FAILURE);
Adam Langleyfcf25832014-12-18 17:42:32 -08001446 return NULL;
1447 }
Adam Langley95c29f32014-06-20 12:00:00 -07001448
David Benjamina1c90a52015-05-30 17:03:14 -04001449 ssl_cipher_collect_ciphers(ssl_method, co_list, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001450
Adam Langleyfcf25832014-12-18 17:42:32 -08001451 /* Now arrange all ciphers by preference:
1452 * TODO(davidben): Compute this order once and copy it. */
Adam Langley95c29f32014-06-20 12:00:00 -07001453
Adam Langleyfcf25832014-12-18 17:42:32 -08001454 /* Everything else being equal, prefer ECDHE_ECDSA then ECDHE_RSA over other
1455 * key exchange mechanisms */
Matt Braithwaite053931e2016-05-25 12:06:05 -07001456
David Benjamind6e9eec2015-11-18 09:48:55 -05001457 ssl_cipher_apply_rule(0, SSL_kECDHE, SSL_aECDSA, ~0u, ~0u, 0, CIPHER_ADD, -1,
Adam Langleyfcf25832014-12-18 17:42:32 -08001458 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001459 ssl_cipher_apply_rule(0, SSL_kECDHE, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0,
1460 &head, &tail);
1461 ssl_cipher_apply_rule(0, SSL_kECDHE, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0,
1462 &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001463
Adam Langleyfcf25832014-12-18 17:42:32 -08001464 /* Order the bulk ciphers. First the preferred AEAD ciphers. We prefer
1465 * CHACHA20 unless there is hardware support for fast and constant-time
David Benjamin13414b32015-12-09 23:02:39 -05001466 * AES_GCM. Of the two CHACHA20 variants, the new one is preferred over the
1467 * old one. */
Adam Langleyfcf25832014-12-18 17:42:32 -08001468 if (EVP_has_aes_hardware()) {
David Benjamind6e9eec2015-11-18 09:48:55 -05001469 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1470 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001471 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1472 &head, &tail);
David Benjamin13414b32015-12-09 23:02:39 -05001473 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1474 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001475 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001476 CIPHER_ADD, -1, 0, &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001477 } else {
David Benjamin13414b32015-12-09 23:02:39 -05001478 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305, ~0u, 0, CIPHER_ADD,
1479 -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001480 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_CHACHA20POLY1305_OLD, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001481 CIPHER_ADD, -1, 0, &head, &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001482 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1483 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001484 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256GCM, ~0u, 0, CIPHER_ADD, -1, 0,
1485 &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001486 }
Adam Langley95c29f32014-06-20 12:00:00 -07001487
David Benjamin43336652016-03-03 15:32:29 -05001488 /* Then the legacy non-AEAD ciphers: AES_128_CBC, AES_256_CBC,
1489 * 3DES_EDE_CBC_SHA, RC4_128_SHA, RC4_128_MD5. */
David Benjamind6e9eec2015-11-18 09:48:55 -05001490 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES128, ~0u, 0, CIPHER_ADD, -1, 0,
1491 &head, &tail);
David Benjamin43336652016-03-03 15:32:29 -05001492 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_AES256, ~0u, 0, CIPHER_ADD, -1, 0,
1493 &head, &tail);
1494 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_3DES, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1495 &tail);
David Benjamind6e9eec2015-11-18 09:48:55 -05001496 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_RC4, ~SSL_MD5, 0, CIPHER_ADD, -1, 0,
1497 &head, &tail);
1498 ssl_cipher_apply_rule(0, ~0u, ~0u, SSL_RC4, SSL_MD5, 0, CIPHER_ADD, -1, 0,
1499 &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001500
Adam Langleyfcf25832014-12-18 17:42:32 -08001501 /* Temporarily enable everything else for sorting */
David Benjamind6e9eec2015-11-18 09:48:55 -05001502 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_ADD, -1, 0, &head,
1503 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001504
Adam Langleyfcf25832014-12-18 17:42:32 -08001505 /* Move ciphers without forward secrecy to the end. */
David Benjamind6e9eec2015-11-18 09:48:55 -05001506 ssl_cipher_apply_rule(0, ~(SSL_kDHE | SSL_kECDHE), ~0u, ~0u, ~0u, 0,
David Benjamin0344daf2015-04-08 02:08:01 -04001507 CIPHER_ORD, -1, 0, &head, &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001508
Adam Langleyfcf25832014-12-18 17:42:32 -08001509 /* Now disable everything (maintaining the ordering!) */
David Benjamind6e9eec2015-11-18 09:48:55 -05001510 ssl_cipher_apply_rule(0, ~0u, ~0u, ~0u, ~0u, 0, CIPHER_DEL, -1, 0, &head,
1511 &tail);
Adam Langley95c29f32014-06-20 12:00:00 -07001512
Adam Langleyfcf25832014-12-18 17:42:32 -08001513 /* If the rule_string begins with DEFAULT, apply the default rule before
1514 * using the (possibly available) additional rules. */
1515 ok = 1;
1516 rule_p = rule_str;
1517 if (strncmp(rule_str, "DEFAULT", 7) == 0) {
David Benjamin0344daf2015-04-08 02:08:01 -04001518 ok = ssl_cipher_process_rulestr(ssl_method, SSL_DEFAULT_CIPHER_LIST, &head,
1519 &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001520 rule_p += 7;
1521 if (*rule_p == ':') {
1522 rule_p++;
1523 }
1524 }
Adam Langley858a88d2014-06-20 12:00:00 -07001525
Adam Langleyfcf25832014-12-18 17:42:32 -08001526 if (ok && strlen(rule_p) > 0) {
David Benjamin0344daf2015-04-08 02:08:01 -04001527 ok = ssl_cipher_process_rulestr(ssl_method, rule_p, &head, &tail);
Adam Langleyfcf25832014-12-18 17:42:32 -08001528 }
Adam Langley95c29f32014-06-20 12:00:00 -07001529
Adam Langleyfcf25832014-12-18 17:42:32 -08001530 if (!ok) {
1531 goto err;
1532 }
1533
1534 /* Allocate new "cipherstack" for the result, return with error
1535 * if we cannot get one. */
1536 cipherstack = sk_SSL_CIPHER_new_null();
1537 if (cipherstack == NULL) {
1538 goto err;
1539 }
1540
David Benjamina1c90a52015-05-30 17:03:14 -04001541 in_group_flags = OPENSSL_malloc(kCiphersLen);
Adam Langleyfcf25832014-12-18 17:42:32 -08001542 if (!in_group_flags) {
1543 goto err;
1544 }
1545
1546 /* The cipher selection for the list is done. The ciphers are added
1547 * to the resulting precedence to the STACK_OF(SSL_CIPHER). */
1548 for (curr = head; curr != NULL; curr = curr->next) {
1549 if (curr->active) {
David Benjamin2adb7ec2015-01-11 19:59:06 -05001550 if (!sk_SSL_CIPHER_push(cipherstack, curr->cipher)) {
1551 goto err;
1552 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001553 in_group_flags[num_in_group_flags++] = curr->in_group;
1554 }
1555 }
1556 OPENSSL_free(co_list); /* Not needed any longer */
1557 co_list = NULL;
1558
1559 tmp_cipher_list = sk_SSL_CIPHER_dup(cipherstack);
1560 if (tmp_cipher_list == NULL) {
1561 goto err;
1562 }
1563 pref_list = OPENSSL_malloc(sizeof(struct ssl_cipher_preference_list_st));
1564 if (!pref_list) {
1565 goto err;
1566 }
1567 pref_list->ciphers = cipherstack;
1568 pref_list->in_group_flags = OPENSSL_malloc(num_in_group_flags);
1569 if (!pref_list->in_group_flags) {
1570 goto err;
1571 }
1572 memcpy(pref_list->in_group_flags, in_group_flags, num_in_group_flags);
1573 OPENSSL_free(in_group_flags);
1574 in_group_flags = NULL;
David Benjamin71f07942015-04-08 02:36:59 -04001575 if (*out_cipher_list != NULL) {
1576 ssl_cipher_preference_list_free(*out_cipher_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001577 }
David Benjamin71f07942015-04-08 02:36:59 -04001578 *out_cipher_list = pref_list;
Adam Langleyfcf25832014-12-18 17:42:32 -08001579 pref_list = NULL;
1580
David Benjamin71f07942015-04-08 02:36:59 -04001581 if (out_cipher_list_by_id != NULL) {
David Benjamin2755a3e2015-04-22 16:17:58 -04001582 sk_SSL_CIPHER_free(*out_cipher_list_by_id);
David Benjamin71f07942015-04-08 02:36:59 -04001583 *out_cipher_list_by_id = tmp_cipher_list;
Adam Langleyfcf25832014-12-18 17:42:32 -08001584 tmp_cipher_list = NULL;
David Benjamin71f07942015-04-08 02:36:59 -04001585 (void) sk_SSL_CIPHER_set_cmp_func(*out_cipher_list_by_id,
1586 ssl_cipher_ptr_id_cmp);
Adam Langleyfcf25832014-12-18 17:42:32 -08001587
David Benjamin71f07942015-04-08 02:36:59 -04001588 sk_SSL_CIPHER_sort(*out_cipher_list_by_id);
Adam Langleyfcf25832014-12-18 17:42:32 -08001589 } else {
1590 sk_SSL_CIPHER_free(tmp_cipher_list);
1591 tmp_cipher_list = NULL;
1592 }
1593
1594 return cipherstack;
Adam Langley858a88d2014-06-20 12:00:00 -07001595
1596err:
David Benjamin2755a3e2015-04-22 16:17:58 -04001597 OPENSSL_free(co_list);
1598 OPENSSL_free(in_group_flags);
1599 sk_SSL_CIPHER_free(cipherstack);
1600 sk_SSL_CIPHER_free(tmp_cipher_list);
1601 if (pref_list) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001602 OPENSSL_free(pref_list->in_group_flags);
1603 }
David Benjamin2755a3e2015-04-22 16:17:58 -04001604 OPENSSL_free(pref_list);
Adam Langleyfcf25832014-12-18 17:42:32 -08001605 return NULL;
1606}
Adam Langley95c29f32014-06-20 12:00:00 -07001607
David Benjamin71f07942015-04-08 02:36:59 -04001608uint32_t SSL_CIPHER_get_id(const SSL_CIPHER *cipher) { return cipher->id; }
1609
David Benjamina1c90a52015-05-30 17:03:14 -04001610uint16_t ssl_cipher_get_value(const SSL_CIPHER *cipher) {
1611 uint32_t id = cipher->id;
1612 /* All ciphers are SSLv3. */
1613 assert((id & 0xff000000) == 0x03000000);
1614 return id & 0xffff;
1615}
1616
David Benjamin71f07942015-04-08 02:36:59 -04001617int SSL_CIPHER_is_AES(const SSL_CIPHER *cipher) {
1618 return (cipher->algorithm_enc & SSL_AES) != 0;
1619}
1620
1621int SSL_CIPHER_has_MD5_HMAC(const SSL_CIPHER *cipher) {
1622 return (cipher->algorithm_mac & SSL_MD5) != 0;
1623}
1624
David Benjaminef793f42015-11-05 18:16:27 -05001625int SSL_CIPHER_has_SHA1_HMAC(const SSL_CIPHER *cipher) {
1626 return (cipher->algorithm_mac & SSL_SHA1) != 0;
1627}
1628
David Benjamina211aee2016-02-24 17:18:44 -05001629int SSL_CIPHER_has_SHA256_HMAC(const SSL_CIPHER *cipher) {
1630 return (cipher->algorithm_mac & SSL_SHA256) != 0;
1631}
1632
David Benjamin71f07942015-04-08 02:36:59 -04001633int SSL_CIPHER_is_AESGCM(const SSL_CIPHER *cipher) {
David Benjaminc0125ef2015-09-09 09:11:07 -04001634 return (cipher->algorithm_enc & (SSL_AES128GCM | SSL_AES256GCM)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001635}
1636
David Benjaminef793f42015-11-05 18:16:27 -05001637int SSL_CIPHER_is_AES128GCM(const SSL_CIPHER *cipher) {
1638 return (cipher->algorithm_enc & SSL_AES128GCM) != 0;
1639}
1640
Adam Langleyb00061c2015-11-16 17:44:52 -08001641int SSL_CIPHER_is_AES128CBC(const SSL_CIPHER *cipher) {
1642 return (cipher->algorithm_enc & SSL_AES128) != 0;
1643}
1644
1645int SSL_CIPHER_is_AES256CBC(const SSL_CIPHER *cipher) {
1646 return (cipher->algorithm_enc & SSL_AES256) != 0;
1647}
1648
David Benjamin51a01a52015-10-29 13:19:56 -04001649int SSL_CIPHER_is_CHACHA20POLY1305(const SSL_CIPHER *cipher) {
David Benjamin13414b32015-12-09 23:02:39 -05001650 return (cipher->algorithm_enc &
1651 (SSL_CHACHA20POLY1305 | SSL_CHACHA20POLY1305_OLD)) != 0;
David Benjamin71f07942015-04-08 02:36:59 -04001652}
1653
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001654int SSL_CIPHER_is_NULL(const SSL_CIPHER *cipher) {
1655 return (cipher->algorithm_enc & SSL_eNULL) != 0;
1656}
1657
Adam Langleyd7fe75c2015-09-18 15:40:48 -07001658int SSL_CIPHER_is_RC4(const SSL_CIPHER *cipher) {
1659 return (cipher->algorithm_enc & SSL_RC4) != 0;
1660}
1661
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001662int SSL_CIPHER_is_block_cipher(const SSL_CIPHER *cipher) {
1663 /* Neither stream cipher nor AEAD. */
1664 return (cipher->algorithm_enc & (SSL_RC4 | SSL_eNULL)) == 0 &&
1665 cipher->algorithm_mac != SSL_AEAD;
1666}
1667
David Benjaminef793f42015-11-05 18:16:27 -05001668int SSL_CIPHER_is_ECDSA(const SSL_CIPHER *cipher) {
1669 return (cipher->algorithm_auth & SSL_aECDSA) != 0;
1670}
1671
David Benjamin4cc36ad2015-12-19 14:23:26 -05001672int SSL_CIPHER_is_ECDHE(const SSL_CIPHER *cipher) {
1673 return (cipher->algorithm_mkey & SSL_kECDHE) != 0;
1674}
1675
Matt Braithwaite053931e2016-05-25 12:06:05 -07001676int SSL_CIPHER_is_CECPQ1(const SSL_CIPHER *cipher) {
1677 return (cipher->algorithm_mkey & SSL_kCECPQ1) != 0;
1678}
1679
David Benjaminef793f42015-11-05 18:16:27 -05001680uint16_t SSL_CIPHER_get_min_version(const SSL_CIPHER *cipher) {
David Benjamindcb6ef02015-11-06 15:35:54 -05001681 if (cipher->algorithm_prf != SSL_HANDSHAKE_MAC_DEFAULT) {
1682 /* Cipher suites before TLS 1.2 use the default PRF, while all those added
1683 * afterwards specify a particular hash. */
David Benjaminef793f42015-11-05 18:16:27 -05001684 return TLS1_2_VERSION;
1685 }
1686 return SSL3_VERSION;
1687}
1688
David Benjamin71f07942015-04-08 02:36:59 -04001689/* return the actual cipher being used */
1690const char *SSL_CIPHER_get_name(const SSL_CIPHER *cipher) {
1691 if (cipher != NULL) {
1692 return cipher->name;
1693 }
1694
1695 return "(NONE)";
1696}
1697
1698const char *SSL_CIPHER_get_kx_name(const SSL_CIPHER *cipher) {
1699 if (cipher == NULL) {
1700 return "";
1701 }
1702
1703 switch (cipher->algorithm_mkey) {
1704 case SSL_kRSA:
1705 return "RSA";
1706
1707 case SSL_kDHE:
1708 switch (cipher->algorithm_auth) {
1709 case SSL_aRSA:
1710 return "DHE_RSA";
1711 default:
1712 assert(0);
1713 return "UNKNOWN";
1714 }
1715
1716 case SSL_kECDHE:
1717 switch (cipher->algorithm_auth) {
1718 case SSL_aECDSA:
1719 return "ECDHE_ECDSA";
1720 case SSL_aRSA:
1721 return "ECDHE_RSA";
1722 case SSL_aPSK:
1723 return "ECDHE_PSK";
1724 default:
1725 assert(0);
1726 return "UNKNOWN";
1727 }
1728
Matt Braithwaite053931e2016-05-25 12:06:05 -07001729 case SSL_kCECPQ1:
1730 switch (cipher->algorithm_auth) {
1731 case SSL_aECDSA:
1732 return "CECPQ1_ECDSA";
1733 case SSL_aRSA:
1734 return "CECPQ1_RSA";
1735 default:
1736 assert(0);
1737 return "UNKNOWN";
1738 }
1739
David Benjamin71f07942015-04-08 02:36:59 -04001740 case SSL_kPSK:
1741 assert(cipher->algorithm_auth == SSL_aPSK);
1742 return "PSK";
1743
1744 default:
1745 assert(0);
1746 return "UNKNOWN";
1747 }
1748}
1749
1750static const char *ssl_cipher_get_enc_name(const SSL_CIPHER *cipher) {
1751 switch (cipher->algorithm_enc) {
1752 case SSL_3DES:
1753 return "3DES_EDE_CBC";
1754 case SSL_RC4:
1755 return "RC4";
1756 case SSL_AES128:
1757 return "AES_128_CBC";
1758 case SSL_AES256:
1759 return "AES_256_CBC";
1760 case SSL_AES128GCM:
1761 return "AES_128_GCM";
1762 case SSL_AES256GCM:
1763 return "AES_256_GCM";
David Benjamin13414b32015-12-09 23:02:39 -05001764 case SSL_CHACHA20POLY1305:
Brian Smith271777f2015-10-03 13:53:33 -10001765 case SSL_CHACHA20POLY1305_OLD:
David Benjamin71f07942015-04-08 02:36:59 -04001766 return "CHACHA20_POLY1305";
1767 break;
1768 default:
1769 assert(0);
1770 return "UNKNOWN";
1771 }
1772}
1773
1774static const char *ssl_cipher_get_prf_name(const SSL_CIPHER *cipher) {
David Benjaminb0883312015-08-06 09:54:13 -04001775 switch (cipher->algorithm_prf) {
1776 case SSL_HANDSHAKE_MAC_DEFAULT:
1777 /* Before TLS 1.2, the PRF component is the hash used in the HMAC, which is
1778 * only ever MD5 or SHA-1. */
1779 switch (cipher->algorithm_mac) {
1780 case SSL_MD5:
1781 return "MD5";
1782 case SSL_SHA1:
1783 return "SHA";
1784 }
1785 break;
1786 case SSL_HANDSHAKE_MAC_SHA256:
1787 return "SHA256";
1788 case SSL_HANDSHAKE_MAC_SHA384:
1789 return "SHA384";
David Benjamin71f07942015-04-08 02:36:59 -04001790 }
David Benjaminb0883312015-08-06 09:54:13 -04001791 assert(0);
1792 return "UNKNOWN";
David Benjamin71f07942015-04-08 02:36:59 -04001793}
1794
1795char *SSL_CIPHER_get_rfc_name(const SSL_CIPHER *cipher) {
1796 if (cipher == NULL) {
1797 return NULL;
1798 }
1799
1800 const char *kx_name = SSL_CIPHER_get_kx_name(cipher);
1801 const char *enc_name = ssl_cipher_get_enc_name(cipher);
1802 const char *prf_name = ssl_cipher_get_prf_name(cipher);
1803
1804 /* The final name is TLS_{kx_name}_WITH_{enc_name}_{prf_name}. */
1805 size_t len = 4 + strlen(kx_name) + 6 + strlen(enc_name) + 1 +
1806 strlen(prf_name) + 1;
1807 char *ret = OPENSSL_malloc(len);
1808 if (ret == NULL) {
1809 return NULL;
1810 }
1811 if (BUF_strlcpy(ret, "TLS_", len) >= len ||
1812 BUF_strlcat(ret, kx_name, len) >= len ||
1813 BUF_strlcat(ret, "_WITH_", len) >= len ||
1814 BUF_strlcat(ret, enc_name, len) >= len ||
1815 BUF_strlcat(ret, "_", len) >= len ||
1816 BUF_strlcat(ret, prf_name, len) >= len) {
1817 assert(0);
1818 OPENSSL_free(ret);
1819 return NULL;
1820 }
1821 assert(strlen(ret) + 1 == len);
1822 return ret;
1823}
1824
1825int SSL_CIPHER_get_bits(const SSL_CIPHER *cipher, int *out_alg_bits) {
1826 if (cipher == NULL) {
1827 return 0;
1828 }
1829
David Benjamin9f2e2772015-11-18 09:59:43 -05001830 int alg_bits, strength_bits;
1831 switch (cipher->algorithm_enc) {
1832 case SSL_AES128:
1833 case SSL_AES128GCM:
1834 case SSL_RC4:
1835 alg_bits = 128;
1836 strength_bits = 128;
1837 break;
1838
1839 case SSL_AES256:
1840 case SSL_AES256GCM:
1841#if !defined(BORINGSSL_ANDROID_SYSTEM)
1842 case SSL_CHACHA20POLY1305_OLD:
1843#endif
David Benjamin13414b32015-12-09 23:02:39 -05001844 case SSL_CHACHA20POLY1305:
David Benjamin9f2e2772015-11-18 09:59:43 -05001845 alg_bits = 256;
1846 strength_bits = 256;
1847 break;
1848
1849 case SSL_3DES:
1850 alg_bits = 168;
1851 strength_bits = 112;
1852 break;
1853
1854 case SSL_eNULL:
1855 alg_bits = 0;
1856 strength_bits = 0;
1857 break;
1858
1859 default:
1860 assert(0);
1861 alg_bits = 0;
1862 strength_bits = 0;
David Benjamin71f07942015-04-08 02:36:59 -04001863 }
David Benjamin9f2e2772015-11-18 09:59:43 -05001864
1865 if (out_alg_bits != NULL) {
1866 *out_alg_bits = alg_bits;
1867 }
1868 return strength_bits;
David Benjamin71f07942015-04-08 02:36:59 -04001869}
1870
Adam Langleyfcf25832014-12-18 17:42:32 -08001871const char *SSL_CIPHER_description(const SSL_CIPHER *cipher, char *buf,
1872 int len) {
Adam Langleyfcf25832014-12-18 17:42:32 -08001873 const char *kx, *au, *enc, *mac;
David Benjamindcb6ef02015-11-06 15:35:54 -05001874 uint32_t alg_mkey, alg_auth, alg_enc, alg_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001875
Adam Langleyfcf25832014-12-18 17:42:32 -08001876 alg_mkey = cipher->algorithm_mkey;
1877 alg_auth = cipher->algorithm_auth;
1878 alg_enc = cipher->algorithm_enc;
1879 alg_mac = cipher->algorithm_mac;
Adam Langley95c29f32014-06-20 12:00:00 -07001880
Adam Langleyfcf25832014-12-18 17:42:32 -08001881 switch (alg_mkey) {
1882 case SSL_kRSA:
1883 kx = "RSA";
1884 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001885
David Benjamin7061e282015-03-19 11:10:48 -04001886 case SSL_kDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001887 kx = "DH";
1888 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001889
David Benjamin7061e282015-03-19 11:10:48 -04001890 case SSL_kECDHE:
Adam Langleyfcf25832014-12-18 17:42:32 -08001891 kx = "ECDH";
1892 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001893
Matt Braithwaite053931e2016-05-25 12:06:05 -07001894 case SSL_kCECPQ1:
1895 kx = "CECPQ1";
1896 break;
1897
Adam Langleyfcf25832014-12-18 17:42:32 -08001898 case SSL_kPSK:
1899 kx = "PSK";
1900 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001901
Adam Langleyfcf25832014-12-18 17:42:32 -08001902 default:
1903 kx = "unknown";
1904 }
Adam Langley95c29f32014-06-20 12:00:00 -07001905
Adam Langleyfcf25832014-12-18 17:42:32 -08001906 switch (alg_auth) {
1907 case SSL_aRSA:
1908 au = "RSA";
1909 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001910
Adam Langleyfcf25832014-12-18 17:42:32 -08001911 case SSL_aECDSA:
1912 au = "ECDSA";
1913 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07001914
Adam Langleyfcf25832014-12-18 17:42:32 -08001915 case SSL_aPSK:
1916 au = "PSK";
1917 break;
Adam Langley4d4bff82014-06-20 12:00:00 -07001918
Adam Langleyfcf25832014-12-18 17:42:32 -08001919 default:
1920 au = "unknown";
1921 break;
1922 }
Adam Langleyde0b2022014-06-20 12:00:00 -07001923
Adam Langleyfcf25832014-12-18 17:42:32 -08001924 switch (alg_enc) {
1925 case SSL_3DES:
1926 enc = "3DES(168)";
1927 break;
Adam Langley95c29f32014-06-20 12:00:00 -07001928
Adam Langleyfcf25832014-12-18 17:42:32 -08001929 case SSL_RC4:
1930 enc = "RC4(128)";
1931 break;
1932
1933 case SSL_AES128:
1934 enc = "AES(128)";
1935 break;
1936
1937 case SSL_AES256:
1938 enc = "AES(256)";
1939 break;
1940
1941 case SSL_AES128GCM:
1942 enc = "AESGCM(128)";
1943 break;
1944
1945 case SSL_AES256GCM:
1946 enc = "AESGCM(256)";
1947 break;
1948
Brian Smith271777f2015-10-03 13:53:33 -10001949 case SSL_CHACHA20POLY1305_OLD:
David Benjamin13414b32015-12-09 23:02:39 -05001950 enc = "ChaCha20-Poly1305-Old";
1951 break;
1952
1953 case SSL_CHACHA20POLY1305:
Adam Langleyfcf25832014-12-18 17:42:32 -08001954 enc = "ChaCha20-Poly1305";
1955 break;
1956
Matt Braithwaiteaf096752015-09-02 19:48:16 -07001957 case SSL_eNULL:
1958 enc="None";
1959 break;
1960
Adam Langleyfcf25832014-12-18 17:42:32 -08001961 default:
1962 enc = "unknown";
1963 break;
1964 }
1965
1966 switch (alg_mac) {
1967 case SSL_MD5:
1968 mac = "MD5";
1969 break;
1970
1971 case SSL_SHA1:
1972 mac = "SHA1";
1973 break;
1974
1975 case SSL_SHA256:
1976 mac = "SHA256";
1977 break;
1978
1979 case SSL_SHA384:
1980 mac = "SHA384";
1981 break;
1982
1983 case SSL_AEAD:
1984 mac = "AEAD";
1985 break;
1986
1987 default:
1988 mac = "unknown";
1989 break;
1990 }
1991
1992 if (buf == NULL) {
1993 len = 128;
1994 buf = OPENSSL_malloc(len);
David Benjamin1eed2c02015-02-08 23:20:06 -05001995 if (buf == NULL) {
1996 return NULL;
1997 }
Adam Langleyfcf25832014-12-18 17:42:32 -08001998 } else if (len < 128) {
1999 return "Buffer too small";
2000 }
2001
Brian Smith0687bdf2016-01-17 09:18:26 -10002002 BIO_snprintf(buf, len, "%-23s Kx=%-8s Au=%-4s Enc=%-9s Mac=%-4s\n",
2003 cipher->name, kx, au, enc, mac);
Adam Langleyfcf25832014-12-18 17:42:32 -08002004 return buf;
2005}
2006
David Benjamin71f07942015-04-08 02:36:59 -04002007const char *SSL_CIPHER_get_version(const SSL_CIPHER *cipher) {
2008 return "TLSv1/SSLv3";
Adam Langleyfcf25832014-12-18 17:42:32 -08002009}
2010
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07002011COMP_METHOD *SSL_COMP_get_compression_methods(void) { return NULL; }
Adam Langleyfcf25832014-12-18 17:42:32 -08002012
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07002013int SSL_COMP_add_compression_method(int id, COMP_METHOD *cm) { return 1; }
Adam Langleyfcf25832014-12-18 17:42:32 -08002014
Matt Braithwaite6a1275b2015-06-26 12:09:10 -07002015const char *SSL_COMP_get_name(const COMP_METHOD *comp) { return NULL; }
Adam Langley95c29f32014-06-20 12:00:00 -07002016
David Benjamind1d80782015-07-05 11:54:09 -04002017int ssl_cipher_get_key_type(const SSL_CIPHER *cipher) {
David Benjamin71f07942015-04-08 02:36:59 -04002018 uint32_t alg_a = cipher->algorithm_auth;
Adam Langley95c29f32014-06-20 12:00:00 -07002019
Adam Langleyfcf25832014-12-18 17:42:32 -08002020 if (alg_a & SSL_aECDSA) {
David Benjamind1d80782015-07-05 11:54:09 -04002021 return EVP_PKEY_EC;
Adam Langleyfcf25832014-12-18 17:42:32 -08002022 } else if (alg_a & SSL_aRSA) {
David Benjamind1d80782015-07-05 11:54:09 -04002023 return EVP_PKEY_RSA;
Adam Langleyfcf25832014-12-18 17:42:32 -08002024 }
Adam Langley95c29f32014-06-20 12:00:00 -07002025
David Benjamind1d80782015-07-05 11:54:09 -04002026 return EVP_PKEY_NONE;
Adam Langleyfcf25832014-12-18 17:42:32 -08002027}
David Benjamin9c651c92014-07-12 13:27:45 -04002028
David Benjaminc032dfa2016-05-12 14:54:57 -04002029int ssl_cipher_uses_certificate_auth(const SSL_CIPHER *cipher) {
2030 return (cipher->algorithm_auth & SSL_aCERT) != 0;
Adam Langleyfcf25832014-12-18 17:42:32 -08002031}
2032
Adam Langleyfcf25832014-12-18 17:42:32 -08002033int ssl_cipher_requires_server_key_exchange(const SSL_CIPHER *cipher) {
2034 /* Ephemeral Diffie-Hellman key exchanges require a ServerKeyExchange. */
Matt Braithwaite053931e2016-05-25 12:06:05 -07002035 if (cipher->algorithm_mkey & SSL_kDHE ||
2036 cipher->algorithm_mkey & SSL_kECDHE ||
2037 cipher->algorithm_mkey & SSL_kCECPQ1) {
Adam Langleyfcf25832014-12-18 17:42:32 -08002038 return 1;
2039 }
2040
2041 /* It is optional in all others. */
2042 return 0;
2043}
David Benjaminb8d28cf2015-07-28 21:34:45 -04002044
2045size_t ssl_cipher_get_record_split_len(const SSL_CIPHER *cipher) {
2046 size_t block_size;
2047 switch (cipher->algorithm_enc) {
2048 case SSL_3DES:
2049 block_size = 8;
2050 break;
2051 case SSL_AES128:
2052 case SSL_AES256:
2053 block_size = 16;
2054 break;
2055 default:
2056 return 0;
2057 }
2058
2059 size_t mac_len;
2060 switch (cipher->algorithm_mac) {
2061 case SSL_MD5:
2062 mac_len = MD5_DIGEST_LENGTH;
2063 break;
2064 case SSL_SHA1:
2065 mac_len = SHA_DIGEST_LENGTH;
2066 break;
2067 default:
2068 return 0;
2069 }
2070
2071 size_t ret = 1 + mac_len;
2072 ret += block_size - (ret % block_size);
2073 return ret;
2074}