Add back support for TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256

This TLS 1.2 algorithm is substantially inferior to AES-GCM and should
never be used. It will not be available unless configured by name.
However, in can be used to provide backwards-compatibility with devices
that cannot be updated if so needed.

Change-Id: I1fd78efeb33aceca76ec2e7cb76b70f761ed1af8
Reviewed-on: https://boringssl-review.googlesource.com/c/boringssl/+/59585
Reviewed-by: David Benjamin <davidben@google.com>
Commit-Queue: Adam Langley <agl@google.com>
Auto-Submit: Adam Langley <agl@google.com>
chromium-stable
Adam Langley 2 years ago committed by Boringssl LUCI CQ
parent 722f5d878d
commit b1c6f45f1f
  1. 47
      crypto/cipher_extra/cipher_test.cc
  2. 29
      crypto/cipher_extra/e_tls.c
  3. 8
      crypto/cipher_extra/internal.h
  4. 202
      crypto/cipher_extra/tls_cbc.c
  5. 2
      include/openssl/aead.h
  6. 4
      include/openssl/tls1.h
  7. 6
      ssl/internal.h
  8. 31
      ssl/ssl_cipher.cc
  9. 28
      ssl/ssl_test.cc
  10. 1
      ssl/test/runner/runner.go
  11. 2
      ssl/test/test_config.cc

@ -623,6 +623,53 @@ TEST(CipherTest, SHA1WithSecretSuffix) {
}
}
TEST(CipherTest, SHA256WithSecretSuffix) {
uint8_t buf[SHA256_CBLOCK * 4];
RAND_bytes(buf, sizeof(buf));
// Hashing should run in time independent of the bytes.
CONSTTIME_SECRET(buf, sizeof(buf));
// Exhaustively testing interesting cases in this function is cubic in the
// block size, so we test in 3-byte increments.
constexpr size_t kSkip = 3;
// This value should be less than 8 to test the edge case when the 8-byte
// length wraps to the next block.
static_assert(kSkip < 8, "kSkip is too large");
// |EVP_sha256_final_with_secret_suffix| is sensitive to the public length of
// the partial block previously hashed. In TLS, this is the HMAC prefix, the
// header, and the public minimum padding length.
for (size_t prefix = 0; prefix < SHA256_CBLOCK; prefix += kSkip) {
SCOPED_TRACE(prefix);
// The first block is treated differently, so we run with up to three
// blocks of length variability.
for (size_t max_len = 0; max_len < 3 * SHA256_CBLOCK; max_len += kSkip) {
SCOPED_TRACE(max_len);
for (size_t len = 0; len <= max_len; len += kSkip) {
SCOPED_TRACE(len);
uint8_t expected[SHA256_DIGEST_LENGTH];
SHA256(buf, prefix + len, expected);
CONSTTIME_DECLASSIFY(expected, sizeof(expected));
// Make a copy of the secret length to avoid interfering with the loop.
size_t secret_len = len;
CONSTTIME_SECRET(&secret_len, sizeof(secret_len));
SHA256_CTX ctx;
SHA256_Init(&ctx);
SHA256_Update(&ctx, buf, prefix);
uint8_t computed[SHA256_DIGEST_LENGTH];
ASSERT_TRUE(EVP_sha256_final_with_secret_suffix(
&ctx, computed, buf + prefix, secret_len, max_len));
CONSTTIME_DECLASSIFY(computed, sizeof(computed));
EXPECT_EQ(Bytes(expected), Bytes(computed));
}
}
}
}
TEST(CipherTest, GetCipher) {
const EVP_CIPHER *cipher = EVP_get_cipherbynid(NID_aes_128_gcm);
ASSERT_TRUE(cipher);

@ -400,6 +400,14 @@ static int aead_aes_128_cbc_sha1_tls_implicit_iv_init(
EVP_sha1(), 1);
}
static int aead_aes_128_cbc_sha256_tls_init(EVP_AEAD_CTX *ctx,
const uint8_t *key, size_t key_len,
size_t tag_len,
enum evp_aead_direction_t dir) {
return aead_tls_init(ctx, key, key_len, tag_len, dir, EVP_aes_128_cbc(),
EVP_sha256(), 0);
}
static int aead_aes_256_cbc_sha1_tls_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
size_t key_len, size_t tag_len,
enum evp_aead_direction_t dir) {
@ -476,6 +484,23 @@ static const EVP_AEAD aead_aes_128_cbc_sha1_tls_implicit_iv = {
aead_tls_tag_len,
};
static const EVP_AEAD aead_aes_128_cbc_sha256_tls = {
SHA256_DIGEST_LENGTH + 16, // key len (SHA256 + AES128)
16, // nonce len (IV)
16 + SHA256_DIGEST_LENGTH, // overhead (padding + SHA256)
SHA256_DIGEST_LENGTH, // max tag length
0, // seal_scatter_supports_extra_in
NULL, // init
aead_aes_128_cbc_sha256_tls_init,
aead_tls_cleanup,
aead_tls_open,
aead_tls_seal_scatter,
NULL, // open_gather
NULL, // get_iv
aead_tls_tag_len,
};
static const EVP_AEAD aead_aes_256_cbc_sha1_tls = {
SHA_DIGEST_LENGTH + 32, // key len (SHA1 + AES256)
16, // nonce len (IV)
@ -552,6 +577,10 @@ const EVP_AEAD *EVP_aead_aes_128_cbc_sha1_tls_implicit_iv(void) {
return &aead_aes_128_cbc_sha1_tls_implicit_iv;
}
const EVP_AEAD *EVP_aead_aes_128_cbc_sha256_tls(void) {
return &aead_aes_128_cbc_sha256_tls;
}
const EVP_AEAD *EVP_aead_aes_256_cbc_sha1_tls(void) {
return &aead_aes_256_cbc_sha1_tls;
}

@ -109,6 +109,14 @@ OPENSSL_EXPORT int EVP_sha1_final_with_secret_suffix(
SHA_CTX *ctx, uint8_t out[SHA_DIGEST_LENGTH], const uint8_t *in, size_t len,
size_t max_len);
// EVP_sha256_final_with_secret_suffix acts like
// |EVP_sha1_final_with_secret_suffix|, but for SHA-256.
//
// This function is exported for unit tests.
OPENSSL_EXPORT int EVP_sha256_final_with_secret_suffix(
SHA256_CTX *ctx, uint8_t out[SHA256_DIGEST_LENGTH], const uint8_t *in,
size_t len, size_t max_len);
// EVP_tls_cbc_digest_record computes the MAC of a decrypted, padded TLS
// record.
//

@ -267,24 +267,115 @@ int EVP_sha1_final_with_secret_suffix(SHA_CTX *ctx,
return 1;
}
int EVP_tls_cbc_record_digest_supported(const EVP_MD *md) {
return EVP_MD_type(md) == NID_sha1;
int EVP_sha256_final_with_secret_suffix(SHA256_CTX *ctx,
uint8_t out[SHA256_DIGEST_LENGTH],
const uint8_t *in, size_t len,
size_t max_len) {
// Bound the input length so |total_bits| below fits in four bytes. This is
// redundant with TLS record size limits. This also ensures |input_idx| below
// does not overflow.
size_t max_len_bits = max_len << 3;
if (ctx->Nh != 0 ||
(max_len_bits >> 3) != max_len || // Overflow
ctx->Nl + max_len_bits < max_len_bits ||
ctx->Nl + max_len_bits > UINT32_MAX) {
return 0;
}
// We need to hash the following into |ctx|:
//
// - ctx->data[:ctx->num]
// - in[:len]
// - A 0x80 byte
// - However many zero bytes are needed to pad up to a block.
// - Eight bytes of length.
size_t num_blocks = (ctx->num + len + 1 + 8 + SHA256_CBLOCK - 1) >> 6;
size_t last_block = num_blocks - 1;
size_t max_blocks = (ctx->num + max_len + 1 + 8 + SHA256_CBLOCK - 1) >> 6;
// The bounds above imply |total_bits| fits in four bytes.
size_t total_bits = ctx->Nl + (len << 3);
uint8_t length_bytes[4];
length_bytes[0] = (uint8_t)(total_bits >> 24);
length_bytes[1] = (uint8_t)(total_bits >> 16);
length_bytes[2] = (uint8_t)(total_bits >> 8);
length_bytes[3] = (uint8_t)total_bits;
// We now construct and process each expected block in constant-time.
uint8_t block[SHA256_CBLOCK] = {0};
uint32_t result[8] = {0};
// input_idx is the index into |in| corresponding to the current block.
// However, we allow this index to overflow beyond |max_len|, to simplify the
// 0x80 byte.
size_t input_idx = 0;
for (size_t i = 0; i < max_blocks; i++) {
// Fill |block| with data from the partial block in |ctx| and |in|. We copy
// as if we were hashing up to |max_len| and then zero the excess later.
size_t block_start = 0;
if (i == 0) {
OPENSSL_memcpy(block, ctx->data, ctx->num);
block_start = ctx->num;
}
if (input_idx < max_len) {
size_t to_copy = SHA256_CBLOCK - block_start;
if (to_copy > max_len - input_idx) {
to_copy = max_len - input_idx;
}
OPENSSL_memcpy(block + block_start, in + input_idx, to_copy);
}
// Zero any bytes beyond |len| and add the 0x80 byte.
for (size_t j = block_start; j < SHA256_CBLOCK; j++) {
// input[idx] corresponds to block[j].
size_t idx = input_idx + j - block_start;
// The barriers on |len| are not strictly necessary. However, without
// them, GCC compiles this code by incorporating |len| into the loop
// counter and subtracting it out later. This is still constant-time, but
// it frustrates attempts to validate this.
uint8_t is_in_bounds = constant_time_lt_8(idx, value_barrier_w(len));
uint8_t is_padding_byte = constant_time_eq_8(idx, value_barrier_w(len));
block[j] &= is_in_bounds;
block[j] |= 0x80 & is_padding_byte;
}
input_idx += SHA256_CBLOCK - block_start;
// Fill in the length if this is the last block.
crypto_word_t is_last_block = constant_time_eq_w(i, last_block);
for (size_t j = 0; j < 4; j++) {
block[SHA256_CBLOCK - 4 + j] |= is_last_block & length_bytes[j];
}
// Process the block and save the hash state if it is the final value.
SHA256_Transform(ctx, block);
for (size_t j = 0; j < 8; j++) {
result[j] |= is_last_block & ctx->h[j];
}
}
// Write the output.
for (size_t i = 0; i < 8; i++) {
CRYPTO_store_u32_be(out + 4 * i, result[i]);
}
return 1;
}
int EVP_tls_cbc_digest_record(const EVP_MD *md, uint8_t *md_out,
size_t *md_out_size, const uint8_t header[13],
const uint8_t *data, size_t data_size,
size_t data_plus_mac_plus_padding_size,
const uint8_t *mac_secret,
unsigned mac_secret_length) {
if (EVP_MD_type(md) != NID_sha1) {
// EVP_tls_cbc_record_digest_supported should have been called first to
// check that the hash function is supported.
assert(0);
*md_out_size = 0;
int EVP_tls_cbc_record_digest_supported(const EVP_MD *md) {
switch (EVP_MD_type(md)) {
case NID_sha1:
case NID_sha256:
return 1;
default:
return 0;
}
}
static int tls_cbc_digest_record_sha1(uint8_t *md_out, size_t *md_out_size,
const uint8_t header[13],
const uint8_t *data, size_t data_size,
size_t data_plus_mac_plus_padding_size,
const uint8_t *mac_secret,
unsigned mac_secret_length) {
if (mac_secret_length > SHA_CBLOCK) {
// HMAC pads small keys with zeros and hashes large keys down. This function
// should never reach the large key case.
@ -336,3 +427,88 @@ int EVP_tls_cbc_digest_record(const EVP_MD *md, uint8_t *md_out,
*md_out_size = SHA_DIGEST_LENGTH;
return 1;
}
static int tls_cbc_digest_record_sha256(uint8_t *md_out, size_t *md_out_size,
const uint8_t header[13],
const uint8_t *data, size_t data_size,
size_t data_plus_mac_plus_padding_size,
const uint8_t *mac_secret,
unsigned mac_secret_length) {
if (mac_secret_length > SHA256_CBLOCK) {
// HMAC pads small keys with zeros and hashes large keys down. This function
// should never reach the large key case.
assert(0);
return 0;
}
// Compute the initial HMAC block.
uint8_t hmac_pad[SHA256_CBLOCK];
OPENSSL_memset(hmac_pad, 0, sizeof(hmac_pad));
OPENSSL_memcpy(hmac_pad, mac_secret, mac_secret_length);
for (size_t i = 0; i < SHA256_CBLOCK; i++) {
hmac_pad[i] ^= 0x36;
}
SHA256_CTX ctx;
SHA256_Init(&ctx);
SHA256_Update(&ctx, hmac_pad, SHA256_CBLOCK);
SHA256_Update(&ctx, header, 13);
// There are at most 256 bytes of padding, so we can compute the public
// minimum length for |data_size|.
size_t min_data_size = 0;
if (data_plus_mac_plus_padding_size > SHA256_DIGEST_LENGTH + 256) {
min_data_size =
data_plus_mac_plus_padding_size - SHA256_DIGEST_LENGTH - 256;
}
// Hash the public minimum length directly. This reduces the number of blocks
// that must be computed in constant-time.
SHA256_Update(&ctx, data, min_data_size);
// Hash the remaining data without leaking |data_size|.
uint8_t mac_out[SHA256_DIGEST_LENGTH];
if (!EVP_sha256_final_with_secret_suffix(
&ctx, mac_out, data + min_data_size, data_size - min_data_size,
data_plus_mac_plus_padding_size - min_data_size)) {
return 0;
}
// Complete the HMAC in the standard manner.
SHA256_Init(&ctx);
for (size_t i = 0; i < SHA256_CBLOCK; i++) {
hmac_pad[i] ^= 0x6a;
}
SHA256_Update(&ctx, hmac_pad, SHA256_CBLOCK);
SHA256_Update(&ctx, mac_out, SHA256_DIGEST_LENGTH);
SHA256_Final(md_out, &ctx);
*md_out_size = SHA256_DIGEST_LENGTH;
return 1;
}
int EVP_tls_cbc_digest_record(const EVP_MD *md, uint8_t *md_out,
size_t *md_out_size, const uint8_t header[13],
const uint8_t *data, size_t data_size,
size_t data_plus_mac_plus_padding_size,
const uint8_t *mac_secret,
unsigned mac_secret_length) {
switch (EVP_MD_type(md)) {
case NID_sha1:
return tls_cbc_digest_record_sha1(
md_out, md_out_size, header, data, data_size,
data_plus_mac_plus_padding_size, mac_secret, mac_secret_length);
case NID_sha256:
return tls_cbc_digest_record_sha256(
md_out, md_out_size, header, data, data_size,
data_plus_mac_plus_padding_size, mac_secret, mac_secret_length);
default:
// EVP_tls_cbc_record_digest_supported should have been called first to
// check that the hash function is supported.
assert(0);
*md_out_size = 0;
return 0;
}
}

@ -400,6 +400,8 @@ OPENSSL_EXPORT const EVP_AEAD *EVP_AEAD_CTX_aead(const EVP_AEAD_CTX *ctx);
OPENSSL_EXPORT const EVP_AEAD *EVP_aead_aes_128_cbc_sha1_tls(void);
OPENSSL_EXPORT const EVP_AEAD *EVP_aead_aes_128_cbc_sha1_tls_implicit_iv(void);
OPENSSL_EXPORT const EVP_AEAD *EVP_aead_aes_128_cbc_sha256_tls(void);
OPENSSL_EXPORT const EVP_AEAD *EVP_aead_aes_256_cbc_sha1_tls(void);
OPENSSL_EXPORT const EVP_AEAD *EVP_aead_aes_256_cbc_sha1_tls_implicit_iv(void);

@ -408,6 +408,8 @@ extern "C" {
#define TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA 0x0300C013
#define TLS1_CK_ECDHE_RSA_WITH_AES_256_CBC_SHA 0x0300C014
#define TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA256 0x0300C027
#define TLS1_CK_ECDH_anon_WITH_NULL_SHA 0x0300C015
#define TLS1_CK_ECDH_anon_WITH_RC4_128_SHA 0x0300C016
#define TLS1_CK_ECDH_anon_WITH_DES_192_CBC3_SHA 0x0300C017
@ -518,6 +520,8 @@ extern "C" {
#define TLS1_TXT_ECDHE_RSA_WITH_AES_128_CBC_SHA "ECDHE-RSA-AES128-SHA"
#define TLS1_TXT_ECDHE_RSA_WITH_AES_256_CBC_SHA "ECDHE-RSA-AES256-SHA"
#define TLS1_TXT_ECDHE_RSA_WITH_AES_128_CBC_SHA256 "ECDHE-RSA-AES128-SHA256"
#define TLS1_TXT_ECDH_anon_WITH_NULL_SHA "AECDH-NULL-SHA"
#define TLS1_TXT_ECDH_anon_WITH_RC4_128_SHA "AECDH-RC4-SHA"
#define TLS1_TXT_ECDH_anon_WITH_DES_192_CBC3_SHA "AECDH-DES-CBC3-SHA"

@ -553,8 +553,9 @@ BSSL_NAMESPACE_BEGIN
// Bits for |algorithm_mac| (symmetric authentication).
#define SSL_SHA1 0x00000001u
#define SSL_SHA256 0x00000002u
// SSL_AEAD is set for all AEADs.
#define SSL_AEAD 0x00000002u
#define SSL_AEAD 0x00000004u
// Bits for |algorithm_prf| (handshake digest).
#define SSL_HANDSHAKE_MAC_DEFAULT 0x1
@ -672,6 +673,9 @@ const SSL_CIPHER *ssl_choose_tls13_cipher(CBS cipher_suites, bool has_aes_hw,
bool ssl_tls13_cipher_meets_policy(uint16_t cipher_id,
enum ssl_compliance_policy_t policy);
// ssl_cipher_is_deprecated returns true if |cipher| is deprecated.
OPENSSL_EXPORT bool ssl_cipher_is_deprecated(const SSL_CIPHER *cipher);
// Transcript layer.

@ -335,6 +335,18 @@ static constexpr SSL_CIPHER kCiphers[] = {
SSL_HANDSHAKE_MAC_DEFAULT,
},
// Cipher C027
{
TLS1_TXT_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256",
TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
SSL_kECDHE,
SSL_aRSA,
SSL_AES128,
SSL_SHA256,
SSL_HANDSHAKE_MAC_SHA256,
},
// GCM based TLS v1.2 ciphersuites from RFC 5289
// Cipher C02B
@ -626,6 +638,14 @@ bool ssl_cipher_get_evp_aead(const EVP_AEAD **out_aead,
}
*out_mac_secret_len = SHA_DIGEST_LENGTH;
} else if (cipher->algorithm_mac == SSL_SHA256) {
if (cipher->algorithm_enc == SSL_AES128) {
*out_aead = EVP_aead_aes_128_cbc_sha256_tls();
} else {
return false;
}
*out_mac_secret_len = SHA256_DIGEST_LENGTH;
} else {
return false;
}
@ -748,9 +768,9 @@ void SSLCipherPreferenceList::Remove(const SSL_CIPHER *cipher) {
sk_SSL_CIPHER_delete(ciphers.get(), index);
}
static bool ssl_cipher_is_deprecated(const SSL_CIPHER *cipher) {
bool ssl_cipher_is_deprecated(const SSL_CIPHER *cipher) {
// TODO(crbug.com/boringssl/599): Deprecate 3DES.
return false;
return cipher->id == TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA256;
}
// ssl_cipher_apply_rule applies the rule type |rule| to ciphers matching its
@ -1138,6 +1158,7 @@ bool ssl_create_cipher_list(UniquePtr<SSLCipherPreferenceList> *out_cipher_list,
TLS1_CK_ECDHE_ECDSA_WITH_AES_256_CBC_SHA & 0xffff,
TLS1_CK_ECDHE_RSA_WITH_AES_256_CBC_SHA & 0xffff,
TLS1_CK_ECDHE_PSK_WITH_AES_256_CBC_SHA & 0xffff,
TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA256 & 0xffff,
TLS1_CK_RSA_WITH_AES_128_GCM_SHA256 & 0xffff,
TLS1_CK_RSA_WITH_AES_256_GCM_SHA384 & 0xffff,
TLS1_CK_RSA_WITH_AES_128_SHA & 0xffff,
@ -1380,6 +1401,8 @@ int SSL_CIPHER_get_digest_nid(const SSL_CIPHER *cipher) {
return NID_undef;
case SSL_SHA1:
return NID_sha1;
case SSL_SHA256:
return NID_sha256;
}
assert(0);
return NID_undef;
@ -1628,6 +1651,10 @@ const char *SSL_CIPHER_description(const SSL_CIPHER *cipher, char *buf,
mac = "SHA1";
break;
case SSL_SHA256:
mac = "SHA256";
break;
case SSL_AEAD:
mac = "AEAD";
break;

@ -382,7 +382,7 @@ static const char *kBadRules[] = {
"[AES128-SHA | AES128-SHA256]",
};
static const char *kMustNotIncludeNull[] = {
static const char *kMustNotIncludeDeprecated[] = {
"ALL",
"DEFAULT",
"HIGH",
@ -395,6 +395,11 @@ static const char *kMustNotIncludeNull[] = {
"TLSv1.2",
};
static const char* kShouldIncludeCBCSHA256[] = {
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256",
"ALL:TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256",
};
static const CurveTest kCurveTests[] = {
{
"P-256",
@ -578,7 +583,7 @@ TEST(SSLTest, CipherRules) {
ERR_clear_error();
}
for (const char *rule : kMustNotIncludeNull) {
for (const char *rule : kMustNotIncludeDeprecated) {
SCOPED_TRACE(rule);
bssl::UniquePtr<SSL_CTX> ctx(SSL_CTX_new(TLS_method()));
ASSERT_TRUE(ctx);
@ -586,6 +591,25 @@ TEST(SSLTest, CipherRules) {
ASSERT_TRUE(SSL_CTX_set_strict_cipher_list(ctx.get(), rule));
for (const SSL_CIPHER *cipher : SSL_CTX_get_ciphers(ctx.get())) {
EXPECT_NE(NID_undef, SSL_CIPHER_get_cipher_nid(cipher));
EXPECT_FALSE(ssl_cipher_is_deprecated(cipher));
}
}
{
for (const char *rule : kShouldIncludeCBCSHA256) {
bssl::UniquePtr<SSL_CTX> ctx(SSL_CTX_new(TLS_method()));
ASSERT_TRUE(ctx);
ASSERT_TRUE(SSL_CTX_set_strict_cipher_list(ctx.get(), rule));
bool found = false;
for (const SSL_CIPHER *cipher : SSL_CTX_get_ciphers(ctx.get())) {
if ((TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA256 & 0xffff) ==
SSL_CIPHER_get_protocol_id(cipher)) {
found = true;
break;
}
}
EXPECT_TRUE(found);
}
}
}

@ -1843,6 +1843,7 @@ var testCipherSuites = []testCipherSuite{
{"ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256},
{"ECDHE_RSA_WITH_AES_128_GCM_SHA256", TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256},
{"ECDHE_RSA_WITH_AES_128_CBC_SHA", TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA},
{"ECDHE_RSA_WITH_AES_128_CBC_SHA256", TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256},
{"ECDHE_RSA_WITH_AES_256_GCM_SHA384", TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384},
{"ECDHE_RSA_WITH_AES_256_CBC_SHA", TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA},
{"ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256},

@ -1391,7 +1391,7 @@ bssl::UniquePtr<SSL_CTX> TestConfig::SetupCtx(SSL_CTX *old_ctx) const {
SSL_CTX_set0_buffer_pool(ssl_ctx.get(), g_pool);
std::string cipher_list = "ALL";
std::string cipher_list = "ALL:TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256";
if (!cipher.empty()) {
cipher_list = cipher;
SSL_CTX_set_options(ssl_ctx.get(), SSL_OP_CIPHER_SERVER_PREFERENCE);

Loading…
Cancel
Save