|
|
|
//
|
|
|
|
// Copyright 2018 The Abseil Authors.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// https://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
//
|
|
|
|
#include "absl/random/mocking_bit_gen.h"
|
|
|
|
|
Export of internal Abseil changes
--
b1fc72630aaa81c8395c3b22ba267d938fe29a2e by Derek Mauro <dmauro@google.com>:
Fix -Wdeprecated-copy warnings from Clang 13.
Example:
error: definition of implicit copy assignment operator for 'UDT' is deprecated because it has a user-declared copy constructor [-Werror,-Wdeprecated-copy]
PiperOrigin-RevId: 380058303
--
0422744812b1a2010d9eea5b17fbe89f3441b66b by Evan Brown <ezb@google.com>:
Change the "full table!" asserts in raw_hash_set to use `<= capacity` instead of `< capacity`.
If we add support for non-power-of-two-minus-one capacities, this is the correct thing to assert. For example, consider: Group::kWidth = 8, capacity_ = 8, ctrl_ = {kEmpty, 1, 2, 3, 4, 5, 6, 7, kSentinel, kEmpty, 1, 2, 3, 4, 5, 6}. In this case, if we do an unsuccessful lookup with H2 mapping to slot 1, then the first Group will contain {1, 2, 3, 4, 5, 6, 7, kSentinel} so we need to continue to the second Group (at which point seq.index() == 8 == capacity_) to find a kEmpty.
Note: this is a no-op change for now since we never have `capacity % Group::kWidth == 0`.
PiperOrigin-RevId: 380033480
--
40628c34d540356de65fabb16c1439c0ec7a0764 by Abseil Team <absl-team@google.com>:
Drop out-of-date documentation about `absl::FixedArray`'s allocator support
PiperOrigin-RevId: 379811653
--
e7ad047863ae55c9b7aec0753cfc527a4ea614bc by Evan Brown <ezb@google.com>:
Fix a bug in ConvertDeletedToEmptyAndFullToDeleted in which we were copying 1 more cloned control byte than actually exists.
When alignof(slot_type)>1, this wouldn't cause a problem because the extra byte is padding.
Also change loop bounds to not rely on the fact that capacity_+1 is a multiple of Group::kWidth.
PiperOrigin-RevId: 379311830
--
1a3ba500fb2c33205854eb9258cd6e0fb1061bca by Martijn Vels <mvels@google.com>:
Change Ring, EXTERNAL and FLAT tag values to be consecutive values
The purpose of this change is to have FLAT = EXTERNAL + 1. Especially in the ring and btree alternative code, there is a common check if a node is a 'plain' edge (EXTERNAL or FLAT), or 'something else'. This change can make that check a single branch, i.e., instead of 'tag == EXTERNAL || tag >= FLAT', we can simply check for 'tag >= EXTERNAL'. Likewise we have some cases where we check for RING, EXTERNAL or FLAT, so we align RING + 1 with EXTERNAL.
PiperOrigin-RevId: 379291576
--
0c78e65ca4d85244b106c3f8e24cf268e09e72a3 by Benjamin Barenblat <bbaren@google.com>:
Round a double multiplication before casting it to integer
The code
static_cast<int>(x * y)
(for double x and y) performs a double multiplication into a temporary
that, by standard, may have excess precision. The subsequent cast to int
discards the excess precision. However, the cast may examine the excess
precision during conversion, producing surprising results like
static_cast<int>(1.7 * 10) == 16
on certain systems. Correct this case by explicitly rounding 1.7 * 10
before casting it.
PiperOrigin-RevId: 378922064
GitOrigin-RevId: b1fc72630aaa81c8395c3b22ba267d938fe29a2e
Change-Id: Ica708a006921118673e78d5fd2d61fe0fb0894d1
4 years ago
|
|
|
#include <cmath>
|
|
|
|
#include <numeric>
|
|
|
|
#include <random>
|
|
|
|
|
|
|
|
#include "gmock/gmock.h"
|
|
|
|
#include "gtest/gtest-spi.h"
|
|
|
|
#include "gtest/gtest.h"
|
|
|
|
#include "absl/random/bit_gen_ref.h"
|
|
|
|
#include "absl/random/mock_distributions.h"
|
|
|
|
#include "absl/random/random.h"
|
|
|
|
|
|
|
|
namespace {
|
Export of internal Abseil changes
--
5ed5dc9e17c66c298ee31cefc941a46348d8ad34 by Abseil Team <absl-team@google.com>:
Fix typo.
PiperOrigin-RevId: 362040582
--
ac704b53a49becc42f77e4529d3952f8e7d18ce4 by Abseil Team <absl-team@google.com>:
Fix a typo in a comment.
PiperOrigin-RevId: 361576641
--
d20ccb27b7e9b53481e9192c1aae5202c06bfcb1 by Derek Mauro <dmauro@google.com>:
Remove the inline keyword from functions that aren't defined
in the header.
This may fix #910.
PiperOrigin-RevId: 361551300
--
aed9ae1dffa7b228dcb6ffbeb2fe06a13970c72b by Laramie Leavitt <lar@google.com>:
Propagate nice/strict/naggy state on absl::MockingBitGen.
Allowing NiceMocks reduces the log spam for un-mocked calls, and it enables nicer setup with ON_CALL, so it is desirable to support it in absl::MockingBitGen. Internally, gmock tracks object "strictness" levels using an internal API; in order to achieve the same results we detect when the MockingBitGen is wrapped in a Nice/Naggy/Strict and wrap the internal implementation MockFunction in the same type.
This is achieved by providing overloads to the Call() function, and passing the mock object type down into it's own RegisterMock call, where a compile-time check verifies the state and creates the appropriate mock function.
PiperOrigin-RevId: 361233484
--
96186023fabd13d01d32d60d9c7ac4ead1aeb989 by Abseil Team <absl-team@google.com>:
Ensure that trivial types are passed by value rather than reference
PiperOrigin-RevId: 361217450
--
e1135944835d27f77e8119b8166d8fb6aa25f906 by Evan Brown <ezb@google.com>:
Internal change.
PiperOrigin-RevId: 361215882
--
583fe6c94c1c2ef757ef6e78292a15fbe4030e35 by Evan Brown <ezb@google.com>:
Increase the minimum number of slots per node from 3 to 4. We also rename kNodeValues (and related names) to kNodeSlots to make it clear that they are about the number of slots per node rather than the number of values per node - kMinNodeValues keeps the same name because it's actually about the number of values rather than the number of slots.
Motivation: I think the expected number of values per node, assuming random insertion order, is the average of the maximum and minimum numbers of values per node (kNodeSlots and kMinNodeValues). For large and/or even kNodeSlots, this is ~75% of kNodeSlots, but for kNodeSlots=3, this is ~67% of kNodeSlots. kMinNodeValues (which corresponds to worst-case occupancy) is ~33% of kNodeSlots, when kNodeSlots=3, compared to 50% for even kNodeSlots. This results in higher memory overhead per value, and since this case (kNodeSlots=3) is used when values are large, it seems worth fixing.
PiperOrigin-RevId: 361171495
GitOrigin-RevId: 5ed5dc9e17c66c298ee31cefc941a46348d8ad34
Change-Id: I8e33b5df1f987a77112093821085c410185ab51a
4 years ago
|
|
|
|
|
|
|
using ::testing::_;
|
|
|
|
using ::testing::Ne;
|
|
|
|
using ::testing::Return;
|
|
|
|
|
|
|
|
TEST(BasicMocking, AllDistributionsAreOverridable) {
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Uniform<int>(gen, 1, 1000000), 20);
|
|
|
|
EXPECT_CALL(absl::MockUniform<int>(), Call(gen, 1, 1000000))
|
|
|
|
.WillOnce(Return(20));
|
|
|
|
EXPECT_EQ(absl::Uniform<int>(gen, 1, 1000000), 20);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Uniform<double>(gen, 0.0, 100.0), 5.0);
|
|
|
|
EXPECT_CALL(absl::MockUniform<double>(), Call(gen, 0.0, 100.0))
|
|
|
|
.WillOnce(Return(5.0));
|
|
|
|
EXPECT_EQ(absl::Uniform<double>(gen, 0.0, 100.0), 5.0);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Exponential<double>(gen, 1.0), 42);
|
|
|
|
EXPECT_CALL(absl::MockExponential<double>(), Call(gen, 1.0))
|
|
|
|
.WillOnce(Return(42));
|
|
|
|
EXPECT_EQ(absl::Exponential<double>(gen, 1.0), 42);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Poisson<int>(gen, 1.0), 500);
|
|
|
|
EXPECT_CALL(absl::MockPoisson<int>(), Call(gen, 1.0)).WillOnce(Return(500));
|
|
|
|
EXPECT_EQ(absl::Poisson<int>(gen, 1.0), 500);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Bernoulli(gen, 0.000001), true);
|
|
|
|
EXPECT_CALL(absl::MockBernoulli(), Call(gen, 0.000001))
|
|
|
|
.WillOnce(Return(true));
|
|
|
|
EXPECT_EQ(absl::Bernoulli(gen, 0.000001), true);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Zipf<int>(gen, 1000000, 2.0, 1.0), 1221);
|
|
|
|
EXPECT_CALL(absl::MockZipf<int>(), Call(gen, 1000000, 2.0, 1.0))
|
|
|
|
.WillOnce(Return(1221));
|
|
|
|
EXPECT_EQ(absl::Zipf<int>(gen, 1000000, 2.0, 1.0), 1221);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Gaussian<double>(gen, 0.0, 1.0), 0.001);
|
|
|
|
EXPECT_CALL(absl::MockGaussian<double>(), Call(gen, 0.0, 1.0))
|
|
|
|
.WillOnce(Return(0.001));
|
|
|
|
EXPECT_EQ(absl::Gaussian<double>(gen, 0.0, 1.0), 0.001);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::LogUniform<int>(gen, 0, 1000000, 2), 500000);
|
|
|
|
EXPECT_CALL(absl::MockLogUniform<int>(), Call(gen, 0, 1000000, 2))
|
|
|
|
.WillOnce(Return(500000));
|
|
|
|
EXPECT_EQ(absl::LogUniform<int>(gen, 0, 1000000, 2), 500000);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(BasicMocking, OnDistribution) {
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Uniform<int>(gen, 1, 1000000), 20);
|
|
|
|
ON_CALL(absl::MockUniform<int>(), Call(gen, 1, 1000000))
|
|
|
|
.WillByDefault(Return(20));
|
|
|
|
EXPECT_EQ(absl::Uniform<int>(gen, 1, 1000000), 20);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Uniform<double>(gen, 0.0, 100.0), 5.0);
|
|
|
|
ON_CALL(absl::MockUniform<double>(), Call(gen, 0.0, 100.0))
|
|
|
|
.WillByDefault(Return(5.0));
|
|
|
|
EXPECT_EQ(absl::Uniform<double>(gen, 0.0, 100.0), 5.0);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Exponential<double>(gen, 1.0), 42);
|
|
|
|
ON_CALL(absl::MockExponential<double>(), Call(gen, 1.0))
|
|
|
|
.WillByDefault(Return(42));
|
|
|
|
EXPECT_EQ(absl::Exponential<double>(gen, 1.0), 42);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Poisson<int>(gen, 1.0), 500);
|
|
|
|
ON_CALL(absl::MockPoisson<int>(), Call(gen, 1.0)).WillByDefault(Return(500));
|
|
|
|
EXPECT_EQ(absl::Poisson<int>(gen, 1.0), 500);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Bernoulli(gen, 0.000001), true);
|
|
|
|
ON_CALL(absl::MockBernoulli(), Call(gen, 0.000001))
|
|
|
|
.WillByDefault(Return(true));
|
|
|
|
EXPECT_EQ(absl::Bernoulli(gen, 0.000001), true);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Zipf<int>(gen, 1000000, 2.0, 1.0), 1221);
|
|
|
|
ON_CALL(absl::MockZipf<int>(), Call(gen, 1000000, 2.0, 1.0))
|
|
|
|
.WillByDefault(Return(1221));
|
|
|
|
EXPECT_EQ(absl::Zipf<int>(gen, 1000000, 2.0, 1.0), 1221);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Gaussian<double>(gen, 0.0, 1.0), 0.001);
|
|
|
|
ON_CALL(absl::MockGaussian<double>(), Call(gen, 0.0, 1.0))
|
|
|
|
.WillByDefault(Return(0.001));
|
|
|
|
EXPECT_EQ(absl::Gaussian<double>(gen, 0.0, 1.0), 0.001);
|
|
|
|
|
|
|
|
EXPECT_NE(absl::LogUniform<int>(gen, 0, 1000000, 2), 2040);
|
|
|
|
ON_CALL(absl::MockLogUniform<int>(), Call(gen, 0, 1000000, 2))
|
|
|
|
.WillByDefault(Return(2040));
|
|
|
|
EXPECT_EQ(absl::LogUniform<int>(gen, 0, 1000000, 2), 2040);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(BasicMocking, GMockMatchers) {
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Zipf<int>(gen, 1000000, 2.0, 1.0), 1221);
|
|
|
|
ON_CALL(absl::MockZipf<int>(), Call(gen, 1000000, 2.0, 1.0))
|
|
|
|
.WillByDefault(Return(1221));
|
|
|
|
EXPECT_EQ(absl::Zipf<int>(gen, 1000000, 2.0, 1.0), 1221);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(BasicMocking, OverridesWithMultipleGMockExpectations) {
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
|
|
|
|
EXPECT_CALL(absl::MockUniform<int>(), Call(gen, 1, 10000))
|
|
|
|
.WillOnce(Return(20))
|
|
|
|
.WillOnce(Return(40))
|
|
|
|
.WillOnce(Return(60));
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 10000), 20);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 10000), 40);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 10000), 60);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(BasicMocking, DefaultArgument) {
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
|
|
|
|
ON_CALL(absl::MockExponential<double>(), Call(gen, 1.0))
|
|
|
|
.WillByDefault(Return(200));
|
|
|
|
|
|
|
|
EXPECT_EQ(absl::Exponential<double>(gen), 200);
|
|
|
|
EXPECT_EQ(absl::Exponential<double>(gen, 1.0), 200);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(BasicMocking, MultipleGenerators) {
|
|
|
|
auto get_value = [](absl::BitGenRef gen_ref) {
|
|
|
|
return absl::Uniform(gen_ref, 1, 1000000);
|
|
|
|
};
|
|
|
|
absl::MockingBitGen unmocked_generator;
|
|
|
|
absl::MockingBitGen mocked_with_3;
|
|
|
|
absl::MockingBitGen mocked_with_11;
|
|
|
|
|
|
|
|
EXPECT_CALL(absl::MockUniform<int>(), Call(mocked_with_3, 1, 1000000))
|
|
|
|
.WillOnce(Return(3))
|
|
|
|
.WillRepeatedly(Return(17));
|
|
|
|
EXPECT_CALL(absl::MockUniform<int>(), Call(mocked_with_11, 1, 1000000))
|
|
|
|
.WillOnce(Return(11))
|
|
|
|
.WillRepeatedly(Return(17));
|
|
|
|
|
|
|
|
// Ensure that unmocked generator generates neither value.
|
|
|
|
int unmocked_value = get_value(unmocked_generator);
|
|
|
|
EXPECT_NE(unmocked_value, 3);
|
|
|
|
EXPECT_NE(unmocked_value, 11);
|
|
|
|
// Mocked generators should generate their mocked values.
|
|
|
|
EXPECT_EQ(get_value(mocked_with_3), 3);
|
|
|
|
EXPECT_EQ(get_value(mocked_with_11), 11);
|
|
|
|
// Ensure that the mocks have expired.
|
|
|
|
EXPECT_NE(get_value(mocked_with_3), 3);
|
|
|
|
EXPECT_NE(get_value(mocked_with_11), 11);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(BasicMocking, MocksNotTrigeredForIncorrectTypes) {
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
EXPECT_CALL(absl::MockUniform<uint32_t>(), Call(gen)).WillOnce(Return(42));
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Uniform<uint16_t>(gen), 42); // Not mocked
|
|
|
|
EXPECT_EQ(absl::Uniform<uint32_t>(gen), 42); // Mock triggered
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(BasicMocking, FailsOnUnsatisfiedMocks) {
|
|
|
|
EXPECT_NONFATAL_FAILURE(
|
|
|
|
[]() {
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
EXPECT_CALL(absl::MockExponential<double>(), Call(gen, 1.0))
|
|
|
|
.WillOnce(Return(3.0));
|
|
|
|
// Does not call absl::Exponential().
|
|
|
|
}(),
|
|
|
|
"unsatisfied and active");
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(OnUniform, RespectsUniformIntervalSemantics) {
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
|
|
|
|
EXPECT_CALL(absl::MockUniform<int>(),
|
|
|
|
Call(absl::IntervalClosed, gen, 1, 1000000))
|
|
|
|
.WillOnce(Return(301));
|
|
|
|
EXPECT_NE(absl::Uniform(gen, 1, 1000000), 301); // Not mocked
|
|
|
|
EXPECT_EQ(absl::Uniform(absl::IntervalClosed, gen, 1, 1000000), 301);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(OnUniform, RespectsNoArgUnsignedShorthand) {
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
EXPECT_CALL(absl::MockUniform<uint32_t>(), Call(gen)).WillOnce(Return(42));
|
|
|
|
EXPECT_EQ(absl::Uniform<uint32_t>(gen), 42);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(RepeatedlyModifier, ForceSnakeEyesForManyDice) {
|
|
|
|
auto roll_some_dice = [](absl::BitGenRef gen_ref) {
|
|
|
|
std::vector<int> results(16);
|
|
|
|
for (auto& r : results) {
|
|
|
|
r = absl::Uniform(absl::IntervalClosed, gen_ref, 1, 6);
|
|
|
|
}
|
|
|
|
return results;
|
|
|
|
};
|
|
|
|
std::vector<int> results;
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
|
|
|
|
// Without any mocked calls, not all dice roll a "6".
|
|
|
|
results = roll_some_dice(gen);
|
|
|
|
EXPECT_LT(std::accumulate(std::begin(results), std::end(results), 0),
|
|
|
|
results.size() * 6);
|
|
|
|
|
|
|
|
// Verify that we can force all "6"-rolls, with mocking.
|
|
|
|
ON_CALL(absl::MockUniform<int>(), Call(absl::IntervalClosed, gen, 1, 6))
|
|
|
|
.WillByDefault(Return(6));
|
|
|
|
results = roll_some_dice(gen);
|
|
|
|
EXPECT_EQ(std::accumulate(std::begin(results), std::end(results), 0),
|
|
|
|
results.size() * 6);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(WillOnce, DistinctCounters) {
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
EXPECT_CALL(absl::MockUniform<int>(), Call(gen, 1, 1000000))
|
|
|
|
.Times(3)
|
|
|
|
.WillRepeatedly(Return(0));
|
|
|
|
EXPECT_CALL(absl::MockUniform<int>(), Call(gen, 1000001, 2000000))
|
|
|
|
.Times(3)
|
|
|
|
.WillRepeatedly(Return(1));
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1000001, 2000000), 1);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 1000000), 0);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1000001, 2000000), 1);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 1000000), 0);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1000001, 2000000), 1);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 1000000), 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(TimesModifier, ModifierSaturatesAndExpires) {
|
|
|
|
EXPECT_NONFATAL_FAILURE(
|
|
|
|
[]() {
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
EXPECT_CALL(absl::MockUniform<int>(), Call(gen, 1, 1000000))
|
|
|
|
.Times(3)
|
|
|
|
.WillRepeatedly(Return(15))
|
|
|
|
.RetiresOnSaturation();
|
|
|
|
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 1000000), 15);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 1000000), 15);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 1000000), 15);
|
|
|
|
// Times(3) has expired - Should get a different value now.
|
|
|
|
|
|
|
|
EXPECT_NE(absl::Uniform(gen, 1, 1000000), 15);
|
|
|
|
}(),
|
|
|
|
"");
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(TimesModifier, Times0) {
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
EXPECT_CALL(absl::MockBernoulli(), Call(gen, 0.0)).Times(0);
|
|
|
|
EXPECT_CALL(absl::MockPoisson<int>(), Call(gen, 1.0)).Times(0);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(AnythingMatcher, MatchesAnyArgument) {
|
|
|
|
using testing::_;
|
|
|
|
|
|
|
|
{
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
ON_CALL(absl::MockUniform<int>(), Call(absl::IntervalClosed, gen, _, 1000))
|
|
|
|
.WillByDefault(Return(11));
|
|
|
|
ON_CALL(absl::MockUniform<int>(),
|
|
|
|
Call(absl::IntervalClosed, gen, _, Ne(1000)))
|
|
|
|
.WillByDefault(Return(99));
|
|
|
|
|
|
|
|
EXPECT_EQ(absl::Uniform(absl::IntervalClosed, gen, 10, 1000000), 99);
|
|
|
|
EXPECT_EQ(absl::Uniform(absl::IntervalClosed, gen, 10, 1000), 11);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
ON_CALL(absl::MockUniform<int>(), Call(gen, 1, _))
|
|
|
|
.WillByDefault(Return(25));
|
|
|
|
ON_CALL(absl::MockUniform<int>(), Call(gen, Ne(1), _))
|
|
|
|
.WillByDefault(Return(99));
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 3, 1000000), 99);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 1000000), 25);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
ON_CALL(absl::MockUniform<int>(), Call(gen, _, _))
|
|
|
|
.WillByDefault(Return(145));
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 1000), 145);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 10, 1000), 145);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 100, 1000), 145);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(AnythingMatcher, WithWillByDefault) {
|
|
|
|
using testing::_;
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
std::vector<int> values = {11, 22, 33, 44, 55, 66, 77, 88, 99, 1010};
|
|
|
|
|
|
|
|
ON_CALL(absl::MockUniform<size_t>(), Call(gen, 0, _))
|
|
|
|
.WillByDefault(Return(0));
|
|
|
|
for (int i = 0; i < 100; i++) {
|
|
|
|
auto& elem = values[absl::Uniform(gen, 0u, values.size())];
|
|
|
|
EXPECT_EQ(elem, 11);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(BasicMocking, WillByDefaultWithArgs) {
|
|
|
|
using testing::_;
|
|
|
|
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
ON_CALL(absl::MockPoisson<int>(), Call(gen, _))
|
Export of internal Abseil changes
--
b1fc72630aaa81c8395c3b22ba267d938fe29a2e by Derek Mauro <dmauro@google.com>:
Fix -Wdeprecated-copy warnings from Clang 13.
Example:
error: definition of implicit copy assignment operator for 'UDT' is deprecated because it has a user-declared copy constructor [-Werror,-Wdeprecated-copy]
PiperOrigin-RevId: 380058303
--
0422744812b1a2010d9eea5b17fbe89f3441b66b by Evan Brown <ezb@google.com>:
Change the "full table!" asserts in raw_hash_set to use `<= capacity` instead of `< capacity`.
If we add support for non-power-of-two-minus-one capacities, this is the correct thing to assert. For example, consider: Group::kWidth = 8, capacity_ = 8, ctrl_ = {kEmpty, 1, 2, 3, 4, 5, 6, 7, kSentinel, kEmpty, 1, 2, 3, 4, 5, 6}. In this case, if we do an unsuccessful lookup with H2 mapping to slot 1, then the first Group will contain {1, 2, 3, 4, 5, 6, 7, kSentinel} so we need to continue to the second Group (at which point seq.index() == 8 == capacity_) to find a kEmpty.
Note: this is a no-op change for now since we never have `capacity % Group::kWidth == 0`.
PiperOrigin-RevId: 380033480
--
40628c34d540356de65fabb16c1439c0ec7a0764 by Abseil Team <absl-team@google.com>:
Drop out-of-date documentation about `absl::FixedArray`'s allocator support
PiperOrigin-RevId: 379811653
--
e7ad047863ae55c9b7aec0753cfc527a4ea614bc by Evan Brown <ezb@google.com>:
Fix a bug in ConvertDeletedToEmptyAndFullToDeleted in which we were copying 1 more cloned control byte than actually exists.
When alignof(slot_type)>1, this wouldn't cause a problem because the extra byte is padding.
Also change loop bounds to not rely on the fact that capacity_+1 is a multiple of Group::kWidth.
PiperOrigin-RevId: 379311830
--
1a3ba500fb2c33205854eb9258cd6e0fb1061bca by Martijn Vels <mvels@google.com>:
Change Ring, EXTERNAL and FLAT tag values to be consecutive values
The purpose of this change is to have FLAT = EXTERNAL + 1. Especially in the ring and btree alternative code, there is a common check if a node is a 'plain' edge (EXTERNAL or FLAT), or 'something else'. This change can make that check a single branch, i.e., instead of 'tag == EXTERNAL || tag >= FLAT', we can simply check for 'tag >= EXTERNAL'. Likewise we have some cases where we check for RING, EXTERNAL or FLAT, so we align RING + 1 with EXTERNAL.
PiperOrigin-RevId: 379291576
--
0c78e65ca4d85244b106c3f8e24cf268e09e72a3 by Benjamin Barenblat <bbaren@google.com>:
Round a double multiplication before casting it to integer
The code
static_cast<int>(x * y)
(for double x and y) performs a double multiplication into a temporary
that, by standard, may have excess precision. The subsequent cast to int
discards the excess precision. However, the cast may examine the excess
precision during conversion, producing surprising results like
static_cast<int>(1.7 * 10) == 16
on certain systems. Correct this case by explicitly rounding 1.7 * 10
before casting it.
PiperOrigin-RevId: 378922064
GitOrigin-RevId: b1fc72630aaa81c8395c3b22ba267d938fe29a2e
Change-Id: Ica708a006921118673e78d5fd2d61fe0fb0894d1
4 years ago
|
|
|
.WillByDefault([](double lambda) {
|
|
|
|
return static_cast<int>(std::rint(lambda * 10));
|
|
|
|
});
|
|
|
|
EXPECT_EQ(absl::Poisson<int>(gen, 1.7), 17);
|
|
|
|
EXPECT_EQ(absl::Poisson<int>(gen, 0.03), 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(MockingBitGen, InSequenceSucceedsInOrder) {
|
|
|
|
absl::MockingBitGen gen;
|
|
|
|
|
|
|
|
testing::InSequence seq;
|
|
|
|
|
|
|
|
EXPECT_CALL(absl::MockPoisson<int>(), Call(gen, 1.0)).WillOnce(Return(3));
|
|
|
|
EXPECT_CALL(absl::MockPoisson<int>(), Call(gen, 2.0)).WillOnce(Return(4));
|
|
|
|
|
|
|
|
EXPECT_EQ(absl::Poisson<int>(gen, 1.0), 3);
|
|
|
|
EXPECT_EQ(absl::Poisson<int>(gen, 2.0), 4);
|
|
|
|
}
|
|
|
|
|
Export of internal Abseil changes
--
5ed5dc9e17c66c298ee31cefc941a46348d8ad34 by Abseil Team <absl-team@google.com>:
Fix typo.
PiperOrigin-RevId: 362040582
--
ac704b53a49becc42f77e4529d3952f8e7d18ce4 by Abseil Team <absl-team@google.com>:
Fix a typo in a comment.
PiperOrigin-RevId: 361576641
--
d20ccb27b7e9b53481e9192c1aae5202c06bfcb1 by Derek Mauro <dmauro@google.com>:
Remove the inline keyword from functions that aren't defined
in the header.
This may fix #910.
PiperOrigin-RevId: 361551300
--
aed9ae1dffa7b228dcb6ffbeb2fe06a13970c72b by Laramie Leavitt <lar@google.com>:
Propagate nice/strict/naggy state on absl::MockingBitGen.
Allowing NiceMocks reduces the log spam for un-mocked calls, and it enables nicer setup with ON_CALL, so it is desirable to support it in absl::MockingBitGen. Internally, gmock tracks object "strictness" levels using an internal API; in order to achieve the same results we detect when the MockingBitGen is wrapped in a Nice/Naggy/Strict and wrap the internal implementation MockFunction in the same type.
This is achieved by providing overloads to the Call() function, and passing the mock object type down into it's own RegisterMock call, where a compile-time check verifies the state and creates the appropriate mock function.
PiperOrigin-RevId: 361233484
--
96186023fabd13d01d32d60d9c7ac4ead1aeb989 by Abseil Team <absl-team@google.com>:
Ensure that trivial types are passed by value rather than reference
PiperOrigin-RevId: 361217450
--
e1135944835d27f77e8119b8166d8fb6aa25f906 by Evan Brown <ezb@google.com>:
Internal change.
PiperOrigin-RevId: 361215882
--
583fe6c94c1c2ef757ef6e78292a15fbe4030e35 by Evan Brown <ezb@google.com>:
Increase the minimum number of slots per node from 3 to 4. We also rename kNodeValues (and related names) to kNodeSlots to make it clear that they are about the number of slots per node rather than the number of values per node - kMinNodeValues keeps the same name because it's actually about the number of values rather than the number of slots.
Motivation: I think the expected number of values per node, assuming random insertion order, is the average of the maximum and minimum numbers of values per node (kNodeSlots and kMinNodeValues). For large and/or even kNodeSlots, this is ~75% of kNodeSlots, but for kNodeSlots=3, this is ~67% of kNodeSlots. kMinNodeValues (which corresponds to worst-case occupancy) is ~33% of kNodeSlots, when kNodeSlots=3, compared to 50% for even kNodeSlots. This results in higher memory overhead per value, and since this case (kNodeSlots=3) is used when values are large, it seems worth fixing.
PiperOrigin-RevId: 361171495
GitOrigin-RevId: 5ed5dc9e17c66c298ee31cefc941a46348d8ad34
Change-Id: I8e33b5df1f987a77112093821085c410185ab51a
4 years ago
|
|
|
TEST(MockingBitGen, NiceMock) {
|
|
|
|
::testing::NiceMock<absl::MockingBitGen> gen;
|
|
|
|
ON_CALL(absl::MockUniform<int>(), Call(gen, _, _)).WillByDefault(Return(145));
|
|
|
|
|
|
|
|
ON_CALL(absl::MockPoisson<int>(), Call(gen, _)).WillByDefault(Return(3));
|
|
|
|
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 1000), 145);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 10, 1000), 145);
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 100, 1000), 145);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(MockingBitGen, NaggyMock) {
|
|
|
|
// This is difficult to test, as only the output matters, so just verify
|
|
|
|
// that ON_CALL can be installed. Anything else requires log inspection.
|
|
|
|
::testing::NaggyMock<absl::MockingBitGen> gen;
|
|
|
|
|
|
|
|
ON_CALL(absl::MockUniform<int>(), Call(gen, _, _)).WillByDefault(Return(145));
|
|
|
|
ON_CALL(absl::MockPoisson<int>(), Call(gen, _)).WillByDefault(Return(3));
|
|
|
|
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 1000), 145);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(MockingBitGen, StrictMock_NotEnough) {
|
|
|
|
EXPECT_NONFATAL_FAILURE(
|
|
|
|
[]() {
|
|
|
|
::testing::StrictMock<absl::MockingBitGen> gen;
|
|
|
|
EXPECT_CALL(absl::MockUniform<int>(), Call(gen, _, _))
|
|
|
|
.WillOnce(Return(145));
|
|
|
|
}(),
|
|
|
|
"unsatisfied and active");
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(MockingBitGen, StrictMock_TooMany) {
|
|
|
|
::testing::StrictMock<absl::MockingBitGen> gen;
|
|
|
|
|
|
|
|
EXPECT_CALL(absl::MockUniform<int>(), Call(gen, _, _)).WillOnce(Return(145));
|
|
|
|
EXPECT_EQ(absl::Uniform(gen, 1, 1000), 145);
|
|
|
|
|
|
|
|
EXPECT_NONFATAL_FAILURE(
|
|
|
|
[&]() { EXPECT_EQ(absl::Uniform(gen, 10, 1000), 0); }(),
|
|
|
|
"over-saturated and active");
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace
|