Changed test to produce twice the same MessageBuilder

This commit is contained in:
accelerated
2018-06-11 15:45:41 -04:00
parent a4eefacaa1
commit 972a008aa4
2 changed files with 14 additions and 9 deletions

View File

@@ -435,9 +435,9 @@ void BufferedProducer<BufferType>::add_message(Builder builder) {
template <typename BufferType> template <typename BufferType>
void BufferedProducer<BufferType>::produce(const MessageBuilder& builder) { void BufferedProducer<BufferType>::produce(const MessageBuilder& builder) {
if (has_internal_data_) { if (has_internal_data_) {
MessageBuilder builder_copy(builder.clone()); MessageBuilder builder_clone(builder.clone());
add_tracker(builder_copy); add_tracker(builder_clone);
async_produce(builder_copy, true); async_produce(builder_clone, true);
} }
else { else {
async_produce(builder, true); async_produce(builder, true);
@@ -447,13 +447,13 @@ void BufferedProducer<BufferType>::produce(const MessageBuilder& builder) {
template <typename BufferType> template <typename BufferType>
void BufferedProducer<BufferType>::sync_produce(const MessageBuilder& builder) { void BufferedProducer<BufferType>::sync_produce(const MessageBuilder& builder) {
if (has_internal_data_) { if (has_internal_data_) {
MessageBuilder builder_copy(builder.clone()); MessageBuilder builder_clone(builder.clone());
TrackerPtr tracker = add_tracker(builder_copy); TrackerPtr tracker = add_tracker(builder_clone);
// produce until we succeed or we reach max retry limit // produce until we succeed or we reach max retry limit
std::future<bool> should_retry; std::future<bool> should_retry;
do { do {
should_retry = tracker->get_new_future(); should_retry = tracker->get_new_future();
produce_message(builder_copy); produce_message(builder_clone);
wait_for_acks(); wait_for_acks();
} }
while (should_retry.get()); while (should_retry.get());

View File

@@ -341,18 +341,23 @@ TEST_CASE("replay sync messages with errors", "[producer][buffered_producer][syn
// Create a consumer and subscribe to this topic // Create a consumer and subscribe to this topic
Consumer consumer(make_consumer_config()); Consumer consumer(make_consumer_config());
consumer.subscribe({ KAFKA_TOPICS[0] }); consumer.subscribe({ KAFKA_TOPICS[0] });
ConsumerRunner runner(consumer, num_retries+1, KAFKA_NUM_PARTITIONS); ConsumerRunner runner(consumer, 2*(num_retries+1), KAFKA_NUM_PARTITIONS);
// Now create a producer and produce a message // Now create a producer and produce a message
ErrorProducer<string> producer(make_producer_config(), BufferedProducer<string>::TestParameters{true, false}); ErrorProducer<string> producer(make_producer_config(), BufferedProducer<string>::TestParameters{true, false});
producer.set_produce_failure_callback(dr_failure_callback); producer.set_produce_failure_callback(dr_failure_callback);
producer.set_max_number_retries(num_retries); producer.set_max_number_retries(num_retries);
string payload = "Hello world"; string payload = "Hello world";
producer.sync_produce(MessageBuilder(KAFKA_TOPICS[0]).payload(payload).user_data(&dr_data[0])); MessageBuilder builder(KAFKA_TOPICS[0]);
builder.payload(payload).user_data(&dr_data[0]);
//Produce the same message twice
producer.sync_produce(builder);
producer.sync_produce(builder);
runner.try_join(); runner.try_join();
const auto& messages = runner.get_messages(); const auto& messages = runner.get_messages();
REQUIRE(messages.size() == num_retries+1); REQUIRE(messages.size() == 2*(num_retries+1));
for (size_t i = 0; i < messages.size(); ++i) { for (size_t i = 0; i < messages.size(); ++i) {
const auto& message = messages[i]; const auto& message = messages[i];
CHECK(message.get_topic() == KAFKA_TOPICS[0]); CHECK(message.get_topic() == KAFKA_TOPICS[0]);