mirror of
				https://github.com/Telecominfraproject/wlan-cloud-lib-cppkafka.git
				synced 2025-11-03 20:18:06 +00:00 
			
		
		
		
	Move tests to use catch instead of googletest (#56)
* Port buffer test to use Catch2 * Move compacted topic processor test to Catch2 * Move configuration tests to Catch2 * Rename configuration test cases * Move topic partition list test to Catch2 * Move handle base tests to Catch2 * Move producer tests to Catch2 * Move consumer tests to catch2 * Use CHECK on tests when appropriate * Remove googletest * Show tests' progress as they run * Update message when Catch2 is not checked out * Remove references to googletest * Run cppkafka_tests manually on travis * Print amount of time taken by each test case
This commit is contained in:
		
							
								
								
									
										6
									
								
								.gitmodules
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.gitmodules
									
									
									
									
										vendored
									
									
								
							@@ -1,3 +1,3 @@
 | 
				
			|||||||
[submodule "third_party/googletest"]
 | 
					[submodule "third_party/Catch2"]
 | 
				
			||||||
	path = third_party/googletest
 | 
						path = third_party/Catch2
 | 
				
			||||||
	url = https://github.com/google/googletest.git
 | 
						url = https://github.com/catchorg/Catch2.git
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -40,4 +40,4 @@ script:
 | 
				
			|||||||
  - cmake .. -DRDKAFKA_ROOT_DIR=../librdkafka/install/ -DKAFKA_TEST_INSTANCE=localhost:9092
 | 
					  - cmake .. -DRDKAFKA_ROOT_DIR=../librdkafka/install/ -DKAFKA_TEST_INSTANCE=localhost:9092
 | 
				
			||||||
  - make examples
 | 
					  - make examples
 | 
				
			||||||
  - make tests
 | 
					  - make tests
 | 
				
			||||||
  - ctest -V
 | 
					  - ./tests/cppkafka_tests
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -62,30 +62,14 @@ if(DOXYGEN_FOUND)
 | 
				
			|||||||
endif(DOXYGEN_FOUND)
 | 
					endif(DOXYGEN_FOUND)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
if(NOT CPPKAFKA_DISABLE_TESTS)
 | 
					if(NOT CPPKAFKA_DISABLE_TESTS)
 | 
				
			||||||
    set(GOOGLETEST_ROOT ${CMAKE_SOURCE_DIR}/third_party/googletest)
 | 
					    set(CATCH_ROOT ${CMAKE_SOURCE_DIR}/third_party/Catch2)
 | 
				
			||||||
    if(EXISTS "${GOOGLETEST_ROOT}/CMakeLists.txt")
 | 
					    if(EXISTS ${CATCH_ROOT}/CMakeLists.txt)
 | 
				
			||||||
        set(GOOGLETEST_INCLUDE ${GOOGLETEST_ROOT}/googletest/include)
 | 
					        set(CATCH_INCLUDE ${CATCH_ROOT}/single_include)
 | 
				
			||||||
        set(GOOGLETEST_BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR}/googletest)
 | 
					 | 
				
			||||||
        set(GOOGLETEST_LIBRARY ${GOOGLETEST_BINARY_DIR}/googletest)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        include(ExternalProject)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        ExternalProject_Add(
 | 
					 | 
				
			||||||
        googletest
 | 
					 | 
				
			||||||
        DOWNLOAD_COMMAND ""
 | 
					 | 
				
			||||||
        SOURCE_DIR ${GOOGLETEST_ROOT}
 | 
					 | 
				
			||||||
        BINARY_DIR ${GOOGLETEST_BINARY_DIR}
 | 
					 | 
				
			||||||
        CMAKE_CACHE_ARGS "-DBUILD_GTEST:bool=ON" "-DBUILD_GMOCK:bool=OFF"
 | 
					 | 
				
			||||||
                        "-Dgtest_force_shared_crt:bool=ON"
 | 
					 | 
				
			||||||
        INSTALL_COMMAND ""
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        enable_testing()
 | 
					        enable_testing()
 | 
				
			||||||
        add_subdirectory(tests)
 | 
					        add_subdirectory(tests)
 | 
				
			||||||
        # Make sure we build googletest before anything else
 | 
					 | 
				
			||||||
        add_dependencies(cppkafka googletest)
 | 
					 | 
				
			||||||
    else()
 | 
					    else()
 | 
				
			||||||
        message(STATUS "Disabling tests because submodule googletest isn't pulled out")
 | 
					        message(STATUS "Disabling tests because submodule Catch2 isn't checked out")
 | 
				
			||||||
    endif()
 | 
					    endif()
 | 
				
			||||||
endif()
 | 
					endif()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,31 +1,31 @@
 | 
				
			|||||||
include_directories(${GOOGLETEST_INCLUDE})
 | 
					 | 
				
			||||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../include/)
 | 
					include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../include/)
 | 
				
			||||||
 | 
					include_directories(SYSTEM ${CATCH_INCLUDE})
 | 
				
			||||||
include_directories(SYSTEM ${Boost_INCLUDE_DIRS} ${RDKAFKA_INCLUDE_DIR})
 | 
					include_directories(SYSTEM ${Boost_INCLUDE_DIRS} ${RDKAFKA_INCLUDE_DIR})
 | 
				
			||||||
 | 
					
 | 
				
			||||||
link_directories(${GOOGLETEST_LIBRARY})
 | 
					 | 
				
			||||||
link_libraries(cppkafka ${RDKAFKA_LIBRARY} gtest gtest_main pthread)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
set(KAFKA_TEST_INSTANCE "kafka-vm:9092" 
 | 
					set(KAFKA_TEST_INSTANCE "kafka-vm:9092" 
 | 
				
			||||||
    CACHE STRING "The kafka instance to which to connect to run tests")
 | 
					    CACHE STRING "The kafka instance to which to connect to run tests")
 | 
				
			||||||
add_custom_target(tests)
 | 
					add_custom_target(tests)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
macro(create_test test_name)
 | 
					 | 
				
			||||||
    add_executable(${test_name}_test EXCLUDE_FROM_ALL "${test_name}_test.cpp")
 | 
					 | 
				
			||||||
    add_test(${test_name} ${test_name}_test)
 | 
					 | 
				
			||||||
    add_dependencies(tests ${test_name}_test)
 | 
					 | 
				
			||||||
    add_dependencies(${test_name}_test cppkafka)
 | 
					 | 
				
			||||||
    target_link_libraries(${test_name}_test cppkafka-test)
 | 
					 | 
				
			||||||
endmacro()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
 | 
					include_directories(${CMAKE_CURRENT_SOURCE_DIR})
 | 
				
			||||||
add_library(cppkafka-test EXCLUDE_FROM_ALL test_utils.cpp)
 | 
					add_library(cppkafka-test EXCLUDE_FROM_ALL test_utils.cpp)
 | 
				
			||||||
add_dependencies(cppkafka-test cppkafka)
 | 
					target_link_libraries(cppkafka-test cppkafka ${RDKAFKA_LIBRARY} pthread)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
add_definitions("-DKAFKA_TEST_INSTANCE=\"${KAFKA_TEST_INSTANCE}\"")
 | 
					add_definitions("-DKAFKA_TEST_INSTANCE=\"${KAFKA_TEST_INSTANCE}\"")
 | 
				
			||||||
create_test(consumer)
 | 
					
 | 
				
			||||||
create_test(producer)
 | 
					add_executable(
 | 
				
			||||||
create_test(kafka_handle_base)
 | 
					    cppkafka_tests
 | 
				
			||||||
create_test(topic_partition_list)
 | 
					    EXCLUDE_FROM_ALL
 | 
				
			||||||
create_test(configuration)
 | 
					    buffer_test.cpp
 | 
				
			||||||
create_test(buffer)
 | 
					    compacted_topic_processor_test.cpp
 | 
				
			||||||
create_test(compacted_topic_processor)
 | 
					    configuration_test.cpp
 | 
				
			||||||
 | 
					    topic_partition_list_test.cpp
 | 
				
			||||||
 | 
					    kafka_handle_base_test.cpp
 | 
				
			||||||
 | 
					    producer_test.cpp
 | 
				
			||||||
 | 
					    consumer_test.cpp
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Main file
 | 
				
			||||||
 | 
					    test_main.cpp
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
 | 
					target_link_libraries(cppkafka_tests cppkafka-test)
 | 
				
			||||||
 | 
					add_dependencies(tests cppkafka_tests)
 | 
				
			||||||
 | 
					add_test(cppkafka cppkafka_tests)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,7 +1,7 @@
 | 
				
			|||||||
#include <string>
 | 
					#include <string>
 | 
				
			||||||
#include <vector>
 | 
					#include <vector>
 | 
				
			||||||
#include <sstream>
 | 
					#include <sstream>
 | 
				
			||||||
#include <gtest/gtest.h>
 | 
					#include <catch.hpp>
 | 
				
			||||||
#include "cppkafka/buffer.h"
 | 
					#include "cppkafka/buffer.h"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
using std::string;
 | 
					using std::string;
 | 
				
			||||||
@@ -10,69 +10,59 @@ using std::ostringstream;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
using namespace cppkafka;
 | 
					using namespace cppkafka;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class BufferTest : public testing::Test {
 | 
					TEST_CASE("conversions", "[buffer]") {
 | 
				
			||||||
public:
 | 
					    const string data = "Hello world!";
 | 
				
			||||||
 | 
					    const Buffer buffer(data);
 | 
				
			||||||
 | 
					    const Buffer empty_buffer;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
};
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(BufferTest, OperatorBool) {
 | 
					    SECTION("bool conversion") {
 | 
				
			||||||
    string data = "Hello world!";
 | 
					        CHECK(!!buffer == true);
 | 
				
			||||||
    Buffer buffer1(data);
 | 
					        CHECK(!!empty_buffer == false);
 | 
				
			||||||
    Buffer buffer2;
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    EXPECT_TRUE(buffer1);
 | 
					    SECTION("string conversion") {
 | 
				
			||||||
    EXPECT_FALSE(buffer2);
 | 
					        CHECK(static_cast<string>(buffer) == data);
 | 
				
			||||||
 | 
					        CHECK(static_cast<string>(empty_buffer).empty());
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    SECTION("vector conversion") {
 | 
				
			||||||
 | 
					        const vector<char> buffer_as_vector = buffer;
 | 
				
			||||||
 | 
					        CHECK(string(buffer_as_vector.begin(), buffer_as_vector.end()) == data);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(BufferTest, StringConversion) {
 | 
					TEST_CASE("construction", "[buffer]") {
 | 
				
			||||||
    string data = "Hello world!";
 | 
					 | 
				
			||||||
    Buffer buffer(data);
 | 
					 | 
				
			||||||
    string buffer_as_string = buffer;
 | 
					 | 
				
			||||||
    EXPECT_EQ(data, buffer_as_string);
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(BufferTest, StringConversionOnEmptyBuffer) {
 | 
					 | 
				
			||||||
    Buffer buffer;
 | 
					 | 
				
			||||||
    EXPECT_EQ("", static_cast<string>(buffer));
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(BufferTest, VectorConversion) {
 | 
					 | 
				
			||||||
    string data = "Hello world!";
 | 
					 | 
				
			||||||
    Buffer buffer(data);
 | 
					 | 
				
			||||||
    vector<char> buffer_as_vector = buffer;
 | 
					 | 
				
			||||||
    EXPECT_EQ(data, string(buffer_as_vector.begin(), buffer_as_vector.end()));
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(BufferTest, VectorConstruction) {
 | 
					 | 
				
			||||||
    const string str_data = "Hello world!";
 | 
					    const string str_data = "Hello world!";
 | 
				
			||||||
    const vector<uint8_t> data(str_data.begin(), str_data.end());
 | 
					    const vector<uint8_t> data(str_data.begin(), str_data.end());
 | 
				
			||||||
    Buffer buffer(data);
 | 
					    const Buffer buffer(data);
 | 
				
			||||||
    EXPECT_EQ(str_data, buffer);
 | 
					    CHECK(str_data == buffer);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(BufferTest, Equality) {
 | 
					 | 
				
			||||||
    string data = "Hello world!";
 | 
					 | 
				
			||||||
    Buffer buffer1(data);
 | 
					 | 
				
			||||||
    Buffer buffer2(data);
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    EXPECT_EQ(buffer1, buffer2);
 | 
					TEST_CASE("comparison", "[buffer]") {
 | 
				
			||||||
 | 
					    const string data = "Hello world!";
 | 
				
			||||||
 | 
					    const Buffer buffer1(data);
 | 
				
			||||||
 | 
					    const Buffer buffer2(data);
 | 
				
			||||||
 | 
					    const Buffer empty_buffer;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    SECTION("equality") {
 | 
				
			||||||
 | 
					        CHECK(buffer1 == buffer2);
 | 
				
			||||||
 | 
					        CHECK(buffer2 == buffer1);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    SECTION("inequality") {
 | 
				
			||||||
 | 
					        CHECK(buffer1 != empty_buffer);
 | 
				
			||||||
 | 
					        CHECK(empty_buffer != buffer1);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(BufferTest, InEquality) {
 | 
					TEST_CASE("stream extraction", "[buffer]") {
 | 
				
			||||||
    string data1 = "Hello world!";
 | 
					    const string data = "Hello \x7fwor\x03ld!";
 | 
				
			||||||
    string data2 = "Hello worldz";
 | 
					    const string pretty_string = "Hello \\x7fwor\\x03ld!";
 | 
				
			||||||
    Buffer buffer1(data1);
 | 
					    const Buffer buffer(data);
 | 
				
			||||||
    Buffer buffer2(data2);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    EXPECT_NE(buffer1, buffer2);
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(BufferTest, OutputOperator) {
 | 
					 | 
				
			||||||
    string data = "Hello \x7fwor\x03ld!";
 | 
					 | 
				
			||||||
    string pretty_string = "Hello \\x7fwor\\x03ld!";
 | 
					 | 
				
			||||||
    Buffer buffer(data);
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ostringstream output;
 | 
					    ostringstream output;
 | 
				
			||||||
    output << buffer;
 | 
					    output << buffer;
 | 
				
			||||||
    EXPECT_EQ(pretty_string, output.str());
 | 
					    CHECK(output.str() == pretty_string );
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -4,7 +4,7 @@
 | 
				
			|||||||
#include <set>
 | 
					#include <set>
 | 
				
			||||||
#include <map>
 | 
					#include <map>
 | 
				
			||||||
#include <condition_variable>
 | 
					#include <condition_variable>
 | 
				
			||||||
#include <gtest/gtest.h>
 | 
					#include <catch.hpp>
 | 
				
			||||||
#include "cppkafka/producer.h"
 | 
					#include "cppkafka/producer.h"
 | 
				
			||||||
#include "cppkafka/consumer.h"
 | 
					#include "cppkafka/consumer.h"
 | 
				
			||||||
#include "cppkafka/utils/compacted_topic_processor.h"
 | 
					#include "cppkafka/utils/compacted_topic_processor.h"
 | 
				
			||||||
@@ -29,28 +29,23 @@ using std::chrono::milliseconds;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
using namespace cppkafka;
 | 
					using namespace cppkafka;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class CompactedTopicProcessorTest : public testing::Test {
 | 
					static const string KAFKA_TOPIC = "cppkafka_test1";
 | 
				
			||||||
public:
 | 
					 | 
				
			||||||
    static const string KAFKA_TOPIC;
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Configuration make_producer_config() {
 | 
					static Configuration make_producer_config() {
 | 
				
			||||||
        Configuration config;
 | 
					    Configuration config;
 | 
				
			||||||
        config.set("metadata.broker.list", KAFKA_TEST_INSTANCE);
 | 
					    config.set("metadata.broker.list", KAFKA_TEST_INSTANCE);
 | 
				
			||||||
        return config;
 | 
					    return config;
 | 
				
			||||||
    }
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Configuration make_consumer_config() {
 | 
					static Configuration make_consumer_config() {
 | 
				
			||||||
        Configuration config;
 | 
					    Configuration config;
 | 
				
			||||||
        config.set("metadata.broker.list", KAFKA_TEST_INSTANCE);
 | 
					    config.set("metadata.broker.list", KAFKA_TEST_INSTANCE);
 | 
				
			||||||
        config.set("enable.auto.commit", false);
 | 
					    config.set("enable.auto.commit", false);
 | 
				
			||||||
        config.set("group.id", "compacted_topic_test");
 | 
					    config.set("group.id", "compacted_topic_test");
 | 
				
			||||||
        return config;
 | 
					    return config;
 | 
				
			||||||
    }
 | 
					}
 | 
				
			||||||
};
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
const string CompactedTopicProcessorTest::KAFKA_TOPIC = "cppkafka_test1";
 | 
					TEST_CASE("consumption", "[consumer][compacted]") {
 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(CompactedTopicProcessorTest, Consume) {
 | 
					 | 
				
			||||||
    Consumer consumer(make_consumer_config());
 | 
					    Consumer consumer(make_consumer_config());
 | 
				
			||||||
    // We'll use ints as the key, strings as the value
 | 
					    // We'll use ints as the key, strings as the value
 | 
				
			||||||
    using CompactedConsumer = CompactedTopicProcessor<int, string>;
 | 
					    using CompactedConsumer = CompactedTopicProcessor<int, string>;
 | 
				
			||||||
@@ -101,27 +96,27 @@ TEST_F(CompactedTopicProcessorTest, Consume) {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    size_t set_count = 0;
 | 
					    size_t set_count = 0;
 | 
				
			||||||
    size_t delete_count = 0;
 | 
					    size_t delete_count = 0;
 | 
				
			||||||
    ASSERT_FALSE(events.empty());
 | 
					    CHECK(events.empty() == false);
 | 
				
			||||||
    for (const Event& event : events) {
 | 
					    for (const Event& event : events) {
 | 
				
			||||||
        switch (event.get_type()) {
 | 
					        switch (event.get_type()) {
 | 
				
			||||||
            case Event::SET_ELEMENT:
 | 
					            case Event::SET_ELEMENT:
 | 
				
			||||||
                {
 | 
					                {
 | 
				
			||||||
                    auto iter = elements.find(to_string(event.get_key()));
 | 
					                    auto iter = elements.find(to_string(event.get_key()));
 | 
				
			||||||
                    ASSERT_NE(iter, elements.end());
 | 
					                    REQUIRE(iter != elements.end());
 | 
				
			||||||
                    EXPECT_EQ(iter->second.value, event.get_value());
 | 
					                    CHECK(iter->second.value == event.get_value());
 | 
				
			||||||
                    EXPECT_EQ(iter->second.partition, event.get_partition());
 | 
					                    CHECK(iter->second.partition == event.get_partition());
 | 
				
			||||||
                    set_count++;
 | 
					                    set_count++;
 | 
				
			||||||
                }
 | 
					                }
 | 
				
			||||||
                break;
 | 
					                break;
 | 
				
			||||||
            case Event::DELETE_ELEMENT:
 | 
					            case Event::DELETE_ELEMENT:
 | 
				
			||||||
                EXPECT_EQ(0, event.get_partition());
 | 
					                CHECK(event.get_partition() == 0);
 | 
				
			||||||
                EXPECT_EQ(42, event.get_key());
 | 
					                CHECK(event.get_key() == 42);
 | 
				
			||||||
                delete_count++;
 | 
					                delete_count++;
 | 
				
			||||||
                break;
 | 
					                break;
 | 
				
			||||||
            default:
 | 
					            default:
 | 
				
			||||||
            break;
 | 
					            break;
 | 
				
			||||||
        } 
 | 
					        } 
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    EXPECT_EQ(2, set_count);
 | 
					    CHECK(set_count == 2);
 | 
				
			||||||
    EXPECT_EQ(1, delete_count);
 | 
					    CHECK(delete_count == 1);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
#include <gtest/gtest.h>
 | 
					#include <catch.hpp>
 | 
				
			||||||
#include "cppkafka/configuration.h"
 | 
					#include "cppkafka/configuration.h"
 | 
				
			||||||
#include "cppkafka/exceptions.h"
 | 
					#include "cppkafka/exceptions.h"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -6,86 +6,88 @@ using namespace cppkafka;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
using std::string;
 | 
					using std::string;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class ConfigurationTest : public testing::Test {
 | 
					TEST_CASE("normal config", "[config]") {
 | 
				
			||||||
public:
 | 
					 | 
				
			||||||
    
 | 
					 | 
				
			||||||
};
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(ConfigurationTest, GetSetConfig) {
 | 
					 | 
				
			||||||
    Configuration config;
 | 
					    Configuration config;
 | 
				
			||||||
    config.set("group.id", "foo").set("metadata.broker.list", "asd:9092");
 | 
					 | 
				
			||||||
    EXPECT_EQ("foo", config.get("group.id"));
 | 
					 | 
				
			||||||
    EXPECT_EQ("asd:9092", config.get("metadata.broker.list"));
 | 
					 | 
				
			||||||
    EXPECT_EQ("foo", config.get<string>("group.id"));
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    EXPECT_THROW(config.get("asd"), ConfigOptionNotFound);
 | 
					    SECTION("get existing") {
 | 
				
			||||||
 | 
					        config.set("group.id", "foo").set("metadata.broker.list", "asd:9092");
 | 
				
			||||||
 | 
					        CHECK(config.get("group.id") == "foo");
 | 
				
			||||||
 | 
					        CHECK(config.get("metadata.broker.list") == "asd:9092");
 | 
				
			||||||
 | 
					        CHECK(config.get<string>("group.id") == "foo");
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    SECTION("get non existent") {
 | 
				
			||||||
 | 
					        REQUIRE_THROWS_AS(config.get("asd"), ConfigOptionNotFound);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    SECTION("set overloads") {
 | 
				
			||||||
 | 
					        config.set("enable.auto.commit", true);
 | 
				
			||||||
 | 
					        config.set("auto.commit.interval.ms", 100);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        CHECK(config.get("enable.auto.commit") == "true");
 | 
				
			||||||
 | 
					        CHECK(config.get("auto.commit.interval.ms") == "100");
 | 
				
			||||||
 | 
					        CHECK(config.get<int>("auto.commit.interval.ms") == 100);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    SECTION("set multiple") {
 | 
				
			||||||
 | 
					        config = {
 | 
				
			||||||
 | 
					            { "group.id", "foo" },
 | 
				
			||||||
 | 
					            { "metadata.broker.list", string("asd:9092") },
 | 
				
			||||||
 | 
					            { "message.max.bytes", 2000 },
 | 
				
			||||||
 | 
					            { "topic.metadata.refresh.sparse", true }
 | 
				
			||||||
 | 
					        };
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        CHECK(config.get("group.id") == "foo");
 | 
				
			||||||
 | 
					        CHECK(config.get("metadata.broker.list") == "asd:9092");
 | 
				
			||||||
 | 
					        CHECK(config.get<int>("message.max.bytes") == 2000);
 | 
				
			||||||
 | 
					        CHECK(config.get<bool>("topic.metadata.refresh.sparse") == true);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    SECTION("default topic config") {
 | 
				
			||||||
 | 
					        config.set_default_topic_configuration({{ "request.required.acks", 2 }});
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        const auto& topic_config = config.get_default_topic_configuration();
 | 
				
			||||||
 | 
					        CHECK(!!topic_config == true);
 | 
				
			||||||
 | 
					        CHECK(topic_config->get<int>("request.required.acks") == 2);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    SECTION("get all") {
 | 
				
			||||||
 | 
					        config.set("enable.auto.commit", false);
 | 
				
			||||||
 | 
					        auto option_map = config.get_all();
 | 
				
			||||||
 | 
					        CHECK(option_map.at("enable.auto.commit") == "false");
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(ConfigurationTest, GetSetTopicConfig) {
 | 
					TEST_CASE("topic config", "[config]") {
 | 
				
			||||||
    TopicConfiguration config;
 | 
					    TopicConfiguration config;
 | 
				
			||||||
    config.set("auto.commit.enable", true).set("offset.store.method", "broker");
 | 
					 | 
				
			||||||
    EXPECT_EQ("true", config.get("auto.commit.enable"));
 | 
					 | 
				
			||||||
    EXPECT_EQ("broker", config.get("offset.store.method"));
 | 
					 | 
				
			||||||
    EXPECT_EQ(true, config.get<bool>("auto.commit.enable"));
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    EXPECT_THROW(config.get("asd"), ConfigOptionNotFound);
 | 
					    SECTION("get existing") {
 | 
				
			||||||
}
 | 
					        config.set("auto.commit.enable", true).set("offset.store.method", "broker");
 | 
				
			||||||
 | 
					        CHECK(config.get("auto.commit.enable") == "true");
 | 
				
			||||||
TEST_F(ConfigurationTest, ConfigSetMultiple) {
 | 
					        CHECK(config.get("offset.store.method") == "broker");
 | 
				
			||||||
    Configuration config = {
 | 
					        CHECK(config.get<bool>("auto.commit.enable") == true);
 | 
				
			||||||
        { "group.id", "foo" },
 | 
					    }
 | 
				
			||||||
        { "metadata.broker.list", string("asd:9092") },
 | 
					
 | 
				
			||||||
        { "message.max.bytes", 2000 },
 | 
					    SECTION("get non existent") {
 | 
				
			||||||
        { "topic.metadata.refresh.sparse", true }
 | 
					        REQUIRE_THROWS_AS(config.get("asd"), ConfigOptionNotFound);
 | 
				
			||||||
    };
 | 
					    }
 | 
				
			||||||
    EXPECT_EQ("foo", config.get("group.id"));
 | 
					
 | 
				
			||||||
    EXPECT_EQ("asd:9092", config.get("metadata.broker.list"));
 | 
					    SECTION("set multiple") {
 | 
				
			||||||
    EXPECT_EQ(2000, config.get<int>("message.max.bytes"));
 | 
					        config = {
 | 
				
			||||||
    EXPECT_EQ(true, config.get<bool>("topic.metadata.refresh.sparse"));
 | 
					            { "compression.codec", "none" },
 | 
				
			||||||
}
 | 
					            { "offset.store.method", string("file") },
 | 
				
			||||||
 | 
					            { "request.required.acks", 2 },
 | 
				
			||||||
TEST_F(ConfigurationTest, TopicConfigSetMultiple) {
 | 
					            { "produce.offset.report", true }
 | 
				
			||||||
    TopicConfiguration config = {
 | 
					        };
 | 
				
			||||||
        { "compression.codec", "none" },
 | 
					        CHECK(config.get("compression.codec") == "none");
 | 
				
			||||||
        { "offset.store.method", string("file") },
 | 
					        CHECK(config.get("offset.store.method") == "file");
 | 
				
			||||||
        { "request.required.acks", 2 },
 | 
					        CHECK(config.get<int>("request.required.acks") == 2);
 | 
				
			||||||
        { "produce.offset.report", true }
 | 
					        CHECK(config.get<bool>("produce.offset.report") == true);
 | 
				
			||||||
    };
 | 
					    }
 | 
				
			||||||
    EXPECT_EQ("none", config.get("compression.codec"));
 | 
					
 | 
				
			||||||
    EXPECT_EQ("file", config.get("offset.store.method"));
 | 
					    SECTION("get all") {
 | 
				
			||||||
    EXPECT_EQ(2, config.get<int>("request.required.acks"));
 | 
					        config.set("auto.commit.enable", false);
 | 
				
			||||||
    EXPECT_EQ(true, config.get<bool>("produce.offset.report"));
 | 
					        auto option_map = config.get_all();
 | 
				
			||||||
}
 | 
					        CHECK(option_map.at("auto.commit.enable") == "false");
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
TEST_F(ConfigurationTest, SetDefaultTopicConfiguration) {
 | 
					 | 
				
			||||||
    Configuration config;
 | 
					 | 
				
			||||||
    config.set_default_topic_configuration({{ "request.required.acks", 2 }});
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    const auto& topic_config = config.get_default_topic_configuration();
 | 
					 | 
				
			||||||
    EXPECT_TRUE(topic_config);
 | 
					 | 
				
			||||||
    EXPECT_EQ(2, topic_config->get<int>("request.required.acks"));
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(ConfigurationTest, SetOverloads) {
 | 
					 | 
				
			||||||
    Configuration config;
 | 
					 | 
				
			||||||
    config.set("enable.auto.commit", true);
 | 
					 | 
				
			||||||
    config.set("auto.commit.interval.ms", 100);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    EXPECT_EQ("true", config.get("enable.auto.commit"));
 | 
					 | 
				
			||||||
    EXPECT_EQ("100", config.get("auto.commit.interval.ms"));
 | 
					 | 
				
			||||||
    EXPECT_EQ(100, config.get<int>("auto.commit.interval.ms"));
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(ConfigurationTest, GetAll) {
 | 
					 | 
				
			||||||
    Configuration config;
 | 
					 | 
				
			||||||
    config.set("enable.auto.commit", false);
 | 
					 | 
				
			||||||
    auto option_map = config.get_all();
 | 
					 | 
				
			||||||
    EXPECT_EQ("false", option_map.at("enable.auto.commit"));
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(ConfigurationTest, TopicGetAll) {
 | 
					 | 
				
			||||||
    TopicConfiguration config;
 | 
					 | 
				
			||||||
    config.set("auto.commit.enable", false);
 | 
					 | 
				
			||||||
    auto option_map = config.get_all();
 | 
					 | 
				
			||||||
    EXPECT_EQ("false", option_map.at("auto.commit.enable"));
 | 
					 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -5,7 +5,7 @@
 | 
				
			|||||||
#include <chrono>
 | 
					#include <chrono>
 | 
				
			||||||
#include <iterator>
 | 
					#include <iterator>
 | 
				
			||||||
#include <condition_variable>
 | 
					#include <condition_variable>
 | 
				
			||||||
#include <gtest/gtest.h>
 | 
					#include <catch.hpp>
 | 
				
			||||||
#include "cppkafka/consumer.h"
 | 
					#include "cppkafka/consumer.h"
 | 
				
			||||||
#include "cppkafka/producer.h"
 | 
					#include "cppkafka/producer.h"
 | 
				
			||||||
#include "cppkafka/utils/consumer_dispatcher.h"
 | 
					#include "cppkafka/utils/consumer_dispatcher.h"
 | 
				
			||||||
@@ -29,28 +29,23 @@ using std::chrono::system_clock;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
using namespace cppkafka;
 | 
					using namespace cppkafka;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class ConsumerTest : public testing::Test {
 | 
					const string KAFKA_TOPIC = "cppkafka_test1";
 | 
				
			||||||
public:
 | 
					 | 
				
			||||||
    static const string KAFKA_TOPIC;
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Configuration make_producer_config() {
 | 
					static Configuration make_producer_config() {
 | 
				
			||||||
        Configuration config;
 | 
					    Configuration config;
 | 
				
			||||||
        config.set("metadata.broker.list", KAFKA_TEST_INSTANCE);
 | 
					    config.set("metadata.broker.list", KAFKA_TEST_INSTANCE);
 | 
				
			||||||
        return config;
 | 
					    return config;
 | 
				
			||||||
    }
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Configuration make_consumer_config(const string& group_id = "consumer_test") {
 | 
					static Configuration make_consumer_config(const string& group_id = "consumer_test") {
 | 
				
			||||||
        Configuration config;
 | 
					    Configuration config;
 | 
				
			||||||
        config.set("metadata.broker.list", KAFKA_TEST_INSTANCE);
 | 
					    config.set("metadata.broker.list", KAFKA_TEST_INSTANCE);
 | 
				
			||||||
        config.set("enable.auto.commit", false);
 | 
					    config.set("enable.auto.commit", false);
 | 
				
			||||||
        config.set("group.id", group_id);
 | 
					    config.set("group.id", group_id);
 | 
				
			||||||
        return config;
 | 
					    return config;
 | 
				
			||||||
    }
 | 
					}
 | 
				
			||||||
};
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
const string ConsumerTest::KAFKA_TOPIC = "cppkafka_test1";
 | 
					TEST_CASE("message consumption", "[consumer]") {
 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(ConsumerTest, AssignmentCallback) {
 | 
					 | 
				
			||||||
    TopicPartitionList assignment;
 | 
					    TopicPartitionList assignment;
 | 
				
			||||||
    int partition = 0;
 | 
					    int partition = 0;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -69,27 +64,26 @@ TEST_F(ConsumerTest, AssignmentCallback) {
 | 
				
			|||||||
    runner.try_join();
 | 
					    runner.try_join();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    // All 3 partitions should be ours
 | 
					    // All 3 partitions should be ours
 | 
				
			||||||
    EXPECT_EQ(3, assignment.size());
 | 
					    REQUIRE(assignment.size() == 3);
 | 
				
			||||||
    set<int> partitions = { 0, 1, 2 }; 
 | 
					    set<int> partitions = { 0, 1, 2 }; 
 | 
				
			||||||
    for (const auto& topic_partition : assignment) {
 | 
					    for (const auto& topic_partition : assignment) {
 | 
				
			||||||
        EXPECT_EQ(KAFKA_TOPIC, topic_partition.get_topic());
 | 
					        CHECK(topic_partition.get_topic() == KAFKA_TOPIC);
 | 
				
			||||||
        EXPECT_TRUE(partitions.erase(topic_partition.get_partition()));
 | 
					        CHECK(partitions.erase(topic_partition.get_partition()) == true);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    EXPECT_EQ(1, runner.get_messages().size());
 | 
					    REQUIRE(runner.get_messages().size() == 1);
 | 
				
			||||||
 | 
					    CHECK(consumer.get_subscription() == vector<string>{ KAFKA_TOPIC });
 | 
				
			||||||
    EXPECT_EQ(vector<string>{ KAFKA_TOPIC }, consumer.get_subscription());
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    assignment = consumer.get_assignment();
 | 
					    assignment = consumer.get_assignment();
 | 
				
			||||||
    EXPECT_EQ(3, assignment.size());
 | 
					    CHECK(assignment.size() == 3);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    int64_t low;
 | 
					    int64_t low;
 | 
				
			||||||
    int64_t high;
 | 
					    int64_t high;
 | 
				
			||||||
    tie(low, high) = consumer.get_offsets({ KAFKA_TOPIC, partition });
 | 
					    tie(low, high) = consumer.get_offsets({ KAFKA_TOPIC, partition });
 | 
				
			||||||
    EXPECT_GT(high, low);
 | 
					    CHECK(high > low);
 | 
				
			||||||
    EXPECT_EQ(high, runner.get_messages().back().get_offset() + 1);
 | 
					    CHECK(runner.get_messages().back().get_offset() + 1 == high);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(ConsumerTest, Rebalance) {
 | 
					TEST_CASE("consumer rebalance", "[consumer]") {
 | 
				
			||||||
    TopicPartitionList assignment1;
 | 
					    TopicPartitionList assignment1;
 | 
				
			||||||
    TopicPartitionList assignment2;
 | 
					    TopicPartitionList assignment2;
 | 
				
			||||||
    bool revocation_called = false;
 | 
					    bool revocation_called = false;
 | 
				
			||||||
@@ -114,7 +108,7 @@ TEST_F(ConsumerTest, Rebalance) {
 | 
				
			|||||||
    consumer2.subscribe({ KAFKA_TOPIC });
 | 
					    consumer2.subscribe({ KAFKA_TOPIC });
 | 
				
			||||||
    ConsumerRunner runner2(consumer2, 1, 1);
 | 
					    ConsumerRunner runner2(consumer2, 1, 1);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    EXPECT_TRUE(revocation_called);
 | 
					    CHECK(revocation_called == true);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    // Produce a message just so we stop the consumer
 | 
					    // Produce a message just so we stop the consumer
 | 
				
			||||||
    Producer producer(make_producer_config());
 | 
					    Producer producer(make_producer_config());
 | 
				
			||||||
@@ -124,20 +118,20 @@ TEST_F(ConsumerTest, Rebalance) {
 | 
				
			|||||||
    runner2.try_join();
 | 
					    runner2.try_join();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    // All 3 partitions should be assigned
 | 
					    // All 3 partitions should be assigned
 | 
				
			||||||
    EXPECT_EQ(3, assignment1.size() + assignment2.size());
 | 
					    CHECK(assignment1.size() + assignment2.size() == 3);
 | 
				
			||||||
    set<int> partitions = { 0, 1, 2 }; 
 | 
					    set<int> partitions = { 0, 1, 2 }; 
 | 
				
			||||||
    for (const auto& topic_partition : assignment1) {
 | 
					    for (const auto& topic_partition : assignment1) {
 | 
				
			||||||
        EXPECT_EQ(KAFKA_TOPIC, topic_partition.get_topic());
 | 
					        CHECK(topic_partition.get_topic() == KAFKA_TOPIC);
 | 
				
			||||||
        EXPECT_TRUE(partitions.erase(topic_partition.get_partition()));
 | 
					        CHECK(partitions.erase(topic_partition.get_partition()) == true);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    for (const auto& topic_partition : assignment2) {
 | 
					    for (const auto& topic_partition : assignment2) {
 | 
				
			||||||
        EXPECT_EQ(KAFKA_TOPIC, topic_partition.get_topic());
 | 
					        CHECK(topic_partition.get_topic() == KAFKA_TOPIC);
 | 
				
			||||||
        EXPECT_TRUE(partitions.erase(topic_partition.get_partition()));
 | 
					        CHECK(partitions.erase(topic_partition.get_partition()) == true);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    EXPECT_EQ(1, runner1.get_messages().size() + runner2.get_messages().size());
 | 
					    CHECK(runner1.get_messages().size() + runner2.get_messages().size() == 1);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(ConsumerTest, OffsetCommit) {
 | 
					TEST_CASE("consumer offset commit", "[consumer]") {
 | 
				
			||||||
    int partition = 0;
 | 
					    int partition = 0;
 | 
				
			||||||
    int64_t message_offset = 0;
 | 
					    int64_t message_offset = 0;
 | 
				
			||||||
    bool offset_commit_called = false;
 | 
					    bool offset_commit_called = false;
 | 
				
			||||||
@@ -147,11 +141,11 @@ TEST_F(ConsumerTest, OffsetCommit) {
 | 
				
			|||||||
    config.set_offset_commit_callback([&](Consumer&, Error error,
 | 
					    config.set_offset_commit_callback([&](Consumer&, Error error,
 | 
				
			||||||
                                          const TopicPartitionList& topic_partitions) {
 | 
					                                          const TopicPartitionList& topic_partitions) {
 | 
				
			||||||
        offset_commit_called = true;
 | 
					        offset_commit_called = true;
 | 
				
			||||||
        EXPECT_FALSE(error);
 | 
					        CHECK(!!error == false);
 | 
				
			||||||
        ASSERT_EQ(1, topic_partitions.size());
 | 
					        REQUIRE(topic_partitions.size() == 1);
 | 
				
			||||||
        EXPECT_EQ(KAFKA_TOPIC, topic_partitions[0].get_topic());
 | 
					        CHECK(topic_partitions[0].get_topic() == KAFKA_TOPIC);
 | 
				
			||||||
        EXPECT_EQ(0, topic_partitions[0].get_partition());
 | 
					        CHECK(topic_partitions[0].get_partition() == 0);
 | 
				
			||||||
        EXPECT_EQ(message_offset + 1, topic_partitions[0].get_offset());
 | 
					        CHECK(topic_partitions[0].get_offset() == message_offset + 1);
 | 
				
			||||||
    });
 | 
					    });
 | 
				
			||||||
    Consumer consumer(config);
 | 
					    Consumer consumer(config);
 | 
				
			||||||
    consumer.assign({ { KAFKA_TOPIC, 0 } });
 | 
					    consumer.assign({ { KAFKA_TOPIC, 0 } });
 | 
				
			||||||
@@ -163,17 +157,17 @@ TEST_F(ConsumerTest, OffsetCommit) {
 | 
				
			|||||||
    producer.produce(MessageBuilder(KAFKA_TOPIC).partition(partition).payload(payload));
 | 
					    producer.produce(MessageBuilder(KAFKA_TOPIC).partition(partition).payload(payload));
 | 
				
			||||||
    runner.try_join();
 | 
					    runner.try_join();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ASSERT_EQ(1, runner.get_messages().size());
 | 
					    REQUIRE(runner.get_messages().size() == 1);
 | 
				
			||||||
    const Message& msg = runner.get_messages()[0];
 | 
					    const Message& msg = runner.get_messages()[0];
 | 
				
			||||||
    message_offset = msg.get_offset();
 | 
					    message_offset = msg.get_offset();
 | 
				
			||||||
    consumer.commit(msg);
 | 
					    consumer.commit(msg);
 | 
				
			||||||
    for (size_t i = 0; i < 3 && !offset_commit_called; ++i) {
 | 
					    for (size_t i = 0; i < 3 && !offset_commit_called; ++i) {
 | 
				
			||||||
        consumer.poll();
 | 
					        consumer.poll();
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    EXPECT_TRUE(offset_commit_called);
 | 
					    CHECK(offset_commit_called == true);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(ConsumerTest, Throttle) {
 | 
					TEST_CASE("consumer throttle", "[consumer]") {
 | 
				
			||||||
    int partition = 0;
 | 
					    int partition = 0;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    // Create a consumer and subscribe to the topic
 | 
					    // Create a consumer and subscribe to the topic
 | 
				
			||||||
@@ -210,10 +204,10 @@ TEST_F(ConsumerTest, Throttle) {
 | 
				
			|||||||
        }
 | 
					        }
 | 
				
			||||||
    );
 | 
					    );
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    EXPECT_EQ(3, callback_executed_count);
 | 
					    CHECK(callback_executed_count == 3);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(ConsumerTest, ConsumeBatch) {
 | 
					TEST_CASE("consume batch", "[consumer]") {
 | 
				
			||||||
    int partition = 0;
 | 
					    int partition = 0;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    // Create a consumer and subscribe to the topic
 | 
					    // Create a consumer and subscribe to the topic
 | 
				
			||||||
@@ -242,7 +236,7 @@ TEST_F(ConsumerTest, ConsumeBatch) {
 | 
				
			|||||||
                            make_move_iterator(messages.end()));
 | 
					                            make_move_iterator(messages.end()));
 | 
				
			||||||
        ++i;
 | 
					        ++i;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    ASSERT_EQ(2, all_messages.size());
 | 
					    REQUIRE(all_messages.size() == 2);
 | 
				
			||||||
    EXPECT_EQ(payload, all_messages[0].get_payload());
 | 
					    CHECK(all_messages[0].get_payload() == payload);
 | 
				
			||||||
    EXPECT_EQ(payload, all_messages[1].get_payload());
 | 
					    CHECK(all_messages[1].get_payload() == payload);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
#include <set>
 | 
					#include <set>
 | 
				
			||||||
#include <unordered_set>
 | 
					#include <unordered_set>
 | 
				
			||||||
#include <gtest/gtest.h>
 | 
					#include <catch.hpp>
 | 
				
			||||||
#include "cppkafka/consumer.h"
 | 
					#include "cppkafka/consumer.h"
 | 
				
			||||||
#include "cppkafka/producer.h"
 | 
					#include "cppkafka/producer.h"
 | 
				
			||||||
#include "cppkafka/metadata.h"
 | 
					#include "cppkafka/metadata.h"
 | 
				
			||||||
@@ -14,94 +14,88 @@ using std::string;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
using namespace cppkafka;
 | 
					using namespace cppkafka;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class KafkaHandleBaseTest : public testing::Test {
 | 
					static const string KAFKA_TOPIC = "cppkafka_test1";
 | 
				
			||||||
public:
 | 
					 | 
				
			||||||
    static const string KAFKA_TOPIC;
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Configuration make_config() {
 | 
					Configuration make_config() {
 | 
				
			||||||
        Configuration config;
 | 
					    Configuration config;
 | 
				
			||||||
        config.set("metadata.broker.list", KAFKA_TEST_INSTANCE);
 | 
					    config.set("metadata.broker.list", KAFKA_TEST_INSTANCE);
 | 
				
			||||||
        return config;
 | 
					    return config;
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					string get_kafka_host() {
 | 
				
			||||||
 | 
					    string uri = KAFKA_TEST_INSTANCE;
 | 
				
			||||||
 | 
					    size_t index = uri.find(':');
 | 
				
			||||||
 | 
					    if (index == string::npos) {
 | 
				
			||||||
 | 
					        return uri;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					    else {
 | 
				
			||||||
    string get_kafka_host() {
 | 
					        return uri.substr(0, index);
 | 
				
			||||||
        string uri = KAFKA_TEST_INSTANCE;
 | 
					 | 
				
			||||||
        size_t index = uri.find(':');
 | 
					 | 
				
			||||||
        if (index == string::npos) {
 | 
					 | 
				
			||||||
            return uri;
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
        else {
 | 
					 | 
				
			||||||
            return uri.substr(0, index);
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    uint16_t get_kafka_port() {
 | 
					uint16_t get_kafka_port() {
 | 
				
			||||||
        string uri = KAFKA_TEST_INSTANCE;
 | 
					    string uri = KAFKA_TEST_INSTANCE;
 | 
				
			||||||
        size_t index = uri.find(':');
 | 
					    size_t index = uri.find(':');
 | 
				
			||||||
        if (index == string::npos) {
 | 
					    if (index == string::npos) {
 | 
				
			||||||
            return 9092;
 | 
					        return 9092;
 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
        else {
 | 
					 | 
				
			||||||
            return stoul(uri.substr(index + 1));
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
};
 | 
					    else {
 | 
				
			||||||
 | 
					        return stoul(uri.substr(index + 1));
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
const string KafkaHandleBaseTest::KAFKA_TOPIC = "cppkafka_test1";
 | 
					TEST_CASE("metadata", "[handle_base]") {
 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(KafkaHandleBaseTest, BrokersMetadata) {
 | 
					 | 
				
			||||||
    Producer producer({});
 | 
					    Producer producer({});
 | 
				
			||||||
    producer.add_brokers(KAFKA_TEST_INSTANCE);
 | 
					    producer.add_brokers(KAFKA_TEST_INSTANCE);
 | 
				
			||||||
    Metadata metadata = producer.get_metadata();
 | 
					    Metadata metadata = producer.get_metadata();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    vector<BrokerMetadata> brokers = metadata.get_brokers();
 | 
					    SECTION("brokers") {
 | 
				
			||||||
    ASSERT_EQ(1, brokers.size());
 | 
					        vector<BrokerMetadata> brokers = metadata.get_brokers();
 | 
				
			||||||
    const auto& broker = brokers[0];
 | 
					        REQUIRE(brokers.size() == 1);
 | 
				
			||||||
    // TODO: resolve this
 | 
					        const auto& broker = brokers[0];
 | 
				
			||||||
    //EXPECT_EQ(get_kafka_host(), broker.get_host());
 | 
					        // TODO: resolve this
 | 
				
			||||||
    EXPECT_EQ(get_kafka_port(), broker.get_port());
 | 
					        //REQUIRE(broker.get_host() == get_kafka_host());
 | 
				
			||||||
}
 | 
					        CHECK(broker.get_port() == get_kafka_port());
 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(KafkaHandleBaseTest, TopicsMetadata) {
 | 
					 | 
				
			||||||
    unordered_set<string> topic_names = { "cppkafka_test1", "cppkafka_test2" };
 | 
					 | 
				
			||||||
    size_t found_topics = 0;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Producer producer(make_config());
 | 
					 | 
				
			||||||
    Metadata metadata = producer.get_metadata();
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    const vector<TopicMetadata>& topics = metadata.get_topics();
 | 
					 | 
				
			||||||
    ASSERT_GE(topics.size(), 2);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    for (const auto& topic : topics) {
 | 
					 | 
				
			||||||
        if (topic_names.count(topic.get_name()) == 1) {
 | 
					 | 
				
			||||||
            const vector<PartitionMetadata>& partitions = topic.get_partitions();
 | 
					 | 
				
			||||||
            EXPECT_EQ(3, partitions.size());
 | 
					 | 
				
			||||||
            set<int32_t> expected_ids = { 0, 1, 2 };
 | 
					 | 
				
			||||||
            for (const PartitionMetadata& partition : partitions) {
 | 
					 | 
				
			||||||
                EXPECT_EQ(1, expected_ids.erase(partition.get_id()));
 | 
					 | 
				
			||||||
                for (int32_t replica : partition.get_replicas()) {
 | 
					 | 
				
			||||||
                    EXPECT_EQ(0, replica);
 | 
					 | 
				
			||||||
                }
 | 
					 | 
				
			||||||
                for (int32_t isr : partition.get_in_sync_replica_brokers()) {
 | 
					 | 
				
			||||||
                    EXPECT_EQ(0, isr);
 | 
					 | 
				
			||||||
                }
 | 
					 | 
				
			||||||
            }
 | 
					 | 
				
			||||||
            found_topics++;
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    EXPECT_EQ(topic_names.size(), found_topics);
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    // Find by names
 | 
					    SECTION("topics") {
 | 
				
			||||||
    EXPECT_EQ(topic_names.size(), metadata.get_topics(topic_names).size());
 | 
					        unordered_set<string> topic_names = { "cppkafka_test1", "cppkafka_test2" };
 | 
				
			||||||
    // Find by prefix
 | 
					        size_t found_topics = 0;
 | 
				
			||||||
    EXPECT_EQ(topic_names.size(), metadata.get_topics_prefixed("cppkafka_").size());
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    // Now get the whole metadata only for this topic
 | 
					        const vector<TopicMetadata>& topics = metadata.get_topics();
 | 
				
			||||||
    Topic topic = producer.get_topic(KAFKA_TOPIC);
 | 
					        CHECK(topics.size() >= 2);
 | 
				
			||||||
    EXPECT_EQ(KAFKA_TOPIC, producer.get_metadata(topic).get_name());
 | 
					
 | 
				
			||||||
 | 
					        for (const auto& topic : topics) {
 | 
				
			||||||
 | 
					            if (topic_names.count(topic.get_name()) == 1) {
 | 
				
			||||||
 | 
					                const vector<PartitionMetadata>& partitions = topic.get_partitions();
 | 
				
			||||||
 | 
					                REQUIRE(partitions.size() == 3);
 | 
				
			||||||
 | 
					                set<int32_t> expected_ids = { 0, 1, 2 };
 | 
				
			||||||
 | 
					                for (const PartitionMetadata& partition : partitions) {
 | 
				
			||||||
 | 
					                    REQUIRE(expected_ids.erase(partition.get_id()) == 1);
 | 
				
			||||||
 | 
					                    for (int32_t replica : partition.get_replicas()) {
 | 
				
			||||||
 | 
					                        REQUIRE(replica == 0);
 | 
				
			||||||
 | 
					                    }
 | 
				
			||||||
 | 
					                    for (int32_t isr : partition.get_in_sync_replica_brokers()) {
 | 
				
			||||||
 | 
					                        REQUIRE(isr == 0);
 | 
				
			||||||
 | 
					                    }
 | 
				
			||||||
 | 
					                }
 | 
				
			||||||
 | 
					                found_topics++;
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					        CHECK(found_topics == topic_names.size());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        // Find by names
 | 
				
			||||||
 | 
					        CHECK(metadata.get_topics(topic_names).size() == topic_names.size());
 | 
				
			||||||
 | 
					        // Find by prefix
 | 
				
			||||||
 | 
					        CHECK(metadata.get_topics_prefixed("cppkafka_").size() == topic_names.size());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        // Now get the whole metadata only for this topic
 | 
				
			||||||
 | 
					        Topic topic = producer.get_topic(KAFKA_TOPIC);
 | 
				
			||||||
 | 
					        CHECK(producer.get_metadata(topic).get_name() == KAFKA_TOPIC);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(KafkaHandleBaseTest, ConsumerGroups) {
 | 
					TEST_CASE("consumer groups", "[handle_base]") {
 | 
				
			||||||
    string consumer_group = "kafka_handle_test";
 | 
					    string consumer_group = "kafka_handle_test";
 | 
				
			||||||
    string client_id = "my_client_id";
 | 
					    string client_id = "my_client_id";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -117,15 +111,15 @@ TEST_F(KafkaHandleBaseTest, ConsumerGroups) {
 | 
				
			|||||||
    runner.try_join();
 | 
					    runner.try_join();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    GroupInformation information = consumer.get_consumer_group(consumer_group);
 | 
					    GroupInformation information = consumer.get_consumer_group(consumer_group);
 | 
				
			||||||
    EXPECT_EQ(consumer_group, information.get_name());
 | 
					    CHECK(information.get_name() == consumer_group);
 | 
				
			||||||
    EXPECT_EQ("consumer", information.get_protocol_type());
 | 
					    CHECK(information.get_protocol_type() == "consumer");
 | 
				
			||||||
    ASSERT_EQ(1, information.get_members().size());
 | 
					    CHECK(information.get_members().size() == 1);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    auto member = information.get_members()[0];
 | 
					    auto member = information.get_members()[0];
 | 
				
			||||||
    EXPECT_EQ(client_id, member.get_client_id());
 | 
					    CHECK(member.get_client_id() == client_id);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    MemberAssignmentInformation assignment = member.get_member_assignment();
 | 
					    MemberAssignmentInformation assignment = member.get_member_assignment();
 | 
				
			||||||
    EXPECT_EQ(0, assignment.get_version());
 | 
					    CHECK(assignment.get_version() == 0);
 | 
				
			||||||
    TopicPartitionList expected_topic_partitions = {
 | 
					    TopicPartitionList expected_topic_partitions = {
 | 
				
			||||||
        { KAFKA_TOPIC, 0 },
 | 
					        { KAFKA_TOPIC, 0 },
 | 
				
			||||||
        { KAFKA_TOPIC, 1 },
 | 
					        { KAFKA_TOPIC, 1 },
 | 
				
			||||||
@@ -133,9 +127,5 @@ TEST_F(KafkaHandleBaseTest, ConsumerGroups) {
 | 
				
			|||||||
    };
 | 
					    };
 | 
				
			||||||
    TopicPartitionList topic_partitions = assignment.get_topic_partitions();
 | 
					    TopicPartitionList topic_partitions = assignment.get_topic_partitions();
 | 
				
			||||||
    sort(topic_partitions.begin(), topic_partitions.end());
 | 
					    sort(topic_partitions.begin(), topic_partitions.end());
 | 
				
			||||||
    EXPECT_EQ(expected_topic_partitions, topic_partitions);
 | 
					    CHECK(topic_partitions == expected_topic_partitions);
 | 
				
			||||||
    /*for (const auto c : ) {
 | 
					 | 
				
			||||||
        printf("%0d,", (int)c & 0xff);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
    std::cout << std::endl;*/
 | 
					 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -3,7 +3,7 @@
 | 
				
			|||||||
#include <chrono>
 | 
					#include <chrono>
 | 
				
			||||||
#include <set>
 | 
					#include <set>
 | 
				
			||||||
#include <condition_variable>
 | 
					#include <condition_variable>
 | 
				
			||||||
#include <gtest/gtest.h>
 | 
					#include <catch.hpp>
 | 
				
			||||||
#include "cppkafka/producer.h"
 | 
					#include "cppkafka/producer.h"
 | 
				
			||||||
#include "cppkafka/consumer.h"
 | 
					#include "cppkafka/consumer.h"
 | 
				
			||||||
#include "cppkafka/utils/buffered_producer.h"
 | 
					#include "cppkafka/utils/buffered_producer.h"
 | 
				
			||||||
@@ -26,34 +26,29 @@ using std::chrono::milliseconds;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
using namespace cppkafka;
 | 
					using namespace cppkafka;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class ProducerTest : public testing::Test {
 | 
					static const string KAFKA_TOPIC = "cppkafka_test1";
 | 
				
			||||||
public:
 | 
					 | 
				
			||||||
    static const string KAFKA_TOPIC;
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Configuration make_producer_config() {
 | 
					static Configuration make_producer_config() {
 | 
				
			||||||
        Configuration config = {
 | 
					    Configuration config = {
 | 
				
			||||||
            { "metadata.broker.list", KAFKA_TEST_INSTANCE },
 | 
					        { "metadata.broker.list", KAFKA_TEST_INSTANCE },
 | 
				
			||||||
            { "queue.buffering.max.ms", 0 },
 | 
					        { "queue.buffering.max.ms", 0 },
 | 
				
			||||||
            { "api.version.request", true },
 | 
					        { "api.version.request", true },
 | 
				
			||||||
            { "queue.buffering.max.ms", 50 }
 | 
					        { "queue.buffering.max.ms", 50 }
 | 
				
			||||||
        };
 | 
					    };
 | 
				
			||||||
        return config;
 | 
					    return config;
 | 
				
			||||||
    }
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Configuration make_consumer_config() {
 | 
					static Configuration make_consumer_config() {
 | 
				
			||||||
        Configuration config = {
 | 
					    Configuration config = {
 | 
				
			||||||
            { "metadata.broker.list", KAFKA_TEST_INSTANCE },
 | 
					        { "metadata.broker.list", KAFKA_TEST_INSTANCE },
 | 
				
			||||||
            { "enable.auto.commit", false },
 | 
					        { "enable.auto.commit", false },
 | 
				
			||||||
            { "group.id", "producer_test" },
 | 
					        { "group.id", "producer_test" },
 | 
				
			||||||
            { "api.version.request", true }
 | 
					        { "api.version.request", true }
 | 
				
			||||||
        };
 | 
					    };
 | 
				
			||||||
        return config;
 | 
					    return config;
 | 
				
			||||||
    }
 | 
					}
 | 
				
			||||||
};
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
const string ProducerTest::KAFKA_TOPIC = "cppkafka_test1";
 | 
					TEST_CASE("simple production", "[producer]") {
 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(ProducerTest, OneMessageOnFixedPartition) {
 | 
					 | 
				
			||||||
    int partition = 0;
 | 
					    int partition = 0;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    // Create a consumer and assign this topic/partition
 | 
					    // Create a consumer and assign this topic/partition
 | 
				
			||||||
@@ -61,59 +56,121 @@ TEST_F(ProducerTest, OneMessageOnFixedPartition) {
 | 
				
			|||||||
    consumer.assign({ TopicPartition(KAFKA_TOPIC, partition) });
 | 
					    consumer.assign({ TopicPartition(KAFKA_TOPIC, partition) });
 | 
				
			||||||
    ConsumerRunner runner(consumer, 1, 1);
 | 
					    ConsumerRunner runner(consumer, 1, 1);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    // Now create a producer and produce a message
 | 
					    Configuration config = make_producer_config();
 | 
				
			||||||
    Producer producer(make_producer_config());
 | 
					    SECTION("message with no key") {
 | 
				
			||||||
    string payload = "Hello world! 1";
 | 
					        // Now create a producer and produce a message
 | 
				
			||||||
    producer.produce(MessageBuilder(KAFKA_TOPIC).partition(partition).payload(payload));
 | 
					        const string payload = "Hello world! 1";
 | 
				
			||||||
    runner.try_join();
 | 
					        Producer producer(config);
 | 
				
			||||||
 | 
					        producer.produce(MessageBuilder(KAFKA_TOPIC).partition(partition).payload(payload));
 | 
				
			||||||
 | 
					        runner.try_join();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    const auto& messages = runner.get_messages();
 | 
					        const auto& messages = runner.get_messages();
 | 
				
			||||||
    ASSERT_EQ(1, messages.size());
 | 
					        REQUIRE(messages.size() == 1);
 | 
				
			||||||
    const auto& message = messages[0];
 | 
					        const auto& message = messages[0];
 | 
				
			||||||
    EXPECT_EQ(Buffer(payload), message.get_payload());
 | 
					        CHECK(message.get_payload() == payload);
 | 
				
			||||||
    EXPECT_FALSE(message.get_key());
 | 
					        CHECK(!!message.get_key() == false);
 | 
				
			||||||
    EXPECT_EQ(KAFKA_TOPIC, message.get_topic());
 | 
					        CHECK(message.get_topic() == KAFKA_TOPIC);
 | 
				
			||||||
    EXPECT_EQ(partition, message.get_partition());
 | 
					        CHECK(message.get_partition() == partition);
 | 
				
			||||||
    EXPECT_FALSE(message.get_error());
 | 
					        CHECK(!!message.get_error() == false);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    int64_t low;
 | 
					        int64_t low;
 | 
				
			||||||
    int64_t high;
 | 
					        int64_t high;
 | 
				
			||||||
    tie(low, high) = producer.query_offsets({ KAFKA_TOPIC, partition });
 | 
					        tie(low, high) = producer.query_offsets({ KAFKA_TOPIC, partition });
 | 
				
			||||||
    EXPECT_GT(high, low);
 | 
					        CHECK(high > low);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    SECTION("message with key") {
 | 
				
			||||||
 | 
					        const string payload = "Hello world! 2";
 | 
				
			||||||
 | 
					        const string key = "such key";
 | 
				
			||||||
 | 
					        const milliseconds timestamp{15};
 | 
				
			||||||
 | 
					        Producer producer(config);
 | 
				
			||||||
 | 
					        producer.produce(MessageBuilder(KAFKA_TOPIC).partition(partition)
 | 
				
			||||||
 | 
					                                                     .key(key)
 | 
				
			||||||
 | 
					                                                     .payload(payload)
 | 
				
			||||||
 | 
					                                                     .timestamp(timestamp));
 | 
				
			||||||
 | 
					        runner.try_join();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        const auto& messages = runner.get_messages();
 | 
				
			||||||
 | 
					        REQUIRE(messages.size() == 1);
 | 
				
			||||||
 | 
					        const auto& message = messages[0];
 | 
				
			||||||
 | 
					        CHECK(message.get_payload() == payload);
 | 
				
			||||||
 | 
					        CHECK(message.get_key() == key);
 | 
				
			||||||
 | 
					        CHECK(message.get_topic() == KAFKA_TOPIC);
 | 
				
			||||||
 | 
					        CHECK(message.get_partition() == partition);
 | 
				
			||||||
 | 
					        CHECK(!!message.get_error() == false);
 | 
				
			||||||
 | 
					        REQUIRE(!!message.get_timestamp() == true);
 | 
				
			||||||
 | 
					        CHECK(message.get_timestamp()->get_timestamp() == timestamp);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    SECTION("callbacks") {
 | 
				
			||||||
 | 
					        // Now create a producer and produce a message
 | 
				
			||||||
 | 
					        const string payload = "Hello world! 3";
 | 
				
			||||||
 | 
					        const string key = "hehe";
 | 
				
			||||||
 | 
					        bool delivery_report_called = false;
 | 
				
			||||||
 | 
					        config.set_delivery_report_callback([&](Producer&, const Message& msg) {
 | 
				
			||||||
 | 
					            CHECK(msg.get_payload() == payload);
 | 
				
			||||||
 | 
					            delivery_report_called = true;
 | 
				
			||||||
 | 
					        });
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        TopicConfiguration topic_config;
 | 
				
			||||||
 | 
					        topic_config.set_partitioner_callback([&](const Topic& topic, const Buffer& msg_key,
 | 
				
			||||||
 | 
					                                                  int32_t partition_count) {
 | 
				
			||||||
 | 
					            CHECK(msg_key == key);
 | 
				
			||||||
 | 
					            CHECK(partition_count == 3);
 | 
				
			||||||
 | 
					            CHECK(topic.get_name() == KAFKA_TOPIC);
 | 
				
			||||||
 | 
					            return 0;
 | 
				
			||||||
 | 
					        });
 | 
				
			||||||
 | 
					        config.set_default_topic_configuration(topic_config);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        Producer producer(config);
 | 
				
			||||||
 | 
					        producer.produce(MessageBuilder(KAFKA_TOPIC).key(key).payload(payload));
 | 
				
			||||||
 | 
					        while (producer.get_out_queue_length() > 0) {
 | 
				
			||||||
 | 
					            producer.poll();
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					        runner.try_join();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        const auto& messages = runner.get_messages();
 | 
				
			||||||
 | 
					        REQUIRE(messages.size() == 1);
 | 
				
			||||||
 | 
					        const auto& message = messages[0];
 | 
				
			||||||
 | 
					        CHECK(message.get_payload() == payload);
 | 
				
			||||||
 | 
					        CHECK(message.get_key() == key);
 | 
				
			||||||
 | 
					        CHECK(message.get_topic() == KAFKA_TOPIC);
 | 
				
			||||||
 | 
					        CHECK(message.get_partition() == partition);
 | 
				
			||||||
 | 
					        CHECK(!!message.get_error() == false);
 | 
				
			||||||
 | 
					        CHECK(delivery_report_called == true);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    SECTION("partitioner callback") {
 | 
				
			||||||
 | 
					        // Now create a producer and produce a message
 | 
				
			||||||
 | 
					        const string payload = "Hello world! 4";
 | 
				
			||||||
 | 
					        const string key = "hehe";
 | 
				
			||||||
 | 
					        bool callback_called = false;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        TopicConfiguration topic_config;
 | 
				
			||||||
 | 
					        topic_config.set_partitioner_callback([&](const Topic& topic, const Buffer& msg_key,
 | 
				
			||||||
 | 
					                                                  int32_t partition_count) {
 | 
				
			||||||
 | 
					            CHECK(msg_key == key);
 | 
				
			||||||
 | 
					            CHECK(partition_count == 3);
 | 
				
			||||||
 | 
					            CHECK(topic.get_name() == KAFKA_TOPIC);
 | 
				
			||||||
 | 
					            callback_called = true;
 | 
				
			||||||
 | 
					            return 0;
 | 
				
			||||||
 | 
					        });
 | 
				
			||||||
 | 
					        config.set_default_topic_configuration(topic_config);
 | 
				
			||||||
 | 
					        Producer producer(config);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        producer.produce(MessageBuilder(KAFKA_TOPIC).key(key).payload(payload));
 | 
				
			||||||
 | 
					        producer.poll();
 | 
				
			||||||
 | 
					        runner.try_join();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        const auto& messages = runner.get_messages();
 | 
				
			||||||
 | 
					        REQUIRE(messages.size() == 1);
 | 
				
			||||||
 | 
					        const auto& message = messages[0];
 | 
				
			||||||
 | 
					        CHECK(message.get_partition() == partition);
 | 
				
			||||||
 | 
					        CHECK(callback_called == true);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(ProducerTest, OneMessageUsingKey) {
 | 
					TEST_CASE("multiple messages", "[producer]") {
 | 
				
			||||||
    int partition = 0;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    // Create a consumer and assign this topic/partition
 | 
					 | 
				
			||||||
    Consumer consumer(make_consumer_config());
 | 
					 | 
				
			||||||
    consumer.assign({ TopicPartition(KAFKA_TOPIC, partition) });
 | 
					 | 
				
			||||||
    ConsumerRunner runner(consumer, 1, 1);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    // Now create a producer and produce a message
 | 
					 | 
				
			||||||
    Producer producer(make_producer_config());
 | 
					 | 
				
			||||||
    string payload = "Hello world! 2";
 | 
					 | 
				
			||||||
    string key = "such key";
 | 
					 | 
				
			||||||
    const milliseconds timestamp{15};
 | 
					 | 
				
			||||||
    producer.produce(MessageBuilder(KAFKA_TOPIC).partition(partition)
 | 
					 | 
				
			||||||
                                                 .key(key)
 | 
					 | 
				
			||||||
                                                 .payload(payload)
 | 
					 | 
				
			||||||
                                                 .timestamp(timestamp));
 | 
					 | 
				
			||||||
    runner.try_join();
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    const auto& messages = runner.get_messages();
 | 
					 | 
				
			||||||
    ASSERT_EQ(1, messages.size());
 | 
					 | 
				
			||||||
    const auto& message = messages[0];
 | 
					 | 
				
			||||||
    EXPECT_EQ(Buffer(payload), message.get_payload());
 | 
					 | 
				
			||||||
    EXPECT_EQ(Buffer(key), message.get_key());
 | 
					 | 
				
			||||||
    EXPECT_EQ(KAFKA_TOPIC, message.get_topic());
 | 
					 | 
				
			||||||
    EXPECT_EQ(partition, message.get_partition());
 | 
					 | 
				
			||||||
    EXPECT_FALSE(message.get_error());
 | 
					 | 
				
			||||||
    EXPECT_TRUE(message.get_timestamp());
 | 
					 | 
				
			||||||
    EXPECT_EQ(timestamp, message.get_timestamp()->get_timestamp());
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(ProducerTest, MultipleMessagesUnassignedPartitions) {
 | 
					 | 
				
			||||||
    size_t message_count = 10;
 | 
					    size_t message_count = 10;
 | 
				
			||||||
    int partitions = 3;
 | 
					    int partitions = 3;
 | 
				
			||||||
    set<string> payloads;
 | 
					    set<string> payloads;
 | 
				
			||||||
@@ -125,110 +182,27 @@ TEST_F(ProducerTest, MultipleMessagesUnassignedPartitions) {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    // Now create a producer and produce a message
 | 
					    // Now create a producer and produce a message
 | 
				
			||||||
    Producer producer(make_producer_config());
 | 
					    Producer producer(make_producer_config());
 | 
				
			||||||
    string payload_base = "Hello world ";
 | 
					    const string payload_base = "Hello world ";
 | 
				
			||||||
    for (size_t i = 0; i < message_count; ++i) {
 | 
					    for (size_t i = 0; i < message_count; ++i) {
 | 
				
			||||||
        string payload = payload_base + to_string(i);
 | 
					        const string payload = payload_base + to_string(i);
 | 
				
			||||||
        payloads.insert(payload);
 | 
					        payloads.insert(payload);
 | 
				
			||||||
        producer.produce(MessageBuilder(KAFKA_TOPIC).payload(payload));
 | 
					        producer.produce(MessageBuilder(KAFKA_TOPIC).payload(payload));
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    runner.try_join();
 | 
					    runner.try_join();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    const auto& messages = runner.get_messages();
 | 
					    const auto& messages = runner.get_messages();
 | 
				
			||||||
    ASSERT_EQ(message_count, messages.size());
 | 
					    REQUIRE(messages.size() == message_count);
 | 
				
			||||||
    for (const auto& message : messages) {
 | 
					    for (const auto& message : messages) {
 | 
				
			||||||
        EXPECT_EQ(KAFKA_TOPIC, message.get_topic());
 | 
					        CHECK(message.get_topic() == KAFKA_TOPIC);
 | 
				
			||||||
        EXPECT_EQ(1, payloads.erase(message.get_payload()));
 | 
					        CHECK(payloads.erase(message.get_payload()) == 1);
 | 
				
			||||||
        EXPECT_FALSE(message.get_error());
 | 
					        CHECK(!!message.get_error() == false);
 | 
				
			||||||
        EXPECT_FALSE(message.get_key());
 | 
					        CHECK(!!message.get_key() == false);
 | 
				
			||||||
        EXPECT_GE(message.get_partition(), 0);
 | 
					        CHECK(message.get_partition() >= 0);
 | 
				
			||||||
        EXPECT_LT(message.get_partition(), 3);
 | 
					        CHECK(message.get_partition() < 3);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(ProducerTest, Callbacks) {
 | 
					TEST_CASE("buffered producer", "[producer]") {
 | 
				
			||||||
    int partition = 0;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    // Create a consumer and assign this topic/partition
 | 
					 | 
				
			||||||
    Consumer consumer(make_consumer_config());
 | 
					 | 
				
			||||||
    consumer.assign({ TopicPartition(KAFKA_TOPIC, partition) });
 | 
					 | 
				
			||||||
    ConsumerRunner runner(consumer, 1, 1);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    // Now create a producer and produce a message
 | 
					 | 
				
			||||||
    string payload = "Hello world! 3";
 | 
					 | 
				
			||||||
    string key = "hehe";
 | 
					 | 
				
			||||||
    bool delivery_report_called = false;
 | 
					 | 
				
			||||||
    Configuration config = make_producer_config();
 | 
					 | 
				
			||||||
    config.set_delivery_report_callback([&](Producer&, const Message& msg) {
 | 
					 | 
				
			||||||
        EXPECT_EQ(Buffer(payload), msg.get_payload());
 | 
					 | 
				
			||||||
        delivery_report_called = true;
 | 
					 | 
				
			||||||
    });
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    TopicConfiguration topic_config;
 | 
					 | 
				
			||||||
    topic_config.set_partitioner_callback([&](const Topic& topic, const Buffer& msg_key,
 | 
					 | 
				
			||||||
                                              int32_t partition_count) {
 | 
					 | 
				
			||||||
        EXPECT_EQ(Buffer(key), msg_key);
 | 
					 | 
				
			||||||
        EXPECT_EQ(3, partition_count);
 | 
					 | 
				
			||||||
        EXPECT_EQ(KAFKA_TOPIC, topic.get_name());
 | 
					 | 
				
			||||||
        return 0;
 | 
					 | 
				
			||||||
    });
 | 
					 | 
				
			||||||
    config.set_default_topic_configuration(topic_config);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Producer producer(move(config));
 | 
					 | 
				
			||||||
    producer.produce(MessageBuilder(KAFKA_TOPIC).key(key).payload(payload));
 | 
					 | 
				
			||||||
    while (producer.get_out_queue_length() > 0) {
 | 
					 | 
				
			||||||
        producer.poll();
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
    runner.try_join();
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    const auto& messages = runner.get_messages();
 | 
					 | 
				
			||||||
    ASSERT_EQ(1, messages.size());
 | 
					 | 
				
			||||||
    const auto& message = messages[0];
 | 
					 | 
				
			||||||
    EXPECT_EQ(Buffer(payload), message.get_payload());
 | 
					 | 
				
			||||||
    EXPECT_EQ(Buffer(key), message.get_key());
 | 
					 | 
				
			||||||
    EXPECT_EQ(KAFKA_TOPIC, message.get_topic());
 | 
					 | 
				
			||||||
    EXPECT_EQ(partition, message.get_partition());
 | 
					 | 
				
			||||||
    EXPECT_FALSE(message.get_error());
 | 
					 | 
				
			||||||
    EXPECT_TRUE(delivery_report_called);
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(ProducerTest, PartitionerCallbackOnDefaultTopicConfig) {
 | 
					 | 
				
			||||||
    int partition = 0;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    // Create a consumer and assign this topic/partition
 | 
					 | 
				
			||||||
    Consumer consumer(make_consumer_config());
 | 
					 | 
				
			||||||
    consumer.assign({ TopicPartition(KAFKA_TOPIC, partition) });
 | 
					 | 
				
			||||||
    ConsumerRunner runner(consumer, 1, 1);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    // Now create a producer and produce a message
 | 
					 | 
				
			||||||
    string payload = "Hello world! 4";
 | 
					 | 
				
			||||||
    string key = "hehe";
 | 
					 | 
				
			||||||
    bool callback_called = false;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Configuration config = make_producer_config();
 | 
					 | 
				
			||||||
    TopicConfiguration topic_config;
 | 
					 | 
				
			||||||
    topic_config.set_partitioner_callback([&](const Topic& topic, const Buffer& msg_key,
 | 
					 | 
				
			||||||
                                              int32_t partition_count) {
 | 
					 | 
				
			||||||
        EXPECT_EQ(Buffer(key), msg_key);
 | 
					 | 
				
			||||||
        EXPECT_EQ(3, partition_count);
 | 
					 | 
				
			||||||
        EXPECT_EQ(KAFKA_TOPIC, topic.get_name());
 | 
					 | 
				
			||||||
        callback_called = true;
 | 
					 | 
				
			||||||
        return 0;
 | 
					 | 
				
			||||||
    });
 | 
					 | 
				
			||||||
    config.set_default_topic_configuration(topic_config);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Producer producer(move(config));
 | 
					 | 
				
			||||||
    producer.produce(MessageBuilder(KAFKA_TOPIC).key(key).payload(payload));
 | 
					 | 
				
			||||||
    producer.poll();
 | 
					 | 
				
			||||||
    runner.try_join();
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    const auto& messages = runner.get_messages();
 | 
					 | 
				
			||||||
    ASSERT_EQ(1, messages.size());
 | 
					 | 
				
			||||||
    const auto& message = messages[0];
 | 
					 | 
				
			||||||
    EXPECT_EQ(partition, message.get_partition());
 | 
					 | 
				
			||||||
    EXPECT_TRUE(callback_called);   
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(ProducerTest, BufferedProducer) {
 | 
					 | 
				
			||||||
    int partition = 0;
 | 
					    int partition = 0;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    // Create a consumer and assign this topic/partition
 | 
					    // Create a consumer and assign this topic/partition
 | 
				
			||||||
@@ -238,8 +212,8 @@ TEST_F(ProducerTest, BufferedProducer) {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    // Now create a buffered producer and produce two messages
 | 
					    // Now create a buffered producer and produce two messages
 | 
				
			||||||
    BufferedProducer<string> producer(make_producer_config());
 | 
					    BufferedProducer<string> producer(make_producer_config());
 | 
				
			||||||
    string payload = "Hello world! 2";
 | 
					    const string payload = "Hello world! 2";
 | 
				
			||||||
    string key = "such key";
 | 
					    const string key = "such key";
 | 
				
			||||||
    producer.add_message(MessageBuilder(KAFKA_TOPIC).partition(partition)
 | 
					    producer.add_message(MessageBuilder(KAFKA_TOPIC).partition(partition)
 | 
				
			||||||
                                                    .key(key)
 | 
					                                                    .key(key)
 | 
				
			||||||
                                                    .payload(payload));
 | 
					                                                    .payload(payload));
 | 
				
			||||||
@@ -253,16 +227,16 @@ TEST_F(ProducerTest, BufferedProducer) {
 | 
				
			|||||||
    runner.try_join();
 | 
					    runner.try_join();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    const auto& messages = runner.get_messages();
 | 
					    const auto& messages = runner.get_messages();
 | 
				
			||||||
    ASSERT_EQ(3, messages.size());
 | 
					    REQUIRE(messages.size() == 3);
 | 
				
			||||||
    const auto& message = messages[0];
 | 
					    const auto& message = messages[0];
 | 
				
			||||||
    EXPECT_EQ(Buffer(key), message.get_key());
 | 
					    CHECK(message.get_key() == key);
 | 
				
			||||||
    EXPECT_EQ(KAFKA_TOPIC, message.get_topic());
 | 
					    CHECK(message.get_topic() == KAFKA_TOPIC);
 | 
				
			||||||
    EXPECT_EQ(partition, message.get_partition());
 | 
					    CHECK(message.get_partition() == partition);
 | 
				
			||||||
    EXPECT_FALSE(message.get_error());
 | 
					    CHECK(!!message.get_error() == false);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    EXPECT_FALSE(messages[1].get_key());
 | 
					    CHECK(!!messages[1].get_key() == false);
 | 
				
			||||||
    EXPECT_FALSE(messages[2].get_key());
 | 
					    CHECK(!!messages[2].get_key() == false);
 | 
				
			||||||
    for (const auto& message : messages) {
 | 
					    for (const auto& message : messages) {
 | 
				
			||||||
        EXPECT_EQ(Buffer(payload), message.get_payload());
 | 
					        CHECK(message.get_payload() == payload);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										74
									
								
								tests/test_main.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										74
									
								
								tests/test_main.cpp
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,74 @@
 | 
				
			|||||||
 | 
					#include <chrono>
 | 
				
			||||||
 | 
					#define CATCH_CONFIG_RUNNER
 | 
				
			||||||
 | 
					#include <catch.hpp>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					using std::string;
 | 
				
			||||||
 | 
					using std::chrono::steady_clock;
 | 
				
			||||||
 | 
					using std::chrono::milliseconds;
 | 
				
			||||||
 | 
					using std::chrono::duration_cast;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					using Catch::ConsoleReporter;
 | 
				
			||||||
 | 
					using Catch::ReporterConfig;
 | 
				
			||||||
 | 
					using Catch::ReporterPreferences;
 | 
				
			||||||
 | 
					using Catch::TestCaseInfo;
 | 
				
			||||||
 | 
					using Catch::TestCaseStats;
 | 
				
			||||||
 | 
					using Catch::Totals;
 | 
				
			||||||
 | 
					using Catch::Session;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					namespace cppkafka {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class InstantTestReporter : public ConsoleReporter {
 | 
				
			||||||
 | 
					public:
 | 
				
			||||||
 | 
					    using ClockType = steady_clock;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    InstantTestReporter(const ReporterConfig& config)
 | 
				
			||||||
 | 
					    : ConsoleReporter(config) {
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    static string getDescription() {
 | 
				
			||||||
 | 
					        return "Reports the tests' progress as they run";
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ReporterPreferences getPreferences() const override {
 | 
				
			||||||
 | 
					        ReporterPreferences output;
 | 
				
			||||||
 | 
					        output.shouldRedirectStdOut = false;
 | 
				
			||||||
 | 
					        return output;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    void testCaseStarting(const TestCaseInfo& info) override {
 | 
				
			||||||
 | 
					        ConsoleReporter::testCaseStarting(info);
 | 
				
			||||||
 | 
					        stream << "Running test \"" << info.name << "\" @ " << info.lineInfo << "\n";
 | 
				
			||||||
 | 
					        test_start_ts_ = ClockType::now();
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    void testCaseEnded(const TestCaseStats& stats) override {
 | 
				
			||||||
 | 
					        const Totals& totals = stats.totals;
 | 
				
			||||||
 | 
					        const size_t totalTestCases = totals.assertions.passed + totals.assertions.failed;
 | 
				
			||||||
 | 
					        const auto elapsed = ClockType::now() - test_start_ts_;
 | 
				
			||||||
 | 
					        stream << "Done. " << totals.assertions.passed << "/" << totalTestCases
 | 
				
			||||||
 | 
					               << " assertions succeeded in " <<  duration_cast<milliseconds>(elapsed).count()
 | 
				
			||||||
 | 
					               << "ms\n";
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					private:
 | 
				
			||||||
 | 
					    ClockType::time_point test_start_ts_;
 | 
				
			||||||
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					CATCH_REGISTER_REPORTER("instant", InstantTestReporter)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					} // cppkafka
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					int main(int argc, char* argv[]) {
 | 
				
			||||||
 | 
					    Session session;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    int returnCode = session.applyCommandLine( argc, argv );
 | 
				
			||||||
 | 
					    if (returnCode != 0) {
 | 
				
			||||||
 | 
					        return returnCode;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    if (session.configData().reporterNames.empty()) {
 | 
				
			||||||
 | 
					        // Set our reporter as the default one
 | 
				
			||||||
 | 
					        session.configData().reporterNames.emplace_back("instant");
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    int numFailed = session.run();
 | 
				
			||||||
 | 
					    return numFailed;
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,5 +1,5 @@
 | 
				
			|||||||
#include <sstream>
 | 
					#include <sstream>
 | 
				
			||||||
#include <gtest/gtest.h>
 | 
					#include <catch.hpp>
 | 
				
			||||||
#include "cppkafka/topic_partition_list.h"
 | 
					#include "cppkafka/topic_partition_list.h"
 | 
				
			||||||
#include "cppkafka/topic_partition.h"
 | 
					#include "cppkafka/topic_partition.h"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -7,12 +7,7 @@ using std::ostringstream;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
using namespace cppkafka;
 | 
					using namespace cppkafka;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class TopicPartitionListTest : public testing::Test {
 | 
					TEST_CASE("rdkafka conversion", "[topic_partition]") {
 | 
				
			||||||
public:
 | 
					 | 
				
			||||||
    
 | 
					 | 
				
			||||||
};
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
TEST_F(TopicPartitionListTest, Conversion) {
 | 
					 | 
				
			||||||
    TopicPartitionList list1;
 | 
					    TopicPartitionList list1;
 | 
				
			||||||
    list1.push_back("foo");
 | 
					    list1.push_back("foo");
 | 
				
			||||||
    list1.push_back({ "bar", 2 });
 | 
					    list1.push_back({ "bar", 2 });
 | 
				
			||||||
@@ -20,24 +15,24 @@ TEST_F(TopicPartitionListTest, Conversion) {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    TopicPartitionList list2 = convert(convert(list1));
 | 
					    TopicPartitionList list2 = convert(convert(list1));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    EXPECT_EQ(list1.size(), list2.size());
 | 
					    CHECK(list1.size() == list2.size());
 | 
				
			||||||
    for (size_t i = 0; i < list1.size(); ++i) {
 | 
					    for (size_t i = 0; i < list1.size(); ++i) {
 | 
				
			||||||
        const auto& item1 = list1[i];
 | 
					        const auto& item1 = list1[i];
 | 
				
			||||||
        const auto& item2 = list2[i];
 | 
					        const auto& item2 = list2[i];
 | 
				
			||||||
        EXPECT_EQ(item1.get_topic(), item2.get_topic());
 | 
					        CHECK(item1.get_topic() == item2.get_topic());
 | 
				
			||||||
        EXPECT_EQ(item1.get_partition(), item2.get_partition());
 | 
					        CHECK(item1.get_partition() == item2.get_partition());
 | 
				
			||||||
        EXPECT_EQ(item1.get_offset(), item2.get_offset());
 | 
					        CHECK(item1.get_offset() == item2.get_offset());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(TopicPartitionListTest, AsString) {
 | 
					TEST_CASE("topic partition to string", "[topic_partition]") {
 | 
				
			||||||
    ostringstream output;
 | 
					    ostringstream output;
 | 
				
			||||||
    TopicPartition topic_partition("foo", 5);
 | 
					    TopicPartition topic_partition("foo", 5);
 | 
				
			||||||
    output << topic_partition;
 | 
					    output << topic_partition;
 | 
				
			||||||
    EXPECT_EQ("foo[5:#]", output.str());
 | 
					    CHECK(output.str() == "foo[5:#]");
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TEST_F(TopicPartitionListTest, ListAsString) {
 | 
					TEST_CASE("topic partition list to string", "[topic_partition]") {
 | 
				
			||||||
    ostringstream output;
 | 
					    ostringstream output;
 | 
				
			||||||
    TopicPartitionList list;
 | 
					    TopicPartitionList list;
 | 
				
			||||||
    list.push_back("foo");
 | 
					    list.push_back("foo");
 | 
				
			||||||
@@ -45,5 +40,5 @@ TEST_F(TopicPartitionListTest, ListAsString) {
 | 
				
			|||||||
    list.push_back({ "foobar", 3, 4 });
 | 
					    list.push_back({ "foobar", 3, 4 });
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    output << list;
 | 
					    output << list;
 | 
				
			||||||
    EXPECT_EQ("[ foo[-1:#], bar[2:#], foobar[3:4] ]", output.str());
 | 
					    CHECK(output.str() == "[ foo[-1:#], bar[2:#], foobar[3:4] ]");
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										1
									
								
								third_party/Catch2
									
									
									
									
										vendored
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								third_party/Catch2
									
									
									
									
										vendored
									
									
										Submodule
									
								
							 Submodule third_party/Catch2 added at d2d8455b57
									
								
							
							
								
								
									
										1
									
								
								third_party/googletest
									
									
									
									
										vendored
									
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								third_party/googletest
									
									
									
									
										vendored
									
									
								
							 Submodule third_party/googletest deleted from 0a439623f7
									
								
							
		Reference in New Issue
	
	Block a user