使用C++11 简单测试接口性能矩阵

本版本和上一个版本的接口基本一致,就是做接口测试的工程。
但是增加了简单的性能矩阵的测试。
不是并发的性能,主要是单接口在不同参数下的表现。

本例特点,

  1. 在时间方面,去掉了C语言的time_t库,改用boost::posix_time和boost::date_time::gregorian库。

  2. 在性能方面,除了简单的获取返回的gunzip后的解压数据大小外,还获取了unzip之前的raw data大小。
    使用的库依然是 cpp-httplib,方法是在http header头中增加"Accept-Encoding", "gzip, deflate", 这样 ClientImpl对象就知道我们需要返回的格式是 gzip格式。
    然后设置cli.set_decompress(false); 这样就直接返回了gip的raw content,不做gzip 解压缩。

  3. 使用retry decorator来进行请求重试和算平均时间。[本例没什么作用,每个接口都只请求了一次。这样做的原因是本例不存在并发,单接口请求返回的时间基本稳定。两次请求时间相差不超过1秒]。

代码结构如下,


image.png

代码如下,
CMakeLists.txt

cmake_minimum_required(VERSION 2.6)

if(APPLE)
    message(STATUS "This is Apple, do nothing.")
elseif(UNIX)
    message(STATUS "This is linux, set CMAKE_PREFIX_PATH.")
    set(CMAKE_PREFIX_PATH /vcpkg/ports/cppwork/vcpkg_installed/x64-linux/share)
endif(APPLE)

project(app_perf_test)

add_definitions(-std=c++17)

add_definitions(-g)

find_package(ZLIB)

find_package(glog REQUIRED)

find_package(OpenCV REQUIRED )

find_package(Boost REQUIRED COMPONENTS
    system
    filesystem
    serialization
    program_options
    thread
    )

find_package(DataFrame REQUIRED)

if(APPLE)
    MESSAGE(STATUS "This is APPLE, set INCLUDE_DIRS")
set(INCLUDE_DIRS ${Boost_INCLUDE_DIRS} /usr/local/include /usr/local/iODBC/include /opt/snowflake/snowflakeodbc/include/ ${CMAKE_CURRENT_SOURCE_DIR}/../../)
elseif(UNIX)
    MESSAGE(STATUS "This is linux, set INCLUDE_DIRS")
    set(INCLUDE_DIRS ${Boost_INCLUDE_DIRS} /usr/local/include ${CMAKE_CURRENT_SOURCE_DIR}/../../)
endif(APPLE)


if(APPLE)
    MESSAGE(STATUS "This is APPLE, set LINK_DIRS")
    set(LINK_DIRS /usr/local/lib /usr/local/iODBC/lib /opt/snowflake/snowflakeodbc/lib/universal)
elseif(UNIX)
    MESSAGE(STATUS "This is linux, set LINK_DIRS")
    set(LINK_DIRS ${Boost_INCLUDE_DIRS} /usr/local/lib /vcpkg/ports/cppwork/vcpkg_installed/x64-linux/lib)
endif(APPLE)

if(APPLE)
    MESSAGE(STATUS "This is APPLE, set ODBC_LIBS")
    set(ODBC_LIBS iodbc iodbcinst)
elseif(UNIX)
    MESSAGE(STATUS "This is linux, set LINK_DIRS")
    set(ODBC_LIBS odbc odbcinst ltdl)
endif(APPLE)

include_directories(${INCLUDE_DIRS})
LINK_DIRECTORIES(${LINK_DIRS})

file( GLOB test_file_list ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp) 

file( GLOB APP_SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/../impl/*.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../utils/*.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../cases/*.cpp ${CMAKE_CURRENT_SOURCE_DIR}/*.h ${CMAKE_CURRENT_SOURCE_DIR}/../../http/impl/*.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../../yaml/impl/*.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../../df/impl/*.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../../death_handler/impl/*.cpp)

add_library(${PROJECT_NAME}_lib SHARED ${APP_SOURCES} ${test_file})
target_link_libraries(${PROJECT_NAME}_lib ${Boost_LIBRARIES} ZLIB::ZLIB glog::glog DataFrame::DataFrame ${OpenCV_LIBS})
target_link_libraries(${PROJECT_NAME}_lib  ssl crypto libgtest.a pystring libyaml-cpp.a libgmock.a ${ODBC_LIBS} libnanodbc.a pthread dl backtrace)

foreach( test_file ${test_file_list} )
    file(RELATIVE_PATH filename ${CMAKE_CURRENT_SOURCE_DIR} ${test_file})
    string(REPLACE ".cpp" "" file ${filename})
    add_executable(${file}  ${test_file})
    target_link_libraries(${file} ${PROJECT_NAME}_lib)
endforeach( test_file ${test_file_list})

test/app_perf_perf_test.cpp

#include "api_accuracy/cases/app_perf_cases.h"
#include "api_accuracy/test_cfg.h"
#include "api_accuracy/utils/req.h"
#include "api_accuracy/utils/io_util.h"
#include "api_accuracy/utils/funcs.h"
#include "api_accuracy/utils/beans.h"
#include "api_accuracy/utils/compare.h"
#include "api_accuracy/utils/decorator.hpp"
#include "api_accuracy/utils/data_op.h"

#include "df/df.h"

#include "pystring/pystring.h"
#include "json/json.hpp"
#include "death_handler/death_handler.h"

#include 

#include 


using json = nlohmann::json;

int main(int argc, char** argv) {
    FLAGS_log_dir = "./";
    FLAGS_alsologtostderr = true;
    // 日志级别 INFO, WARNING, ERROR, FATAL 的值分别为0、1、2、3
    FLAGS_minloglevel = 0;

    Debug::DeathHandler dh;

    google::InitGoogleLogging("./logs.log");
    testing::InitGoogleTest(&argc, argv);
    int ret = RUN_ALL_TESTS();
    return ret;
}


class AppPerfPerfTests : public testing::Test {
   protected:
    virtual void SetUp() { perf_cases = app_perf_perf_cases; }

    virtual void TearDown() {}

    std::vector perf_cases;
};

TEST_F(AppPerfPerfTests, ParseStartDateAndEndDateFromUrl) {
    const std::string url = "/v1.3/portfolio/app-performance/less?company_id=1000200000000034&granularity=monthly&start_date=2020-01-01&end_date=2020-03-31&countries=US,CN,JP,GB";

    auto date_range = parse_start_date_and_end_date_from_url(url);
    auto start_date = date_range["start_date"].get();
    auto end_date = date_range["end_date"].get();
    LOG(INFO) << start_date << "\n";
    LOG(INFO) << end_date << "\n";
    ASSERT_EQ("start_date=2020-01-01", start_date);
    ASSERT_EQ("end_date=2020-03-31", end_date);
}

TEST_F(AppPerfPerfTests, ReplaceEndDateWithGivenDate) {
    const std::string url = "/v1.3/portfolio/app-performance/less?company_id=1000200000000034&granularity=monthly&start_date=2020-01-01&end_date=2020-03-31&countries=US,CN,JP,GB";
    const std::string given_date = "2020-05-31";
    auto new_url = replace_end_date_with_given_date(given_date, url);
    LOG(INFO) << new_url << "\n"; 
    auto expect_url = "/v1.3/portfolio/app-performance/less?company_id=1000200000000034&granularity=monthly&start_date=2020-01-01&end_date=2020-05-31&countries=US,CN,JP,GB";
    ASSERT_EQ(expect_url, new_url);
}

TEST_F(AppPerfPerfTests, TestPerfCases) {
    json pt_list;
    auto req_test_func = make_decorator(Req::make_a_api_query); 

    for (auto&& perf_case : perf_cases) {
        auto is_running = true;
        auto url = perf_case.url;
        auto i = 0;
        while(is_running) {
            LOG(INFO) << url << "\n";
            auto req_result = req_test_func(url);
            auto is_running = req_result.status;
            if(!is_running) {
                json pt {
                    {"1name", ""},
                    {"2apps_count", ""},
                    {"3data_chunks", ""}, 
                    {"4raw_size", ""}, 
                    {"5decompress_size", ""},
                    {"6response_time", ""},
                };

                pt_list.emplace_back(pt);
                pt_list.emplace_back(pt);
                LOG(INFO) << "Met 521 error, break..." << "\n";
                break;
            }
            auto res_content = req_result.res;
            auto response_time = req_result.avg_time;
            auto zipped_content = Req::make_a_zipped_query(url);

            auto raw_size = (float)(zipped_content.size()) / (float)(1024);
            auto decompress_size = (float)(res_content.size()) / (float)(1024);
        
            LOG(INFO) << "Raw size: " << raw_size << "\n";
            LOG(INFO) << "Decompressed size:" << decompress_size << "\n";
            auto res_js = json::parse(res_content);
            auto d_op = data_op(res_js);
            auto apps_count = d_op.get_apps_count();
            auto data_chunks = d_op.get_data_chunks();
            json pt_value  {
                {"1name",  url},
                {"2apps_count", apps_count},
                    {"3data_chunks", data_chunks}, 
                    {"4raw_size", raw_size}, 
                    {"5decompress_size", decompress_size},
                    {"6response_time", response_time}
            };
            pt_list.emplace_back(std::move(pt_value));
            ++i;
            auto ret_val = adjust_req_url_date(url, i);
            auto end_date_str = ret_val["end_date"].get();
            url = ret_val["ret_url"].get();
            auto end_date = boost_ptime_from_string(end_date_str);
            auto now = boost::posix_time::second_clock::local_time();

            if(end_date > now) {
                json pt {
                    {"1name", ""},
                    {"2apps_count", ""},
                    {"3data_chunks", ""}, 
                    {"4raw_size", ""}, 
                    {"5decompress_size", ""},
                    {"6response_time", ""},
                };

                pt_list.emplace_back(pt);
                pt_list.emplace_back(pt);
                LOG(INFO) << "Met end date, break..." << "\n";
                break;
            }
        }
    }

    auto rs_csv_path = "../result/result.csv";
    auto df = df_op::convert_json_to_df(pt_list, {});
    df_op::write_to_csv(df, rs_csv_path);
}

utils/funcs.h

#ifndef _FREDRIC_FUNCS_H_
#define _FREDRIC_FUNCS_H_

#include "json/json.hpp"

#include 

#include 
#include 
#include 

using json = nlohmann::json;

std::vector get_map_keys(const std::map& field_mapping);
std::vector get_map_values(const std::map& field_mapping);
std::string read_query_file_and_replace_consts(const json& prod, const std::string& www_query_file_name);
json parse_dim_product_fields(const json& result_json, const std::vector& dim_keys_);
json convert_dim_values_to_api_values(const json& values, const std::map& api_dim_mapping);
bool find_diff_and_save_f(const json& values_, const json& act_www_values_, const std::string& api_data_field_name);

std::string timestamp_to_utc_time(const uint64_t& timestamp_);
time_t utc_timestamp_from_string(const std::string& date_str);
boost::posix_time::ptime boost_ptime_from_string(const std::string& date_str);

std::string get_ith_day(const std::string& date_str, const int i);
std::string get_ith_week(const std::string& date_str, const int i);
std::string get_ith_month(const std::string& date_str, const int i);

float get_float_fraction(float number);

json parse_start_date_and_end_date_from_url(const std::string& url);

std::string replace_end_date_with_given_date(const std::string& given_date, const std::string& url);

json adjust_req_url_date(const std::string& url, int i);
#endif

utils/funcs.cpp

#include "api_accuracy/utils/funcs.h"

#include 

#include 

#include 
#include 
#include 
#include 
#include 
#include 
#include  
#include 

#include "api_accuracy/utils/io_util.h"
#include "df/df.h"
#include "glog/logging.h"

std::vector get_map_keys(
    const std::map& field_mapping) {
    std::vector keys{};
    std::for_each(field_mapping.begin(), field_mapping.end(),
                  [&keys](const auto& item) { keys.push_back(item.first); });
    return keys;
}

std::vector get_map_values(
    const std::map& field_mapping) {
    std::vector values{};
    std::for_each(
        field_mapping.begin(), field_mapping.end(),
        [&values](const auto& item) { values.push_back(item.second); });
    return values;
}

std::string read_query_file_and_replace_consts(
    const json& prod, const std::string& www_query_file_name) {
    auto product_id = prod["product_id"].get();
    auto market_code = prod["market_code"].get();
    auto start_date = prod["start_date"].get();
    auto end_date = prod["end_date"].get();
    auto country_code = prod["countries"].get();
    auto device_code = prod["devices"].get();
    auto granularity = prod["granularity"].get();

    auto file_content = IOUtil::read_file(www_query_file_name);

    file_content = pystring::replace(file_content, "${product_id}",
                                     std::to_string(product_id));
    file_content =
        pystring::replace(file_content, "${market_code}", market_code);
    file_content = pystring::replace(file_content, "${start_date}", start_date);
    file_content = pystring::replace(file_content, "${end_date}", end_date);
    std::vector country_list{};
    pystring::split(country_code, country_list, ",");
    json country_js = country_list;
    file_content =
        pystring::replace(file_content, "${country_code}", country_js.dump());
    file_content =
        pystring::replace(file_content, "${granularity}", granularity);

    if (country_list[0] == "all_supported") {
        auto content_js = json::parse(file_content);
        content_js["filters"].erase("country_code");
        file_content = content_js.dump();
    }

    std::vector device_list{};
    pystring::split(device_code, device_list, ",");
    if (device_list[0] != "all_supported") {
        auto content_js = json::parse(file_content);
        content_js["filters"]["device_code"] = {{"in", device_list}};
        file_content = content_js.dump();
    }

    return file_content;
}

json parse_dim_product_fields(const json& result_json,
                              const std::vector& dim_keys_) {
    json values;
    auto facets = result_json["data"]["facets"];
    auto dims = result_json["data"]["dimensions"];
    for (auto&& facet_ : facets) {
        json value;
        for (auto&& dim_key_ : dim_keys_) {
            if (dim_key_.find("/") == std::string::npos) {
                value[dim_key_] = facet_[dim_key_];
            } else {
                json tmp_val;
                std::string tmp_key = "";
                std::vector one_dim_keys{};
                pystring::split(dim_key_, one_dim_keys, "/");
                for (int i = 0; i < one_dim_keys.size(); ++i) {
                    if (i == 0) {
                        tmp_key = one_dim_keys[0];
                        if (!facet_[tmp_key].is_null()) {
                            tmp_val =
                                std::to_string(facet_[tmp_key].get());
                        } else {
                            break;
                        }
                    } else {
                        for (auto &&begin = dims.begin(), end = dims.end();
                             begin != end; ++begin) {
                            auto in_dim_key = begin.key();
                            auto in_dim_val = begin.value();

                            if (one_dim_keys[i - 1] == in_dim_key) {
                                for (auto &&ii_begin = in_dim_val.begin(),
                                          ii_end = in_dim_val.end();
                                     ii_begin != ii_end; ++ii_begin) {
                                    auto ii_dim_key = ii_begin.key();
                                    auto ii_dim_val = ii_begin.value();
                                    if (ii_dim_key ==
                                        tmp_val.get()) {
                                        if (i == (one_dim_keys.size() - 1)) {
                                            tmp_val =
                                                ii_dim_val[one_dim_keys[i]];
                                        } else {
                                            if (!ii_dim_val[one_dim_keys[i]]
                                                     .is_null()) {
                                                tmp_val = std::to_string(
                                                    ii_dim_val[one_dim_keys[i]]
                                                        .get());
                                            } else {
                                                break;
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                value[dim_key_] = tmp_val;
            }
        }
        values.emplace_back(std::move(value));
    }
    return std::move(values);
}

json convert_dim_values_to_api_values(
    const json& values,
    const std::map& api_dim_mapping) {
    json new_values;
    for (auto&& value : values) {
        json new_value;
        for (auto&& api_dim_item : api_dim_mapping) {
            auto api_key = api_dim_item.first;
            auto dim_key = api_dim_item.second;
            new_value[api_key] = value[dim_key];
        }
        new_values.emplace_back(std::move(new_value));
    }
    return std::move(new_values);
}

bool find_diff_and_save_f(const json& values_, const json& act_www_values_,
                          const std::string& api_data_field_name) {
    std::vector pri_keys_{};
    auto df1 = df_op::convert_json_to_df(values_, pri_keys_);
    auto df2 = df_op::convert_json_to_df(act_www_values_, pri_keys_);
    std::string res_csv_path = "../result/" + api_data_field_name + "_res.csv";
    if (values_.size() == 0 && act_www_values_.size() == 0) {
        return false;
    } else if (values_.size() == 0 && act_www_values_.size() != 0) {
        df_op::write_to_csv(df2, res_csv_path);
        return true;
    } else if (values_.size() != 0 && act_www_values_.size() == 0) {
        df_op::write_to_csv(df1, res_csv_path);
        return true;
    } else {
        auto df3 =
            df1.concat(df2, concat_policy::all_columns);
        std::vector keys_{Keys};
        auto df4 = df_op::remove_duplicate(df3, keys_);
        auto df_rows = df4.shape().first;
        if (df_rows == 0) {
            return false;
        }

        df_op::write_to_csv(df3, res_csv_path);
        return true;
    }
}

std::string timestamp_to_utc_time(const uint64_t& timestamp_) {
    long gmt_time = (long)((double)timestamp_ / (double)1000);
    auto time_ = boost::posix_time::from_time_t(gmt_time);
    auto date_time_str = boost::posix_time::to_iso_extended_string(time_);
    std::vector dates_;
    pystring::split(date_time_str, dates_, "T");
    return dates_[0];
}

// Return timestamp as seconds
time_t utc_timestamp_from_string(const std::string& date_str) {
    std::stringstream ios_extended_ss {};
    ios_extended_ss << date_str << "T" << "00:00:00";
    auto time_ = boost::posix_time::from_iso_extended_string(ios_extended_ss.str());
    auto time_stamp = boost::posix_time::to_time_t(time_);
    return time_stamp;
}

// Return boost::posix_time::ptime object
boost::posix_time::ptime boost_ptime_from_string(const std::string& date_str) {
    std::stringstream ios_extended_ss {};
    ios_extended_ss << date_str << "T" << "00:00:00";
    auto time_ = boost::posix_time::from_iso_extended_string(ios_extended_ss.str());
    return std::move(time_);
}


std::string _convert_boost_date_to_str(const boost::gregorian::date& d) {
    std::stringstream ss;
    ss << d.year() << "-";
    int month = (int)(d.month());
    if(month < 10) {
        ss << "0";
    }
    ss << month << "-";
    int day = d.day();
    if(day < 10) {
        ss << "0";
    }
    ss << day;
    return std::move(ss.str());
}

std::string get_ith_day(const std::string& date_str, const int i) {
    std::vector dates_;
    pystring::split(date_str, dates_, "-");
    int year = std::atoi(dates_[0].c_str());
    int month = std::atoi(dates_[1].c_str());
    int day = std::atoi(dates_[2].c_str());
    boost::gregorian::date d(year, month, day);
    boost::gregorian::date_duration dd(i * 1);
    d += dd;
    auto ret_date_str = _convert_boost_date_to_str(d);
    return std::move(ret_date_str);
}

std::string get_ith_week(const std::string& date_str, const int i) {
    std::vector dates_;
    pystring::split(date_str, dates_, "-");
    int year = std::atoi(dates_[0].c_str());
    int month = std::atoi(dates_[1].c_str());
    int day = std::atoi(dates_[2].c_str());
    boost::gregorian::date d(year, month, day);
    boost::gregorian::weeks_duration dd(i * 1);
    d += dd;
    auto ret_date_str = _convert_boost_date_to_str(d);
    return std::move(ret_date_str);
}

std::string get_ith_month(const std::string& date_str, const int i) {
    std::vector dates_;
    pystring::split(date_str, dates_, "-");
    int year = std::atoi(dates_[0].c_str());
    int month = std::atoi(dates_[1].c_str());
    int day = std::atoi(dates_[2].c_str());
    boost::gregorian::date d(year, month, day);
    boost::gregorian::date end_d = d.end_of_month();
    for(int idx = 0; idx < i-1; ++idx) {
        end_d += boost::gregorian::date_duration(1);
        end_d = end_d.end_of_month();
    }
    
    auto ret_date_str = _convert_boost_date_to_str(end_d);
    return std::move(ret_date_str);
}

float get_float_fraction(float number) {
    float int_part;
    float fractpart = modf (number, &int_part);
    return fractpart;
}

json parse_start_date_and_end_date_from_url(const std::string& url) {
    std::regex start_pattern(R"(start_date=(\d{4}-\d{1,2}-\d{1,2}))"); 
    std::regex end_pattern(R"(end_date=(\d{4}-\d{1,2}-\d{1,2}))"); 
    std::smatch start_date_match;
    std::smatch end_date_match;
    std::regex_search(url, start_date_match, start_pattern);
    std::regex_search(url, end_date_match, end_pattern);
    auto start_date = start_date_match[0];
    auto end_date = end_date_match[0];
    json date_range {
        {"start_date", start_date},
        {"end_date", end_date}
    };

    LOG(INFO) << date_range << "\n";
    return std::move(date_range);
}

std::string replace_end_date_with_given_date(const std::string& given_date, const std::string& url) {
    std::regex end_pattern(R"(end_date=(\d{4}-\d{1,2}-\d{1,2}))");

    std::stringstream given_date_ss;
    given_date_ss << "end_date=" << given_date;
    auto given_date_str = given_date_ss.str();

    std::string new_url = std::regex_replace(url, end_pattern, given_date_str);
    return std::move(new_url);
}

json adjust_req_url_date(const std::string& url, int i) {
    auto d_range = parse_start_date_and_end_date_from_url(url);
    auto start_date_str = d_range["start_date"].get();
    std::vector start_date_vec;
    pystring::split(start_date_str, start_date_vec, "=");
    std::string end_date;
    if(url.find("daily") != std::string::npos) {
        end_date =  get_ith_day(start_date_vec[1], i);
    } else if(url.find("monthly") != std::string::npos) {
        end_date =  get_ith_month(start_date_vec[1], i);
    } else if(url.find("weekly") != std::string::npos) {
        end_date =  get_ith_week(start_date_vec[1], i);
    }
    auto ret_url = replace_end_date_with_given_date(end_date, url);

    json ret_val {
        {"end_date", end_date}, 
        {"ret_url", ret_url}
    };
    return std::move(ret_val);
}

utils/data_op.h

#ifndef _FREDRIC_DATA_OP_H_
#define _FREDRIC_DATA_OP_H_

#include "json/json.hpp"

using json = nlohmann::json;

struct data_op {
    data_op(const json& app_perf_result_);
    int get_apps_count();
    int get_data_chunks();
    json app_perf_result;
};

#endif

utils/data_op.cpp

#include "api_accuracy/utils/data_op.h"

data_op::data_op(const json& app_perf_result_): app_perf_result(app_perf_result_) {

}

int data_op::get_apps_count() {
    return app_perf_result["products"].size();
}

int data_op::get_data_chunks() {
    auto products = app_perf_result["products"];
    int chunk_size = 0;
    for(auto&& product_ : products) {
        chunk_size += product_["app_performance"].size();
        chunk_size += product_["demographics"].size();
    }
    return chunk_size;
}

utils/decorator.hpp

#ifndef _FREDRIC_DECORATOR_HPP_
#define _FREDRIC_DECORATOR_HPP_

#include 

#include 
#include 
#include 

#include "api_accuracy/test_cfg.h"
#include "api_accuracy/utils/beans.h"
#include "glog/logging.h"

//-------------------------------
// BEGIN decorator implementation
//-------------------------------

template 
struct decorator;

template 
struct decorator {
    decorator(std::function f) : f_(f) {}

    ReqResult operator()(Args... args) {
        R res;
        size_t total_time = 0;
        for (int i = 0; i < perf_retry_count; ++i) {
            json res_dic;
            boost::posix_time::ptime start;
            boost::posix_time::ptime end;
            try {
                LOG(INFO) << "try once..." << "\n";
                start = boost::posix_time::second_clock::local_time();
                res = f_(args...);
                end = boost::posix_time::second_clock::local_time();
                res_dic = json::parse(res);
            } catch(const std::exception& ex) {
               LOG(ERROR) << ex.what() << "\n";
               return ReqResult{false, res, 0};
            }

            // error in request, return directly
            auto err_it = res_dic.find("error");
            if(err_it != res_dic.end()) {
               LOG(ERROR) << res << "\n";
               return ReqResult{false, res, 0};
            }
            auto curr_time = (end - start).seconds();
            total_time += curr_time;
        }
        float avg_time = ((float)total_time) / ((float)perf_retry_count);
        return ReqResult{true, res, avg_time};
    }

    std::function f_;
};

template 
decorator make_decorator(R (*f)(Args...)) {
    return decorator(std::function(f));
}

#endif

utils/req.h

#ifndef _FREDRIC_REQ_H_
#define _FREDRIC_REQ_H_

#include 

struct Req {
    static std::string make_a_www_query(const std::string& path, const std::string& body);
    static std::string make_a_api_query(const std::string& path); 
    static std::string make_a_zipped_query(const std::string& path);
};
#endif

utils/req.cpp

#include "api_accuracy/utils/req.h"
#include "http/http_util.h"
#include "api_accuracy/test_cfg.h"

#include 

std::string Req::make_a_www_query(const std::string& path, const std::string& body) {
    std::string www_res{};
    bool get_www_res = HttpUtil::post_and_get_str(www_host, path, www_headers, body, www_res);
    if(!get_www_res) {
        LOG(ERROR) << "Get WWW result failure" << "\n";
        return "";
    }
    return std::move(www_res);
}

std::string Req::make_a_api_query(const std::string& path) {
    std::string api_res{};
    bool get_api_res = HttpUtil::get_str(api_host, path, api_headers, api_res);
    if(!get_api_res) {
        LOG(ERROR) << "Get api result failure" << "\n";
        return "";
    }
    return std::move(api_res);
}

std::string Req::make_a_zipped_query(const std::string& path) {
    std::string api_res{};
    bool get_api_res = HttpUtil::get_zipped_str(api_host, path, zipped_api_headers, api_res);
    if(!get_api_res) {
        LOG(ERROR) << "Get api result failure" << "\n";
        return "";
    }
    return std::move(api_res);
}

http/http_util.h

#ifndef _HTTP_UTIL_H_
#define _HTTP_UTIL_H_

#define CPPHTTPLIB_OPENSSL_SUPPORT
#define CPPHTTPLIB_ZLIB_SUPPORT

#include "http/httplib.h"
#include 

const int ConnectionTimeout = 2;
const int ReadTimeout = 80;

class HttpUtil {
    public:
        /**
         * HttpUtil get method
         * 
         * @param: url the url to be used to get a web page from remote server
         * @param: path the path to be used to get a web page from remote server
         * @param: result_name the download result file path
         */
        static bool get(std::string url, std::string path, std::string result_name);

         /**
         * HttpUtil get_file method
         * 
         * @param: host the host to be used to get an item from remote server
         * @param: path the path to be used to get an item from remote server
         * @param: result_name the download result file path
         */
        static bool get_file(std::string host, std::string path, std::string result_name, const std::map& headers={});

        static std::string get_redirect_url(std::string host, std::string path, const std::map& headers={});

        static bool get_str(std::string host, std::string path, const std::map & headers, std::string &result_string);


        static bool get_zipped_str(std::string host, std::string path, const std::map & headers, std::string &result_string);

        static bool post_and_get_str(std::string host, std::string path, const std::map & headers, const std::string& body,std::string &result_string);
};
#endif

http/http_util.cpp

#include "http/http_util.h"

#include 
#include 


bool HttpUtil::get(std::string url, std::string path, std::string result_name) {

    try {
        httplib::Client cli {url};
        cli.set_connection_timeout(ConnectionTimeout);

        auto res = cli.Get(path.c_str());
        if(res->status != 200) {
            std::cerr << "Get [" << url << path << "] failed" << std::endl;
            std::cerr << "Status code : [" << res->status << "]" << "\n"   << "Result body : [" << res->body << "]" 
            << std::endl; 
            cli.stop();
            return false;
        }
        std::ofstream os {result_name, std::ios_base::out | std::ios_base::binary};
        os << res->body;
        cli.stop();
        return true;
    } catch(const std::exception & e) {
        std::cerr << "Exception: " << e.what() << std::endl;
        return false;
    }
}


std::string HttpUtil::get_redirect_url(std::string host, std::string path, const std::map& headers) {
    try {

        #ifdef CPPHTTPLIB_OPENSSL_SUPPORT
            auto port = 443;
            httplib::SSLClient cli(host, port);
        #else
            auto port = 80;
            httplib::Client cli(host, port);
        #endif

        cli.set_connection_timeout(ConnectionTimeout);
        cli.set_read_timeout(ReadTimeout);
        cli.set_follow_location(false);

        httplib::Headers headers_ {};
        for(auto&& item: headers) {
            headers_.insert(item);
        }

        auto res = cli.Get(path.c_str(), headers_);

        if(res->headers.find("location") == res->headers.end()) {
            std::cerr << "Redirect failed" << std::endl;
            cli.stop();
            return "";
        }
        auto redirect_url = res->headers.find("location")->second;
        cli.stop();
        return std::move(redirect_url);
    } catch(const std::exception & e) {
        std::cerr << "Exception: " << e.what() << std::endl;
        return "";
    }
}

bool HttpUtil::get_file(std::string host, std::string path, std::string result_name, const std::map& headers) {

    try {

        #ifdef CPPHTTPLIB_OPENSSL_SUPPORT
            auto port = 443;
            httplib::SSLClient cli(host, port);
        #else
            auto port = 80;
            httplib::Client cli(host, port);
        #endif

        cli.set_connection_timeout(ConnectionTimeout);
        cli.set_read_timeout(ReadTimeout);


        httplib::Headers headers_ {};
        for(auto&& item: headers) {
            headers_.insert(item);
        }

        std::ofstream os {result_name};

        auto res = cli.Get(path.c_str(), headers_,
              [&](const char *data, size_t data_length) {
                os << std::string(data, data_length);
                return true;
              });

        if(!res || res->status != 200) {
            cli.stop();
            return false;
        }
        cli.stop();
        return true;
    } catch(const std::exception & e) {
        std::cerr << "Exception: " << e.what() << std::endl;
        return false;
    }
}

bool HttpUtil::get_str(std::string host, std::string path, const std::map & headers, std::string &result_string) {

     try {
        #ifdef CPPHTTPLIB_OPENSSL_SUPPORT
            auto port = 443;
            httplib::SSLClient cli(host, port);
        #else
            auto port = 80;
            httplib::Client cli(host, port);
        #endif

        cli.set_connection_timeout(ConnectionTimeout);
        cli.set_read_timeout(ReadTimeout);

        httplib::Headers headers_ {};
        for(auto&& item: headers) {
            headers_.insert(item);
        }
        
        auto res = cli.Get(path.c_str(), headers_);
        
        if(res == nullptr) {
            cli.stop();
            return false;
        }

        result_string = res->body;
        cli.stop();
        return true;
    } catch(const std::exception & e) {
        std::cerr << "Exception: " << e.what() << std::endl;
        return false;
    }
}

bool HttpUtil::get_zipped_str(std::string host, std::string path, const std::map & headers, std::string &result_string) {
    try {
        
        #ifdef CPPHTTPLIB_OPENSSL_SUPPORT
            auto port = 443;
            httplib::SSLClient cli(host, port);
        #else
            auto port = 80;
            httplib::Client cli(host, port);
        #endif
        cli.set_decompress(false);
        cli.set_connection_timeout(ConnectionTimeout);
        cli.set_read_timeout(ReadTimeout);

        httplib::Headers headers_ {};
        for(auto&& item: headers) {
            headers_.insert(item);
        }
        headers_.insert({"Accept-Encoding", "gzip, deflate"});
        
        auto res = cli.Get(path.c_str(), headers_);
        
        if(res == nullptr) {
            cli.stop();
            return false;
        }

        result_string = res->body;
        cli.stop();
        return true;
    } catch(const std::exception & e) {
        std::cerr << "Exception: " << e.what() << std::endl;
        return false;
    }
}

bool HttpUtil::post_and_get_str(std::string host, std::string path, const std::map & headers, const std::string& body, std::string &result_string){
    try {
        #ifdef CPPHTTPLIB_OPENSSL_SUPPORT
            auto port = 443;
            httplib::SSLClient cli(host, port);
        #else
            auto port = 80;
            httplib::Client cli(host, port);
        #endif

        cli.set_connection_timeout(ConnectionTimeout);
        cli.set_read_timeout(ReadTimeout);

        httplib::Headers headers_ {};
        for(auto&& item: headers) {
            headers_.insert(item);
        }
        auto res = cli.Post(path.c_str(), headers_, body.c_str(), body.size(), "application/json");
        
        if(res == nullptr) {
            cli.stop();
            return false;
        }

        result_string = res->body;
        cli.stop();

        return true;
    } catch(const std::exception & e) {
        std::cerr << "Exception: " << e.what() << std::endl;
        return false;
    }
}

程序输出如下,


image.png

最终会调用hosseinmoein DataFrame生成一个简单的csv文件。

你可能感兴趣的:(使用C++11 简单测试接口性能矩阵)