diff --git a/CHANGELOG.md b/CHANGELOG.md index ea9fe88..4208fea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,22 @@ # Changelog + +## 0.38.1 - 2025-06-25 + +### Enhancements +- Added `range_by_schema` field to `DatasetRange` struct +- Changed historical `TimeseriesGetRange` and `TimeseriesGetRangeToFile` methods to use + a `POST` request to allow for requesting supported maximum of 2000 symbols +- Added logging around `Historical::BatchDownload` +- Changed the following Venue, Publisher, and Dataset descriptions: + - "ICE Futures Europe (Financials)" renamed to "ICE Europe Financials" + - "ICE Futures Europe (Commodities)" renamed to "ICE Europe Commodities" + +### Bug fixes +- Fixed handling of `null` `last_modified_date` in `MetadataGetDatasetCondition` + response +- Fixed default `ShouldLog` implementation + ## 0.38.0 - 2025-06-10 ### Enhancements @@ -161,8 +178,8 @@ upgrading data to version 3. ## 0.31.0 - 2025-03-18 ### Enhancements -- Added new venues, datasets, and publishers for ICE Futures US, ICE Futures Europe - (Financial products), Eurex, and European Energy Exchange (EEX) +- Added new venues, datasets, and publishers for ICE Futures US, ICE Europe Financials + products, Eurex, and European Energy Exchange (EEX) ## 0.30.0 - 2025-02-11 diff --git a/CMakeLists.txt b/CMakeLists.txt index 264eeda..a9b1264 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -6,7 +6,7 @@ cmake_minimum_required(VERSION 3.24..4.0) project( databento - VERSION 0.38.0 + VERSION 0.38.1 LANGUAGES CXX DESCRIPTION "Official Databento client library" ) diff --git a/include/databento/datetime.hpp b/include/databento/datetime.hpp index 60f1522..9ff8a0c 100644 --- a/include/databento/datetime.hpp +++ b/include/databento/datetime.hpp @@ -32,4 +32,22 @@ struct DateTimeRange { T end; }; using DateRange = DateTimeRange; + +template +inline bool operator==(const DateTimeRange& lhs, + const DateTimeRange& rhs) { + return lhs.start == rhs.start && lhs.end == rhs.end; +} +template +inline bool operator!=(const DateTimeRange& lhs, + const DateTimeRange& rhs) { + return !(lhs == rhs); +} + +std::string ToString(const DateTimeRange& dt_range); +std::ostream& operator<<(std::ostream& stream, + const DateTimeRange& dt_range); +std::string ToString(const DateTimeRange& dt_range); +std::ostream& operator<<(std::ostream& stream, + const DateTimeRange& dt_range); } // namespace databento diff --git a/include/databento/detail/http_client.hpp b/include/databento/detail/http_client.hpp index 74075db..2ac109e 100644 --- a/include/databento/detail/http_client.hpp +++ b/include/databento/detail/http_client.hpp @@ -24,15 +24,23 @@ class HttpClient { nlohmann::json GetJson(const std::string& path, const httplib::Params& params); nlohmann::json PostJson(const std::string& path, - const httplib::Params& params); + const httplib::Params& form_params); void GetRawStream(const std::string& path, const httplib::Params& params, const httplib::ContentReceiver& callback); + void PostRawStream(const std::string& path, + const httplib::Params& form_params, + const httplib::ContentReceiver& callback); private: + static bool IsErrorStatus(int status_code); + static httplib::ResponseHandler MakeStreamResponseHandler(int& out_status); + static void CheckStatusAndStreamRes(const std::string& path, int status_code, + std::string&& err_body, + const httplib::Result& res); + nlohmann::json CheckAndParseResponse(const std::string& path, httplib::Result&& res) const; void CheckWarnings(const httplib::Response& response) const; - static bool IsErrorStatus(int status_code); static const httplib::Headers kHeaders; diff --git a/include/databento/detail/json_helpers.hpp b/include/databento/detail/json_helpers.hpp index e54dad2..1ac05f6 100644 --- a/include/databento/detail/json_helpers.hpp +++ b/include/databento/detail/json_helpers.hpp @@ -4,6 +4,7 @@ #include #include // multimap +#include #include #include #include @@ -79,6 +80,10 @@ template <> std::string ParseAt(std::string_view endpoint, const nlohmann::json& json, std::string_view key); template <> +std::optional ParseAt(std::string_view endpoint, + const nlohmann::json& json, + std::string_view key); +template <> std::uint64_t ParseAt(std::string_view endpoint, const nlohmann::json& json, std::string_view key); template <> diff --git a/include/databento/historical.hpp b/include/databento/historical.hpp index d526b94..8ca6720 100644 --- a/include/databento/historical.hpp +++ b/include/databento/historical.hpp @@ -230,8 +230,6 @@ class Historical { using HttplibParams = std::multimap; BatchJob BatchSubmitJob(const HttplibParams& params); - void StreamToFile(const std::string& url_path, const HttplibParams& params, - const std::filesystem::path& file_path); void DownloadFile(const std::string& url, const std::filesystem::path& output_path); std::vector BatchListJobs(const HttplibParams& params); diff --git a/include/databento/log.hpp b/include/databento/log.hpp index d8acf18..1398c3a 100644 --- a/include/databento/log.hpp +++ b/include/databento/log.hpp @@ -37,7 +37,7 @@ class ConsoleLogReceiver : public ILogReceiver { void Receive(LogLevel level, const std::string& msg) override; bool ShouldLog(databento::LogLevel level) const override { - return level > min_level_; + return level >= min_level_; } private: diff --git a/include/databento/metadata.hpp b/include/databento/metadata.hpp index c79f321..6fb3c2d 100644 --- a/include/databento/metadata.hpp +++ b/include/databento/metadata.hpp @@ -2,9 +2,12 @@ #include #include +#include #include #include +#include +#include "databento/datetime.hpp" #include "databento/enums.hpp" // FeedMode, DatasetCondition, Schema namespace databento { @@ -28,12 +31,13 @@ struct UnitPricesForMode { struct DatasetConditionDetail { std::string date; DatasetCondition condition; - std::string last_modified_date; + std::optional last_modified_date; }; struct DatasetRange { std::string start; std::string end; + std::map> range_by_schema; }; inline bool operator==(const PublisherDetail& lhs, const PublisherDetail& rhs) { @@ -71,7 +75,8 @@ inline bool operator!=(const DatasetConditionDetail& lhs, } inline bool operator==(const DatasetRange& lhs, const DatasetRange& rhs) { - return lhs.start == rhs.start && lhs.end == rhs.end; + return lhs.start == rhs.start && lhs.end == rhs.end && + lhs.range_by_schema == rhs.range_by_schema; } inline bool operator!=(const DatasetRange& lhs, const DatasetRange& rhs) { return !(lhs == rhs); diff --git a/include/databento/publishers.hpp b/include/databento/publishers.hpp index 9a20062..4e7cede 100644 --- a/include/databento/publishers.hpp +++ b/include/databento/publishers.hpp @@ -81,7 +81,7 @@ enum class Venue : std::uint16_t { Bato = 36, // MEMX Options Mxop = 37, - // ICE Futures Europe (Commodities) + // ICE Europe Commodities Ifeu = 38, // ICE Endex Ndex = 39, @@ -103,7 +103,7 @@ enum class Venue : std::uint16_t { Equs = 47, // ICE Futures US Ifus = 48, - // ICE Futures Europe (Financials) + // ICE Europe Financials Ifll = 49, // Eurex Exchange Xeur = 50, @@ -167,7 +167,7 @@ enum class Dataset : std::uint16_t { XnasQbbo = 26, // Nasdaq NLS XnasNls = 27, - // ICE Futures Europe (Commodities) iMpact + // ICE Europe Commodities iMpact IfeuImpact = 28, // ICE Endex iMpact NdexImpact = 29, @@ -185,7 +185,7 @@ enum class Dataset : std::uint16_t { EqusMini = 35, // ICE Futures US iMpact IfusImpact = 36, - // ICE Futures Europe (Financials) iMpact + // ICE Europe Financials iMpact IfllImpact = 37, // Eurex EOBI XeurEobi = 38, @@ -307,7 +307,7 @@ enum class Publisher : std::uint16_t { EqusPlusFiny = 55, // Databento US Equities Plus - FINRA/Nasdaq TRF Chicago EqusPlusFinc = 56, - // ICE Futures Europe (Commodities) + // ICE Europe Commodities IfeuImpactIfeu = 57, // ICE Endex NdexImpactNdex = 58, @@ -361,7 +361,7 @@ enum class Publisher : std::uint16_t { XnasBasicFinn = 82, // Nasdaq Basic - FINRA/Nasdaq TRF Chicago XnasBasicFinc = 83, - // ICE Futures Europe - Off-Market Trades + // ICE Europe - Off-Market Trades IfeuImpactXoff = 84, // ICE Endex - Off-Market Trades NdexImpactXoff = 85, @@ -391,9 +391,9 @@ enum class Publisher : std::uint16_t { IfusImpactIfus = 97, // ICE Futures US - Off-Market Trades IfusImpactXoff = 98, - // ICE Futures Europe (Financials) + // ICE Europe Financials IfllImpactIfll = 99, - // ICE Futures Europe (Financials) - Off-Market Trades + // ICE Europe Financials - Off-Market Trades IfllImpactXoff = 100, // Eurex EOBI XeurEobiXeur = 101, diff --git a/pkg/PKGBUILD b/pkg/PKGBUILD index b57d262..013976e 100644 --- a/pkg/PKGBUILD +++ b/pkg/PKGBUILD @@ -1,7 +1,7 @@ # Maintainer: Databento _pkgname=databento-cpp pkgname=databento-cpp-git -pkgver=0.38.0 +pkgver=0.38.1 pkgrel=1 pkgdesc="Official C++ client for Databento" arch=('any') diff --git a/src/datetime.cpp b/src/datetime.cpp index 93e3f2f..4319db2 100644 --- a/src/datetime.cpp +++ b/src/datetime.cpp @@ -10,6 +10,7 @@ #include // ostringstream #include "databento/constants.hpp" // kUndefTimestamp +#include "stream_op_helper.hpp" namespace databento { std::string ToIso8601(UnixNanos unix_nanos) { @@ -61,4 +62,32 @@ std::string DateFromIso8601Int(std::uint32_t date_int) { << std::setfill('0') << std::setw(2) << day; return out_ss.str(); } + +std::string ToString(const DateTimeRange& dt_range) { + return MakeString(dt_range); +} +std::ostream& operator<<(std::ostream& stream, + const DateTimeRange& dt_range) { + return StreamOpBuilder{stream} + .SetSpacer(" ") + .SetTypeName("DateTimeRange") + .Build() + .AddField("start", dt_range.start) + .AddField("end", dt_range.end) + .Finish(); +} + +std::string ToString(const DateTimeRange& dt_range) { + return MakeString(dt_range); +} +std::ostream& operator<<(std::ostream& stream, + const DateTimeRange& dt_range) { + return StreamOpBuilder{stream} + .SetSpacer(" ") + .SetTypeName("DateTimeRange") + .Build() + .AddField("start", dt_range.start) + .AddField("end", dt_range.end) + .Finish(); +} } // namespace databento diff --git a/src/detail/http_client.cpp b/src/detail/http_client.cpp index 45347c7..ba63f5d 100644 --- a/src/detail/http_client.cpp +++ b/src/detail/http_client.cpp @@ -53,25 +53,64 @@ void HttpClient::GetRawStream(const std::string& path, const std::string full_path = httplib::append_query_params(path, params); std::string err_body{}; int err_status{}; - const httplib::Result res = client_.Get( - full_path, - [&err_status](const httplib::Response& resp) { - if (HttpClient::IsErrorStatus(resp.status)) { - err_status = resp.status; - } - return true; - }, - [&callback, &err_body, &err_status](const char* data, - std::size_t length) { - // if an error response was received, read all content into err_status - if (err_status > 0) { - err_body.append(data, length); - return true; - } - return callback(data, length); - }); - if (err_status > 0) { - throw HttpResponseError{path, err_status, std::move(err_body)}; + const httplib::Result res = + client_.Get(full_path, MakeStreamResponseHandler(err_status), + [&callback, &err_body, &err_status](const char* data, + std::size_t length) { + // if an error response was received, read all content into + // err_body + if (err_status > 0) { + err_body.append(data, length); + return true; + } + return callback(data, length); + }); + CheckStatusAndStreamRes(path, err_status, std::move(err_body), res); +} + +void HttpClient::PostRawStream(const std::string& path, + const httplib::Params& form_params, + const httplib::ContentReceiver& callback) { + std::string err_body{}; + int err_status{}; + httplib::Request req; + req.method = "POST"; + req.set_header("Content-Type", "application/x-www-form-urlencoded"); + req.path = path; + req.body = httplib::detail::params_to_query_str(form_params); + req.response_handler = MakeStreamResponseHandler(err_status); + req.content_receiver = [&callback, &err_body, &err_status]( + const char* data, std::size_t length, + std::uint64_t, std::uint64_t) { + // if an error response was received, read all content into + // err_body + if (err_status > 0) { + err_body.append(data, length); + return true; + } + return callback(data, length); + }; + // NOLINTNEXTLINE(clang-analyzer-unix.BlockInCriticalSection): dependency code + const httplib::Result res = client_.send(req); + CheckStatusAndStreamRes(path, err_status, std::move(err_body), res); +} + +httplib::ResponseHandler HttpClient::MakeStreamResponseHandler( + int& out_status) { + return [&out_status](const httplib::Response& resp) { + if (HttpClient::IsErrorStatus(resp.status)) { + out_status = resp.status; + } + return true; + }; +} + +void HttpClient::CheckStatusAndStreamRes(const std::string& path, + int status_code, + std::string&& err_body, + const httplib::Result& res) { + if (status_code > 0) { + throw HttpResponseError{path, status_code, std::move(err_body)}; } if (res.error() != httplib::Error::Success && // canceled happens if `callback` returns false, which is based on the diff --git a/src/detail/json_helpers.cpp b/src/detail/json_helpers.cpp index 7abae76..d8d1ea4 100644 --- a/src/detail/json_helpers.cpp +++ b/src/detail/json_helpers.cpp @@ -1,6 +1,7 @@ #include "databento/detail/json_helpers.hpp" #include // accumulate +#include #include // istringstream #include @@ -49,6 +50,17 @@ bool ParseAt(std::string_view endpoint, const nlohmann::json& json, template <> std::string ParseAt(std::string_view endpoint, const nlohmann::json& json, std::string_view key) { + const auto s = ParseAt>(endpoint, json, key); + if (s) { + return *s; + } + return {}; +} + +template <> +std::optional ParseAt(std::string_view endpoint, + const nlohmann::json& json, + std::string_view key) { const auto& val_json = CheckedAt(endpoint, json, key); if (val_json.is_null()) { return {}; diff --git a/src/historical.cpp b/src/historical.cpp index 3088183..ec2b1a5 100644 --- a/src/historical.cpp +++ b/src/historical.cpp @@ -315,38 +315,43 @@ std::filesystem::path Historical::BatchDownload( return output_path; } -void Historical::StreamToFile(const std::string& url_path, - const HttplibParams& params, - const std::filesystem::path& file_path) { - OutFileStream out_file{file_path}; - this->client_.GetRawStream( - url_path, params, [&out_file](const char* data, std::size_t length) { - out_file.WriteAll(reinterpret_cast(data), length); - return true; - }); -} - void Historical::DownloadFile(const std::string& url, const std::filesystem::path& output_path) { - static const std::string kEndpoint = "Historical::DownloadFile"; + static const std::string kMethod = "Historical::DownloadFile"; // extract path from URL const auto protocol_divider = url.find("://"); std::string path; + if (protocol_divider == std::string::npos) { const auto slash = url.find_first_of('/'); if (slash == std::string::npos) { - throw InvalidArgumentError{kEndpoint, "url", "No slashes"}; + throw InvalidArgumentError{kMethod, "url", "No slashes"}; } path = url.substr(slash); } else { const auto slash = url.find('/', protocol_divider + 3); if (slash == std::string::npos) { - throw InvalidArgumentError{kEndpoint, "url", "No slashes"}; + throw InvalidArgumentError{kMethod, "url", "No slashes"}; } path = url.substr(slash); } + std::ostringstream ss; + ss << '[' << kMethod << "] Downloading batch file " << path << " to " + << output_path; + log_receiver_->Receive(LogLevel::Info, ss.str()); + + OutFileStream out_file{output_path}; + this->client_.GetRawStream( + path, {}, [&out_file](const char* data, std::size_t length) { + out_file.WriteAll(reinterpret_cast(data), length); + return true; + }); - StreamToFile(path, {}, output_path); + if (log_receiver_->ShouldLog(LogLevel::Debug)) { + ss.str(""); + ss << '[' << kMethod << ']' << " Completed download of " << path; + log_receiver_->Receive(LogLevel::Debug, ss.str()); + } } std::vector Historical::MetadataListPublishers() { @@ -508,12 +513,10 @@ Historical::MetadataGetDatasetCondition(const httplib::Params& params) { if (!detail_json.is_object()) { throw JsonResponseError::TypeMismatch(kEndpoint, "object", detail_json); } - std::string date = - detail::ParseAt(kEndpoint, detail_json, "date"); - const DatasetCondition condition = - detail::FromCheckedAtString(kEndpoint, detail_json, - "condition"); - std::string last_modified_date = detail::ParseAt( + auto date = detail::ParseAt(kEndpoint, detail_json, "date"); + const auto condition = detail::FromCheckedAtString( + kEndpoint, detail_json, "condition"); + auto last_modified_date = detail::ParseAt>( kEndpoint, detail_json, "last_modified_date"); details.emplace_back(DatasetConditionDetail{std::move(date), condition, std::move(last_modified_date)}); @@ -529,8 +532,27 @@ databento::DatasetRange Historical::MetadataGetDatasetRange( if (!json.is_object()) { throw JsonResponseError::TypeMismatch(kEndpoint, "object", json); } + const auto& schema_json = detail::CheckedAt(kEndpoint, json, "schema"); + if (!schema_json.is_object()) { + throw JsonResponseError::TypeMismatch(kEndpoint, "schema object", json); + } + std::map> range_by_schema; + for (const auto& schema_item : schema_json.items()) { + if (!schema_item.value().is_object()) { + throw JsonResponseError::TypeMismatch(kEndpoint, "nested schema object", + json); + } + auto start = + detail::ParseAt(kEndpoint, schema_item.value(), "start"); + auto end = + detail::ParseAt(kEndpoint, schema_item.value(), "end"); + range_by_schema.emplace( + FromString(schema_item.key()), + DateTimeRange{std::move(start), std::move(end)}); + } return DatasetRange{detail::ParseAt(kEndpoint, json, "start"), - detail::ParseAt(kEndpoint, json, "end")}; + detail::ParseAt(kEndpoint, json, "end"), + std::move(range_by_schema)}; } static const std::string kMetadataGetRecordCountEndpoint = @@ -851,7 +873,7 @@ void Historical::TimeseriesGetRange(const HttplibParams& params, detail::DbnBufferDecoder decoder{metadata_callback, record_callback}; bool early_exit = false; - this->client_.GetRawStream( + this->client_.PostRawStream( kTimeseriesGetRangePath, params, [&decoder, &early_exit](const char* data, std::size_t length) mutable { if (decoder.Process(data, length) == KeepGoing::Continue) { @@ -930,7 +952,15 @@ databento::DbnFileStore Historical::TimeseriesGetRangeToFile( } databento::DbnFileStore Historical::TimeseriesGetRangeToFile( const HttplibParams& params, const std::filesystem::path& file_path) { - StreamToFile(kTimeseriesGetRangePath, params, file_path); + { + OutFileStream out_file{file_path}; + this->client_.PostRawStream( + kTimeseriesGetRangePath, params, + [&out_file](const char* data, std::size_t length) { + out_file.WriteAll(reinterpret_cast(data), length); + return true; + }); + } // Flush out_file return DbnFileStore{log_receiver_, file_path, VersionUpgradePolicy::UpgradeToV3}; } diff --git a/src/metadata.cpp b/src/metadata.cpp index 6c8b3b6..32513b5 100644 --- a/src/metadata.cpp +++ b/src/metadata.cpp @@ -1,6 +1,7 @@ #include "databento/metadata.hpp" #include +#include #include "stream_op_helper.hpp" @@ -55,12 +56,22 @@ std::string ToString(const DatasetRange& dataset_range) { } std::ostream& operator<<(std::ostream& stream, const DatasetRange& dataset_range) { + std::ostringstream range_by_schema_ss; + auto range_by_schema_helper = StreamOpBuilder{range_by_schema_ss} + .SetSpacer("\n ") + .SetIndent(" ") + .Build(); + for (const auto& [schema, range] : dataset_range.range_by_schema) { + range_by_schema_helper.AddKeyVal(schema, range); + } + range_by_schema_helper.Finish(); return StreamOpBuilder{stream} - .SetSpacer(" ") + .SetSpacer("\n ") .SetTypeName("DatasetRange") .Build() .AddField("start", dataset_range.start) .AddField("end", dataset_range.end) + .AddField("range_by_schema", range_by_schema_ss) .Finish(); } } // namespace databento diff --git a/src/stream_op_helper.hpp b/src/stream_op_helper.hpp index a84f942..2e5b650 100644 --- a/src/stream_op_helper.hpp +++ b/src/stream_op_helper.hpp @@ -60,7 +60,7 @@ class StreamOpHelper { template void FmtToStream(const std::optional& val) { if (val.has_value()) { - stream_ << *val; + FmtToStream(*val); } else { stream_ << "nullopt"; } @@ -109,6 +109,19 @@ class StreamOpHelper { return *this; } + template + StreamOpHelper& AddKeyVal(const K& key, const V& val) { + if (!is_first_) { + stream_ << ','; + } + stream_ << spacer_ << indent_; + FmtToStream(key); + stream_ << ": "; + FmtToStream(val); + is_first_ = false; + return *this; + } + std::ostream& Finish() { if (spacer_.find('\n') == std::string::npos) { // no spacing required if empty diff --git a/tests/include/mock/mock_http_server.hpp b/tests/include/mock/mock_http_server.hpp index 724127a..17cd368 100644 --- a/tests/include/mock/mock_http_server.hpp +++ b/tests/include/mock/mock_http_server.hpp @@ -5,8 +5,10 @@ #include #include +#include #include +#include "databento/detail/buffer.hpp" #include "databento/detail/scoped_thread.hpp" #include "databento/record.hpp" @@ -23,7 +25,7 @@ class MockHttpServer { ~MockHttpServer() { server_.stop(); } int ListenOnThread(); - void MockBadRequest(const std::string& path, const nlohmann::json& json); + void MockBadPostRequest(const std::string& path, const nlohmann::json& json); void MockGetJson(const std::string& path, const nlohmann::json& json); void MockGetJson(const std::string& path, const std::map& params, @@ -34,22 +36,31 @@ class MockHttpServer { void MockPostJson(const std::string& path, const std::map& params, const nlohmann::json& json); - void MockStreamDbn(const std::string& path, - const std::map& params, - const std::string& dbn_path); - void MockStreamDbn(const std::string& path, - const std::map& params, - Record record, std::size_t count, std::size_t chunk_size); - void MockStreamDbn(const std::string& path, - const std::map& params, - Record record, std::size_t count, std::size_t extra_bytes, - std::size_t chunk_size); + void MockGetDbn(const std::string& path, + const std::map& params, + const std::string& dbn_path); + void MockPostDbn(const std::string& path, + const std::map& params, + const std::string& dbn_path); + void MockPostDbn(const std::string& path, + const std::map& params, + Record record, std::size_t count, std::size_t chunk_size); + void MockPostDbn(const std::string& path, + const std::map& params, + Record record, std::size_t count, std::size_t extra_bytes, + std::size_t chunk_size); private: + using SharedConstBuffer = std::shared_ptr; + static void CheckParams(const std::map& params, const httplib::Request& req); static void CheckFormParams(const std::map& params, const httplib::Request& req); + static SharedConstBuffer EncodeToBuffer(const std::string& dbn_path); + static httplib::Server::Handler MakeDbnStreamHandler( + const std::map& params, + SharedConstBuffer&& buffer, std::size_t chunk_size); httplib::Server server_{}; const int port_{}; diff --git a/tests/src/historical_tests.cpp b/tests/src/historical_tests.cpp index 858ae3a..529783d 100644 --- a/tests/src/historical_tests.cpp +++ b/tests/src/historical_tests.cpp @@ -10,6 +10,7 @@ #include #include #include +#include #include // logic_error #include // move @@ -240,8 +241,8 @@ TEST_F(HistoricalTests, TestBatchDownloadAll) { const TempFile temp_dbn_file{tmp_path_ / "job123/test.dbn"}; mock_server_.MockGetJson("/v0/batch.list_files", {{"job_id", kJobId}}, kListFilesResp); - mock_server_.MockStreamDbn("/v0/job_id/test.dbn", {}, - TEST_DATA_DIR "/test_data.mbo.v3.dbn"); + mock_server_.MockGetDbn("/v0/job_id/test.dbn", {}, + TEST_DATA_DIR "/test_data.mbo.v3.dbn"); mock_server_.MockGetJson("/v0/job_id/test_metadata.json", {{"key", "value"}}); const auto port = mock_server_.ListenOnThread(); @@ -424,7 +425,7 @@ TEST_F(HistoricalTests, TestMetadataGetDatasetCondition) { {"last_modified_date", "2023-03-01"}}, {{"date", "2022-11-10"}, {"condition", "missing"}, - {"last_modified_date", "2023-03-01"}}}; + {"last_modified_date", nullptr}}}; mock_server_.MockGetJson("/v0/metadata.get_dataset_condition", {{"dataset", dataset::kXnasItch}, {"start_date", "2022-11-06"}, @@ -440,7 +441,7 @@ TEST_F(HistoricalTests, TestMetadataGetDatasetCondition) { {"2022-11-07", DatasetCondition::Available, "2023-03-01"}, {"2022-11-08", DatasetCondition::Degraded, "2023-03-01"}, {"2022-11-09", DatasetCondition::Pending, "2023-03-01"}, - {"2022-11-10", DatasetCondition::Missing, "2023-03-01"}, + {"2022-11-10", DatasetCondition::Missing, std::nullopt}, }; EXPECT_EQ(res, kExp); } @@ -464,8 +465,22 @@ TEST_F(HistoricalTests, TestMetadataListUnitPrices) { } TEST_F(HistoricalTests, TestMetadataGetDatasetRange) { - const nlohmann::json kResp = {{"start", "2017-05-21T00:00:00.000000000Z"}, - {"end", "2022-12-01T00:00:00.000000000Z"}}; + const nlohmann::json kResp = { + {"start", "2017-05-21T00:00:00.000000000Z"}, + {"end", "2022-12-01T00:00:00.000000000Z"}, + {"schema", + { + {"bbo-1m", + {{"start", "2020-08-02T00:00:00.000000000Z"}, + {"end", "2023-03-23T00:00:00.000000000Z"}}}, + {"ohlcv-1s", + {{"start", "2020-08-02T00:00:00.000000000Z"}, + {"end", "2023-03-23T00:00:00.000000000Z"}}}, + {"ohlcv-1m", + {{"start", "2020-08-02T00:00:00.000000000Z"}, + {"end", "2023-03-23T00:00:00.000000000Z"}}}, + + }}}; mock_server_.MockGetJson("/v0/metadata.get_dataset_range", {{"dataset", dataset::kXnasItch}}, kResp); const auto port = mock_server_.ListenOnThread(); @@ -625,17 +640,17 @@ TEST_F(HistoricalTests, TestSymbologyResolve) { } TEST_F(HistoricalTests, TestTimeseriesGetRange_Basic) { - mock_server_.MockStreamDbn("/v0/timeseries.get_range", - {{"dataset", dataset::kGlbxMdp3}, - {"symbols", "ESH1"}, - {"schema", "mbo"}, - {"start", "1609160400000711344"}, - {"end", "1609160800000711344"}, - {"encoding", "dbn"}, - {"stype_in", "raw_symbol"}, - {"stype_out", "instrument_id"}, - {"limit", "2"}}, - TEST_DATA_DIR "/test_data.mbo.v3.dbn.zst"); + mock_server_.MockPostDbn("/v0/timeseries.get_range", + {{"dataset", dataset::kGlbxMdp3}, + {"symbols", "ESH1"}, + {"schema", "mbo"}, + {"start", "1609160400000711344"}, + {"end", "1609160800000711344"}, + {"encoding", "dbn"}, + {"stype_in", "raw_symbol"}, + {"stype_out", "instrument_id"}, + {"limit", "2"}}, + TEST_DATA_DIR "/test_data.mbo.v3.dbn.zst"); const auto port = mock_server_.ListenOnThread(); databento::Historical target{&logger_, kApiKey, "localhost", @@ -661,16 +676,16 @@ TEST_F(HistoricalTests, TestTimeseriesGetRange_Basic) { } TEST_F(HistoricalTests, TestTimeseriesGetRange_NoMetadataCallback) { - mock_server_.MockStreamDbn("/v0/timeseries.get_range", - {{"dataset", dataset::kGlbxMdp3}, - {"start", "2022-10-21T13:30"}, - {"end", "2022-10-21T20:00"}, - {"symbols", "CYZ2"}, - {"schema", "tbbo"}, - {"encoding", "dbn"}, - {"stype_in", "raw_symbol"}, - {"stype_out", "instrument_id"}}, - TEST_DATA_DIR "/test_data.tbbo.v3.dbn.zst"); + mock_server_.MockPostDbn("/v0/timeseries.get_range", + {{"dataset", dataset::kGlbxMdp3}, + {"start", "2022-10-21T13:30"}, + {"end", "2022-10-21T20:00"}, + {"symbols", "CYZ2"}, + {"schema", "tbbo"}, + {"encoding", "dbn"}, + {"stype_in", "raw_symbol"}, + {"stype_out", "instrument_id"}}, + TEST_DATA_DIR "/test_data.tbbo.v3.dbn.zst"); const auto port = mock_server_.ListenOnThread(); databento::Historical target{&logger_, kApiKey, "localhost", @@ -689,7 +704,7 @@ TEST_F(HistoricalTests, TestTimeseriesGetRange_NoMetadataCallback) { TEST_F(HistoricalTests, TestTimeseriesGetRange_BadRequest) { const nlohmann::json resp{ {"detail", "Authorization failed: illegal chars in username."}}; - mock_server_.MockBadRequest("/v0/timeseries.get_range", resp); + mock_server_.MockBadPostRequest("/v0/timeseries.get_range", resp); const auto port = mock_server_.ListenOnThread(); databento::Historical target{&logger_, kApiKey, "localhost", @@ -714,8 +729,8 @@ TEST_F(HistoricalTests, TestTimeseriesGetRange_BadRequest) { } TEST_F(HistoricalTests, TestTimeseriesGetRange_CallbackException) { - mock_server_.MockStreamDbn("/v0/timeseries.get_range", {}, - TEST_DATA_DIR "/test_data.mbo.v3.dbn.zst"); + mock_server_.MockPostDbn("/v0/timeseries.get_range", {}, + TEST_DATA_DIR "/test_data.mbo.v3.dbn.zst"); const auto port = mock_server_.ListenOnThread(); databento::Historical target{&logger_, kApiKey, "localhost", @@ -732,8 +747,8 @@ TEST_F(HistoricalTests, TestTimeseriesGetRange_CallbackException) { } TEST_F(HistoricalTests, TestTimeseriesGetRange_Cancellation) { - mock_server_.MockStreamDbn("/v0/timeseries.get_range", {}, - TEST_DATA_DIR "/test_data.mbo.v3.dbn.zst"); + mock_server_.MockPostDbn("/v0/timeseries.get_range", {}, + TEST_DATA_DIR "/test_data.mbo.v3.dbn.zst"); const auto port = mock_server_.ListenOnThread(); databento::Historical target{&logger_, kApiKey, "localhost", @@ -762,9 +777,9 @@ TEST_F(HistoricalTests, TestTimeseriesGetRange_LargeChunks) { 10005, {}}}; constexpr auto kRecordCount = 50'000; - mock_server_.MockStreamDbn("/v0/timeseries.get_range", - {{"dataset", ToString(Dataset::IfusImpact)}}, - Record{&mbp1.hd}, kRecordCount, 75'000); + mock_server_.MockPostDbn("/v0/timeseries.get_range", + {{"dataset", ToString(Dataset::IfusImpact)}}, + Record{&mbp1.hd}, kRecordCount, 75'000); const auto port = mock_server_.ListenOnThread(); databento::Historical target{&logger_, kApiKey, "localhost", @@ -789,9 +804,9 @@ TEST_F(HistoricalTests, TestTimeseriesGetRange_UnreadBytes) { 10005, {}}}; constexpr auto kRecordCount = 1'000; - mock_server_.MockStreamDbn("/v0/timeseries.get_range", - {{"dataset", ToString(Dataset::IfusImpact)}}, - Record{&mbp1.hd}, kRecordCount, 20, 75'000); + mock_server_.MockPostDbn("/v0/timeseries.get_range", + {{"dataset", ToString(Dataset::IfusImpact)}}, + Record{&mbp1.hd}, kRecordCount, 20, 75'000); const auto port = mock_server_.ListenOnThread(); logger_ = mock::MockLogReceiver{[](auto count, LogLevel level, @@ -815,16 +830,16 @@ TEST_F(HistoricalTests, TestTimeseriesGetRange_UnreadBytes) { } TEST_F(HistoricalTests, TestTimeseriesGetRangeToFile) { - mock_server_.MockStreamDbn("/v0/timeseries.get_range", - {{"dataset", dataset::kGlbxMdp3}, - {"start", "2022-10-21T13:30"}, - {"end", "2022-10-21T20:00"}, - {"symbols", "CYZ2"}, - {"schema", "tbbo"}, - {"encoding", "dbn"}, - {"stype_in", "raw_symbol"}, - {"stype_out", "instrument_id"}}, - TEST_DATA_DIR "/test_data.tbbo.v3.dbn.zst"); + mock_server_.MockPostDbn("/v0/timeseries.get_range", + {{"dataset", dataset::kGlbxMdp3}, + {"start", "2022-10-21T13:30"}, + {"end", "2022-10-21T20:00"}, + {"symbols", "CYZ2"}, + {"schema", "tbbo"}, + {"encoding", "dbn"}, + {"stype_in", "raw_symbol"}, + {"stype_out", "instrument_id"}}, + TEST_DATA_DIR "/test_data.tbbo.v3.dbn.zst"); const auto port = mock_server_.ListenOnThread(); databento::Historical target{&logger_, kApiKey, "localhost", diff --git a/tests/src/log_tests.cpp b/tests/src/log_tests.cpp index 13d45b0..d14b965 100644 --- a/tests/src/log_tests.cpp +++ b/tests/src/log_tests.cpp @@ -1,9 +1,10 @@ -#include +#include #include #include #include "databento/log.hpp" +#include "gmock/gmock.h" namespace databento::tests { class ConsoleLogReceiverTests : public testing::Test { @@ -24,7 +25,9 @@ TEST_F(ConsoleLogReceiverTests, TestFilter) { const std::string msg = "Something happened"; target_.Receive(LogLevel::Debug, msg); const std::string output = stream_.str(); - ASSERT_TRUE(output.empty()); + EXPECT_TRUE(output.empty()); + target_.Receive(LogLevel::Info, msg); + EXPECT_THAT(stream_.str(), testing::HasSubstr(msg)); } TEST(ILogReceiverTests, TestDefault) { diff --git a/tests/src/metadata_tests.cpp b/tests/src/metadata_tests.cpp index 564bad3..a81d10d 100644 --- a/tests/src/metadata_tests.cpp +++ b/tests/src/metadata_tests.cpp @@ -5,18 +5,34 @@ namespace databento::tests { TEST(MetadataTests, TestDatasetConditionDetailToString) { - const DatasetConditionDetail target{"2022-11-10", DatasetCondition::Available, - "2023-03-01"}; - ASSERT_EQ( - ToString(target), + const DatasetConditionDetail available{ + "2022-11-10", DatasetCondition::Available, "2023-03-01"}; + EXPECT_EQ( + ToString(available), R"(DatasetConditionDetail { date = "2022-11-10", condition = available, last_modified_date = "2023-03-01" })"); + const DatasetConditionDetail missing{ + "2022-11-11", DatasetCondition::Missing, {}}; + EXPECT_EQ( + ToString(missing), + R"(DatasetConditionDetail { date = "2022-11-11", condition = missing, last_modified_date = nullopt })"); } TEST(MetadataTests, TestDatasetRangeToString) { - const DatasetRange target{"2022-05-17T00:00:00.000000000Z", - "2023-01-07T00:00:00.000000000Z"}; - ASSERT_EQ( - ToString(target), - R"(DatasetRange { start = "2022-05-17T00:00:00.000000000Z", end = "2023-01-07T00:00:00.000000000Z" })"); + const DatasetRange target{ + "2022-05-17T00:00:00.000000000Z", + "2023-01-07T00:00:00.000000000Z", + {{Schema::Bbo1M, + {"2020-08-02T00:00:00.000000000Z", "2023-03-23T00:00:00.000000000Z"}}, + {Schema::Bbo1S, + {"2020-08-02T00:00:00.000000000Z", "2023-03-23T00:00:00.000000000Z"}}}}; + ASSERT_EQ(ToString(target), + R"(DatasetRange { + start = "2022-05-17T00:00:00.000000000Z", + end = "2023-01-07T00:00:00.000000000Z", + range_by_schema = { + bbo-1s: DateTimeRange { start = "2020-08-02T00:00:00.000000000Z", end = "2023-03-23T00:00:00.000000000Z" }, + bbo-1m: DateTimeRange { start = "2020-08-02T00:00:00.000000000Z", end = "2023-03-23T00:00:00.000000000Z" } + } +})"); } } // namespace databento::tests diff --git a/tests/src/mock_http_server.cpp b/tests/src/mock_http_server.cpp index d5dc69b..e8cde50 100644 --- a/tests/src/mock_http_server.cpp +++ b/tests/src/mock_http_server.cpp @@ -4,15 +4,14 @@ #include #include -#include // ifstream -#include // streamsize -#include // cerr #include #include "databento/constants.hpp" #include "databento/dbn.hpp" #include "databento/dbn_encoder.hpp" +#include "databento/detail/buffer.hpp" #include "databento/detail/zstd_stream.hpp" +#include "databento/file_stream.hpp" #include "databento/record.hpp" using databento::tests::mock::MockHttpServer; @@ -23,9 +22,9 @@ int MockHttpServer::ListenOnThread() { return port_; } -void MockHttpServer::MockBadRequest(const std::string& path, - const nlohmann::json& json) { - server_.Get(path, [json](const httplib::Request&, httplib::Response& resp) { +void MockHttpServer::MockBadPostRequest(const std::string& path, + const nlohmann::json& json) { + server_.Post(path, [json](const httplib::Request&, httplib::Response& resp) { resp.status = 400; resp.body = json.dump(); }); @@ -78,50 +77,39 @@ void MockHttpServer::MockPostJson( }); } -void MockHttpServer::MockStreamDbn( +void MockHttpServer::MockGetDbn( const std::string& path, const std::map& params, const std::string& dbn_path) { constexpr std::size_t kChunkSize = 32; // Read contents into buffer - std::ifstream input_file{dbn_path, std::ios::binary | std::ios::ate}; - const auto size = static_cast(input_file.tellg()); - input_file.seekg(0, std::ios::beg); - std::vector buffer(size); - input_file.read(buffer.data(), static_cast(size)); + auto buffer = EncodeToBuffer(dbn_path); // Serve - server_.Get(path, [buffer = std::move(buffer), kChunkSize, params]( - const httplib::Request& req, httplib::Response& resp) { - if (!req.has_header("Authorization")) { - resp.status = 401; - return; - } - CheckParams(params, req); - resp.status = 200; - resp.set_header("Content-Disposition", "attachment; filename=test.dbn.zst"); - resp.set_content_provider( - "application/octet-stream", - [buffer, kChunkSize](const std::size_t offset, - httplib::DataSink& sink) { - if (offset < buffer.size()) { - sink.write(&buffer[offset], - std::min(kChunkSize, buffer.size() - offset)); - } else { - sink.done(); - } - return true; - }); - }); + server_.Get(path, + MakeDbnStreamHandler(params, std::move(buffer), kChunkSize)); +} + +void MockHttpServer::MockPostDbn( + const std::string& path, const std::map& params, + const std::string& dbn_path) { + constexpr std::size_t kChunkSize = 32; + + // Read contents into buffer + auto buffer = EncodeToBuffer(dbn_path); + + // Serve + server_.Post(path, + MakeDbnStreamHandler(params, std::move(buffer), kChunkSize)); } -void MockHttpServer::MockStreamDbn( +void MockHttpServer::MockPostDbn( const std::string& path, const std::map& params, Record record, std::size_t count, std::size_t chunk_size) { - MockStreamDbn(path, params, record, count, 0, chunk_size); + MockPostDbn(path, params, record, count, 0, chunk_size); } -void MockHttpServer::MockStreamDbn( +void MockHttpServer::MockPostDbn( const std::string& path, const std::map& params, Record record, std::size_t count, std::size_t extra_bytes, std::size_t chunk_size) { @@ -146,31 +134,8 @@ void MockHttpServer::MockStreamDbn( zstd_stream.WriteAll(empty.data(), empty.size()); } } - server_.Get(path, [params, buffer, count, record, chunk_size]( - const httplib::Request& req, httplib::Response& resp) { - if (!req.has_header("Authorization")) { - resp.status = 401; - return; - } - CheckParams(params, req); - resp.status = 200; - resp.set_header("Content-Disposition", "attachment; filename=test.dbn.zst"); - resp.set_content_provider( - "application/octet-stream", - [&buffer, chunk_size](const std::size_t offset, - httplib::DataSink& sink) { - if (buffer->ReadCapacity() - offset) { - const auto write_size = - std::min(chunk_size, buffer->ReadCapacity() - offset); - sink.write( - reinterpret_cast(&buffer->ReadBegin()[offset]), - write_size); - } else { - sink.done(); - } - return true; - }); - }); + server_.Post(path, + MakeDbnStreamHandler(params, std::move(buffer), chunk_size)); } void MockHttpServer::CheckParams( @@ -205,3 +170,45 @@ void MockHttpServer::CheckFormParams( } } } + +MockHttpServer::SharedConstBuffer MockHttpServer::EncodeToBuffer( + const std::string& dbn_path) { + detail::Buffer buffer{}; + InFileStream input_file{dbn_path}; + while (auto read_size = + input_file.ReadSome(buffer.WriteBegin(), buffer.WriteCapacity())) { + buffer.Fill(read_size); + if (buffer.WriteCapacity() < 1024) { + buffer.Reserve(buffer.Capacity() * 2); + } + } + return std::make_shared(std::move(buffer)); +} + +httplib::Server::Handler MockHttpServer::MakeDbnStreamHandler( + const std::map& params, + SharedConstBuffer&& buffer, std::size_t chunk_size) { + return [buffer = std::move(buffer), chunk_size, params]( + const httplib::Request& req, httplib::Response& resp) { + if (!req.has_header("Authorization")) { + resp.status = 401; + return; + } + CheckParams(params, req); + resp.status = 200; + resp.set_header("Content-Disposition", "attachment; filename=test.dbn.zst"); + resp.set_content_provider( + "application/octet-stream", + [buffer, chunk_size](const std::size_t offset, + httplib::DataSink& sink) { + if (offset < buffer->ReadCapacity()) { + sink.write( + reinterpret_cast(&buffer->ReadBegin()[offset]), + std::min(chunk_size, buffer->ReadCapacity() - offset)); + } else { + sink.done(); + } + return true; + }); + }; +}