1010#include < exception> // exception, exception_ptr
1111#include < fstream> // ofstream
1212#include < ios> // ios::binary
13+ #include < iterator> // back_inserter
1314#include < memory> // unique_ptr
1415#include < string>
1516#include < utility> // move
@@ -245,10 +246,8 @@ std::vector<databento::BatchJob> Historical::BatchListJobs(
245246 throw JsonResponseError::TypeMismatch (kEndpoint , " array" , json);
246247 }
247248 std::vector<BatchJob> jobs;
248- jobs.reserve (json.size ());
249- for (const auto & job_json : json.items ()) {
250- jobs.emplace_back (::Parse (kEndpoint , job_json.value ()));
251- }
249+ std::transform (json.begin (), json.end (), std::back_inserter (jobs),
250+ [](const auto & item) { return ::Parse (kEndpoint , item); });
252251 return jobs;
253252}
254253
@@ -316,6 +315,21 @@ std::string Historical::BatchDownload(const std::string& output_dir,
316315 return output_path;
317316}
318317
318+ void Historical::StreamToFile (const std::string& url_path,
319+ const HttplibParams& params,
320+ const std::string& file_path) {
321+ std::ofstream out_file{file_path, std::ios::binary};
322+ if (out_file.fail ()) {
323+ throw InvalidArgumentError{" Historical::StreamToFile" , " file_path" ,
324+ " Failed to open file" };
325+ }
326+ this ->client_ .GetRawStream (
327+ url_path, params, [&out_file](const char * data, std::size_t length) {
328+ out_file.write (data, static_cast <std::streamsize>(length));
329+ return true ;
330+ });
331+ }
332+
319333void Historical::DownloadFile (const std::string& url,
320334 const std::string& output_path) {
321335 static const std::string kEndpoint = " Historical::BatchDownload" ;
@@ -338,12 +352,7 @@ void Historical::DownloadFile(const std::string& url,
338352 path = url.substr (slash);
339353 }
340354
341- client_.GetRawStream (
342- path, {}, [&output_path](const char * data, std::size_t length) {
343- std::ofstream out_file{output_path};
344- out_file.write (data, static_cast <std::streamsize>(length));
345- return KeepGoing::Continue;
346- });
355+ StreamToFile (path, {}, output_path);
347356}
348357
349358std::map<std::string, std::int32_t > Historical::MetadataListPublishers () {
@@ -848,14 +857,15 @@ databento::SymbologyResolution Historical::SymbologyResolve(
848857 mapping_json);
849858 }
850859 std::vector<StrMappingInterval> mapping_intervals;
851- mapping_intervals.reserve (mapping_json.size ());
852- for (const auto & interval_json : mapping_json.items ()) {
853- mapping_intervals.emplace_back (StrMappingInterval{
854- detail::CheckedAt (kEndpoint , interval_json.value (), " d0" ),
855- detail::CheckedAt (kEndpoint , interval_json.value (), " d1" ),
856- detail::CheckedAt (kEndpoint , interval_json.value (), " s" ),
857- });
858- }
860+ std::transform (mapping_json.begin (), mapping_json.end (),
861+ std::back_inserter (mapping_intervals),
862+ [](const auto & interval_json) {
863+ return StrMappingInterval{
864+ detail::CheckedAt (kEndpoint , interval_json, " d0" ),
865+ detail::CheckedAt (kEndpoint , interval_json, " d1" ),
866+ detail::CheckedAt (kEndpoint , interval_json, " s" ),
867+ };
868+ });
859869 res.mappings .emplace (mapping.key (), std::move (mapping_intervals));
860870 }
861871 if (!partial_json.is_array ()) {
@@ -1053,19 +1063,7 @@ databento::DbnFileStore Historical::TimeseriesGetRangeToFile(
10531063}
10541064databento::DbnFileStore Historical::TimeseriesGetRangeToFile (
10551065 const HttplibParams& params, const std::string& file_path) {
1056- {
1057- std::ofstream out_file{file_path, std::ios::binary};
1058- if (out_file.fail ()) {
1059- throw InvalidArgumentError{kTimeseriesGetRangeEndpoint , " file_path" ,
1060- " Non-existent or invalid file" };
1061- }
1062- this ->client_ .GetRawStream (
1063- kTimeseriesGetRangePath , params,
1064- [&out_file](const char * data, std::size_t length) {
1065- out_file.write (data, static_cast <std::streamsize>(length));
1066- return true ;
1067- });
1068- } // close out_file
1066+ StreamToFile (kTimeseriesGetRangePath , params, file_path);
10691067 return DbnFileStore{file_path};
10701068}
10711069
0 commit comments