Use cURL so http requests don't get cached

This commit is contained in:
Federico Cecchetto
2022-01-31 23:45:03 +01:00
parent 5865f427de
commit 1775a13378
5 changed files with 160 additions and 29 deletions

View File

@ -1,48 +1,100 @@
#include "http.hpp"
#include "nt.hpp"
#include <atlcomcli.h>
#include <curl/curl.h>
#include <gsl/gsl>
#pragma comment(lib, "ws2_32.lib")
namespace utils::http
{
std::optional<std::string> get_data(const std::string& url)
namespace
{
CComPtr<IStream> stream;
if (FAILED(URLOpenBlockingStreamA(nullptr, url.data(), &stream, 0, nullptr)))
struct progress_helper
{
return {};
}
const std::function<void(size_t)>* callback{};
std::exception_ptr exception{};
};
char buffer[0x1000];
std::string result;
HRESULT status{};
do
int progress_callback(void *clientp, const curl_off_t /*dltotal*/, const curl_off_t dlnow, const curl_off_t /*ultotal*/, const curl_off_t /*ulnow*/)
{
DWORD bytes_read = 0;
status = stream->Read(buffer, sizeof(buffer), &bytes_read);
auto* helper = static_cast<progress_helper*>(clientp);
if (bytes_read > 0)
try
{
result.append(buffer, bytes_read);
if (*helper->callback)
{
(*helper->callback)(dlnow);
}
}
catch(...)
{
helper->exception = std::current_exception();
return -1;
}
}
while (SUCCEEDED(status) && status != S_FALSE);
if (FAILED(status))
return 0;
}
size_t write_callback(void* contents, const size_t size, const size_t nmemb, void* userp)
{
return {};
}
auto* buffer = static_cast<std::string*>(userp);
return {result};
const auto total_size = size * nmemb;
buffer->append(static_cast<char*>(contents), total_size);
return total_size;
}
}
std::future<std::optional<std::string>> get_data_async(const std::string& url)
std::optional<std::string> get_data(const std::string& url, const headers& headers, const std::function<void(size_t)>& callback)
{
return std::async(std::launch::async, [url]()
curl_slist* header_list = nullptr;
auto* curl = curl_easy_init();
if (!curl)
{
return get_data(url);
return {};
}
auto _ = gsl::finally([&]()
{
curl_slist_free_all(header_list);
curl_easy_cleanup(curl);
});
for(const auto& header : headers)
{
auto data = header.first + ": " + header.second;
header_list = curl_slist_append(header_list, data.data());
}
std::string buffer{};
progress_helper helper{};
helper.callback = &callback;
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, header_list);
curl_easy_setopt(curl, CURLOPT_URL, url.data());
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_callback);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &buffer);
curl_easy_setopt(curl, CURLOPT_XFERINFOFUNCTION, progress_callback);
curl_easy_setopt(curl, CURLOPT_XFERINFODATA, &helper);
curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0);
if (curl_easy_perform(curl) == CURLE_OK)
{
return {std::move(buffer)};
}
if(helper.exception)
{
std::rethrow_exception(helper.exception);
}
return {};
}
std::future<std::optional<std::string>> get_data_async(const std::string& url, const headers& headers)
{
return std::async(std::launch::async, [url, headers]()
{
return get_data(url, headers);
});
}
}

View File

@ -6,6 +6,8 @@
namespace utils::http
{
std::optional<std::string> get_data(const std::string& url);
std::future<std::optional<std::string>> get_data_async(const std::string& url);
using headers = std::unordered_map<std::string, std::string>;
std::optional<std::string> get_data(const std::string& url, const headers& headers = {}, const std::function<void(size_t)>& callback = {});
std::future<std::optional<std::string>> get_data_async(const std::string& url, const headers& headers = {});
}