Sort out cURL timeouts and increase default

This commit is contained in:
sfan5 2021-04-02 00:19:39 +02:00 committed by GitHub
parent 3560691c0a
commit 34888a914e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 18 additions and 73 deletions

@ -1413,9 +1413,8 @@ enable_ipv6 (IPv6) bool true
[*Advanced]
# Default timeout for cURL, stated in milliseconds.
# Only has an effect if compiled with cURL.
curl_timeout (cURL timeout) int 5000
# Maximum time an interactive request (e.g. server list fetch) may take, stated in milliseconds.
curl_timeout (cURL interactive timeout) int 20000
# Limits number of parallel HTTP requests. Affects:
# - Media fetch if server uses remote_media setting.
@ -1424,7 +1423,7 @@ curl_timeout (cURL timeout) int 5000
# Only has an effect if compiled with cURL.
curl_parallel_limit (cURL parallel limit) int 8
# Maximum time in ms a file download (e.g. a mod download) may take.
# Maximum time a file download (e.g. a mod download) may take, stated in milliseconds.
curl_file_download_timeout (cURL file download timeout) int 300000
# Makes DirectX work with LuaJIT. Disable if it causes troubles.

@ -8394,7 +8394,7 @@ Used by `HTTPApiTable.fetch` and `HTTPApiTable.fetch_async`.
url = "http://example.org",
timeout = 10,
-- Timeout for connection in seconds. Default is 3 seconds.
-- Timeout for request to be completed in seconds. Default depends on engine settings.
method = "GET", "POST", "PUT" or "DELETE"
-- The http method to use. Defaults to "GET".

@ -216,7 +216,6 @@ void ClientMediaDownloader::initialStep(Client *client)
// This is the first time we use httpfetch, so alloc a caller ID
m_httpfetch_caller = httpfetch_caller_alloc();
m_httpfetch_timeout = g_settings->getS32("curl_timeout");
// Set the active fetch limit to curl_parallel_limit or 84,
// whichever is greater. This gives us some leeway so that
@ -258,8 +257,6 @@ void ClientMediaDownloader::initialStep(Client *client)
remote->baseurl + MTHASHSET_FILE_NAME;
fetch_request.caller = m_httpfetch_caller;
fetch_request.request_id = m_httpfetch_next_id; // == i
fetch_request.timeout = m_httpfetch_timeout;
fetch_request.connect_timeout = m_httpfetch_timeout;
fetch_request.method = HTTP_POST;
fetch_request.raw_data = required_hash_set;
fetch_request.extra_headers.emplace_back(
@ -432,9 +429,8 @@ void ClientMediaDownloader::startRemoteMediaTransfers()
fetch_request.url = url;
fetch_request.caller = m_httpfetch_caller;
fetch_request.request_id = m_httpfetch_next_id;
fetch_request.timeout = 0; // no data timeout!
fetch_request.connect_timeout =
m_httpfetch_timeout;
fetch_request.timeout =
g_settings->getS32("curl_file_download_timeout");
httpfetch_async(fetch_request);
m_remote_file_transfers.insert(std::make_pair(

@ -137,7 +137,6 @@ private:
// Status of remote transfers
unsigned long m_httpfetch_caller;
unsigned long m_httpfetch_next_id = 0;
long m_httpfetch_timeout = 0;
s32 m_httpfetch_active = 0;
s32 m_httpfetch_active_limit = 0;
s32 m_outstanding_hash_sets = 0;

@ -17,56 +17,11 @@ with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include <vector>
#include <iostream>
#include <sstream>
#include <memory>
#include "convert_json.h"
#include "content/mods.h"
#include "config.h"
#include "log.h"
#include "settings.h"
#include "httpfetch.h"
#include "porting.h"
Json::Value fetchJsonValue(const std::string &url,
std::vector<std::string> *extra_headers)
{
HTTPFetchRequest fetch_request;
HTTPFetchResult fetch_result;
fetch_request.url = url;
fetch_request.caller = HTTPFETCH_SYNC;
if (extra_headers != NULL)
fetch_request.extra_headers = *extra_headers;
httpfetch_sync(fetch_request, fetch_result);
if (!fetch_result.succeeded) {
return Json::Value();
}
Json::Value root;
std::istringstream stream(fetch_result.data);
Json::CharReaderBuilder builder;
builder.settings_["collectComments"] = false;
std::string errs;
if (!Json::parseFromStream(builder, stream, &root, &errs)) {
errorstream << "URL: " << url << std::endl;
errorstream << "Failed to parse json data " << errs << std::endl;
if (fetch_result.data.size() > 100) {
errorstream << "Data (" << fetch_result.data.size()
<< " bytes) printed to warningstream." << std::endl;
warningstream << "data: \"" << fetch_result.data << "\"" << std::endl;
} else {
errorstream << "data: \"" << fetch_result.data << "\"" << std::endl;
}
return Json::Value();
}
return root;
}
void fastWriteJson(const Json::Value &value, std::ostream &to)
{

@ -22,9 +22,6 @@ with this program; if not, write to the Free Software Foundation, Inc.,
#include <json/json.h>
#include <ostream>
Json::Value fetchJsonValue(const std::string &url,
std::vector<std::string> *extra_headers);
void fastWriteJson(const Json::Value &value, std::ostream &to);
std::string fastWriteJson(const Json::Value &value);

@ -56,7 +56,7 @@ void set_default_settings()
settings->setDefault("client_unload_unused_data_timeout", "600");
settings->setDefault("client_mapblock_limit", "7500");
settings->setDefault("enable_build_where_you_stand", "false");
settings->setDefault("curl_timeout", "5000");
settings->setDefault("curl_timeout", "20000");
settings->setDefault("curl_parallel_limit", "8");
settings->setDefault("curl_file_download_timeout", "300000");
settings->setDefault("curl_verify_cert", "true");

@ -22,7 +22,7 @@ with this program; if not, write to the Free Software Foundation, Inc.,
#include <iostream>
#include <sstream>
#include <list>
#include <map>
#include <unordered_map>
#include <cerrno>
#include <mutex>
#include "network/socket.h" // for select()
@ -37,13 +37,14 @@ with this program; if not, write to the Free Software Foundation, Inc.,
#include "settings.h"
#include "noise.h"
std::mutex g_httpfetch_mutex;
std::map<unsigned long, std::queue<HTTPFetchResult> > g_httpfetch_results;
PcgRandom g_callerid_randomness;
static std::mutex g_httpfetch_mutex;
static std::unordered_map<unsigned long, std::queue<HTTPFetchResult>>
g_httpfetch_results;
static PcgRandom g_callerid_randomness;
HTTPFetchRequest::HTTPFetchRequest() :
timeout(g_settings->getS32("curl_timeout")),
connect_timeout(timeout),
connect_timeout(10 * 1000),
useragent(std::string(PROJECT_NAME_C "/") + g_version_hash + " (" + porting::get_sysinfo() + ")")
{
}
@ -54,7 +55,7 @@ static void httpfetch_deliver_result(const HTTPFetchResult &fetch_result)
unsigned long caller = fetch_result.caller;
if (caller != HTTPFETCH_DISCARD) {
MutexAutoLock lock(g_httpfetch_mutex);
g_httpfetch_results[caller].push(fetch_result);
g_httpfetch_results[caller].emplace(fetch_result);
}
}
@ -67,8 +68,7 @@ unsigned long httpfetch_caller_alloc()
// Check each caller ID except HTTPFETCH_DISCARD
const unsigned long discard = HTTPFETCH_DISCARD;
for (unsigned long caller = discard + 1; caller != discard; ++caller) {
std::map<unsigned long, std::queue<HTTPFetchResult> >::iterator
it = g_httpfetch_results.find(caller);
auto it = g_httpfetch_results.find(caller);
if (it == g_httpfetch_results.end()) {
verbosestream << "httpfetch_caller_alloc: allocating "
<< caller << std::endl;
@ -127,8 +127,7 @@ bool httpfetch_async_get(unsigned long caller, HTTPFetchResult &fetch_result)
MutexAutoLock lock(g_httpfetch_mutex);
// Check that caller exists
std::map<unsigned long, std::queue<HTTPFetchResult> >::iterator
it = g_httpfetch_results.find(caller);
auto it = g_httpfetch_results.find(caller);
if (it == g_httpfetch_results.end())
return false;
@ -138,7 +137,7 @@ bool httpfetch_async_get(unsigned long caller, HTTPFetchResult &fetch_result)
return false;
// Pop first result
fetch_result = caller_results.front();
fetch_result = std::move(caller_results.front());
caller_results.pop();
return true;
}