From c8465ce06fab84103dc1fa613a28f22d68554fb1 Mon Sep 17 00:00:00 2001 From: Tom Gundersen Date: Sat, 14 Mar 2020 17:32:21 +0100 Subject: [PATCH] sources/files: time-out curl Add a 10s connection timeout for each file transfer. Also add an increasing max timeout for a given file transfer (30s to 180s). Also increase the retries to 10 and the concurrent threads to 15. Hopefully this should make things a bit more stable in the face of bad mirrors. We were encountering mirrors that would hang either on connect or download at such slow speeds that they might as well have stalled (~1kB in 45s). Follow-up patches will provide a more long-term solution, by allowing the same mirror selection as dnf currently uses. Signed-off-by: Tom Gundersen --- sources/org.osbuild.files | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/sources/org.osbuild.files b/sources/org.osbuild.files index 0366b3c9..d7800574 100755 --- a/sources/org.osbuild.files +++ b/sources/org.osbuild.files @@ -34,10 +34,12 @@ def fetch(url, checksum, directory): # subdirectory, so we avoid copying accross block devices. with tempfile.TemporaryDirectory(prefix="osbuild-unverified-file-", dir=directory) as tmpdir: # some mirrors are broken sometimes. retry manually, because curl doesn't on 404 - for _ in range(3): + for i in range(10): curl = subprocess.run([ "curl", "--silent", + "--max-time", f"{30 + i*15}", + "--connect-timeout", "10", "--show-error", "--fail", "--location", @@ -67,7 +69,7 @@ def main(options, checksums, cache, output): os.makedirs(cache, exist_ok=True) os.makedirs(output, exist_ok=True) - with concurrent.futures.ProcessPoolExecutor(max_workers=10) as executor: + with concurrent.futures.ProcessPoolExecutor(max_workers=15) as executor: requested_urls = [] for checksum in checksums: try: