1
0
mirror of https://github.com/facebook/proxygen.git synced 2025-08-08 18:02:05 +03:00

Add retries to ArchiveFetcher

Summary:
X-link: https://github.com/facebookincubator/zstrong/pull/1227

Add retries to ArchiveFetcher when downloading fails. There will be 4 retries, with backoff and jitter. The max delay is capped at 10 seconds.

Reviewed By: srikrishnagopu

Differential Revision: D71167342

fbshipit-source-id: d927a639cf99185c5a04d063400bdab874dfddfe
This commit is contained in:
Paul Cruz
2025-03-14 12:10:44 -07:00
committed by Facebook GitHub Bot
parent a5ab6f31cf
commit 6bb525691b
2 changed files with 182 additions and 1 deletions

View File

@@ -9,6 +9,7 @@
import errno
import hashlib
import os
import random
import re
import shutil
import stat
@@ -837,7 +838,20 @@ class ArchiveFetcher(Fetcher):
def _download(self) -> None:
self._download_dir()
download_url_to_file_with_progress(self.url, self.file_name)
max_attempts = 5
delay = 1
for attempt in range(max_attempts):
try:
download_url_to_file_with_progress(self.url, self.file_name)
break
except TransientFailure as tf:
if attempt < max_attempts - 1:
delay *= 2
delay_with_jitter = delay * (1 + random.random() * 0.1)
time.sleep(min(delay_with_jitter, 10))
else:
print(f"Failed after retries: {tf}")
raise
self._verify_hash()
def clean(self) -> None: