Skip to content

Commit 511feac

Browse files
committed
urlcache: Add a cache buster on the last retry
Might give us some idea of whether some rare persistent issues are cache related or not. Signed-off-by: Hector Martin <[email protected]>
1 parent bb8ca6d commit 511feac

1 file changed

Lines changed: 7 additions & 4 deletions

File tree

src/urlcache.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# SPDX-License-Identifier: MIT
2-
import os, sys, os.path, time, logging
2+
import os, sys, os.path, time, logging, random
33
from dataclasses import dataclass
44

55
from urllib import request
@@ -35,9 +35,12 @@ def get_size(self):
3535
fd = request.urlopen(req)
3636
return int(fd.getheader("Content-length"))
3737

38-
def get_partial(self, off, size):
38+
def get_partial(self, off, size, bypass_cache=False):
3939
#print("get_partial", off, size)
40-
req = request.Request(self.url, method="GET")
40+
url = self.url
41+
if bypass_cache:
42+
url += f"?{random.random()}"
43+
req = request.Request(url, method="GET")
4144
req.add_header("Range", f"bytes={off}-{off+size-1}")
4245
fd = request.urlopen(req, timeout=self.TIMEOUT)
4346

@@ -75,7 +78,7 @@ def get_block(self, blk, readahead=1):
7578
sleep = 1
7679
for retry in range(retries + 1):
7780
try:
78-
data = self.get_partial(off, size)
81+
data = self.get_partial(off, size, bypass_cache=(retry == retries))
7982
except Exception as e:
8083
if retry == retries:
8184
p_error(f"Exceeded maximum retries downloading data.")

0 commit comments

Comments
 (0)