|
9 | 9 |
|
10 | 10 | """ |
11 | 11 |
|
| 12 | +import time |
12 | 13 | import urllib.parse |
13 | 14 | import urllib.request |
14 | 15 | import urllib.error |
|
19 | 20 |
|
20 | 21 | from . import keys |
21 | 22 | from .utils import urlopen_and_read |
22 | | -from .flickrerrors import FlickrError, FlickrAPIError, FlickrServerError |
| 23 | +from .flickrerrors import FlickrError, FlickrAPIError, FlickrServerError, FlickrRateLimitError |
23 | 24 | from .cache import SimpleCache |
24 | 25 |
|
25 | 26 | REST_URL = "https://api.flickr.com/services/rest/" |
|
30 | 31 |
|
31 | 32 | logger = logging.getLogger(__name__) |
32 | 33 |
|
| 34 | +# Rate limit retry configuration |
| 35 | +MAX_RETRIES: int = 3 |
| 36 | +RETRY_BASE_DELAY: float = 1.0 # Base delay in seconds for exponential backoff |
| 37 | +RETRY_MAX_DELAY: float = 60.0 # Maximum delay between retries |
| 38 | + |
33 | 39 |
|
34 | 40 | def enable_cache(cache_object: Any | None = None) -> None: |
35 | 41 | """enable caching |
@@ -64,6 +70,150 @@ def get_timeout() -> float: |
64 | 70 | return TIMEOUT |
65 | 71 |
|
66 | 72 |
|
| 73 | +def set_retry_config( |
| 74 | + max_retries: int | None = None, |
| 75 | + base_delay: float | None = None, |
| 76 | + max_delay: float | None = None, |
| 77 | +) -> None: |
| 78 | + """Configure rate limit retry behavior. |
| 79 | +
|
| 80 | + Parameters: |
| 81 | + ----------- |
| 82 | + max_retries: int, optional |
| 83 | + Maximum number of retries on rate limit (default 3). Set to 0 to disable. |
| 84 | + base_delay: float, optional |
| 85 | + Base delay in seconds for exponential backoff (default 1.0) |
| 86 | + max_delay: float, optional |
| 87 | + Maximum delay between retries in seconds (default 60.0) |
| 88 | + """ |
| 89 | + global MAX_RETRIES, RETRY_BASE_DELAY, RETRY_MAX_DELAY |
| 90 | + if max_retries is not None: |
| 91 | + MAX_RETRIES = max_retries |
| 92 | + if base_delay is not None: |
| 93 | + RETRY_BASE_DELAY = base_delay |
| 94 | + if max_delay is not None: |
| 95 | + RETRY_MAX_DELAY = max_delay |
| 96 | + |
| 97 | + |
| 98 | +def get_retry_config() -> dict[str, Any]: |
| 99 | + """Get current retry configuration. |
| 100 | +
|
| 101 | + Returns: |
| 102 | + -------- |
| 103 | + dict with keys: max_retries, base_delay, max_delay |
| 104 | + """ |
| 105 | + return { |
| 106 | + "max_retries": MAX_RETRIES, |
| 107 | + "base_delay": RETRY_BASE_DELAY, |
| 108 | + "max_delay": RETRY_MAX_DELAY, |
| 109 | + } |
| 110 | + |
| 111 | + |
| 112 | +def _calculate_retry_delay(attempt: int, retry_after: float | None) -> float: |
| 113 | + """Calculate delay before next retry. |
| 114 | +
|
| 115 | + Uses Retry-After header if available, otherwise exponential backoff. |
| 116 | +
|
| 117 | + Parameters: |
| 118 | + ----------- |
| 119 | + attempt: int |
| 120 | + Current retry attempt number (0-indexed) |
| 121 | + retry_after: float | None |
| 122 | + Value from Retry-After header, if present |
| 123 | +
|
| 124 | + Returns: |
| 125 | + -------- |
| 126 | + Delay in seconds |
| 127 | + """ |
| 128 | + if retry_after is not None and retry_after > 0: |
| 129 | + return min(retry_after, RETRY_MAX_DELAY) |
| 130 | + |
| 131 | + # Exponential backoff: base_delay * 2^attempt |
| 132 | + delay = RETRY_BASE_DELAY * (2**attempt) |
| 133 | + return min(delay, RETRY_MAX_DELAY) |
| 134 | + |
| 135 | + |
| 136 | +def _parse_retry_after(response: requests.Response) -> float | None: |
| 137 | + """Parse Retry-After header from response. |
| 138 | +
|
| 139 | + Parameters: |
| 140 | + ----------- |
| 141 | + response: requests.Response |
| 142 | + The HTTP response |
| 143 | +
|
| 144 | + Returns: |
| 145 | + -------- |
| 146 | + Seconds to wait, or None if header not present/parseable |
| 147 | + """ |
| 148 | + retry_after = response.headers.get("Retry-After") |
| 149 | + if retry_after is None: |
| 150 | + return None |
| 151 | + |
| 152 | + try: |
| 153 | + return float(retry_after) |
| 154 | + except ValueError: |
| 155 | + # Could be an HTTP-date format, but Flickr typically uses seconds |
| 156 | + logger.warning("Could not parse Retry-After header: %s", retry_after) |
| 157 | + return None |
| 158 | + |
| 159 | + |
| 160 | +def _make_request_with_retry( |
| 161 | + request_url: str, |
| 162 | + args: dict[str, Any], |
| 163 | + oauth_auth: Any, |
| 164 | +) -> requests.Response: |
| 165 | + """Make HTTP request with automatic retry on rate limit errors. |
| 166 | +
|
| 167 | + Parameters: |
| 168 | + ----------- |
| 169 | + request_url: str |
| 170 | + The URL to request |
| 171 | + args: dict |
| 172 | + Request arguments |
| 173 | + oauth_auth: Any |
| 174 | + OAuth authentication object (or None) |
| 175 | +
|
| 176 | + Returns: |
| 177 | + -------- |
| 178 | + requests.Response |
| 179 | +
|
| 180 | + Raises: |
| 181 | + ------- |
| 182 | + FlickrRateLimitError: If rate limit exceeded and max retries exhausted |
| 183 | + """ |
| 184 | + last_error: FlickrRateLimitError | None = None |
| 185 | + |
| 186 | + for attempt in range(MAX_RETRIES + 1): |
| 187 | + resp = requests.post(request_url, args, auth=oauth_auth, timeout=get_timeout()) |
| 188 | + |
| 189 | + if resp.status_code != 429: |
| 190 | + return resp |
| 191 | + |
| 192 | + # Rate limited - parse retry info and potentially retry |
| 193 | + retry_after = _parse_retry_after(resp) |
| 194 | + content = resp.content.decode("utf8") if resp.content else "Too Many Requests" |
| 195 | + last_error = FlickrRateLimitError(retry_after, content) |
| 196 | + |
| 197 | + if attempt >= MAX_RETRIES: |
| 198 | + logger.warning( |
| 199 | + "Rate limit exceeded, max retries (%d) exhausted", |
| 200 | + MAX_RETRIES, |
| 201 | + ) |
| 202 | + break |
| 203 | + |
| 204 | + delay = _calculate_retry_delay(attempt, retry_after) |
| 205 | + logger.warning( |
| 206 | + "Rate limit exceeded (attempt %d/%d), retrying in %.1f seconds", |
| 207 | + attempt + 1, |
| 208 | + MAX_RETRIES + 1, |
| 209 | + delay, |
| 210 | + ) |
| 211 | + time.sleep(delay) |
| 212 | + |
| 213 | + # If we get here, we've exhausted retries |
| 214 | + raise last_error # type: ignore[misc] |
| 215 | + |
| 216 | + |
67 | 217 | def send_request(url, data): |
68 | 218 | """send a http request.""" |
69 | 219 | req = urllib.request.Request(url, data.encode()) |
@@ -145,19 +295,19 @@ def call_api( |
145 | 295 | args = dict(oauth_request.items()) |
146 | 296 |
|
147 | 297 | if CACHE is None: |
148 | | - resp = requests.post(request_url, args, auth=oauth_auth, timeout=get_timeout()) |
| 298 | + resp = _make_request_with_retry(request_url, args, oauth_auth) |
149 | 299 | else: |
150 | 300 | cachekey = {k: v for k, v in args.items() if k not in IGNORED_FIELDS} |
151 | 301 | cachekey = urllib.parse.urlencode(cachekey) |
152 | 302 |
|
153 | | - resp = CACHE.get(cachekey) or requests.post( |
154 | | - request_url, args, auth=oauth_auth, timeout=get_timeout() |
155 | | - ) |
156 | | - if cachekey not in CACHE: |
157 | | - CACHE.set(cachekey, resp) |
158 | | - logger.debug("NO HIT for cache key: %s" % cachekey) |
| 303 | + cached_resp = CACHE.get(cachekey) |
| 304 | + if cached_resp: |
| 305 | + resp = cached_resp |
| 306 | + logger.debug(" HIT for cache key: %s", cachekey) |
159 | 307 | else: |
160 | | - logger.debug(" HIT for cache key: %s" % cachekey) |
| 308 | + resp = _make_request_with_retry(request_url, args, oauth_auth) |
| 309 | + CACHE.set(cachekey, resp) |
| 310 | + logger.debug("NO HIT for cache key: %s", cachekey) |
161 | 311 |
|
162 | 312 | if raw: |
163 | 313 | return resp.content |
|
0 commit comments