From 57c38f5ccdbf120d5b362d962a9c0c85d5ac6481 Mon Sep 17 00:00:00 2001 From: Allan Beaufour Date: Sun, 25 Jan 2026 09:07:07 -0500 Subject: [PATCH] Add rate limiting handling with automatic retry - Add FlickrRateLimitError exception for HTTP 429 responses - Implement automatic retry with exponential backoff on rate limit - Parse Retry-After header when available - Add configurable retry settings: set_retry_config(), get_retry_config() - Default: 3 retries, 1s base delay, 60s max delay The retry behavior can be configured or disabled: flickr_api.set_retry_config(max_retries=0) # Disable retries flickr_api.set_retry_config(max_retries=5, base_delay=2.0) When rate limited and retries are exhausted, raises FlickrRateLimitError with retry_after attribute containing the suggested wait time. Includes 17 new tests for rate limit handling. Co-Authored-By: Claude Opus 4.5 --- flickr_api/__init__.py | 3 + flickr_api/flickrerrors.py | 36 ++++++ flickr_api/method_call.py | 168 +++++++++++++++++++++++-- test/test_rate_limit.py | 251 +++++++++++++++++++++++++++++++++++++ 4 files changed, 449 insertions(+), 9 deletions(-) create mode 100644 test/test_rate_limit.py diff --git a/flickr_api/__init__.py b/flickr_api/__init__.py index 9490b9c..7cb7288 100644 --- a/flickr_api/__init__.py +++ b/flickr_api/__init__.py @@ -35,7 +35,10 @@ from .auth import set_auth_handler as set_auth_handler from .method_call import disable_cache as disable_cache from .method_call import enable_cache as enable_cache +from .method_call import get_retry_config as get_retry_config from .method_call import get_timeout as get_timeout +from .method_call import set_retry_config as set_retry_config from .method_call import set_timeout as set_timeout from .keys import set_keys as set_keys +from .flickrerrors import FlickrRateLimitError as FlickrRateLimitError from ._version import __version__ as __version__ diff --git a/flickr_api/flickrerrors.py b/flickr_api/flickrerrors.py index 44b91fd..17d6145 100644 --- a/flickr_api/flickrerrors.py +++ b/flickr_api/flickrerrors.py @@ -65,3 +65,39 @@ def __init__(self, status_code: int, content: str) -> None: FlickrError.__init__(self, "HTTP Server Error %i: %s" % (status_code, content)) self.status_code = status_code self.content = content + + +class FlickrRateLimitError(FlickrError): + """Exception for Flickr Rate Limit Errors (HTTP 429) + + Raised when the API rate limit has been exceeded. Contains retry + information to help callers implement backoff strategies. + + Parameters: + ----------- + retry_after: float | None + Seconds to wait before retrying, from Retry-After header (if provided) + content: str + error content message + """ + + retry_after: float | None + content: str + + def __init__(self, retry_after: float | None, content: str) -> None: + """Constructor + + Parameters: + ----------- + retry_after: float | None + Seconds to wait before retrying (from Retry-After header, if available) + content: str + error content message + """ + if retry_after: + msg = f"Rate limit exceeded. Retry after {retry_after} seconds: {content}" + else: + msg = f"Rate limit exceeded: {content}" + FlickrError.__init__(self, msg) + self.retry_after = retry_after + self.content = content diff --git a/flickr_api/method_call.py b/flickr_api/method_call.py index 286b46d..5f91254 100644 --- a/flickr_api/method_call.py +++ b/flickr_api/method_call.py @@ -9,6 +9,7 @@ """ +import time import urllib.parse import urllib.request import urllib.error @@ -19,7 +20,7 @@ from . import keys from .utils import urlopen_and_read -from .flickrerrors import FlickrError, FlickrAPIError, FlickrServerError +from .flickrerrors import FlickrError, FlickrAPIError, FlickrServerError, FlickrRateLimitError from .cache import SimpleCache REST_URL = "https://api.flickr.com/services/rest/" @@ -30,6 +31,11 @@ logger = logging.getLogger(__name__) +# Rate limit retry configuration +MAX_RETRIES: int = 3 +RETRY_BASE_DELAY: float = 1.0 # Base delay in seconds for exponential backoff +RETRY_MAX_DELAY: float = 60.0 # Maximum delay between retries + def enable_cache(cache_object: Any | None = None) -> None: """enable caching @@ -64,6 +70,150 @@ def get_timeout() -> float: return TIMEOUT +def set_retry_config( + max_retries: int | None = None, + base_delay: float | None = None, + max_delay: float | None = None, +) -> None: + """Configure rate limit retry behavior. + + Parameters: + ----------- + max_retries: int, optional + Maximum number of retries on rate limit (default 3). Set to 0 to disable. + base_delay: float, optional + Base delay in seconds for exponential backoff (default 1.0) + max_delay: float, optional + Maximum delay between retries in seconds (default 60.0) + """ + global MAX_RETRIES, RETRY_BASE_DELAY, RETRY_MAX_DELAY + if max_retries is not None: + MAX_RETRIES = max_retries + if base_delay is not None: + RETRY_BASE_DELAY = base_delay + if max_delay is not None: + RETRY_MAX_DELAY = max_delay + + +def get_retry_config() -> dict[str, Any]: + """Get current retry configuration. + + Returns: + -------- + dict with keys: max_retries, base_delay, max_delay + """ + return { + "max_retries": MAX_RETRIES, + "base_delay": RETRY_BASE_DELAY, + "max_delay": RETRY_MAX_DELAY, + } + + +def _calculate_retry_delay(attempt: int, retry_after: float | None) -> float: + """Calculate delay before next retry. + + Uses Retry-After header if available, otherwise exponential backoff. + + Parameters: + ----------- + attempt: int + Current retry attempt number (0-indexed) + retry_after: float | None + Value from Retry-After header, if present + + Returns: + -------- + Delay in seconds + """ + if retry_after is not None and retry_after > 0: + return min(retry_after, RETRY_MAX_DELAY) + + # Exponential backoff: base_delay * 2^attempt + delay = RETRY_BASE_DELAY * (2**attempt) + return min(delay, RETRY_MAX_DELAY) + + +def _parse_retry_after(response: requests.Response) -> float | None: + """Parse Retry-After header from response. + + Parameters: + ----------- + response: requests.Response + The HTTP response + + Returns: + -------- + Seconds to wait, or None if header not present/parseable + """ + retry_after = response.headers.get("Retry-After") + if retry_after is None: + return None + + try: + return float(retry_after) + except ValueError: + # Could be an HTTP-date format, but Flickr typically uses seconds + logger.warning("Could not parse Retry-After header: %s", retry_after) + return None + + +def _make_request_with_retry( + request_url: str, + args: dict[str, Any], + oauth_auth: Any, +) -> requests.Response: + """Make HTTP request with automatic retry on rate limit errors. + + Parameters: + ----------- + request_url: str + The URL to request + args: dict + Request arguments + oauth_auth: Any + OAuth authentication object (or None) + + Returns: + -------- + requests.Response + + Raises: + ------- + FlickrRateLimitError: If rate limit exceeded and max retries exhausted + """ + last_error: FlickrRateLimitError | None = None + + for attempt in range(MAX_RETRIES + 1): + resp = requests.post(request_url, args, auth=oauth_auth, timeout=get_timeout()) + + if resp.status_code != 429: + return resp + + # Rate limited - parse retry info and potentially retry + retry_after = _parse_retry_after(resp) + content = resp.content.decode("utf8") if resp.content else "Too Many Requests" + last_error = FlickrRateLimitError(retry_after, content) + + if attempt >= MAX_RETRIES: + logger.warning( + "Rate limit exceeded, max retries (%d) exhausted", + MAX_RETRIES, + ) + break + + delay = _calculate_retry_delay(attempt, retry_after) + logger.warning( + "Rate limit exceeded (attempt %d/%d), retrying in %.1f seconds", + attempt + 1, + MAX_RETRIES + 1, + delay, + ) + time.sleep(delay) + + # If we get here, we've exhausted retries + raise last_error # type: ignore[misc] + + def send_request(url, data): """send a http request.""" req = urllib.request.Request(url, data.encode()) @@ -145,19 +295,19 @@ def call_api( args = dict(oauth_request.items()) if CACHE is None: - resp = requests.post(request_url, args, auth=oauth_auth, timeout=get_timeout()) + resp = _make_request_with_retry(request_url, args, oauth_auth) else: cachekey = {k: v for k, v in args.items() if k not in IGNORED_FIELDS} cachekey = urllib.parse.urlencode(cachekey) - resp = CACHE.get(cachekey) or requests.post( - request_url, args, auth=oauth_auth, timeout=get_timeout() - ) - if cachekey not in CACHE: - CACHE.set(cachekey, resp) - logger.debug("NO HIT for cache key: %s" % cachekey) + cached_resp = CACHE.get(cachekey) + if cached_resp: + resp = cached_resp + logger.debug(" HIT for cache key: %s", cachekey) else: - logger.debug(" HIT for cache key: %s" % cachekey) + resp = _make_request_with_retry(request_url, args, oauth_auth) + CACHE.set(cachekey, resp) + logger.debug("NO HIT for cache key: %s", cachekey) if raw: return resp.content diff --git a/test/test_rate_limit.py b/test/test_rate_limit.py new file mode 100644 index 0000000..1a6a1a3 --- /dev/null +++ b/test/test_rate_limit.py @@ -0,0 +1,251 @@ +"""Tests for rate limit handling in method_call module.""" + +import unittest +from io import BytesIO +from unittest.mock import MagicMock, patch + +from requests import Response + +import flickr_api as f +from flickr_api import method_call +from flickr_api.auth import AuthHandler +from flickr_api.flickrerrors import FlickrRateLimitError + + +class TestRateLimitConfig(unittest.TestCase): + """Tests for rate limit configuration functions.""" + + def setUp(self): + """Save original config values.""" + self.original_config = method_call.get_retry_config() + + def tearDown(self): + """Restore original config values.""" + method_call.set_retry_config( + max_retries=self.original_config["max_retries"], + base_delay=self.original_config["base_delay"], + max_delay=self.original_config["max_delay"], + ) + + def test_get_retry_config_returns_defaults(self): + """get_retry_config returns default values.""" + config = method_call.get_retry_config() + self.assertIn("max_retries", config) + self.assertIn("base_delay", config) + self.assertIn("max_delay", config) + self.assertEqual(3, config["max_retries"]) + self.assertEqual(1.0, config["base_delay"]) + self.assertEqual(60.0, config["max_delay"]) + + def test_set_retry_config_updates_values(self): + """set_retry_config updates configuration.""" + method_call.set_retry_config(max_retries=5, base_delay=2.0, max_delay=120.0) + config = method_call.get_retry_config() + self.assertEqual(5, config["max_retries"]) + self.assertEqual(2.0, config["base_delay"]) + self.assertEqual(120.0, config["max_delay"]) + + def test_set_retry_config_partial_update(self): + """set_retry_config can update individual values.""" + method_call.set_retry_config(max_retries=10) + config = method_call.get_retry_config() + self.assertEqual(10, config["max_retries"]) + # Other values unchanged + self.assertEqual(1.0, config["base_delay"]) + self.assertEqual(60.0, config["max_delay"]) + + +class TestCalculateRetryDelay(unittest.TestCase): + """Tests for _calculate_retry_delay function.""" + + def setUp(self): + """Save and set known config values.""" + self.original_config = method_call.get_retry_config() + method_call.set_retry_config(base_delay=1.0, max_delay=60.0) + + def tearDown(self): + """Restore original config values.""" + method_call.set_retry_config( + max_retries=self.original_config["max_retries"], + base_delay=self.original_config["base_delay"], + max_delay=self.original_config["max_delay"], + ) + + def test_uses_retry_after_when_provided(self): + """Uses Retry-After value when available.""" + delay = method_call._calculate_retry_delay(attempt=0, retry_after=10.0) + self.assertEqual(10.0, delay) + + def test_caps_retry_after_at_max_delay(self): + """Caps Retry-After at max_delay.""" + delay = method_call._calculate_retry_delay(attempt=0, retry_after=120.0) + self.assertEqual(60.0, delay) # max_delay + + def test_exponential_backoff_without_retry_after(self): + """Uses exponential backoff when Retry-After not provided.""" + # attempt 0: 1 * 2^0 = 1 + delay = method_call._calculate_retry_delay(attempt=0, retry_after=None) + self.assertEqual(1.0, delay) + + # attempt 1: 1 * 2^1 = 2 + delay = method_call._calculate_retry_delay(attempt=1, retry_after=None) + self.assertEqual(2.0, delay) + + # attempt 2: 1 * 2^2 = 4 + delay = method_call._calculate_retry_delay(attempt=2, retry_after=None) + self.assertEqual(4.0, delay) + + def test_exponential_backoff_capped_at_max_delay(self): + """Exponential backoff is capped at max_delay.""" + # attempt 10: 1 * 2^10 = 1024, but capped at 60 + delay = method_call._calculate_retry_delay(attempt=10, retry_after=None) + self.assertEqual(60.0, delay) + + +class TestParseRetryAfter(unittest.TestCase): + """Tests for _parse_retry_after function.""" + + def test_parses_numeric_value(self): + """Parses numeric Retry-After header.""" + resp = Response() + resp.headers["Retry-After"] = "30" + result = method_call._parse_retry_after(resp) + self.assertEqual(30.0, result) + + def test_parses_float_value(self): + """Parses float Retry-After header.""" + resp = Response() + resp.headers["Retry-After"] = "30.5" + result = method_call._parse_retry_after(resp) + self.assertEqual(30.5, result) + + def test_returns_none_when_header_missing(self): + """Returns None when Retry-After header is missing.""" + resp = Response() + result = method_call._parse_retry_after(resp) + self.assertIsNone(result) + + def test_returns_none_for_invalid_value(self): + """Returns None for non-numeric Retry-After header.""" + resp = Response() + resp.headers["Retry-After"] = "invalid" + result = method_call._parse_retry_after(resp) + self.assertIsNone(result) + + +class TestRateLimitHandling(unittest.TestCase): + """Tests for rate limit handling in call_api.""" + + def setUp(self): + """Set up test fixtures.""" + auth_handler = AuthHandler( + key="test", + secret="test", + access_token_key="test", + access_token_secret="test", + ) + f.set_auth_handler(auth_handler) + self.original_config = method_call.get_retry_config() + # Disable retries for most tests + method_call.set_retry_config(max_retries=0) + + def tearDown(self): + """Restore original config values.""" + method_call.set_retry_config( + max_retries=self.original_config["max_retries"], + base_delay=self.original_config["base_delay"], + max_delay=self.original_config["max_delay"], + ) + + def _create_429_response(self, retry_after=None, content="Too Many Requests"): + """Create a mock 429 response.""" + resp = Response() + resp.status_code = 429 + resp.raw = BytesIO(content.encode("utf-8")) + resp._content = content.encode("utf-8") + if retry_after: + resp.headers["Retry-After"] = str(retry_after) + return resp + + def _create_ok_response(self): + """Create a mock successful response.""" + resp = Response() + resp.status_code = 200 + resp._content = b'{"stat": "ok", "user": {"id": "123", "username": {"_content": "testuser"}}}' + return resp + + @patch.object(method_call.requests, "post") + def test_raises_rate_limit_error_on_429(self, mock_post): + """Raises FlickrRateLimitError on 429 response.""" + mock_post.return_value = self._create_429_response() + + with self.assertRaises(FlickrRateLimitError) as context: + f.Person.findByUserName("testuser") + + self.assertIn("Rate limit exceeded", str(context.exception)) + + @patch.object(method_call.requests, "post") + def test_rate_limit_error_includes_retry_after(self, mock_post): + """FlickrRateLimitError includes Retry-After value.""" + mock_post.return_value = self._create_429_response(retry_after=30) + + with self.assertRaises(FlickrRateLimitError) as context: + f.Person.findByUserName("testuser") + + self.assertEqual(30.0, context.exception.retry_after) + self.assertIn("30", str(context.exception)) + + @patch.object(method_call.requests, "post") + @patch.object(method_call.time, "sleep") + def test_retries_on_429_with_retries_enabled(self, mock_sleep, mock_post): + """Retries on 429 when retries are enabled.""" + method_call.set_retry_config(max_retries=2, base_delay=1.0) + + # First two calls return 429, third succeeds + mock_post.side_effect = [ + self._create_429_response(retry_after=1), + self._create_429_response(retry_after=1), + self._create_ok_response(), + ] + + # Should not raise - succeeds on third attempt + f.Person.findByUserName("testuser") + + self.assertEqual(3, mock_post.call_count) + self.assertEqual(2, mock_sleep.call_count) + + @patch.object(method_call.requests, "post") + @patch.object(method_call.time, "sleep") + def test_raises_after_max_retries_exceeded(self, mock_sleep, mock_post): + """Raises FlickrRateLimitError after max retries exceeded.""" + method_call.set_retry_config(max_retries=2, base_delay=1.0) + + # All calls return 429 + mock_post.return_value = self._create_429_response(retry_after=1) + + with self.assertRaises(FlickrRateLimitError): + f.Person.findByUserName("testuser") + + # Initial attempt + 2 retries = 3 calls + self.assertEqual(3, mock_post.call_count) + # 2 sleeps (after first and second failures) + self.assertEqual(2, mock_sleep.call_count) + + +class TestFlickrRateLimitError(unittest.TestCase): + """Tests for FlickrRateLimitError exception.""" + + def test_error_with_retry_after(self): + """Error message includes retry_after when provided.""" + error = FlickrRateLimitError(retry_after=30.0, content="Rate limited") + self.assertEqual(30.0, error.retry_after) + self.assertEqual("Rate limited", error.content) + self.assertIn("30", str(error)) + self.assertIn("Rate limit exceeded", str(error)) + + def test_error_without_retry_after(self): + """Error message works without retry_after.""" + error = FlickrRateLimitError(retry_after=None, content="Rate limited") + self.assertIsNone(error.retry_after) + self.assertEqual("Rate limited", error.content) + self.assertIn("Rate limit exceeded", str(error))