diff --git a/pyinaturalist/api_requests.py b/pyinaturalist/api_requests.py index f9edd7ba..6224f0c4 100644 --- a/pyinaturalist/api_requests.py +++ b/pyinaturalist/api_requests.py @@ -23,20 +23,6 @@ from pyinaturalist.forge_utils import copy_signature from pyinaturalist.request_params import prepare_request -# Request rate limits. Only compatible with python 3.7+. -# TODO: Remove try-except after dropping support for python 3.6 -try: - from pyrate_limiter import Duration, Limiter, RequestRate - - REQUEST_RATES = [ - RequestRate(REQUESTS_PER_SECOND, Duration.SECOND), - RequestRate(REQUESTS_PER_MINUTE, Duration.MINUTE), - RequestRate(REQUESTS_PER_DAY, Duration.DAY), - ] - RATE_LIMITER = Limiter(*REQUEST_RATES) -except ImportError: - RATE_LIMITER = None - # Mock response content to return in dry-run mode MOCK_RESPONSE = Mock(spec=requests.Response) MOCK_RESPONSE.json.return_value = {'results': [], 'total_results': 0, 'access_token': ''} @@ -111,17 +97,32 @@ def put(url: str, **kwargs) -> requests.Response: # TODO: Handle error 429 if we still somehow exceed the rate limit? @contextmanager -def ratelimit(limiter=RATE_LIMITER, bucket=pyinaturalist.user_agent): +def ratelimit(bucket=pyinaturalist.user_agent): """Add delays in between requests to stay within the rate limits. If pyrate-limiter is not installed, this will quietly do nothing. """ - if limiter: - with limiter.ratelimit(bucket, delay=True, max_delay=MAX_DELAY): + if RATE_LIMITER: + with RATE_LIMITER.ratelimit(bucket, delay=True, max_delay=MAX_DELAY): yield else: yield +def get_limiter(): + """Get a rate limiter object, if pyrate-limiter is installed""" + try: + from pyrate_limiter import Duration, Limiter, RequestRate + + requst_rates = [ + RequestRate(REQUESTS_PER_SECOND, Duration.SECOND), + RequestRate(REQUESTS_PER_MINUTE, Duration.MINUTE), + RequestRate(REQUESTS_PER_DAY, Duration.DAY), + ] + return Limiter(*requst_rates) + except ImportError: + return None + + def get_session() -> requests.Session: """Get a Session object that will be reused across requests to take advantage of connection pooling. This is especially relevant for large paginated requests. If used in a multi-threaded @@ -158,3 +159,6 @@ def log_request(*args, **kwargs): """Log all relevant information about an HTTP request""" kwargs_strs = [f'{k}={v}' for k, v in kwargs.items()] logger.info('Request: {}'.format(', '.join(list(args) + kwargs_strs))) + + +RATE_LIMITER = get_limiter() diff --git a/test/test_api_requests.py b/test/test_api_requests.py index c0d3a101..e3effa90 100644 --- a/test/test_api_requests.py +++ b/test/test_api_requests.py @@ -125,14 +125,13 @@ def test_request_dry_run_disabled(requests_mock): assert request('GET', 'http://url').json() == real_response +@patch('pyinaturalist.api_requests.RATE_LIMITER', Limiter(RequestRate(5, Duration.SECOND))) @patch('pyrate_limiter.limit_context_decorator.sleep', side_effect=sleep) def test_ratelimit(mock_sleep): - - limiter = Limiter(RequestRate(5, Duration.SECOND)) mock_func = MagicMock() for i in range(6): - with ratelimit(limiter, bucket='pytest-1'): + with ratelimit(bucket='pytest-1'): mock_func() # With 6 requests and a limit of 5 request/second, there should be a delay for the final request @@ -140,10 +139,11 @@ def test_ratelimit(mock_sleep): assert mock_sleep.call_count == 1 +@patch('pyinaturalist.api_requests.RATE_LIMITER', None) @patch('pyrate_limiter.limit_context_decorator.sleep') def test_ratelimit__no_limiter(mock_sleep): for i in range(70): - with ratelimit(None): + with ratelimit(): pass assert mock_sleep.call_count == 0