Skip to content

Commit 69e8530

Browse files
committed
- remove avg waiting time for github prefetcher
1 parent bdefd27 commit 69e8530

File tree

2 files changed

+26
-7
lines changed

2 files changed

+26
-7
lines changed

aura/github.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -62,15 +62,16 @@ def time_to_reset(cls) -> float:
6262
else:
6363
return 0.0
6464

65-
def __get_json(self, url: str) -> Union[dict, list]:
65+
@staticmethod
66+
def _get_json(url: str) -> Union[dict, list]:
6667
payload = URLCache.proxy(url=url, tags=["github_api"], session=SESSION)
6768
return json.loads(payload)
6869

6970
def get_repository_data(self) -> dict:
7071
url = f"https://api.github.com/repos/{self.owner}/{self.name}"
7172

7273
try:
73-
return self.__get_json(url)
74+
return self._get_json(url)
7475
except HTTPError as exc:
7576
if exc.code == 404:
7677
raise NoSuchRepository(f"{self.owner}/{self.name}") from exc
@@ -84,7 +85,7 @@ def get_repository_data(self) -> dict:
8485

8586
def get_contributors(self) -> list:
8687
url = f"https://api.github.com/repos/{self.owner}/{self.name}/contributors"
87-
return self.__get_json(url=url)
88+
return self._get_json(url=url)
8889

8990

9091
class GitHubPrefetcher:
@@ -109,13 +110,13 @@ def wait_time(self) -> float:
109110
if (remaining := GitHub.x_api_remaining - self.safety_buffer) <= 0:
110111
return reset_time
111112

112-
return reset_time / remaining
113+
return 0.0
113114

114115
async def process(self, max_retries=3, backoff=1.0):
115116
while True:
116117
api_wait_time = self.wait_time
117118
if api_wait_time > 3:
118-
logger.info(f"Rate limit reached, waiting for {api_wait_time}s")
119+
logger.warning(f"Rate limit reached, waiting for {api_wait_time}s")
119120

120121
await asyncio.sleep(api_wait_time)
121122
item: Optional[str] = await self.queue.get()

tests/test_github.py

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
import json
22
import pytest
3+
import requests
34
import responses
5+
from unittest import mock
46

57
from aura import github
68
from aura import cache
@@ -37,17 +39,33 @@ def test_invalid_github_repos(url, mock_github):
3739
@responses.activate
3840
def test_github_cache(mock_github, mock_cache):
3941
mock_github(responses)
42+
repo_url = "https://api.github.com/repos/psf/requests"
43+
contributors_url = "https://api.github.com/repos/psf/requests/contributors"
4044

45+
real_session = requests.Session()
46+
mock_session = mock.Mock(wraps=real_session, spec=True)
4147
_ = github.GitHub.from_url("https://github.com/psf/requests")
4248

43-
url1_cached = cache.URLCache(url="https://api.github.com/repos/psf/requests")
49+
url1_cached = cache.URLCache(url=repo_url)
4450
assert url1_cached.is_valid is True
4551
assert type(json.loads(url1_cached.fetch())) == dict
4652

47-
url2_cached = cache.URLCache(url="https://api.github.com/repos/psf/requests/contributors")
53+
url2_cached = cache.URLCache(url=contributors_url)
4854
assert url2_cached.is_valid
4955
assert type(json.loads(url2_cached.fetch())) == list
5056

57+
responses.reset()
58+
# Test that the data is cached and does not fire any requests
59+
_ = github.GitHub.from_url("https://github.com/psf/requests")
60+
61+
output = cache.URLCache.proxy(url=repo_url, session=mock_session)
62+
assert mock_session.called is False
63+
assert type(json.loads(output)) == dict
64+
65+
output = cache.URLCache.proxy(url=contributors_url, session=mock_session)
66+
assert mock_session.called is False
67+
assert type(json.loads(output)) == list
68+
5169

5270
@responses.activate
5371
def test_github_rate_limit_reached():

0 commit comments

Comments
 (0)