1
+ import asyncio
1
2
import logging
2
3
import os
3
- import time
4
4
5
+ import aiohttp
5
6
import berserk
6
- import requests
7
7
from chessdotcom import ChessDotComClient
8
8
from dotenv import load_dotenv
9
9
@@ -85,7 +85,28 @@ def does_chess_dot_com_player_exists(username: str):
85
85
return True
86
86
87
87
88
- def get_chess_dot_com_games (username : str ) -> str :
88
+ async def fetch_archive (
89
+ archive_url : str , session : aiohttp .ClientSession , semaphore : asyncio .Semaphore
90
+ ):
91
+ async with semaphore :
92
+ while True :
93
+ async with session .get (f"{ archive_url } /pgn" ) as resp :
94
+ if resp .status == 429 :
95
+ retry_after = resp .headers .get ("Retry-After" )
96
+ if retry_after :
97
+ LOG .info (f"Rate limited. Retrying after { retry_after } seconds." )
98
+ await asyncio .sleep (int (retry_after ))
99
+ else :
100
+ # Implement exponential backoff here if no Retry-After
101
+ await asyncio .sleep (45 ) # Example fixed delay
102
+ elif resp .status == 200 :
103
+ return await resp .text ()
104
+ else :
105
+ LOG .error (f"Failed to fetch { archive_url } : { resp .status } " )
106
+ return ""
107
+
108
+
109
+ async def get_chess_dot_com_games (username : str ) -> str :
89
110
"""Get all games of `username` from chess.com platform.
90
111
91
112
Args:
@@ -94,19 +115,16 @@ def get_chess_dot_com_games(username: str) -> str:
94
115
Returns:
95
116
string of all games.
96
117
"""
97
- all_pgns : str = ""
118
+ conn_limit = 15
119
+ semaphore = asyncio .Semaphore (conn_limit )
120
+ all_pgns : list [str ] = []
98
121
response = chessdotcomClient .get_player_game_archives (username )
99
122
archives : dict [str , list [str ]] = response .json
100
- for archive in archives .get ("archives" , []):
101
- resp = requests .get (f"{ archive } /pgn" , headers = headers , timeout = 60 )
102
- if resp .status_code == 200 :
103
- all_pgns += f"{ resp .text } \n \n "
104
- elif resp .status_code == 429 :
105
- LOG .warning (f"Rate limiting encountered for { archive } " )
106
- time .sleep (45 )
107
- resp = requests .get (f"{ archive } /pgn" , headers = headers , timeout = 60 )
108
- if resp .status_code == 200 :
109
- all_pgns += f"{ resp .text } \n \n "
110
- else :
111
- LOG .error (f"Failed to fetch url { archive } /pgn" )
112
- return all_pgns
123
+ connection = aiohttp .TCPConnector (limit = conn_limit )
124
+ async with aiohttp .ClientSession (connector = connection ) as session :
125
+ tasks = [
126
+ fetch_archive (archive , session , semaphore )
127
+ for archive in archives .get ("archives" , [])
128
+ ]
129
+ all_pgns = await asyncio .gather (* tasks )
130
+ return "\n \n " .join (all_pgns )
0 commit comments