3
3
import time
4
4
from typing import Optional
5
5
6
- import aiohttp
6
+ import httpx
7
7
8
8
from ..utils .logger import logger
9
9
@@ -22,7 +22,7 @@ def __init__(self,
22
22
self .record_url = record_url
23
23
self .save_path = save_path
24
24
self .headers = headers or {}
25
- self .proxy = proxy
25
+ self .proxy = proxy or None
26
26
self .chunk_size = chunk_size
27
27
self .stop_event = asyncio .Event ()
28
28
self .process = None
@@ -50,22 +50,15 @@ async def _download_stream(self) -> None:
50
50
try :
51
51
os .makedirs (os .path .dirname (self .save_path ), exist_ok = True )
52
52
53
- async with aiohttp .ClientSession () as session :
54
- proxy_settings = {}
55
- if self .proxy :
56
- proxy_settings ['proxy' ] = self .proxy
57
-
58
- async with session .get (self .record_url , headers = self .headers ,
59
- timeout = aiohttp .ClientTimeout (total = None ),
60
- ** proxy_settings ) as response :
61
- if response .status != 200 :
62
- logger .error (f"Request Stream Failed, Status Code: { response .status } " )
53
+ async with httpx .AsyncClient (headers = self .headers , proxy = self .proxy , timeout = None ) as client :
54
+ async with client .stream ("GET" , self .record_url ) as response :
55
+ if response .status_code != 200 :
56
+ logger .error (f"Request Stream Failed, Status Code: { response .status_code } " )
63
57
return
64
58
65
59
with open (self .save_path , 'wb' ) as f :
66
- while not self .stop_event .is_set ():
67
- chunk = await response .content .read (self .chunk_size )
68
- if not chunk :
60
+ async for chunk in response .aiter_bytes (self .chunk_size ):
61
+ if self .stop_event .is_set ():
69
62
break
70
63
71
64
f .write (chunk )
0 commit comments