1010
1111Class which handles all requests made throughout the library.
1212"""
13- from aiohttp . client_exceptions import ClientHttpProxyError , \
14- ServerDisconnectedError , \
15- ClientProxyConnectionError , \
13+ from ssl import SSLCertVerificationError , SSLError
14+ from aiohttp import ClientSession , ClientTimeout
15+ from aiohttp . client_exceptions import ServerDisconnectedError , \
1616 ClientResponseError , \
1717 ClientOSError , \
18- ServerTimeoutError , \
19- InvalidURL
20- from aiosocks2 . errors import SocksError
21- from asyncio import gather , TimeoutError
18+ InvalidURL , \
19+ ConnectionTimeoutError
20+ from aiohttp_socks import ProxyConnectionError , ProxyConnector , ProxyError
21+ from asyncio import IncompleteReadError , gather , TimeoutError
2222from json import dumps
2323
2424from .scrapers import proxy_list
@@ -79,12 +79,9 @@ async def _request_proxy_page(self, url, session) -> None:
7979 else :
8080 self .proxies .put (row )
8181
82- async def _test_all_proxies (self , session ):
82+ async def _test_all_proxies (self ):
8383 """
8484 Use asyncio.gather to run multiple requests concurrently by executing `self._test_proxy_link`.
85-
86- Args:
87- session: aiohttp session without proxy support
8885 """
8986 # Use asyncio.gather to run multiple tests concurrently
9087 to_filter = []
@@ -95,10 +92,10 @@ async def _test_all_proxies(self, session):
9592
9693 # Remove duplicate entries
9794 to_filter = [dict (x ) for x in list (set ([tuple (item .items ()) for item in to_filter ]))]
98- tasks = [self ._test_proxy_link (proxy ['proxy' ], proxy , session ) for proxy in to_filter ]
95+ tasks = [self ._test_proxy_link (proxy ['proxy' ], proxy ) for proxy in to_filter ]
9996 await gather (* tasks )
10097
101- async def _test_proxy_link (self , proxy_url , data , session ) -> None :
98+ async def _test_proxy_link (self , proxy_url , data ) -> None :
10299 """
103100 Asynchronously call gg.my-dev.app, a website built by the creator of this package.
104101 If the connection was successful, the proxy works!
@@ -109,31 +106,48 @@ async def _test_proxy_link(self, proxy_url, data, session) -> None:
109106 proxy_url: The URL of the proxy to be tested.
110107 data: Additional data for the proxy test.
111108 """
109+ # If port is empty, assume port 80
110+ if data ['port' ] == '' :
111+ data ['port' ] = '80'
112+ # Make sure port is range
113+ if int (data ['port' ]) < 0 or int (data ['port' ]) > 65535 : return
112114 try :
113- async with session .post (
114- 'https://gg.my-dev.app/api/v1/proxies/validate/lib' ,
115- proxy = proxy_url ,
116- headers = {
117- ** self ._get_header (),
118- 'Content-Type' : 'application/json'
119- },
120- data = dumps (data )
121- ) as response :
122- if response .status == 200 :
123- self .proxies .put (data )
124- self ._filtered_available = self ._filtered_available + 1
125- else :
126- self ._filtered_failed = self ._filtered_failed + 1
115+ self .logger .debug (f'[aProxyRelay] Processing: { proxy_url } -> Added to queue' )
116+ connector = ProxyConnector .from_url (proxy_url .replace ('unknown' , 'socks4' ))
117+ timeout = ClientTimeout (total = self .timeout , connect = self .timeout )
118+ async with ClientSession (connector = connector , timeout = timeout ) as session :
119+ async with session .post (
120+ 'https://gg.my-dev.app/api/v1/proxies/validate/lib' ,
121+ headers = {
122+ ** self ._get_header (),
123+ 'Content-Type' : 'application/json'
124+ },
125+ data = dumps (data )
126+ ) as response :
127+ if response .status == 200 :
128+ self .proxies .put (data )
129+ self ._filtered_available = self ._filtered_available + 1
130+ self .logger .debug (f'[aProxyRelay] Succeed: { proxy_url } -> Freshly Discovered' )
131+ else :
132+ self ._filtered_failed = self ._filtered_failed + 1
133+ self .logger .debug (f'[aProxyRelay] Succeed: { proxy_url } -> Addres Known' )
127134 except (
128- ClientHttpProxyError ,
129- ServerDisconnectedError ,
130- ClientProxyConnectionError ,
131- ClientResponseError ,
132135 ClientOSError ,
133- ServerTimeoutError ,
134136 InvalidURL ,
135137 ConnectionResetError ,
136- ):
138+ ProxyError ,
139+ SSLCertVerificationError ,
140+ ProxyConnectionError ,
141+ ConnectionTimeoutError ,
142+ IncompleteReadError ,
143+ UnicodeEncodeError ,
144+ SSLError ,
145+ ConnectionAbortedError ,
146+ ServerDisconnectedError ,
147+ ClientResponseError ,
148+ TimeoutError
149+ ) as e :
150+ self .logger .debug (f'[aProxyRelay] Failed: { proxy_url } -> { repr (e )} ' )
137151 self ._filtered_failed = self ._filtered_failed + 1
138152
139153 async def _fetch_proxy_servers (self , urls , session ):
@@ -172,7 +186,7 @@ async def _request_proxy_servers(self, url, session) -> None:
172186 self .proxies .put (row )
173187 self ._filtered_ggs = self ._filtered_ggs + 1
174188
175- async def _obtain_targets (self , proxy_url , target , session ) -> None :
189+ async def _obtain_targets (self , proxy_url , target ) -> None :
176190 """
177191 Asynchronously fetch the targets with our proxies.
178192 The 'steam' variable should be defaulted to False and should only be used when targeting Steam.
@@ -182,37 +196,44 @@ async def _obtain_targets(self, proxy_url, target, session) -> None:
182196 proxy_url: The URL of the proxy to be used for the request.
183197 """
184198 try :
185- async with session .get (
186- target ,
187- proxy = proxy_url ,
188- headers = {
189- ** self ._get_header (),
190- 'Content-Type' : 'application/json'
191- },
192- ) as response :
193- status = response .status
194- if status in (200 , 202 ,):
195- self .proxies .put (proxy_url )
196- data = await response .json ()
197- if data :
198- if pack := self .unpack (data , target ):
199- self ._queue_result .put (pack )
199+ connector = ProxyConnector .from_url (proxy_url .replace ('unknown' , 'socks4' ))
200+ timeout = ClientTimeout (total = self .timeout , connect = self .timeout )
201+ async with ClientSession (connector = connector , timeout = timeout ) as session :
202+ async with session .get (
203+ target ,
204+ headers = {
205+ ** self ._get_header (),
206+ 'Content-Type' : 'application/json'
207+ },
208+ ) as response :
209+ status = response .status
210+ if status in (200 , 202 ,):
211+ self .proxies .put (proxy_url )
212+ data = await response .json ()
213+ if data :
214+ if pack := self .unpack (data , target ):
215+ self ._queue_result .put (pack )
216+ else :
217+ self .logger .warning (f'[aProxyRelay] Could not unpack data for: { target } ' )
200218 else :
201- self .logger .warning (f'[aProxyRelay] Could not unpack data for : { target } ' )
219+ self .logger .warning (f'[aProxyRelay] Target { target } Data seems to be None : { data } ' )
202220 else :
203- self .logger .warning (f'[aProxyRelay] Target { target } Data seems to be None: { data } ' )
204- else :
205- self ._queue_target_process .put (target )
206-
221+ self ._queue_target_process .put (target )
207222 except (
208- ClientHttpProxyError ,
209- ServerDisconnectedError ,
210- ClientProxyConnectionError ,
211- ClientResponseError ,
212223 ClientOSError ,
213- ServerTimeoutError ,
214224 InvalidURL ,
215- SocksError ,
216- TimeoutError ,
217- ):
225+ ConnectionResetError ,
226+ ProxyError ,
227+ SSLCertVerificationError ,
228+ ProxyConnectionError ,
229+ ConnectionTimeoutError ,
230+ IncompleteReadError ,
231+ UnicodeEncodeError ,
232+ SSLError ,
233+ ConnectionAbortedError ,
234+ ServerDisconnectedError ,
235+ ClientResponseError ,
236+ TimeoutError
237+ ) as e :
238+ self .logger .debug (f'[aProxyRelay] Failed: { target } -> { repr (e )} ' )
218239 self ._queue_target_process .put (target )
0 commit comments