@@ -16,41 +16,16 @@ def user_agent_extended(be_geo_id, caller):
1616 # Generate extended portion of the User-Agent
1717 user_agent = dict ()
1818
19- # Mimic pip system data collection per https://github.com/pypa/pip/blob/master/src/pip/_internal/network/session.py
20- user_agent ['implementation' ] = {
21- "name" : platform .python_implementation (),
22- }
23-
24- if user_agent ["implementation" ]["name" ] in ('CPython' , 'Jython' , 'IronPython' ):
25- user_agent ["implementation" ]["version" ] = platform .python_version ()
26- elif user_agent ["implementation" ]["name" ] == 'PyPy' :
27- if sys .pypy_version_info .releaselevel == 'final' :
28- pypy_version_info = sys .pypy_version_info [:3 ]
29- else :
30- pypy_version_info = sys .pypy_version_info
31- user_agent ["implementation" ]["version" ] = "." .join (
32- [str (x ) for x in pypy_version_info ]
33- )
34-
35- if sys .platform .startswith ("darwin" ) and platform .mac_ver ()[0 ]:
36- user_agent ["distro" ] = {"name" : "macOS" , "version" : platform .mac_ver ()[0 ]}
37-
38- if platform .system ():
39- user_agent .setdefault ("system" , {})["name" ] = platform .system ()
40-
41- if platform .release ():
42- user_agent .setdefault ("system" , {})["release" ] = platform .release ()
43-
44- if platform .machine ():
45- user_agent ["cpu" ] = platform .machine ()
46-
47- if be_geo_id :
48- user_agent ["be_geo_id" ] = be_geo_id
49-
5019 if caller :
5120 user_agent ["caller" ] = caller
21+ elif be_geo_id :
22+ user_agent ["caller" ] = be_geo_id
23+ else :
24+ user_agent ["caller" ] = "unidentified"
25+
26+ caller_string = f'Caller/({ user_agent ["caller" ]} )'
5227
53- return urllib . parse . quote ( json .dumps (user_agent ) )
28+ return json .dumps (caller_string )
5429
5530
5631# Main module interface
@@ -254,51 +229,57 @@ def request(self, metadata, method, url, **kwargs):
254229
255230 # 4XX errors
256231 else :
257- try :
258- message = response .json ()
259- message_is_dict = True
260- except ValueError :
261- message = response .content [:100 ]
262- message_is_dict = False
263-
264- # Check for specific concurrency errors
265- network_delete_concurrency_error_text = 'This may be due to concurrent requests to delete networks.'
266- action_batch_concurrency_error = {'errors' : [
267- 'Too many concurrently executing batches. Maximum is 5 confirmed but not yet executed batches.' ]
268- }
269- # Check specifically for network delete concurrency error
270- if message_is_dict and 'errors' in message .keys () \
271- and network_delete_concurrency_error_text in message ['errors' ][0 ]:
272- wait = random .randint (30 , self ._network_delete_retry_wait_time )
273- if self ._logger :
274- self ._logger .warning (f'{ tag } , { operation } - { status } { reason } , retrying in { wait } seconds' )
275- time .sleep (wait )
276- retries -= 1
277- if retries == 0 :
278- raise APIError (metadata , response )
279- # Check specifically for action batch concurrency error
280- elif message == action_batch_concurrency_error :
281- wait = self ._action_batch_retry_wait_time
282- if self ._logger :
283- self ._logger .warning (f'{ tag } , { operation } - { status } { reason } , retrying in { wait } seconds' )
284- time .sleep (wait )
285- retries -= 1
286- if retries == 0 :
287- raise APIError (metadata , response )
288- elif self ._retry_4xx_error :
289- wait = random .randint (1 , self ._retry_4xx_error_wait_time )
290- if self ._logger :
291- self ._logger .warning (f'{ tag } , { operation } - { status } { reason } , retrying in { wait } seconds' )
292- time .sleep (wait )
293- retries -= 1
294- if retries == 0 :
295- raise APIError (metadata , response )
232+ retries = self .handle_4xx_errors (metadata , operation , reason , response , retries , status , tag )
233+
234+ def handle_4xx_errors (self , metadata , operation , reason , response , retries , status , tag ):
235+ try :
236+ message = response .json ()
237+ message_is_dict = True
238+ except ValueError :
239+ message = response .content [:100 ]
240+ message_is_dict = False
241+ # Check for specific concurrency errors
242+ network_delete_concurrency_error_text = 'delete or combine networks'
243+ action_batch_concurrency_error_text = 'executing batches'
244+
245+ # Check specifically for concurrency errors
246+ if message_is_dict and 'errors' in message .keys ():
247+ network_deletion_errors = [error for error in message ['errors' ] if network_delete_concurrency_error_text in
248+ error ]
249+ action_batch_errors = [error for error in message ['errors' ] if action_batch_concurrency_error_text in error ]
250+
251+ if network_deletion_errors :
252+ wait = random .randint (30 , self ._network_delete_retry_wait_time )
253+ if self ._logger :
254+ self ._logger .warning (f'{ tag } , { operation } - { status } { reason } , retrying in { wait } seconds' )
255+ time .sleep (wait )
256+ retries -= 1
257+ if retries == 0 :
258+ raise APIError (metadata , response )
259+ elif action_batch_errors :
260+ wait = self ._action_batch_retry_wait_time
261+ if self ._logger :
262+ self ._logger .warning (f'{ tag } , { operation } - { status } { reason } , retrying in { wait } seconds' )
263+ time .sleep (wait )
264+ retries -= 1
265+ if retries == 0 :
266+ raise APIError (metadata , response )
267+
268+ if self ._retry_4xx_error :
269+ wait = random .randint (1 , self ._retry_4xx_error_wait_time )
270+ if self ._logger :
271+ self ._logger .warning (f'{ tag } , { operation } - { status } { reason } , retrying in { wait } seconds' )
272+ time .sleep (wait )
273+ retries -= 1
274+ if retries == 0 :
275+ raise APIError (metadata , response )
296276
297- # All other client-side errors
298- else :
299- if self ._logger :
300- self ._logger .error (f'{ tag } , { operation } - { status } { reason } , { message } ' )
301- raise APIError (metadata , response )
277+ # All other client-side errors
278+ else :
279+ if self ._logger :
280+ self ._logger .error (f'{ tag } , { operation } - { status } { reason } , { message } ' )
281+ raise APIError (metadata , response )
282+ return retries
302283
303284 def get (self , metadata , url , params = None ):
304285 metadata ['method' ] = 'GET'
@@ -375,12 +356,12 @@ def _get_pages_iterator(
375356
376357 return_items = []
377358 # just prepare the list
378- if type (results ) == list :
359+ if isinstance (results , list ) :
379360 return_items = results
380- elif type (results ) == dict and "items" in results :
361+ elif isinstance (results , dict ) and "items" in results :
381362 return_items = results ["items" ]
382363 # For event log endpoint
383- elif type (results ) == dict :
364+ elif isinstance (results , dict ) :
384365 if direction == "next" :
385366 return_items = results ["events" ][::- 1 ]
386367 else :
@@ -395,9 +376,9 @@ def _get_pages_iterator(
395376 response = self .request (metadata , 'GET' , nextlink )
396377
397378 def _get_pages_legacy (self , metadata , url , params = None , total_pages = - 1 , direction = 'next' , event_log_end_time = None ):
398- if type (total_pages ) == str and total_pages .lower () == 'all' :
379+ if isinstance (total_pages , str ) and total_pages .lower () == 'all' :
399380 total_pages = - 1
400- elif type (total_pages ) == str and total_pages .isnumeric ():
381+ elif isinstance (total_pages , str ) and total_pages .isnumeric ():
401382 total_pages = int (total_pages )
402383 metadata ['page' ] = 1
403384
@@ -410,7 +391,7 @@ def _get_pages_legacy(self, metadata, url, params=None, total_pages=-1, directio
410391 results = response .json ()
411392
412393 # For event log endpoint when using 'next' direction, so results/events are sorted chronologically
413- if type (results ) == dict and metadata ['operation' ] == 'getNetworkEvents' and direction == 'next' :
394+ if isinstance (results , dict ) and metadata ['operation' ] == 'getNetworkEvents' and direction == 'next' :
414395 results ['events' ] = results ['events' ][::- 1 ]
415396
416397 # Get additional pages if more than one requested
0 commit comments