|
1 | 1 | import json |
2 | 2 | import logging |
| 3 | +import warnings |
3 | 4 | import os |
4 | 5 | import re |
5 | 6 | from datetime import datetime |
@@ -309,10 +310,11 @@ def _error_body(resp: requests.Response): |
309 | 310 | status codes, returns the raw response text. |
310 | 311 | """ |
311 | 312 | if resp.status_code == 429: |
312 | | - return resp.json().get("error", {}).get("message") |
| 313 | + return "429: Too many requests made. Please obtain an API token or try again later." |
313 | 314 | elif resp.status_code == 403: |
314 | | - return "Query request denied. Possible reasons include query exceeding server limits." |
315 | | - return resp.text |
| 315 | + return "403: Query request denied. Possible reasons include query exceeding server limits." |
| 316 | + return f"{resp.status_code}: {resp.json().get('code', 'Unknown type')}. \ |
| 317 | + {resp.json().get('description', "No description provided")}." |
316 | 318 |
|
317 | 319 |
|
318 | 320 | def _construct_api_requests( |
@@ -574,12 +576,15 @@ def _walk_pages( |
574 | 576 | data=content if method == "POST" else None, |
575 | 577 | ) |
576 | 578 | if resp.status_code != 200: |
577 | | - raise Exception(_error_body(resp)) |
| 579 | + error_text = _error_body(resp) |
| 580 | + raise Exception(error_text) |
578 | 581 | df1 = _get_resp_data(resp, geopd=geopd) |
579 | 582 | dfs = pd.concat([dfs, df1], ignore_index=True) |
580 | 583 | curr_url = _next_req_url(resp) |
581 | 584 | except Exception: |
582 | | - logger.info("Request failed for URL: %s. Stopping pagination and data download.", curr_url) |
| 585 | + warnings.warn(f"{error_text}. Data request incomplete.") |
| 586 | + logger.error("Request incomplete. %s", error_text) |
| 587 | + logger.warning("Request failed for URL: %s. Data download interrupted.", curr_url) |
583 | 588 | curr_url = None |
584 | 589 | return dfs, initial_response |
585 | 590 | finally: |
|
0 commit comments