mirror of
				https://github.com/dgtlmoon/changedetection.io.git
				synced 2025-10-31 06:37:41 +00:00 
			
		
		
		
	Compare commits
	
		
			1 Commits
		
	
	
		
			0.50.14
			...
			puppeteer-
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | bd5bbd8c5c | 
| @@ -36,6 +36,13 @@ class BrowserConnectError(Exception): | ||||
|         logger.error(f"Browser connection error {msg}") | ||||
|         return | ||||
|  | ||||
| class BrowserFetchTimedOut(Exception): | ||||
|     msg = '' | ||||
|     def __init__(self, msg): | ||||
|         self.msg = msg | ||||
|         logger.error(f"Browser processing took too long - {msg}") | ||||
|         return | ||||
|  | ||||
| class BrowserStepsStepException(Exception): | ||||
|     def __init__(self, step_n, original_e): | ||||
|         self.step_n = step_n | ||||
|   | ||||
| @@ -6,7 +6,7 @@ from urllib.parse import urlparse | ||||
|  | ||||
| from loguru import logger | ||||
| from changedetectionio.content_fetchers.base import Fetcher | ||||
| from changedetectionio.content_fetchers.exceptions import PageUnloadable, Non200ErrorCodeReceived, EmptyReply, ScreenshotUnavailable, BrowserConnectError | ||||
| from changedetectionio.content_fetchers.exceptions import PageUnloadable, Non200ErrorCodeReceived, EmptyReply, BrowserFetchTimedOut, BrowserConnectError | ||||
|  | ||||
|  | ||||
| class fetcher(Fetcher): | ||||
| @@ -221,14 +221,21 @@ class fetcher(Fetcher): | ||||
|     def run(self, url, timeout, request_headers, request_body, request_method, ignore_status_codes=False, | ||||
|             current_include_filters=None, is_binary=False): | ||||
|  | ||||
|         #@todo make update_worker async which could run any of these content_fetchers within memory and time constraints | ||||
|         max_time = os.getenv('PUPPETEER_MAX_PROCESSING_TIMEOUT_SECONDS', 180) | ||||
|  | ||||
|         # This will work in 3.10 but not >= 3.11 because 3.11 wants tasks only | ||||
|         asyncio.run(self.main( | ||||
|             url=url, | ||||
|             timeout=timeout, | ||||
|             request_headers=request_headers, | ||||
|             request_body=request_body, | ||||
|             request_method=request_method, | ||||
|             ignore_status_codes=ignore_status_codes, | ||||
|             current_include_filters=current_include_filters, | ||||
|             is_binary=is_binary | ||||
|         )) | ||||
|         try: | ||||
|             asyncio.run(asyncio.wait_for(self.main( | ||||
|                 url=url, | ||||
|                 timeout=timeout, | ||||
|                 request_headers=request_headers, | ||||
|                 request_body=request_body, | ||||
|                 request_method=request_method, | ||||
|                 ignore_status_codes=ignore_status_codes, | ||||
|                 current_include_filters=current_include_filters, | ||||
|                 is_binary=is_binary | ||||
|             ), timeout=max_time)) | ||||
|         except asyncio.TimeoutError: | ||||
|             raise(BrowserFetchTimedOut(msg=f"Browser connected but was unable to process the page in {max_time} seconds.")) | ||||
|  | ||||
|   | ||||
| @@ -369,6 +369,12 @@ class update_worker(threading.Thread): | ||||
|                                                                 } | ||||
|                                                     ) | ||||
|                         process_changedetection_results = False | ||||
|                     except content_fetchers.exceptions.BrowserFetchTimedOut as e: | ||||
|                         self.datastore.update_watch(uuid=uuid, | ||||
|                                                     update_obj={'last_error': e.msg | ||||
|                                                                 } | ||||
|                                                     ) | ||||
|                         process_changedetection_results = False | ||||
|                     except content_fetchers.exceptions.BrowserStepsStepException as e: | ||||
|  | ||||
|                         if not self.datastore.data['watching'].get(uuid): | ||||
|   | ||||
		Reference in New Issue
	
	Block a user