mirror of
				https://github.com/dgtlmoon/changedetection.io.git
				synced 2025-10-30 22:27:52 +00:00 
			
		
		
		
	Compare commits
	
		
			34 Commits
		
	
	
		
			browserste
			...
			pluggy-2
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 65bc76f11b | ||
|   | 1aa0070ae2 | ||
|   | 0ab3a83a11 | ||
|   | 677082723c | ||
|   | 96793890f8 | ||
|   | 0439155127 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 29ca2521eb | ||
|   | 7d67ad057c | ||
|   | 2e88872b7e | ||
|   | b30b718373 | ||
|   | 402f1e47e7 | ||
|   | 42c6f8fc37 | ||
|   | 06744dbd3a | ||
|   | c6433815e4 | ||
|   | ce97d67ecf | ||
|   | 25778a8102 | ||
|   | b88998feea | ||
|   | 9510345e01 | ||
|   | 36085d8cf4 | ||
|   | 494740e3f8 | ||
|   | 399cdf0fbf | ||
|   | 4be0fafa93 | ||
|   | 2769abf374 | ||
|   | 690b16b710 | ||
|   | 8563126287 | ||
|   | f6c667b0a8 | ||
|   | 774923f67d | ||
|   | 432ee1236d | ||
|   | 51ce7ac66e | ||
|   | c3d825f38c | ||
|   | fc3c4b804d | ||
|   | 1749c07750 | ||
|   | 65428655b8 | ||
|   | 8be0029260 | 
							
								
								
									
										10
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,10 @@ | ||||
| version: 2 | ||||
| updates: | ||||
|   - package-ecosystem: github-actions | ||||
|     directory: / | ||||
|     schedule: | ||||
|       interval: "weekly" | ||||
|     groups: | ||||
|       all: | ||||
|         patterns: | ||||
|         - "*" | ||||
							
								
								
									
										6
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @@ -34,7 +34,7 @@ jobs: | ||||
|  | ||||
|     # Initializes the CodeQL tools for scanning. | ||||
|     - name: Initialize CodeQL | ||||
|       uses: github/codeql-action/init@v2 | ||||
|       uses: github/codeql-action/init@v3 | ||||
|       with: | ||||
|         languages: ${{ matrix.language }} | ||||
|         # If you wish to specify custom queries, you can do so here or in a config file. | ||||
| @@ -45,7 +45,7 @@ jobs: | ||||
|     # Autobuild attempts to build any compiled languages  (C/C++, C#, or Java). | ||||
|     # If this step fails, then you should remove it and run the build manually (see below) | ||||
|     - name: Autobuild | ||||
|       uses: github/codeql-action/autobuild@v2 | ||||
|       uses: github/codeql-action/autobuild@v3 | ||||
|  | ||||
|     # ℹ️ Command-line programs to run using the OS shell. | ||||
|     # 📚 https://git.io/JvXDl | ||||
| @@ -59,4 +59,4 @@ jobs: | ||||
|     #   make release | ||||
|  | ||||
|     - name: Perform CodeQL Analysis | ||||
|       uses: github/codeql-action/analyze@v2 | ||||
|       uses: github/codeql-action/analyze@v3 | ||||
|   | ||||
							
								
								
									
										2
									
								
								.github/workflows/containers.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/containers.yml
									
									
									
									
										vendored
									
									
								
							| @@ -41,7 +41,7 @@ jobs: | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Set up Python 3.11 | ||||
|         uses: actions/setup-python@v4 | ||||
|         uses: actions/setup-python@v5 | ||||
|         with: | ||||
|           python-version: 3.11 | ||||
|  | ||||
|   | ||||
							
								
								
									
										8
									
								
								.github/workflows/pypi-release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								.github/workflows/pypi-release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -9,7 +9,7 @@ jobs: | ||||
|     steps: | ||||
|     - uses: actions/checkout@v4 | ||||
|     - name: Set up Python | ||||
|       uses: actions/setup-python@v4 | ||||
|       uses: actions/setup-python@v5 | ||||
|       with: | ||||
|         python-version: "3.x" | ||||
|     - name: Install pypa/build | ||||
| @@ -21,7 +21,7 @@ jobs: | ||||
|     - name: Build a binary wheel and a source tarball | ||||
|       run: python3 -m build | ||||
|     - name: Store the distribution packages | ||||
|       uses: actions/upload-artifact@v3 | ||||
|       uses: actions/upload-artifact@v4 | ||||
|       with: | ||||
|         name: python-package-distributions | ||||
|         path: dist/ | ||||
| @@ -34,7 +34,7 @@ jobs: | ||||
|     - build | ||||
|     steps: | ||||
|     - name: Download all the dists | ||||
|       uses: actions/download-artifact@v3 | ||||
|       uses: actions/download-artifact@v4 | ||||
|       with: | ||||
|         name: python-package-distributions | ||||
|         path: dist/ | ||||
| @@ -64,7 +64,7 @@ jobs: | ||||
|  | ||||
|     steps: | ||||
|     - name: Download all the dists | ||||
|       uses: actions/download-artifact@v3 | ||||
|       uses: actions/download-artifact@v4 | ||||
|       with: | ||||
|         name: python-package-distributions | ||||
|         path: dist/ | ||||
|   | ||||
							
								
								
									
										2
									
								
								.github/workflows/test-container-build.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/test-container-build.yml
									
									
									
									
										vendored
									
									
								
							| @@ -26,7 +26,7 @@ jobs: | ||||
|     steps: | ||||
|         - uses: actions/checkout@v4 | ||||
|         - name: Set up Python 3.11 | ||||
|           uses: actions/setup-python@v4 | ||||
|           uses: actions/setup-python@v5 | ||||
|           with: | ||||
|             python-version: 3.11 | ||||
|  | ||||
|   | ||||
							
								
								
									
										25
									
								
								.github/workflows/test-only.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										25
									
								
								.github/workflows/test-only.yml
									
									
									
									
										vendored
									
									
								
							| @@ -11,7 +11,7 @@ jobs: | ||||
|  | ||||
|       # Mainly just for link/flake8 | ||||
|       - name: Set up Python 3.11 | ||||
|         uses: actions/setup-python@v4 | ||||
|         uses: actions/setup-python@v5 | ||||
|         with: | ||||
|           python-version: '3.11' | ||||
|  | ||||
| @@ -38,7 +38,7 @@ jobs: | ||||
|       - name: Build changedetection.io container for testing | ||||
|         run: |          | ||||
|           # Build a changedetection.io container and start testing inside | ||||
|           docker build . -t test-changedetectionio | ||||
|           docker build --build-arg LOGGER_LEVEL=TRACE -t test-changedetectionio . | ||||
|           # Debug info | ||||
|           docker run test-changedetectionio  bash -c 'pip list' | ||||
|  | ||||
| @@ -50,10 +50,15 @@ jobs: | ||||
|       - name: Test built container with pytest | ||||
|         run: | | ||||
|           # Unit tests | ||||
|           echo "run test with unittest" | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_notification_diff' | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_watch_model' | ||||
|            | ||||
|           # All tests | ||||
|           echo "run test with pytest" | ||||
|           # The default pytest logger_level is TRACE | ||||
|           # To change logger_level for pytest(test/conftest.py), | ||||
|           # append the docker option. e.g. '-e LOGGER_LEVEL=DEBUG' | ||||
|           docker run --network changedet-network  test-changedetectionio  bash -c 'cd changedetectionio && ./run_basic_tests.sh' | ||||
|  | ||||
|       - name: Test built container selenium+browserless/playwright | ||||
| @@ -105,6 +110,13 @@ jobs: | ||||
|            | ||||
|           # and IPv6 | ||||
|           curl -s -g -6 "http://[::1]:5556"|grep -q checkbox-uuid | ||||
|  | ||||
|           # Check whether TRACE log is enabled. | ||||
|           # Also, check whether TRACE is came from STDERR | ||||
|           docker logs test-changedetectionio 2>&1 1>/dev/null | grep 'TRACE log is enabled' || exit 1 | ||||
|           # Check whether DEBUG is came from STDOUT | ||||
|           docker logs test-changedetectionio 2>/dev/null | grep 'DEBUG' || exit 1 | ||||
|  | ||||
|           docker kill test-changedetectionio | ||||
|  | ||||
|       - name: Test changedetection.io SIGTERM and SIGINT signal shutdown | ||||
| @@ -118,8 +130,9 @@ jobs: | ||||
|           sleep 3 | ||||
|           # invert the check (it should be not 0/not running) | ||||
|           docker ps | ||||
|           # check signal catch(STDOUT) log | ||||
|           docker logs sig-test | grep 'Shutdown: Got Signal - SIGINT' || exit 1 | ||||
|           # check signal catch(STDERR) log. Because of | ||||
|           # changedetectionio/__init__.py: logger.add(sys.stderr, level=logger_level) | ||||
|           docker logs sig-test 2>&1 | grep 'Shutdown: Got Signal - SIGINT' || exit 1 | ||||
|           test -z "`docker ps|grep sig-test`" | ||||
|           if [ $? -ne 0 ] | ||||
|           then | ||||
| @@ -139,7 +152,9 @@ jobs: | ||||
|           sleep 3 | ||||
|           # invert the check (it should be not 0/not running) | ||||
|           docker ps | ||||
|           docker logs sig-test | grep 'Shutdown: Got Signal - SIGTERM' || exit 1 | ||||
|           # check signal catch(STDERR) log. Because of | ||||
|           # changedetectionio/__init__.py: logger.add(sys.stderr, level=logger_level) | ||||
|           docker logs sig-test 2>&1 | grep 'Shutdown: Got Signal - SIGTERM' || exit 1 | ||||
|           test -z "`docker ps|grep sig-test`" | ||||
|           if [ $? -ne 0 ] | ||||
|           then | ||||
|   | ||||
| @@ -58,6 +58,11 @@ COPY changedetectionio /app/changedetectionio | ||||
| # Starting wrapper | ||||
| COPY changedetection.py /app/changedetection.py | ||||
|  | ||||
| # Github Action test purpose(test-only.yml). | ||||
| # On production, it is effectively LOGGER_LEVEL=''. | ||||
| ARG LOGGER_LEVEL='' | ||||
| ENV LOGGER_LEVEL "$LOGGER_LEVEL" | ||||
|  | ||||
| WORKDIR /app | ||||
| CMD ["python", "./changedetection.py", "-d", "/datastore"] | ||||
|  | ||||
|   | ||||
| @@ -1,6 +1,7 @@ | ||||
| recursive-include changedetectionio/api * | ||||
| recursive-include changedetectionio/blueprint * | ||||
| recursive-include changedetectionio/model * | ||||
| recursive-include changedetectionio/plugins * | ||||
| recursive-include changedetectionio/processors * | ||||
| recursive-include changedetectionio/res * | ||||
| recursive-include changedetectionio/static * | ||||
|   | ||||
| @@ -249,7 +249,7 @@ Supports managing the website watch list [via our API](https://changedetection.i | ||||
| Do you use changedetection.io to make money? does it save you time or money? Does it make your life easier? less stressful? Remember, we write this software when we should be doing actual paid work, we have to buy food and pay rent just like you. | ||||
|  | ||||
|  | ||||
| Firstly, consider taking out a [change detection monthly subscription - unlimited checks and watches](https://changedetection.io?src=github) , even if you don't use it, you still get the warm fuzzy feeling of helping out the project. (And who knows, you might just use it!) | ||||
| Firstly, consider taking out an officially supported [website change detection subscription](https://changedetection.io?src=github) , even if you don't use it, you still get the warm fuzzy feeling of helping out the project. (And who knows, you might just use it!) | ||||
|  | ||||
| Or directly donate an amount PayPal [](https://www.paypal.com/donate/?hosted_button_id=7CP6HR9ZCNDYJ) | ||||
|  | ||||
| @@ -273,3 +273,9 @@ I offer commercial support, this software is depended on by network security, ae | ||||
| ## Third-party licenses | ||||
|  | ||||
| changedetectionio.html_tools.elementpath_tostring: Copyright (c), 2018-2021, SISSA (Scuola Internazionale Superiore di Studi Avanzati), Licensed under [MIT license](https://github.com/sissaschool/elementpath/blob/master/LICENSE) | ||||
|  | ||||
| ## Contributors | ||||
|  | ||||
| Recognition of fantastic contributors to the project | ||||
|  | ||||
| - Constantin Hong https://github.com/Constantin1489 | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
| # Read more https://github.com/dgtlmoon/changedetection.io/wiki | ||||
|  | ||||
| __version__ = '0.45.12' | ||||
| __version__ = '0.45.13' | ||||
|  | ||||
| from distutils.util import strtobool | ||||
| from json.decoder import JSONDecodeError | ||||
| @@ -17,6 +17,7 @@ import sys | ||||
|  | ||||
| from changedetectionio import store | ||||
| from changedetectionio.flask_app import changedetection_app | ||||
| from loguru import logger | ||||
|  | ||||
|  | ||||
| # Only global so we can access it in the signal handler | ||||
| @@ -28,9 +29,9 @@ def sigshutdown_handler(_signo, _stack_frame): | ||||
|     global app | ||||
|     global datastore | ||||
|     name = signal.Signals(_signo).name | ||||
|     print(f'Shutdown: Got Signal - {name} ({_signo}), Saving DB to disk and calling shutdown') | ||||
|     logger.critical(f'Shutdown: Got Signal - {name} ({_signo}), Saving DB to disk and calling shutdown') | ||||
|     datastore.sync_to_json() | ||||
|     print(f'Sync JSON to disk complete.') | ||||
|     logger.success('Sync JSON to disk complete.') | ||||
|     # This will throw a SystemExit exception, because eventlet.wsgi.server doesn't know how to deal with it. | ||||
|     # Solution: move to gevent or other server in the future (#2014) | ||||
|     datastore.stop_thread = True | ||||
| @@ -57,13 +58,22 @@ def main(): | ||||
|         datastore_path = os.path.join(os.getcwd(), "../datastore") | ||||
|  | ||||
|     try: | ||||
|         opts, args = getopt.getopt(sys.argv[1:], "6Ccsd:h:p:", "port") | ||||
|         opts, args = getopt.getopt(sys.argv[1:], "6Ccsd:h:p:l:", "port") | ||||
|     except getopt.GetoptError: | ||||
|         print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path]') | ||||
|         print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path] -l [debug level - TRACE, DEBUG(default), INFO, SUCCESS, WARNING, ERROR, CRITICAL]') | ||||
|         sys.exit(2) | ||||
|  | ||||
|     create_datastore_dir = False | ||||
|  | ||||
|     # Set a default logger level | ||||
|     logger_level = 'DEBUG' | ||||
|     # Set a logger level via shell env variable | ||||
|     # Used: Dockerfile for CICD | ||||
|     # To set logger level for pytest, see the app function in tests/conftest.py | ||||
|     if os.getenv("LOGGER_LEVEL"): | ||||
|         level = os.getenv("LOGGER_LEVEL") | ||||
|         logger_level = int(level) if level.isdigit() else level.upper() | ||||
|  | ||||
|     for opt, arg in opts: | ||||
|         if opt == '-s': | ||||
|             ssl_mode = True | ||||
| @@ -78,7 +88,7 @@ def main(): | ||||
|             datastore_path = arg | ||||
|  | ||||
|         if opt == '-6': | ||||
|             print ("Enabling IPv6 listen support") | ||||
|             logger.success("Enabling IPv6 listen support") | ||||
|             ipv6_enabled = True | ||||
|  | ||||
|         # Cleanup (remove text files that arent in the index) | ||||
| @@ -89,6 +99,25 @@ def main(): | ||||
|         if opt == '-C': | ||||
|             create_datastore_dir = True | ||||
|  | ||||
|         if opt == '-l': | ||||
|             logger_level = int(arg) if arg.isdigit() else arg.upper() | ||||
|  | ||||
|     # Without this, a logger will be duplicated | ||||
|     logger.remove() | ||||
|     try: | ||||
|         log_level_for_stdout = { 'DEBUG', 'SUCCESS' } | ||||
|         logger.configure(handlers=[ | ||||
|             {"sink": sys.stdout, "level": logger_level, | ||||
|              "filter" : lambda record: record['level'].name in log_level_for_stdout}, | ||||
|             {"sink": sys.stderr, "level": logger_level, | ||||
|              "filter": lambda record: record['level'].name not in log_level_for_stdout}, | ||||
|             ]) | ||||
|     # Catch negative number or wrong log level name | ||||
|     except ValueError: | ||||
|         print("Available log level names: TRACE, DEBUG(default), INFO, SUCCESS," | ||||
|               " WARNING, ERROR, CRITICAL") | ||||
|         sys.exit(2) | ||||
|  | ||||
|     # isnt there some @thingy to attach to each route to tell it, that this route needs a datastore | ||||
|     app_config = {'datastore_path': datastore_path} | ||||
|  | ||||
| @@ -96,17 +125,19 @@ def main(): | ||||
|         if create_datastore_dir: | ||||
|             os.mkdir(app_config['datastore_path']) | ||||
|         else: | ||||
|             print( | ||||
|                 "ERROR: Directory path for the datastore '{}' does not exist, cannot start, please make sure the directory exists or specify a directory with the -d option.\n" | ||||
|                 "Or use the -C parameter to create the directory.".format(app_config['datastore_path']), file=sys.stderr) | ||||
|             logger.critical( | ||||
|                 f"ERROR: Directory path for the datastore '{app_config['datastore_path']}'" | ||||
|                 f" does not exist, cannot start, please make sure the" | ||||
|                 f" directory exists or specify a directory with the -d option.\n" | ||||
|                 f"Or use the -C parameter to create the directory.") | ||||
|             sys.exit(2) | ||||
|  | ||||
|     try: | ||||
|         datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], version_tag=__version__) | ||||
|     except JSONDecodeError as e: | ||||
|         # Dont' start if the JSON DB looks corrupt | ||||
|         print ("ERROR: JSON DB or Proxy List JSON at '{}' appears to be corrupt, aborting".format(app_config['datastore_path'])) | ||||
|         print(str(e)) | ||||
|         logger.critical(f"ERROR: JSON DB or Proxy List JSON at '{app_config['datastore_path']}' appears to be corrupt, aborting.") | ||||
|         logger.critical(str(e)) | ||||
|         return | ||||
|  | ||||
|     app = changedetection_app(app_config, datastore) | ||||
| @@ -145,7 +176,7 @@ def main(): | ||||
|     #         proxy_set_header X-Forwarded-Prefix /app; | ||||
|  | ||||
|     if os.getenv('USE_X_SETTINGS'): | ||||
|         print ("USE_X_SETTINGS is ENABLED\n") | ||||
|         logger.info("USE_X_SETTINGS is ENABLED") | ||||
|         from werkzeug.middleware.proxy_fix import ProxyFix | ||||
|         app.wsgi_app = ProxyFix(app.wsgi_app, x_prefix=1, x_host=1) | ||||
|  | ||||
|   | ||||
| @@ -133,6 +133,7 @@ class WatchHistory(Resource): | ||||
|  | ||||
|     # Get a list of available history for a watch by UUID | ||||
|     # curl http://localhost:5000/api/v1/watch/<string:uuid>/history | ||||
|     @auth.check_token | ||||
|     def get(self, uuid): | ||||
|         """ | ||||
|         @api {get} /api/v1/watch/<string:uuid>/history Get a list of all historical snapshots available for a watch | ||||
|   | ||||
| @@ -23,11 +23,11 @@ | ||||
|  | ||||
| from distutils.util import strtobool | ||||
| from flask import Blueprint, request, make_response | ||||
| import logging | ||||
| import os | ||||
|  | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from changedetectionio.flask_app import login_optionally_required | ||||
| from loguru import logger | ||||
|  | ||||
| browsersteps_sessions = {} | ||||
| io_interface_context = None | ||||
| @@ -58,7 +58,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|             io_interface_context = io_interface_context.start() | ||||
|  | ||||
|         keepalive_ms = ((keepalive_seconds + 3) * 1000) | ||||
|         base_url = os.getenv('PLAYWRIGHT_DRIVER_URL', '') | ||||
|         base_url = os.getenv('PLAYWRIGHT_DRIVER_URL', '').strip('"') | ||||
|         a = "?" if not '?' in base_url else '&' | ||||
|         base_url += a + f"timeout={keepalive_ms}" | ||||
|  | ||||
| @@ -88,7 +88,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|                 if parsed.password: | ||||
|                     proxy['password'] = parsed.password | ||||
|  | ||||
|                 print("Browser Steps: UUID {} selected proxy {}".format(watch_uuid, proxy_url)) | ||||
|                 logger.debug(f"Browser Steps: UUID {watch_uuid} selected proxy {proxy_url}") | ||||
|  | ||||
|         # Tell Playwright to connect to Chrome and setup a new session via our stepper interface | ||||
|         browsersteps_start_session['browserstepper'] = browser_steps.browsersteps_live_ui( | ||||
| @@ -115,10 +115,10 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|         if not watch_uuid: | ||||
|             return make_response('No Watch UUID specified', 500) | ||||
|  | ||||
|         print("Starting connection with playwright") | ||||
|         logging.debug("browser_steps.py connecting") | ||||
|         logger.debug("Starting connection with playwright") | ||||
|         logger.debug("browser_steps.py connecting") | ||||
|         browsersteps_sessions[browsersteps_session_id] = start_browsersteps_session(watch_uuid) | ||||
|         print("Starting connection with playwright - done") | ||||
|         logger.debug("Starting connection with playwright - done") | ||||
|         return {'browsersteps_session_id': browsersteps_session_id} | ||||
|  | ||||
|     @login_optionally_required | ||||
| @@ -189,7 +189,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|                                          optional_value=step_optional_value) | ||||
|  | ||||
|             except Exception as e: | ||||
|                 print("Exception when calling step operation", step_operation, str(e)) | ||||
|                 logger.error(f"Exception when calling step operation {step_operation} {str(e)}") | ||||
|                 # Try to find something of value to give back to the user | ||||
|                 return make_response(str(e).splitlines()[0], 401) | ||||
|  | ||||
|   | ||||
| @@ -4,6 +4,7 @@ import os | ||||
| import time | ||||
| import re | ||||
| from random import randint | ||||
| from loguru import logger | ||||
|  | ||||
| # Two flags, tell the JS which of the "Selector" or "Value" field should be enabled in the front end | ||||
| # 0- off, 1- on | ||||
| @@ -53,7 +54,7 @@ class steppable_browser_interface(): | ||||
|         if call_action_name == 'choose_one': | ||||
|             return | ||||
|  | ||||
|         print("> action calling", call_action_name) | ||||
|         logger.debug(f"> Action calling '{call_action_name}'") | ||||
|         # https://playwright.dev/python/docs/selectors#xpath-selectors | ||||
|         if selector and selector.startswith('/') and not selector.startswith('//'): | ||||
|             selector = "xpath=" + selector | ||||
| @@ -72,7 +73,7 @@ class steppable_browser_interface(): | ||||
|  | ||||
|         action_handler(selector, optional_value) | ||||
|         self.page.wait_for_timeout(1.5 * 1000) | ||||
|         print("Call action done in", time.time() - now) | ||||
|         logger.debug(f"Call action done in {time.time()-now:.2f}s") | ||||
|  | ||||
|     def action_goto_url(self, selector=None, value=None): | ||||
|         # self.page.set_viewport_size({"width": 1280, "height": 5000}) | ||||
| @@ -82,7 +83,7 @@ class steppable_browser_interface(): | ||||
|         #and also wait for seconds ? | ||||
|         #await page.waitForTimeout(1000); | ||||
|         #await page.waitForTimeout(extra_wait_ms); | ||||
|         print("Time to goto URL ", time.time() - now) | ||||
|         logger.debug(f"Time to goto URL {time.time()-now:.2f}s") | ||||
|         return response | ||||
|  | ||||
|     def action_click_element_containing_text(self, selector=None, value=''): | ||||
| @@ -103,7 +104,7 @@ class steppable_browser_interface(): | ||||
|         return response | ||||
|  | ||||
|     def action_click_element(self, selector, value): | ||||
|         print("Clicking element") | ||||
|         logger.debug("Clicking element") | ||||
|         if not len(selector.strip()): | ||||
|             return | ||||
|  | ||||
| @@ -111,7 +112,7 @@ class steppable_browser_interface(): | ||||
|  | ||||
|     def action_click_element_if_exists(self, selector, value): | ||||
|         import playwright._impl._errors as _api_types | ||||
|         print("Clicking element if exists") | ||||
|         logger.debug("Clicking element if exists") | ||||
|         if not len(selector.strip()): | ||||
|             return | ||||
|         try: | ||||
| @@ -227,11 +228,11 @@ class browsersteps_live_ui(steppable_browser_interface): | ||||
|         # Listen for all console events and handle errors | ||||
|         self.page.on("console", lambda msg: print(f"Browser steps console - {msg.type}: {msg.text} {msg.args}")) | ||||
|  | ||||
|         print("Time to browser setup", time.time() - now) | ||||
|         logger.debug(f"Time to browser setup {time.time()-now:.2f}s") | ||||
|         self.page.wait_for_timeout(1 * 1000) | ||||
|  | ||||
|     def mark_as_closed(self): | ||||
|         print("Page closed, cleaning up..") | ||||
|         logger.debug("Page closed, cleaning up..") | ||||
|  | ||||
|     @property | ||||
|     def has_expired(self): | ||||
| @@ -257,7 +258,7 @@ class browsersteps_live_ui(steppable_browser_interface): | ||||
|         xpath_data = self.page.evaluate("async () => {" + xpath_element_js + "}") | ||||
|         # So the JS will find the smallest one first | ||||
|         xpath_data['size_pos'] = sorted(xpath_data['size_pos'], key=lambda k: k['width'] * k['height'], reverse=True) | ||||
|         print("Time to complete get_current_state of browser", time.time() - now) | ||||
|         logger.debug(f"Time to complete get_current_state of browser {time.time()-now:.2f}s") | ||||
|         # except | ||||
|         # playwright._impl._api_types.Error: Browser closed. | ||||
|         # @todo show some countdown timer? | ||||
|   | ||||
| @@ -4,12 +4,12 @@ from urllib.parse import urlparse | ||||
| import chardet | ||||
| import hashlib | ||||
| import json | ||||
| import logging | ||||
| import os | ||||
| import requests | ||||
| import sys | ||||
| import time | ||||
| import urllib.parse | ||||
| from loguru import logger | ||||
|  | ||||
| visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4, header, footer, section, article, aside, details, main, nav, section, summary' | ||||
|  | ||||
| @@ -47,7 +47,7 @@ class BrowserStepsStepException(Exception): | ||||
|     def __init__(self, step_n, original_e): | ||||
|         self.step_n = step_n | ||||
|         self.original_e = original_e | ||||
|         print(f"Browser Steps exception at step {self.step_n}", str(original_e)) | ||||
|         logger.debug(f"Browser Steps exception at step {self.step_n} {str(original_e)}") | ||||
|         return | ||||
|  | ||||
|  | ||||
| @@ -101,6 +101,7 @@ class Fetcher(): | ||||
|     error = None | ||||
|     fetcher_description = "No description" | ||||
|     headers = {} | ||||
|     is_plaintext = None | ||||
|     instock_data = None | ||||
|     instock_data_js = "" | ||||
|     status_code = None | ||||
| @@ -188,7 +189,7 @@ class Fetcher(): | ||||
|  | ||||
|             for step in valid_steps: | ||||
|                 step_n += 1 | ||||
|                 print(">> Iterating check - browser Step n {} - {}...".format(step_n, step['operation'])) | ||||
|                 logger.debug(f">> Iterating check - browser Step n {step_n} - {step['operation']}...") | ||||
|                 self.screenshot_step("before-" + str(step_n)) | ||||
|                 self.save_step_html("before-" + str(step_n)) | ||||
|                 try: | ||||
| @@ -205,8 +206,8 @@ class Fetcher(): | ||||
|                                                       optional_value=optional_value) | ||||
|                     self.screenshot_step(step_n) | ||||
|                     self.save_step_html(step_n) | ||||
|  | ||||
|                 except (Error, TimeoutError) as e: | ||||
|                     logger.debug(str(e)) | ||||
|                     # Stop processing here | ||||
|                     raise BrowserStepsStepException(step_n=step_n, original_e=e) | ||||
|  | ||||
| @@ -295,14 +296,14 @@ class base_html_playwright(Fetcher): | ||||
|  | ||||
|         if self.browser_steps_screenshot_path is not None: | ||||
|             destination = os.path.join(self.browser_steps_screenshot_path, 'step_{}.jpeg'.format(step_n)) | ||||
|             logging.debug("Saving step screenshot to {}".format(destination)) | ||||
|             logger.debug(f"Saving step screenshot to {destination}") | ||||
|             with open(destination, 'wb') as f: | ||||
|                 f.write(screenshot) | ||||
|  | ||||
|     def save_step_html(self, step_n): | ||||
|         content = self.page.content() | ||||
|         destination = os.path.join(self.browser_steps_screenshot_path, 'step_{}.html'.format(step_n)) | ||||
|         logging.debug("Saving step HTML to {}".format(destination)) | ||||
|         logger.debug(f"Saving step HTML to {destination}") | ||||
|         with open(destination, 'w') as f: | ||||
|             f.write(content) | ||||
|  | ||||
| @@ -491,7 +492,7 @@ class base_html_playwright(Fetcher): | ||||
|             if response is None: | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 print("Content Fetcher > Response object was none") | ||||
|                 logger.debug("Content Fetcher > Response object was none") | ||||
|                 raise EmptyReply(url=url, status_code=None) | ||||
|  | ||||
|             try: | ||||
| @@ -503,7 +504,7 @@ class base_html_playwright(Fetcher): | ||||
|                 # This can be ok, we will try to grab what we could retrieve | ||||
|                 pass | ||||
|             except Exception as e: | ||||
|                 print("Content Fetcher > Other exception when executing custom JS code", str(e)) | ||||
|                 logger.debug(f"Content Fetcher > Other exception when executing custom JS code {str(e)}") | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 raise PageUnloadable(url=url, status_code=None, message=str(e)) | ||||
| @@ -524,7 +525,7 @@ class base_html_playwright(Fetcher): | ||||
|             if len(self.page.content().strip()) == 0: | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 print("Content Fetcher > Content was empty") | ||||
|                 logger.debug("Content Fetcher > Content was empty") | ||||
|                 raise EmptyReply(url=url, status_code=response.status) | ||||
|  | ||||
|             # Run Browser Steps here | ||||
| @@ -676,7 +677,7 @@ class base_html_webdriver(Fetcher): | ||||
|             try: | ||||
|                 self.driver.quit() | ||||
|             except Exception as e: | ||||
|                 print("Content Fetcher > Exception in chrome shutdown/quit" + str(e)) | ||||
|                 logger.debug(f"Content Fetcher > Exception in chrome shutdown/quit {str(e)}") | ||||
|  | ||||
|  | ||||
| # "html_requests" is listed as the default fetcher in store.py! | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| #!/usr/bin/python3 | ||||
|  | ||||
| from changedetectionio import queuedWatchMetaData | ||||
| from changedetectionio import queuedWatchMetaData, html_tools, __version__ | ||||
| from copy import deepcopy | ||||
| from distutils.util import strtobool | ||||
| from feedgen.feed import FeedGenerator | ||||
| @@ -12,7 +12,8 @@ from functools import wraps | ||||
| from threading import Event | ||||
| import datetime | ||||
| import flask_login | ||||
| import logging | ||||
| from loguru import logger | ||||
| import sys | ||||
| import os | ||||
| import pytz | ||||
| import queue | ||||
| @@ -34,8 +35,6 @@ from flask import ( | ||||
| ) | ||||
|  | ||||
| from flask_paginate import Pagination, get_page_parameter | ||||
|  | ||||
| from changedetectionio import html_tools, __version__ | ||||
| from changedetectionio.api import api_v1 | ||||
|  | ||||
| datastore = None | ||||
| @@ -49,6 +48,18 @@ extra_stylesheets = [] | ||||
| update_q = queue.PriorityQueue() | ||||
| notification_q = queue.Queue() | ||||
|  | ||||
|  | ||||
| def get_plugin_manager(): | ||||
|     import pluggy | ||||
|     from changedetectionio.plugins import hookspecs | ||||
|     from changedetectionio.plugins import whois as whois_plugin | ||||
|  | ||||
|     pm = pluggy.PluginManager("changedetectionio_plugin") | ||||
|     pm.add_hookspecs(hookspecs) | ||||
|     pm.load_setuptools_entrypoints("changedetectionio_plugin") | ||||
|     pm.register(whois_plugin) | ||||
|     return pm | ||||
|  | ||||
| app = Flask(__name__, | ||||
|             static_url_path="", | ||||
|             static_folder="static", | ||||
| @@ -95,7 +106,6 @@ def init_app_secret(datastore_path): | ||||
|  | ||||
|     return secret | ||||
|  | ||||
|  | ||||
| @app.template_global() | ||||
| def get_darkmode_state(): | ||||
|     css_dark_mode = request.cookies.get('css_dark_mode', 'false') | ||||
| @@ -210,6 +220,8 @@ def login_optionally_required(func): | ||||
|     return decorated_view | ||||
|  | ||||
| def changedetection_app(config=None, datastore_o=None): | ||||
|     logger.trace("TRACE log is enabled") | ||||
|  | ||||
|     global datastore | ||||
|     datastore = datastore_o | ||||
|  | ||||
| @@ -626,7 +638,6 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|             form.fetch_backend.choices.append(p) | ||||
|  | ||||
|         form.fetch_backend.choices.append(("system", 'System settings default')) | ||||
|  | ||||
|         # form.browser_steps[0] can be assumed that we 'goto url' first | ||||
|  | ||||
|         if datastore.proxy_list is None: | ||||
| @@ -727,6 +738,8 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|             if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver' or watch.get('fetch_backend', '').startswith('extra_browser_'): | ||||
|                 is_html_webdriver = True | ||||
|  | ||||
|             processor_config = next((p[2] for p in processors.available_processors() if p[0] == watch.get('processor')), None) | ||||
|  | ||||
|             # Only works reliably with Playwright | ||||
|             visualselector_enabled = os.getenv('PLAYWRIGHT_DRIVER_URL', False) and is_html_webdriver | ||||
|             output = render_template("edit.html", | ||||
| @@ -741,6 +754,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                                      is_html_webdriver=is_html_webdriver, | ||||
|                                      jq_support=jq_support, | ||||
|                                      playwright_enabled=os.getenv('PLAYWRIGHT_DRIVER_URL', False), | ||||
|                                      processor_config=processor_config, | ||||
|                                      settings_application=datastore.data['settings']['application'], | ||||
|                                      using_global_webdriver_wait=default['webdriver_delay'] is None, | ||||
|                                      uuid=uuid, | ||||
| @@ -821,11 +835,14 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                 flash("An error occurred, please see below.", "error") | ||||
|  | ||||
|         output = render_template("settings.html", | ||||
|                                  form=form, | ||||
|                                  hide_remove_pass=os.getenv("SALTED_PASS", False), | ||||
|                                  api_key=datastore.data['settings']['application'].get('api_access_token'), | ||||
|                                  emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False), | ||||
|                                  settings_application=datastore.data['settings']['application']) | ||||
|                                  form=form, | ||||
|                                  hide_remove_pass=os.getenv("SALTED_PASS", False), | ||||
|                                  settings_application=datastore.data['settings']['application'], | ||||
|                                  plugins=[] | ||||
|  | ||||
|                                  ) | ||||
|  | ||||
|         return output | ||||
|  | ||||
| @@ -1492,7 +1509,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|  | ||||
|  | ||||
|         except Exception as e: | ||||
|             logging.error("Error sharing -{}".format(str(e))) | ||||
|             logger.error(f"Error sharing -{str(e)}") | ||||
|             flash("Could not share, something went wrong while communicating with the share server - {}".format(str(e)), 'error') | ||||
|  | ||||
|         # https://changedetection.io/share/VrMv05wpXyQa | ||||
| @@ -1605,7 +1622,7 @@ def notification_runner(): | ||||
|                 sent_obj = notification.process_notification(n_object, datastore) | ||||
|  | ||||
|             except Exception as e: | ||||
|                 logging.error("Watch URL: {}  Error {}".format(n_object['watch_url'], str(e))) | ||||
|                 logger.error(f"Watch URL: {n_object['watch_url']}  Error {str(e)}") | ||||
|  | ||||
|                 # UUID wont be present when we submit a 'test' from the global settings | ||||
|                 if 'uuid' in n_object: | ||||
| @@ -1628,7 +1645,7 @@ def ticker_thread_check_time_launch_checks(): | ||||
|     proxy_last_called_time = {} | ||||
|  | ||||
|     recheck_time_minimum_seconds = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 20)) | ||||
|     print("System env MINIMUM_SECONDS_RECHECK_TIME", recheck_time_minimum_seconds) | ||||
|     logger.debug(f"System env MINIMUM_SECONDS_RECHECK_TIME {recheck_time_minimum_seconds}") | ||||
|  | ||||
|     # Spin up Workers that do the fetching | ||||
|     # Can be overriden by ENV or use the default settings | ||||
| @@ -1673,7 +1690,7 @@ def ticker_thread_check_time_launch_checks(): | ||||
|             now = time.time() | ||||
|             watch = datastore.data['watching'].get(uuid) | ||||
|             if not watch: | ||||
|                 logging.error("Watch: {} no longer present.".format(uuid)) | ||||
|                 logger.error(f"Watch: {uuid} no longer present.") | ||||
|                 continue | ||||
|  | ||||
|             # No need todo further processing if it's paused | ||||
| @@ -1706,10 +1723,10 @@ def ticker_thread_check_time_launch_checks(): | ||||
|                             time_since_proxy_used = int(time.time() - proxy_last_used_time) | ||||
|                             if time_since_proxy_used < proxy_list_reuse_time_minimum: | ||||
|                                 # Not enough time difference reached, skip this watch | ||||
|                                 print("> Skipped UUID {} using proxy '{}', not enough time between proxy requests {}s/{}s".format(uuid, | ||||
|                                                                                                                          watch_proxy, | ||||
|                                                                                                                          time_since_proxy_used, | ||||
|                                                                                                                          proxy_list_reuse_time_minimum)) | ||||
|                                 logger.debug(f"> Skipped UUID {uuid} " | ||||
|                                         f"using proxy '{watch_proxy}', not " | ||||
|                                         f"enough time between proxy requests " | ||||
|                                         f"{time_since_proxy_used}s/{proxy_list_reuse_time_minimum}s") | ||||
|                                 continue | ||||
|                             else: | ||||
|                                 # Record the last used time | ||||
| @@ -1717,14 +1734,12 @@ def ticker_thread_check_time_launch_checks(): | ||||
|  | ||||
|                     # Use Epoch time as priority, so we get a "sorted" PriorityQueue, but we can still push a priority 1 into it. | ||||
|                     priority = int(time.time()) | ||||
|                     print( | ||||
|                         "> Queued watch UUID {} last checked at {} queued at {:0.2f} priority {} jitter {:0.2f}s, {:0.2f}s since last checked".format( | ||||
|                             uuid, | ||||
|                             watch['last_checked'], | ||||
|                             now, | ||||
|                             priority, | ||||
|                             watch.jitter_seconds, | ||||
|                             now - watch['last_checked'])) | ||||
|                     logger.debug( | ||||
|                         f"> Queued watch UUID {uuid} " | ||||
|                         f"last checked at {watch['last_checked']} " | ||||
|                         f"queued at {now:0.2f} priority {priority} " | ||||
|                         f"jitter {watch.jitter_seconds:0.2f}s, " | ||||
|                         f"{now - watch['last_checked']:0.2f}s since last checked") | ||||
|  | ||||
|                     # Into the queue with you | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=priority, item={'uuid': uuid, 'skip_when_checksum_same': True})) | ||||
|   | ||||
| @@ -410,7 +410,7 @@ class quickWatchForm(Form): | ||||
|     url = fields.URLField('URL', validators=[validateURL()]) | ||||
|     tags = StringTagUUID('Group tag', [validators.Optional()]) | ||||
|     watch_submit_button = SubmitField('Watch', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|     processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff") | ||||
|     processor = RadioField(u'Processor', choices=[t[:2] for t in processors.available_processors()], default="text_json_diff") | ||||
|     edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|  | ||||
|  | ||||
| @@ -427,7 +427,7 @@ class commonSettingsForm(Form): | ||||
|                                                                                                                                     message="Should contain one or more seconds")]) | ||||
| class importForm(Form): | ||||
|     from . import processors | ||||
|     processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff") | ||||
|     processor = RadioField(u'Processor', choices=[t[:2] for t in processors.available_processors()], default="text_json_diff") | ||||
|     urls = TextAreaField('URLs') | ||||
|     xlsx_file = FileField('Upload .xlsx file', validators=[FileAllowed(['xlsx'], 'Must be .xlsx file!')]) | ||||
|     file_mapping = SelectField('File mapping', [validators.DataRequired()], choices={('wachete', 'Wachete mapping'), ('custom','Custom mapping')}) | ||||
|   | ||||
| @@ -2,6 +2,7 @@ from abc import ABC, abstractmethod | ||||
| import time | ||||
| import validators | ||||
| from wtforms import ValidationError | ||||
| from loguru import logger | ||||
|  | ||||
| from changedetectionio.forms import validate_url | ||||
|  | ||||
| @@ -195,7 +196,7 @@ class import_xlsx_wachete(Importer): | ||||
|                     try: | ||||
|                         validate_url(data.get('url')) | ||||
|                     except ValidationError as e: | ||||
|                         print(">> import URL error", data.get('url'), str(e)) | ||||
|                         logger.error(f">> Import URL error {data.get('url')} {str(e)}") | ||||
|                         flash(f"Error processing row number {row_id}, URL value was incorrect, row was skipped.", 'error') | ||||
|                         # Don't bother processing anything else on this row | ||||
|                         continue | ||||
| @@ -209,7 +210,7 @@ class import_xlsx_wachete(Importer): | ||||
|                         self.new_uuids.append(new_uuid) | ||||
|                         good += 1 | ||||
|             except Exception as e: | ||||
|                 print(e) | ||||
|                 logger.error(e) | ||||
|                 flash(f"Error processing row number {row_id}, check all cell data types are correct, row was skipped.", 'error') | ||||
|             else: | ||||
|                 row_id += 1 | ||||
| @@ -264,7 +265,7 @@ class import_xlsx_custom(Importer): | ||||
|                         try: | ||||
|                             validate_url(url) | ||||
|                         except ValidationError as e: | ||||
|                             print(">> Import URL error", url, str(e)) | ||||
|                             logger.error(f">> Import URL error {url} {str(e)}") | ||||
|                             flash(f"Error processing row number {row_i}, URL value was incorrect, row was skipped.", 'error') | ||||
|                             # Don't bother processing anything else on this row | ||||
|                             url = None | ||||
| @@ -293,7 +294,7 @@ class import_xlsx_custom(Importer): | ||||
|                         self.new_uuids.append(new_uuid) | ||||
|                         good += 1 | ||||
|         except Exception as e: | ||||
|             print(e) | ||||
|             logger.error(e) | ||||
|             flash(f"Error processing row number {row_i}, check all cell data types are correct, row was skipped.", 'error') | ||||
|         else: | ||||
|             row_i += 1 | ||||
|   | ||||
| @@ -38,6 +38,7 @@ class model(dict): | ||||
|                     'notification_format': default_notification_format, | ||||
|                     'notification_title': default_notification_title, | ||||
|                     'notification_urls': [], # Apprise URL list | ||||
|                     'plugins': [], # list of dict, keyed by plugin name, with dict of the config and enabled true/false | ||||
|                     'pager_size': 50, | ||||
|                     'password': False, | ||||
|                     'render_anchor_tag_content': False, | ||||
|   | ||||
| @@ -1,10 +1,10 @@ | ||||
| from distutils.util import strtobool | ||||
| import logging | ||||
| import os | ||||
| import re | ||||
| import time | ||||
| import uuid | ||||
| from pathlib import Path | ||||
| from loguru import logger | ||||
|  | ||||
| # Allowable protocols, protects against javascript: etc | ||||
| # file:// is further checked by ALLOW_FILE_URI | ||||
| @@ -122,7 +122,7 @@ class model(dict): | ||||
|  | ||||
|     def ensure_data_dir_exists(self): | ||||
|         if not os.path.isdir(self.watch_data_dir): | ||||
|             print ("> Creating data dir {}".format(self.watch_data_dir)) | ||||
|             logger.debug(f"> Creating data dir {self.watch_data_dir}") | ||||
|             os.mkdir(self.watch_data_dir) | ||||
|  | ||||
|     @property | ||||
| @@ -211,7 +211,7 @@ class model(dict): | ||||
|         # Read the history file as a dict | ||||
|         fname = os.path.join(self.watch_data_dir, "history.txt") | ||||
|         if os.path.isfile(fname): | ||||
|             logging.debug("Reading history index " + str(time.time())) | ||||
|             logger.debug(f"Reading watch history index for {self.get('uuid')}") | ||||
|             with open(fname, "r") as f: | ||||
|                 for i in f.readlines(): | ||||
|                     if ',' in i: | ||||
|   | ||||
| @@ -1,7 +1,9 @@ | ||||
| import apprise | ||||
| import time | ||||
| from jinja2 import Environment, BaseLoader | ||||
| from apprise import NotifyFormat | ||||
| import json | ||||
| from loguru import logger | ||||
|  | ||||
| valid_tokens = { | ||||
|     'base_url': '', | ||||
| @@ -131,90 +133,94 @@ def process_notification(n_object, datastore): | ||||
|         # Initially text or whatever | ||||
|         n_format = datastore.data['settings']['application'].get('notification_format', valid_notification_formats[default_notification_format]) | ||||
|  | ||||
|  | ||||
|     # https://github.com/caronc/apprise/wiki/Development_LogCapture | ||||
|     # Anything higher than or equal to WARNING (which covers things like Connection errors) | ||||
|     # raise it as an exception | ||||
|     apobjs=[] | ||||
|     sent_objs=[] | ||||
|  | ||||
|     sent_objs = [] | ||||
|     from .apprise_asset import asset | ||||
|     for url in n_object['notification_urls']: | ||||
|         url = jinja2_env.from_string(url).render(**notification_parameters) | ||||
|         apobj = apprise.Apprise(debug=True, asset=asset) | ||||
|         url = url.strip() | ||||
|         if len(url): | ||||
|             print(">> Process Notification: AppRise notifying {}".format(url)) | ||||
|             with apprise.LogCapture(level=apprise.logging.DEBUG) as logs: | ||||
|                 # Re 323 - Limit discord length to their 2000 char limit total or it wont send. | ||||
|                 # Because different notifications may require different pre-processing, run each sequentially :( | ||||
|                 # 2000 bytes minus - | ||||
|                 #     200 bytes for the overhead of the _entire_ json payload, 200 bytes for {tts, wait, content} etc headers | ||||
|                 #     Length of URL - Incase they specify a longer custom avatar_url | ||||
|     apobj = apprise.Apprise(debug=True, asset=asset) | ||||
|  | ||||
|                 # So if no avatar_url is specified, add one so it can be correctly calculated into the total payload | ||||
|                 k = '?' if not '?' in url else '&' | ||||
|                 if not 'avatar_url' in url \ | ||||
|                         and not url.startswith('mail') \ | ||||
|                         and not url.startswith('post') \ | ||||
|                         and not url.startswith('get') \ | ||||
|                         and not url.startswith('delete') \ | ||||
|                         and not url.startswith('put'): | ||||
|                     url += k + 'avatar_url=https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png' | ||||
|     if not n_object.get('notification_urls'): | ||||
|         return None | ||||
|  | ||||
|                 if url.startswith('tgram://'): | ||||
|                     # Telegram only supports a limit subset of HTML, remove the '<br>' we place in. | ||||
|                     # re https://github.com/dgtlmoon/changedetection.io/issues/555 | ||||
|                     # @todo re-use an existing library we have already imported to strip all non-allowed tags | ||||
|                     n_body = n_body.replace('<br>', '\n') | ||||
|                     n_body = n_body.replace('</br>', '\n') | ||||
|                     # real limit is 4096, but minus some for extra metadata | ||||
|                     payload_max_size = 3600 | ||||
|                     body_limit = max(0, payload_max_size - len(n_title)) | ||||
|                     n_title = n_title[0:payload_max_size] | ||||
|                     n_body = n_body[0:body_limit] | ||||
|     with apprise.LogCapture(level=apprise.logging.DEBUG) as logs: | ||||
|         for url in n_object['notification_urls']: | ||||
|             url = url.strip() | ||||
|             logger.info(">> Process Notification: AppRise notifying {}".format(url)) | ||||
|             url = jinja2_env.from_string(url).render(**notification_parameters) | ||||
|  | ||||
|                 elif url.startswith('discord://') or url.startswith('https://discordapp.com/api/webhooks') or url.startswith('https://discord.com/api'): | ||||
|                     # real limit is 2000, but minus some for extra metadata | ||||
|                     payload_max_size = 1700 | ||||
|                     body_limit = max(0, payload_max_size - len(n_title)) | ||||
|                     n_title = n_title[0:payload_max_size] | ||||
|                     n_body = n_body[0:body_limit] | ||||
|             # Re 323 - Limit discord length to their 2000 char limit total or it wont send. | ||||
|             # Because different notifications may require different pre-processing, run each sequentially :( | ||||
|             # 2000 bytes minus - | ||||
|             #     200 bytes for the overhead of the _entire_ json payload, 200 bytes for {tts, wait, content} etc headers | ||||
|             #     Length of URL - Incase they specify a longer custom avatar_url | ||||
|  | ||||
|                 elif url.startswith('mailto'): | ||||
|                     # Apprise will default to HTML, so we need to override it | ||||
|                     # So that whats' generated in n_body is in line with what is going to be sent. | ||||
|                     # https://github.com/caronc/apprise/issues/633#issuecomment-1191449321 | ||||
|                     if not 'format=' in url and (n_format == 'Text' or n_format == 'Markdown'): | ||||
|                         prefix = '?' if not '?' in url else '&' | ||||
|                         # Apprise format is lowercase text https://github.com/caronc/apprise/issues/633 | ||||
|                         n_format = n_format.lower() | ||||
|                         url = f"{url}{prefix}format={n_format}" | ||||
|                     # If n_format == HTML, then apprise email should default to text/html and we should be sending HTML only | ||||
|             # So if no avatar_url is specified, add one so it can be correctly calculated into the total payload | ||||
|             k = '?' if not '?' in url else '&' | ||||
|             if not 'avatar_url' in url \ | ||||
|                     and not url.startswith('mail') \ | ||||
|                     and not url.startswith('post') \ | ||||
|                     and not url.startswith('get') \ | ||||
|                     and not url.startswith('delete') \ | ||||
|                     and not url.startswith('put'): | ||||
|                 url += k + 'avatar_url=https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png' | ||||
|  | ||||
|                 apobj.add(url) | ||||
|             if url.startswith('tgram://'): | ||||
|                 # Telegram only supports a limit subset of HTML, remove the '<br>' we place in. | ||||
|                 # re https://github.com/dgtlmoon/changedetection.io/issues/555 | ||||
|                 # @todo re-use an existing library we have already imported to strip all non-allowed tags | ||||
|                 n_body = n_body.replace('<br>', '\n') | ||||
|                 n_body = n_body.replace('</br>', '\n') | ||||
|                 # real limit is 4096, but minus some for extra metadata | ||||
|                 payload_max_size = 3600 | ||||
|                 body_limit = max(0, payload_max_size - len(n_title)) | ||||
|                 n_title = n_title[0:payload_max_size] | ||||
|                 n_body = n_body[0:body_limit] | ||||
|  | ||||
|                 apobj.notify( | ||||
|                     title=n_title, | ||||
|                     body=n_body, | ||||
|                     body_format=n_format, | ||||
|                     # False is not an option for AppRise, must be type None | ||||
|                     attach=n_object.get('screenshot', None) | ||||
|                 ) | ||||
|             elif url.startswith('discord://') or url.startswith('https://discordapp.com/api/webhooks') or url.startswith( | ||||
|                     'https://discord.com/api'): | ||||
|                 # real limit is 2000, but minus some for extra metadata | ||||
|                 payload_max_size = 1700 | ||||
|                 body_limit = max(0, payload_max_size - len(n_title)) | ||||
|                 n_title = n_title[0:payload_max_size] | ||||
|                 n_body = n_body[0:body_limit] | ||||
|  | ||||
|                 apobj.clear() | ||||
|             elif url.startswith('mailto'): | ||||
|                 # Apprise will default to HTML, so we need to override it | ||||
|                 # So that whats' generated in n_body is in line with what is going to be sent. | ||||
|                 # https://github.com/caronc/apprise/issues/633#issuecomment-1191449321 | ||||
|                 if not 'format=' in url and (n_format == 'Text' or n_format == 'Markdown'): | ||||
|                     prefix = '?' if not '?' in url else '&' | ||||
|                     # Apprise format is lowercase text https://github.com/caronc/apprise/issues/633 | ||||
|                     n_format = n_format.lower() | ||||
|                     url = f"{url}{prefix}format={n_format}" | ||||
|                 # If n_format == HTML, then apprise email should default to text/html and we should be sending HTML only | ||||
|  | ||||
|                 # Incase it needs to exist in memory for a while after to process(?) | ||||
|                 apobjs.append(apobj) | ||||
|             apobj.add(url) | ||||
|  | ||||
|                 # Returns empty string if nothing found, multi-line string otherwise | ||||
|                 log_value = logs.getvalue() | ||||
|                 if log_value and 'WARNING' in log_value or 'ERROR' in log_value: | ||||
|                     raise Exception(log_value) | ||||
|             sent_objs.append({'title': n_title, | ||||
|                               'body': n_body, | ||||
|                               'url': url, | ||||
|                               'body_format': n_format}) | ||||
|  | ||||
|                 sent_objs.append({'title': n_title, | ||||
|                                   'body': n_body, | ||||
|                                   'url' : url, | ||||
|                                   'body_format': n_format}) | ||||
|         # Blast off the notifications tht are set in .add() | ||||
|         apobj.notify( | ||||
|             title=n_title, | ||||
|             body=n_body, | ||||
|             body_format=n_format, | ||||
|             # False is not an option for AppRise, must be type None | ||||
|             attach=n_object.get('screenshot', None) | ||||
|         ) | ||||
|  | ||||
|         # Give apprise time to register an error | ||||
|         time.sleep(3) | ||||
|  | ||||
|         # Returns empty string if nothing found, multi-line string otherwise | ||||
|         log_value = logs.getvalue() | ||||
|  | ||||
|         if log_value and 'WARNING' in log_value or 'ERROR' in log_value: | ||||
|             raise Exception(log_value) | ||||
|  | ||||
|     # Return what was sent for better logging - after the for loop | ||||
|     return sent_objs | ||||
|   | ||||
							
								
								
									
										6
									
								
								changedetectionio/plugins/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								changedetectionio/plugins/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| import pluggy | ||||
|  | ||||
| hookimpl = pluggy.HookimplMarker("changedetectionio_plugin") | ||||
| """Marker to be imported and used in plugins (and for own implementations)""" | ||||
|  | ||||
| x=1 | ||||
							
								
								
									
										20
									
								
								changedetectionio/plugins/hookspecs.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								changedetectionio/plugins/hookspecs.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | ||||
| import pluggy | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
|  | ||||
| hookspec = pluggy.HookspecMarker("changedetectionio_plugin") | ||||
|  | ||||
|  | ||||
| @hookspec | ||||
| def extra_processor(): | ||||
|     """Defines a new fetch method | ||||
|  | ||||
|     :return: a tuples, (machine_name, description) | ||||
|     """ | ||||
|  | ||||
| @hookspec(firstresult=True) | ||||
| def processor_call(processor_name: str, datastore: ChangeDetectionStore, watch_uuid: str): | ||||
|     """ | ||||
|     Call processors with processor name | ||||
|     :param processor_name: as defined in extra_processors | ||||
|     :return: data? | ||||
|     """ | ||||
							
								
								
									
										53
									
								
								changedetectionio/plugins/whois.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										53
									
								
								changedetectionio/plugins/whois.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,53 @@ | ||||
| """ | ||||
| Whois information lookup | ||||
| - Fetches using whois | ||||
| - Extends the 'text_json_diff' so that text filters can still be used with whois information | ||||
|  | ||||
| @todo publish to pypi and github as a separate plugin | ||||
| """ | ||||
|  | ||||
| from ..plugins import hookimpl | ||||
| import changedetectionio.processors.text_json_diff as text_json_diff | ||||
| from changedetectionio import content_fetcher | ||||
|  | ||||
| # would be changedetectionio.plugins in other apps | ||||
|  | ||||
| class text_json_filtering_whois(text_json_diff.perform_site_check): | ||||
|  | ||||
|     def __init__(self, *args, datastore, watch_uuid, **kwargs): | ||||
|         super().__init__(*args, datastore=datastore, watch_uuid=watch_uuid, **kwargs) | ||||
|  | ||||
|     def call_browser(self): | ||||
|         import whois | ||||
|         # the whois data | ||||
|         self.fetcher = content_fetcher.Fetcher() | ||||
|         self.fetcher.is_plaintext = True | ||||
|  | ||||
|         from urllib.parse import urlparse | ||||
|         parsed = urlparse(self.watch.link) | ||||
|         w = whois.whois(parsed.hostname) | ||||
|         self.fetcher.content= w.text | ||||
|  | ||||
| @hookimpl | ||||
| def extra_processor(): | ||||
|     """ | ||||
|     Advertise a new processor | ||||
|     :return: | ||||
|     """ | ||||
|     from changedetectionio.processors import default_processor_config | ||||
|     processor_config = dict(default_processor_config) | ||||
|     # Which UI elements are not used | ||||
|     processor_config['needs_request_fetch_method'] = False | ||||
|     processor_config['needs_browsersteps'] = False | ||||
|     processor_config['needs_visualselector'] = False | ||||
|     return ('plugin_processor_whois', "Whois domain information fetch", processor_config) | ||||
|  | ||||
| # @todo When a watch chooses this extra_process processor, the watch should ONLY use this one. | ||||
| #       (one watch can only have one extra_processor) | ||||
| @hookimpl | ||||
| def processor_call(processor_name, datastore, watch_uuid): | ||||
|     if processor_name == 'plugin_processor_whois': # could be removed, see above note | ||||
|         x = text_json_filtering_whois(datastore=datastore, watch_uuid=watch_uuid) | ||||
|         return x | ||||
|     return None | ||||
|  | ||||
| @@ -5,6 +5,16 @@ import re | ||||
| from changedetectionio import content_fetcher | ||||
| from copy import deepcopy | ||||
| from distutils.util import strtobool | ||||
| from loguru import logger | ||||
|  | ||||
| # Which UI elements in settings the processor requires | ||||
| # For example, restock monitor isnt compatible with visualselector and filters | ||||
| default_processor_config = { | ||||
|     'needs_request_fetch_method': True, | ||||
|     'needs_browsersteps': True, | ||||
|     'needs_visualselector': True, | ||||
|     'needs_filters': True, | ||||
| } | ||||
|  | ||||
| class difference_detection_processor(): | ||||
|  | ||||
| @@ -69,7 +79,7 @@ class difference_detection_processor(): | ||||
|         proxy_url = None | ||||
|         if preferred_proxy_id: | ||||
|             proxy_url = self.datastore.proxy_list.get(preferred_proxy_id).get('url') | ||||
|             print(f"Using proxy Key: {preferred_proxy_id} as Proxy URL {proxy_url}") | ||||
|             logger.debug(f"Selected proxy key '{preferred_proxy_id}' as proxy URL '{proxy_url}' for {url}") | ||||
|  | ||||
|         # Now call the fetcher (playwright/requests/etc) with arguments that only a fetcher would need. | ||||
|         # When browser_connection_url is None, it method should default to working out whats the best defaults (os env vars etc) | ||||
| @@ -131,6 +141,15 @@ class difference_detection_processor(): | ||||
|  | ||||
| def available_processors(): | ||||
|     from . import restock_diff, text_json_diff | ||||
|     x=[('text_json_diff', text_json_diff.name), ('restock_diff', restock_diff.name)] | ||||
|     # @todo Make this smarter with introspection of sorts. | ||||
|     from ..flask_app import get_plugin_manager | ||||
|     pm = get_plugin_manager() | ||||
|     x = [('text_json_diff', text_json_diff.name, dict(default_processor_config)), | ||||
|          ('restock_diff', restock_diff.name, dict(default_processor_config)) | ||||
|          ] | ||||
|  | ||||
|     plugin_choices = pm.hook.extra_processor() | ||||
|     if plugin_choices: | ||||
|         for p in plugin_choices: | ||||
|             x.append(p) | ||||
|  | ||||
|     return x | ||||
|   | ||||
| @@ -61,4 +61,4 @@ class perform_site_check(difference_detection_processor): | ||||
|         # Always record the new checksum | ||||
|         update_obj["previous_md5"] = fetched_md5 | ||||
|  | ||||
|         return changed_detected, update_obj, self.fetcher.instock_data.encode('utf-8') | ||||
|         return changed_detected, update_obj, self.fetcher.instock_data.encode('utf-8').strip() | ||||
|   | ||||
| @@ -2,7 +2,6 @@ | ||||
|  | ||||
| import hashlib | ||||
| import json | ||||
| import logging | ||||
| import os | ||||
| import re | ||||
| import urllib3 | ||||
| @@ -12,6 +11,7 @@ from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACC | ||||
| from copy import deepcopy | ||||
| from . import difference_detection_processor | ||||
| from ..html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text | ||||
| from loguru import logger | ||||
|  | ||||
| urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) | ||||
|  | ||||
| @@ -155,7 +155,7 @@ class perform_site_check(difference_detection_processor): | ||||
|             html_content = self.fetcher.content | ||||
|  | ||||
|             # If not JSON,  and if it's not text/plain.. | ||||
|             if 'text/plain' in self.fetcher.get_all_headers().get('content-type', '').lower(): | ||||
|             if 'text/plain' in self.fetcher.get_all_headers().get('content-type', '').lower() or self.fetcher.is_plaintext: | ||||
|                 # Don't run get_text or xpath/css filters on plaintext | ||||
|                 stripped_text_from_html = html_content | ||||
|             else: | ||||
| @@ -340,10 +340,10 @@ class perform_site_check(difference_detection_processor): | ||||
|                 has_unique_lines = watch.lines_contain_something_unique_compared_to_history(lines=stripped_text_from_html.splitlines()) | ||||
|                 # One or more lines? unsure? | ||||
|                 if not has_unique_lines: | ||||
|                     logging.debug("check_unique_lines: UUID {} didnt have anything new setting change_detected=False".format(uuid)) | ||||
|                     logger.debug(f"check_unique_lines: UUID {uuid} didnt have anything new setting change_detected=False") | ||||
|                     changed_detected = False | ||||
|                 else: | ||||
|                     logging.debug("check_unique_lines: UUID {} had unique content".format(uuid)) | ||||
|                     logger.debug(f"check_unique_lines: UUID {uuid} had unique content") | ||||
|  | ||||
|         # Always record the new checksum | ||||
|         update_obj["previous_md5"] = fetched_md5 | ||||
|   | ||||
| @@ -1,117 +1,132 @@ | ||||
| // Restock Detector | ||||
| // (c) Leigh Morresi dgtlmoon@gmail.com | ||||
| // | ||||
| // Assumes the product is in stock to begin with, unless the following appears above the fold ; | ||||
| // - outOfStockTexts appears above the fold (out of stock) | ||||
| // - negateOutOfStockRegex (really is in stock) | ||||
|  | ||||
| function isItemInStock() { | ||||
|   // @todo Pass these in so the same list can be used in non-JS fetchers | ||||
|   const outOfStockTexts = [ | ||||
|     ' أخبرني عندما يتوفر', | ||||
|     '0 in stock', | ||||
|     'agotado', | ||||
|     'article épuisé', | ||||
|     'artikel zurzeit vergriffen', | ||||
|     'as soon as stock is available', | ||||
|     'ausverkauft', // sold out | ||||
|     'available for back order', | ||||
|     'back-order or out of stock', | ||||
|     'backordered', | ||||
|     'benachrichtigt mich', // notify me | ||||
|     'brak na stanie', | ||||
|     'brak w magazynie', | ||||
|     'coming soon', | ||||
|     'currently have any tickets for this', | ||||
|     'currently unavailable', | ||||
|     'dostępne wkrótce', | ||||
|     'en rupture de stock', | ||||
|     'ist derzeit nicht auf lager', | ||||
|     'item is no longer available', | ||||
|     'let me know when it\'s available', | ||||
|     'message if back in stock', | ||||
|     'nachricht bei', | ||||
|     'nicht auf lager', | ||||
|     'nicht lieferbar', | ||||
|     'nicht zur verfügung', | ||||
|     'niet beschikbaar', | ||||
|     'niet leverbaar', | ||||
|     'no disponible temporalmente', | ||||
|     'no longer in stock', | ||||
|     'no tickets available', | ||||
|     'not available', | ||||
|     'not currently available', | ||||
|     'not in stock', | ||||
|     'notify me when available', | ||||
|     'não estamos a aceitar encomendas', | ||||
|     'out of stock', | ||||
|     'out-of-stock', | ||||
|     'produkt niedostępny', | ||||
|     'sold out', | ||||
|     'sold-out', | ||||
|     'temporarily out of stock', | ||||
|     'temporarily unavailable', | ||||
|     'tickets unavailable', | ||||
|     'tijdelijk uitverkocht', | ||||
|     'unavailable tickets', | ||||
|     'we do not currently have an estimate of when this product will be back in stock.', | ||||
|     'zur zeit nicht an lager', | ||||
|     '品切れ', | ||||
|     '已售完', | ||||
|     '품절' | ||||
|   ]; | ||||
|     // @todo Pass these in so the same list can be used in non-JS fetchers | ||||
|     const outOfStockTexts = [ | ||||
|         ' أخبرني عندما يتوفر', | ||||
|         '0 in stock', | ||||
|         'agotado', | ||||
|         'article épuisé', | ||||
|         'artikel zurzeit vergriffen', | ||||
|         'as soon as stock is available', | ||||
|         'ausverkauft', // sold out | ||||
|         'available for back order', | ||||
|         'back-order or out of stock', | ||||
|         'backordered', | ||||
|         'benachrichtigt mich', // notify me | ||||
|         'brak na stanie', | ||||
|         'brak w magazynie', | ||||
|         'coming soon', | ||||
|         'currently have any tickets for this', | ||||
|         'currently unavailable', | ||||
|         'dostępne wkrótce', | ||||
|         'en rupture de stock', | ||||
|         'ist derzeit nicht auf lager', | ||||
|         'item is no longer available', | ||||
|         'let me know when it\'s available', | ||||
|         'message if back in stock', | ||||
|         'nachricht bei', | ||||
|         'nicht auf lager', | ||||
|         'nicht lieferbar', | ||||
|         'nicht zur verfügung', | ||||
|         'niet beschikbaar', | ||||
|         'niet leverbaar', | ||||
|         'no disponible temporalmente', | ||||
|         'no longer in stock', | ||||
|         'no tickets available', | ||||
|         'not available', | ||||
|         'not currently available', | ||||
|         'not in stock',         | ||||
|         'notify me when available', | ||||
|         'notify when available',             | ||||
|         'não estamos a aceitar encomendas', | ||||
|         'out of stock', | ||||
|         'out-of-stock', | ||||
|         'produkt niedostępny', | ||||
|         'sold out', | ||||
|         'sold-out', | ||||
|         'temporarily out of stock', | ||||
|         'temporarily unavailable', | ||||
|         'tickets unavailable', | ||||
|         'tijdelijk uitverkocht', | ||||
|         'unavailable tickets', | ||||
|         'we do not currently have an estimate of when this product will be back in stock.', | ||||
|         'we don\'t know when or if this item will be back in stock.', | ||||
|         'zur zeit nicht an lager', | ||||
|         '品切れ', | ||||
|         '已售完', | ||||
|         '품절' | ||||
|     ]; | ||||
|  | ||||
|  | ||||
|   const negateOutOfStockRegexs = [ | ||||
|       '[0-9] in stock' | ||||
|   ] | ||||
|   var negateOutOfStockRegexs_r = []; | ||||
|   for (let i = 0; i < negateOutOfStockRegexs.length; i++) { | ||||
|     negateOutOfStockRegexs_r.push(new RegExp(negateOutOfStockRegexs[0], 'g')); | ||||
|   } | ||||
|  | ||||
|  | ||||
|   const elementsWithZeroChildren = Array.from(document.getElementsByTagName('*')).filter(element => element.children.length === 0); | ||||
|  | ||||
|   // REGEXS THAT REALLY MEAN IT'S IN STOCK | ||||
|   for (let i = elementsWithZeroChildren.length - 1; i >= 0; i--) { | ||||
|     const element = elementsWithZeroChildren[i]; | ||||
|     if (element.offsetWidth > 0 || element.offsetHeight > 0 || element.getClientRects().length > 0) { | ||||
|       var elementText=""; | ||||
|       if (element.tagName.toLowerCase() === "input") { | ||||
|         elementText = element.value.toLowerCase(); | ||||
|       } else { | ||||
|         elementText = element.textContent.toLowerCase(); | ||||
|       } | ||||
|  | ||||
|       if (elementText.length) { | ||||
|         // try which ones could mean its in stock | ||||
|         for (let i = 0; i < negateOutOfStockRegexs.length; i++) { | ||||
|           if (negateOutOfStockRegexs_r[i].test(elementText)) { | ||||
|             return 'Possibly in stock'; | ||||
|           } | ||||
|         } | ||||
|       } | ||||
|     const vh = Math.max(document.documentElement.clientHeight || 0, window.innerHeight || 0); | ||||
|     function getElementBaseText(element) { | ||||
|         // .textContent can include text from children which may give the wrong results | ||||
|         // scan only immediate TEXT_NODEs, which will be a child of the element | ||||
|         var text = ""; | ||||
|         for (var i = 0; i < element.childNodes.length; ++i) | ||||
|             if (element.childNodes[i].nodeType === Node.TEXT_NODE) | ||||
|                 text += element.childNodes[i].textContent; | ||||
|         return text.toLowerCase().trim(); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   // OTHER STUFF THAT COULD BE THAT IT'S OUT OF STOCK | ||||
|   for (let i = elementsWithZeroChildren.length - 1; i >= 0; i--) { | ||||
|     const element = elementsWithZeroChildren[i]; | ||||
|     if (element.offsetWidth > 0 || element.offsetHeight > 0 || element.getClientRects().length > 0) { | ||||
|       var elementText=""; | ||||
|       if (element.tagName.toLowerCase() === "input") { | ||||
|         elementText = element.value.toLowerCase(); | ||||
|       } else { | ||||
|         elementText = element.textContent.toLowerCase(); | ||||
|       } | ||||
|     const negateOutOfStockRegex = new RegExp('([0-9] in stock|add to cart)', 'ig'); | ||||
|  | ||||
|       if (elementText.length) { | ||||
|         // and these mean its out of stock | ||||
|         for (const outOfStockText of outOfStockTexts) { | ||||
|           if (elementText.includes(outOfStockText)) { | ||||
|             return elementText; // item is out of stock | ||||
|           } | ||||
|     // The out-of-stock or in-stock-text is generally always above-the-fold | ||||
|     // and often below-the-fold is a list of related products that may or may not contain trigger text | ||||
|     // so it's good to filter to just the 'above the fold' elements | ||||
|     // and it should be atleast 100px from the top to ignore items in the toolbar, sometimes menu items like "Coming soon" exist | ||||
|     const elementsToScan = Array.from(document.getElementsByTagName('*')).filter(element => element.getBoundingClientRect().top + window.scrollY <= vh && element.getBoundingClientRect().top + window.scrollY >= 100); | ||||
|  | ||||
|     var elementText = ""; | ||||
|  | ||||
|     // REGEXS THAT REALLY MEAN IT'S IN STOCK | ||||
|     for (let i = elementsToScan.length - 1; i >= 0; i--) { | ||||
|         const element = elementsToScan[i]; | ||||
|         elementText = ""; | ||||
|         if (element.tagName.toLowerCase() === "input") { | ||||
|             elementText = element.value.toLowerCase(); | ||||
|         } else { | ||||
|             elementText = getElementBaseText(element); | ||||
|         } | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   return 'Possibly in stock'; // possibly in stock, cant decide otherwise. | ||||
|         if (elementText.length) { | ||||
|             // try which ones could mean its in stock | ||||
|             if (negateOutOfStockRegex.test(elementText)) { | ||||
|                 return 'Possibly in stock'; | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // OTHER STUFF THAT COULD BE THAT IT'S OUT OF STOCK | ||||
|     for (let i = elementsToScan.length - 1; i >= 0; i--) { | ||||
|         const element = elementsToScan[i]; | ||||
|         if (element.offsetWidth > 0 || element.offsetHeight > 0 || element.getClientRects().length > 0) { | ||||
|             elementText = ""; | ||||
|             if (element.tagName.toLowerCase() === "input") { | ||||
|                 elementText = element.value.toLowerCase(); | ||||
|             } else { | ||||
|                 elementText = getElementBaseText(element); | ||||
|             } | ||||
|  | ||||
|             if (elementText.length) { | ||||
|                 // and these mean its out of stock | ||||
|                 for (const outOfStockText of outOfStockTexts) { | ||||
|                     if (elementText.includes(outOfStockText)) { | ||||
|                         return outOfStockText; // item is out of stock | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     return 'Possibly in stock'; // possibly in stock, cant decide otherwise. | ||||
| } | ||||
|  | ||||
| // returns the element text that makes it think it's out of stock | ||||
| return isItemInStock(); | ||||
| return isItemInStock().trim() | ||||
|  | ||||
|   | ||||
| @@ -9,7 +9,6 @@ from copy import deepcopy, copy | ||||
| from os import path, unlink | ||||
| from threading import Lock | ||||
| import json | ||||
| import logging | ||||
| import os | ||||
| import re | ||||
| import requests | ||||
| @@ -17,6 +16,7 @@ import secrets | ||||
| import threading | ||||
| import time | ||||
| import uuid as uuid_builder | ||||
| from loguru import logger | ||||
|  | ||||
| # Because the server will run as a daemon and wont know the URL for notification links when firing off a notification | ||||
| BASE_URL_NOT_SET_TEXT = '("Base URL" not set - see settings - notifications)' | ||||
| @@ -42,7 +42,7 @@ class ChangeDetectionStore: | ||||
|         self.__data = App.model() | ||||
|         self.datastore_path = datastore_path | ||||
|         self.json_store_path = "{}/url-watches.json".format(self.datastore_path) | ||||
|         print(">>> Datastore path is ", self.json_store_path) | ||||
|         logger.info(f"Datastore path is '{self.json_store_path}'") | ||||
|         self.needs_write = False | ||||
|         self.start_time = time.time() | ||||
|         self.stop_thread = False | ||||
| @@ -83,12 +83,12 @@ class ChangeDetectionStore: | ||||
|                 for uuid, watch in self.__data['watching'].items(): | ||||
|                     watch['uuid']=uuid | ||||
|                     self.__data['watching'][uuid] = Watch.model(datastore_path=self.datastore_path, default=watch) | ||||
|                     print("Watching:", uuid, self.__data['watching'][uuid]['url']) | ||||
|                     logger.info(f"Watching: {uuid} {self.__data['watching'][uuid]['url']}") | ||||
|  | ||||
|         # First time ran, Create the datastore. | ||||
|         except (FileNotFoundError): | ||||
|             if include_default_watches: | ||||
|                 print("No JSON DB found at {}, creating JSON store at {}".format(self.json_store_path, self.datastore_path)) | ||||
|                 logger.critical(f"No JSON DB found at {self.json_store_path}, creating JSON store at {self.datastore_path}") | ||||
|                 self.add_watch(url='https://news.ycombinator.com/', | ||||
|                                tag='Tech news', | ||||
|                                extras={'fetch_backend': 'html_requests'}) | ||||
| @@ -139,7 +139,7 @@ class ChangeDetectionStore: | ||||
|         save_data_thread = threading.Thread(target=self.save_datastore).start() | ||||
|  | ||||
|     def set_last_viewed(self, uuid, timestamp): | ||||
|         logging.debug("Setting watch UUID: {} last viewed to {}".format(uuid, int(timestamp))) | ||||
|         logger.debug(f"Setting watch UUID: {uuid} last viewed to {int(timestamp)}") | ||||
|         self.data['watching'][uuid].update({'last_viewed': int(timestamp)}) | ||||
|         self.needs_write = True | ||||
|  | ||||
| @@ -316,7 +316,7 @@ class ChangeDetectionStore: | ||||
|                             apply_extras['include_filters'] = [res['css_filter']] | ||||
|  | ||||
|             except Exception as e: | ||||
|                 logging.error("Error fetching metadata for shared watch link", url, str(e)) | ||||
|                 logger.error(f"Error fetching metadata for shared watch link {url} {str(e)}") | ||||
|                 flash("Error fetching metadata for {}".format(url), 'error') | ||||
|                 return False | ||||
|         from .model.Watch import is_safe_url | ||||
| @@ -345,7 +345,7 @@ class ChangeDetectionStore: | ||||
|  | ||||
|         new_uuid = new_watch.get('uuid') | ||||
|  | ||||
|         logging.debug("Added URL {} - {}".format(url, new_uuid)) | ||||
|         logger.debug(f"Adding URL {url} - {new_uuid}") | ||||
|  | ||||
|         for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']: | ||||
|             if k in apply_extras: | ||||
| @@ -362,7 +362,7 @@ class ChangeDetectionStore: | ||||
|         if write_to_disk_now: | ||||
|             self.sync_to_json() | ||||
|  | ||||
|         print("added ", url) | ||||
|         logger.debug(f"Added '{url}'") | ||||
|  | ||||
|         return new_uuid | ||||
|  | ||||
| @@ -416,14 +416,13 @@ class ChangeDetectionStore: | ||||
|  | ||||
|  | ||||
|     def sync_to_json(self): | ||||
|         logging.info("Saving JSON..") | ||||
|         print("Saving JSON..") | ||||
|         logger.info("Saving JSON..") | ||||
|         try: | ||||
|             data = deepcopy(self.__data) | ||||
|         except RuntimeError as e: | ||||
|             # Try again in 15 seconds | ||||
|             time.sleep(15) | ||||
|             logging.error ("! Data changed when writing to JSON, trying again.. %s", str(e)) | ||||
|             logger.error(f"! Data changed when writing to JSON, trying again.. {str(e)}") | ||||
|             self.sync_to_json() | ||||
|             return | ||||
|         else: | ||||
| @@ -436,7 +435,7 @@ class ChangeDetectionStore: | ||||
|                     json.dump(data, json_file, indent=4) | ||||
|                 os.replace(self.json_store_path+".tmp", self.json_store_path) | ||||
|             except Exception as e: | ||||
|                 logging.error("Error writing JSON!! (Main JSON file save was skipped) : %s", str(e)) | ||||
|                 logger.error(f"Error writing JSON!! (Main JSON file save was skipped) : {str(e)}") | ||||
|  | ||||
|             self.needs_write = False | ||||
|             self.needs_write_urgent = False | ||||
| @@ -447,7 +446,16 @@ class ChangeDetectionStore: | ||||
|  | ||||
|         while True: | ||||
|             if self.stop_thread: | ||||
|                 print("Shutting down datastore thread") | ||||
|                 # Suppressing "Logging error in Loguru Handler #0" during CICD. | ||||
|                 # Not a meaningful difference for a real use-case just for CICD. | ||||
|                 # the side effect is a "Shutting down datastore thread" message | ||||
|                 # at the end of each test. | ||||
|                 # But still more looking better. | ||||
|                 import sys | ||||
|                 logger.remove() | ||||
|                 logger.add(sys.stderr) | ||||
|  | ||||
|                 logger.critical("Shutting down datastore thread") | ||||
|                 return | ||||
|  | ||||
|             if self.needs_write or self.needs_write_urgent: | ||||
| @@ -463,7 +471,7 @@ class ChangeDetectionStore: | ||||
|     # Go through the datastore path and remove any snapshots that are not mentioned in the index | ||||
|     # This usually is not used, but can be handy. | ||||
|     def remove_unused_snapshots(self): | ||||
|         print ("Removing snapshots from datastore that are not in the index..") | ||||
|         logger.info("Removing snapshots from datastore that are not in the index..") | ||||
|  | ||||
|         index=[] | ||||
|         for uuid in self.data['watching']: | ||||
| @@ -476,7 +484,7 @@ class ChangeDetectionStore: | ||||
|         for uuid in self.data['watching']: | ||||
|             for item in pathlib.Path(self.datastore_path).rglob(uuid+"/*.txt"): | ||||
|                 if not str(item) in index: | ||||
|                     print ("Removing",item) | ||||
|                     logger.info(f"Removing {item}") | ||||
|                     unlink(item) | ||||
|  | ||||
|     @property | ||||
| @@ -562,7 +570,7 @@ class ChangeDetectionStore: | ||||
|             if os.path.isfile(filepath): | ||||
|                 headers.update(parse_headers_from_text_file(filepath)) | ||||
|         except Exception as e: | ||||
|             print(f"ERROR reading headers.txt at {filepath}", str(e)) | ||||
|             logger.error(f"ERROR reading headers.txt at {filepath} {str(e)}") | ||||
|  | ||||
|         watch = self.data['watching'].get(uuid) | ||||
|         if watch: | ||||
| @@ -573,7 +581,7 @@ class ChangeDetectionStore: | ||||
|                 if os.path.isfile(filepath): | ||||
|                     headers.update(parse_headers_from_text_file(filepath)) | ||||
|             except Exception as e: | ||||
|                 print(f"ERROR reading headers.txt at {filepath}", str(e)) | ||||
|                 logger.error(f"ERROR reading headers.txt at {filepath} {str(e)}") | ||||
|  | ||||
|             # In /datastore/tag-name.txt | ||||
|             tags = self.get_all_tags_for_watch(uuid=uuid) | ||||
| @@ -584,7 +592,7 @@ class ChangeDetectionStore: | ||||
|                     if os.path.isfile(filepath): | ||||
|                         headers.update(parse_headers_from_text_file(filepath)) | ||||
|                 except Exception as e: | ||||
|                     print(f"ERROR reading headers.txt at {filepath}", str(e)) | ||||
|                     logger.error(f"ERROR reading headers.txt at {filepath} {str(e)}") | ||||
|  | ||||
|         return headers | ||||
|  | ||||
| @@ -602,13 +610,13 @@ class ChangeDetectionStore: | ||||
|     def add_tag(self, name): | ||||
|         # If name exists, return that | ||||
|         n = name.strip().lower() | ||||
|         print (f">>> Adding new tag - '{n}'") | ||||
|         logger.debug(f">>> Adding new tag - '{n}'") | ||||
|         if not n: | ||||
|             return False | ||||
|  | ||||
|         for uuid, tag in self.__data['settings']['application'].get('tags', {}).items(): | ||||
|             if n == tag.get('title', '').lower().strip(): | ||||
|                 print (f">>> Tag {name} already exists") | ||||
|                 logger.warning(f"Tag '{name}' already exists, skipping creation.") | ||||
|                 return uuid | ||||
|  | ||||
|         # Eventually almost everything todo with a watch will apply as a Tag | ||||
| @@ -670,7 +678,7 @@ class ChangeDetectionStore: | ||||
|         updates_available = self.get_updates_available() | ||||
|         for update_n in updates_available: | ||||
|             if update_n > self.__data['settings']['application']['schema_version']: | ||||
|                 print ("Applying update_{}".format((update_n))) | ||||
|                 logger.critical(f"Applying update_{update_n}") | ||||
|                 # Wont exist on fresh installs | ||||
|                 if os.path.exists(self.json_store_path): | ||||
|                     shutil.copyfile(self.json_store_path, self.datastore_path+"/url-watches-before-{}.json".format(update_n)) | ||||
| @@ -678,8 +686,8 @@ class ChangeDetectionStore: | ||||
|                 try: | ||||
|                     update_method = getattr(self, "update_{}".format(update_n))() | ||||
|                 except Exception as e: | ||||
|                     print("Error while trying update_{}".format((update_n))) | ||||
|                     print(e) | ||||
|                     logger.error(f"Error while trying update_{update_n}") | ||||
|                     logger.error(e) | ||||
|                     # Don't run any more updates | ||||
|                     return | ||||
|                 else: | ||||
| @@ -717,7 +725,7 @@ class ChangeDetectionStore: | ||||
|                         with open(os.path.join(target_path, "history.txt"), "w") as f: | ||||
|                             f.writelines(history) | ||||
|                     else: | ||||
|                         logging.warning("Datastore history directory {} does not exist, skipping history import.".format(target_path)) | ||||
|                         logger.warning(f"Datastore history directory {target_path} does not exist, skipping history import.") | ||||
|  | ||||
|                 # No longer needed, dynamically pulled from the disk when needed. | ||||
|                 # But we should set it back to a empty dict so we don't break if this schema runs on an earlier version. | ||||
|   | ||||
| @@ -115,6 +115,12 @@ | ||||
| 									Warning: Contents of <code>{{ '{{diff}}' }}</code>, <code>{{ '{{diff_removed}}' }}</code>, and <code>{{ '{{diff_added}}' }}</code> depend on how the difference algorithm perceives the change. <br> | ||||
|                                     For example, an addition or removal could be perceived as a change in some cases. <a target="_new" href="https://github.com/dgtlmoon/changedetection.io/wiki/Using-the-%7B%7Bdiff%7D%7D,-%7B%7Bdiff_added%7D%7D,-and-%7B%7Bdiff_removed%7D%7D-notification-tokens">More Here</a> <br> | ||||
|                                     </p> | ||||
|                                     <p> | ||||
|                                         For JSON payloads, use <strong>|tojson</strong> without quotes for automatic escaping, for example - <code>{ "name": {{ '{{ watch_title|tojson }}' }} }</code> | ||||
|                                     </p> | ||||
|                                     <p> | ||||
|                                         URL encoding, use <strong>|urlencode</strong>, for example - <code>gets://hook-website.com/test.php?title={{ '{{ watch_title|urlencode }}' }}</code> | ||||
|                                     </p> | ||||
|                                 </div> | ||||
|                             </div> | ||||
|                             <div class="pure-control-group"> | ||||
|   | ||||
| @@ -39,12 +39,15 @@ | ||||
|         <ul> | ||||
|             <li class="tab" id=""><a href="#general">General</a></li> | ||||
|             <li class="tab"><a href="#request">Request</a></li> | ||||
|             {% if playwright_enabled %} | ||||
|             {% if playwright_enabled and processor_config['needs_browsersteps'] %} | ||||
|             <li class="tab"><a id="browsersteps-tab" href="#browser-steps">Browser Steps</a></li> | ||||
|             {% endif %} | ||||
|  | ||||
|             {% if watch['processor'] == 'text_json_diff' %} | ||||
|             {% if processor_config['needs_visualselector']  %} | ||||
|             <li class="tab"><a id="visualselector-tab" href="#visualselector">Visual Filter Selector</a></li> | ||||
|             {% endif %} | ||||
|  | ||||
|             {% if processor_config['needs_filters']  %} | ||||
|             <li class="tab"><a href="#filters-and-triggers">Filters & Triggers</a></li> | ||||
|             {% endif %} | ||||
|  | ||||
| @@ -67,16 +70,12 @@ | ||||
|                         {{ render_field(form.url, placeholder="https://...", required=true, class="m-d") }} | ||||
|                         <span class="pure-form-message-inline">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></span><br> | ||||
|                         <span class="pure-form-message-inline">You can use variables in the URL, perfect for inserting the current date and other logic, <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a></span><br> | ||||
|                         <span class="pure-form-message-inline"> | ||||
|                         {% if watch['processor'] == 'text_json_diff' %} | ||||
|                             Current mode: <strong>Webpage Text/HTML, JSON and PDF changes.</strong><br> | ||||
|                           <a href="{{url_for('edit_page', uuid=uuid)}}?switch_processor=restock_diff" class="pure-button button-xsmall">Switch to re-stock detection mode.</a> | ||||
|                         {% else %} | ||||
|                         Current mode: <strong>Re-stock detection.</strong><br> | ||||
|                           <a href="{{url_for('edit_page', uuid=uuid)}}?switch_processor=text_json_diff" class="pure-button button-xsmall">Switch to Webpage Text/HTML, JSON and PDF changes mode.</a> | ||||
|                         {% endif %} | ||||
|                         </span> | ||||
|  | ||||
|                     </div> | ||||
|                 <div class="pure-control-group"> | ||||
|                     <label for="title">Processing mode</label> | ||||
|                           {% for a in available_processors %} | ||||
|                             <a href="{{url_for('edit_page', uuid=uuid)}}?switch_processor={{ a[0] }}" class="pure-button button-xsmall {% if watch['processor'] == a[0] %}button-secondary{% endif %}">{{ a[1]}}.</a> | ||||
|                           {% endfor %} | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.title, class="m-d") }} | ||||
| @@ -108,6 +107,7 @@ | ||||
|             </div> | ||||
|  | ||||
|             <div class="tab-pane-inner" id="request"> | ||||
|             {% if processor_config['needs_request_fetch_method']  %} | ||||
|                     <div class="pure-control-group inline-radio"> | ||||
|                         {{ render_field(form.fetch_backend, class="fetch-backend") }} | ||||
|                         <span class="pure-form-message-inline"> | ||||
| @@ -116,6 +116,7 @@ | ||||
|                             Tip: <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support">Connect using Bright Data and Oxylabs Proxies, find out more here.</a> | ||||
|                         </span> | ||||
|                     </div> | ||||
|             {% endif  %} | ||||
|                 {% if form.proxy %} | ||||
|                     <div class="pure-control-group inline-radio"> | ||||
|                           <div>{{ form.proxy.label }} <a href="" id="check-all-proxies" class="pure-button button-secondary button-xsmall" >Check/Scan all</a></div> | ||||
| @@ -193,7 +194,7 @@ User-Agent: wonderbra 1.0") }} | ||||
|                     </div> | ||||
|             </fieldset> | ||||
|             </div> | ||||
|             {% if playwright_enabled %} | ||||
|             {% if playwright_enabled and processor_config['needs_browsersteps'] %} | ||||
|             <div class="tab-pane-inner" id="browser-steps"> | ||||
|                 <img class="beta-logo" src="{{url_for('static_content', group='images', filename='beta-logo.png')}}" alt="New beta functionality"> | ||||
|                 <fieldset> | ||||
| @@ -264,8 +265,10 @@ User-Agent: wonderbra 1.0") }} | ||||
|                 </fieldset> | ||||
|             </div> | ||||
|  | ||||
|             {% if watch['processor'] == 'text_json_diff' %} | ||||
|             {% if processor_config['needs_filters']  %} | ||||
|             <div class="tab-pane-inner" id="filters-and-triggers"> | ||||
|                 <div class="text-filtering"> | ||||
|                 <h3>Filter by HTML element</h3> | ||||
|                     <div class="pure-control-group"> | ||||
|                             <strong>Pro-tips:</strong><br> | ||||
|                             <ul> | ||||
| @@ -315,7 +318,7 @@ xpath://body/div/span[contains(@class, 'example-class')]", | ||||
|                                 href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br> | ||||
|                 </span> | ||||
|                     </div> | ||||
|                 <fieldset class="pure-control-group"> | ||||
|                     <div class="pure-control-group"> | ||||
|                     {{ render_field(form.subtractive_selectors, rows=5, placeholder="header | ||||
| footer | ||||
| nav | ||||
| @@ -326,7 +329,8 @@ nav | ||||
|                           <li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li> | ||||
|                         </ul> | ||||
|                       </span> | ||||
|                 </fieldset> | ||||
|                     </div> | ||||
|                 </div> | ||||
|                 <div class="text-filtering"> | ||||
|                 <fieldset class="pure-group" id="text-filtering-type-options"> | ||||
|                     <h3>Text filtering</h3> | ||||
| @@ -423,7 +427,7 @@ Unavailable") }} | ||||
|             </div> | ||||
|             {% endif %} | ||||
|  | ||||
|             {% if watch['processor'] == 'text_json_diff' %} | ||||
|             {% if processor_config['needs_visualselector']  %} | ||||
|             <div class="tab-pane-inner visual-selector-ui" id="visualselector"> | ||||
|                 <img class="beta-logo" src="{{url_for('static_content', group='images', filename='beta-logo.png')}}" alt="New beta functionality"> | ||||
|  | ||||
|   | ||||
| @@ -22,6 +22,7 @@ | ||||
|             <li class="tab"><a href="#filters">Global Filters</a></li> | ||||
|             <li class="tab"><a href="#api">API</a></li> | ||||
|             <li class="tab"><a href="#proxies">CAPTCHA & Proxies</a></li> | ||||
|             <li class="tab"><a href="#plugins">Plugins</a></li> | ||||
|         </ul> | ||||
|     </div> | ||||
|     <div class="box-wrap inner"> | ||||
| @@ -243,6 +244,12 @@ nav | ||||
|                     {{ render_field(form.requests.form.extra_browsers) }} | ||||
|                 </div> | ||||
|             </div> | ||||
|             <div class="tab-pane-inner" id="plugins"> | ||||
|                 available plugin on/off stuff here | ||||
|  | ||||
|                 how to let each one expose config? | ||||
|             </div> | ||||
|  | ||||
|             <div id="actions"> | ||||
|                 <div class="pure-control-group"> | ||||
|                     {{ render_button(form.save_button) }} | ||||
|   | ||||
| @@ -4,6 +4,8 @@ import pytest | ||||
| from changedetectionio import changedetection_app | ||||
| from changedetectionio import store | ||||
| import os | ||||
| import sys | ||||
| from loguru import logger | ||||
|  | ||||
| # https://github.com/pallets/flask/blob/1.1.2/examples/tutorial/tests/test_auth.py | ||||
| # Much better boilerplate than the docs | ||||
| @@ -11,6 +13,15 @@ import os | ||||
|  | ||||
| global app | ||||
|  | ||||
| # https://loguru.readthedocs.io/en/latest/resources/migration.html#replacing-caplog-fixture-from-pytest-library | ||||
| # Show loguru logs only if CICD pytest fails. | ||||
| from loguru import logger | ||||
| @pytest.fixture | ||||
| def reportlog(pytestconfig): | ||||
|     logging_plugin = pytestconfig.pluginmanager.getplugin("logging-plugin") | ||||
|     handler_id = logger.add(logging_plugin.report_handler, format="{message}") | ||||
|     yield | ||||
|     logger.remove(handler_id) | ||||
|  | ||||
| def cleanup(datastore_path): | ||||
|     import glob | ||||
| @@ -41,6 +52,18 @@ def app(request): | ||||
|  | ||||
|     app_config = {'datastore_path': datastore_path, 'disable_checkver' : True} | ||||
|     cleanup(app_config['datastore_path']) | ||||
|  | ||||
|     logger_level = 'TRACE' | ||||
|  | ||||
|     logger.remove() | ||||
|     log_level_for_stdout = { 'DEBUG', 'SUCCESS' } | ||||
|     logger.configure(handlers=[ | ||||
|         {"sink": sys.stdout, "level": logger_level, | ||||
|          "filter" : lambda record: record['level'].name in log_level_for_stdout}, | ||||
|         {"sink": sys.stderr, "level": logger_level, | ||||
|          "filter": lambda record: record['level'].name not in log_level_for_stdout}, | ||||
|         ]) | ||||
|  | ||||
|     datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], include_default_watches=False) | ||||
|     app = changedetection_app(app_config, datastore) | ||||
|  | ||||
|   | ||||
| @@ -37,4 +37,4 @@ def test_fetch_webdriver_content(client, live_server): | ||||
|     ) | ||||
|     logging.getLogger().info("Looking for correct fetched HTML (text) from server") | ||||
|  | ||||
|     assert b'cool it works' in res.data | ||||
|     assert b'cool it works' in res.data | ||||
|   | ||||
| @@ -97,6 +97,17 @@ def test_check_notification_email_formats_default_Text_override_HTML(client, liv | ||||
|     set_original_response() | ||||
|     global smtp_test_server | ||||
|     notification_url = f'mailto://changedetection@{smtp_test_server}:11025/?to=fff@home.com' | ||||
|     notification_body = f"""<!DOCTYPE html> | ||||
| <html lang="en"> | ||||
| <head> | ||||
|     <title>My Webpage</title> | ||||
| </head> | ||||
| <body> | ||||
|     <h1>Test</h1> | ||||
|     {default_notification_body} | ||||
| </body> | ||||
| </html> | ||||
| """ | ||||
|  | ||||
|     ##################### | ||||
|     # Set this up for when we remove the notification from the watch, it should fallback with these details | ||||
| @@ -104,7 +115,7 @@ def test_check_notification_email_formats_default_Text_override_HTML(client, liv | ||||
|         url_for("settings_page"), | ||||
|         data={"application-notification_urls": notification_url, | ||||
|               "application-notification_title": "fallback-title " + default_notification_title, | ||||
|               "application-notification_body": default_notification_body, | ||||
|               "application-notification_body": notification_body, | ||||
|               "application-notification_format": 'Text', | ||||
|               "requests-time_between_check-minutes": 180, | ||||
|               'application-fetch_backend': "html_requests"}, | ||||
| @@ -161,5 +172,10 @@ def test_check_notification_email_formats_default_Text_override_HTML(client, liv | ||||
|     assert 'Content-Type: text/html' in msg | ||||
|     assert '(removed) So let\'s see what happens.<br>' in msg  # the html part | ||||
|  | ||||
|     # https://github.com/dgtlmoon/changedetection.io/issues/2103 | ||||
|     assert '<h1>Test</h1>' in msg | ||||
|     assert '<' not in msg | ||||
|     assert 'Content-Type: text/html' in msg | ||||
|  | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| #!/usr/bin/python3 | ||||
|  | ||||
| from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks | ||||
| from .util import set_original_response, live_server_setup, wait_for_all_checks | ||||
| from flask import url_for | ||||
| from urllib.request import urlopen | ||||
| import io | ||||
| from zipfile import ZipFile | ||||
| import re | ||||
| import time | ||||
| @@ -37,15 +37,10 @@ def test_backup(client, live_server): | ||||
|     # Should be PK/ZIP stream | ||||
|     assert res.data.count(b'PK') >= 2 | ||||
|  | ||||
|     # ZipFile from buffer seems non-obvious, just save it instead | ||||
|     with open("download.zip", 'wb') as f: | ||||
|         f.write(res.data) | ||||
|  | ||||
|     zip = ZipFile('download.zip') | ||||
|     l = zip.namelist() | ||||
|     backup = ZipFile(io.BytesIO(res.data)) | ||||
|     l = backup.namelist() | ||||
|     uuid4hex = re.compile('^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}.*txt', re.I) | ||||
|     newlist = list(filter(uuid4hex.match, l))  # Read Note below | ||||
|  | ||||
|     # Should be two txt files in the archive (history and the snapshot) | ||||
|     assert len(newlist) == 2 | ||||
|  | ||||
|   | ||||
| @@ -1,8 +1,7 @@ | ||||
| import os | ||||
| import time | ||||
| import re | ||||
| from flask import url_for | ||||
| from . util import set_original_response, set_modified_response, live_server_setup | ||||
| from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks | ||||
| import logging | ||||
|  | ||||
| def test_check_notification_error_handling(client, live_server): | ||||
| @@ -11,7 +10,7 @@ def test_check_notification_error_handling(client, live_server): | ||||
|     set_original_response() | ||||
|  | ||||
|     # Give the endpoint time to spin up | ||||
|     time.sleep(2) | ||||
|     time.sleep(1) | ||||
|  | ||||
|     # Set a URL and fetch it, then set a notification URL which is going to give errors | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
| @@ -22,12 +21,16 @@ def test_check_notification_error_handling(client, live_server): | ||||
|     ) | ||||
|     assert b"Watch added" in res.data | ||||
|  | ||||
|     time.sleep(2) | ||||
|     wait_for_all_checks(client) | ||||
|     set_modified_response() | ||||
|  | ||||
|     working_notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json') | ||||
|     broken_notification_url = "jsons://broken-url-xxxxxxxx123/test" | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("edit_page", uuid="first"), | ||||
|         data={"notification_urls": "jsons://broken-url-xxxxxxxx123/test", | ||||
|         # A URL with errors should not block the one that is working | ||||
|         data={"notification_urls": f"{broken_notification_url}\r\n{working_notification_url}", | ||||
|               "notification_title": "xxx", | ||||
|               "notification_body": "xxxxx", | ||||
|               "notification_format": "Text", | ||||
| @@ -63,4 +66,10 @@ def test_check_notification_error_handling(client, live_server): | ||||
|     found_name_resolution_error = b"Temporary failure in name resolution" in res.data or b"Name or service not known" in res.data | ||||
|     assert found_name_resolution_error | ||||
|  | ||||
|     # And the working one, which is after the 'broken' one should still have fired | ||||
|     with open("test-datastore/notification.txt", "r") as f: | ||||
|         notification_submission = f.read() | ||||
|     os.unlink("test-datastore/notification.txt") | ||||
|     assert 'xxxxx' in notification_submission | ||||
|  | ||||
|     client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|   | ||||
| @@ -12,14 +12,13 @@ from .processors.restock_diff import UnableToExtractRestockData | ||||
| # Requests for checking on a single site(watch) from a queue of watches | ||||
| # (another process inserts watches into the queue that are time-ready for checking) | ||||
|  | ||||
| import logging | ||||
| import sys | ||||
| from loguru import logger | ||||
|  | ||||
| class update_worker(threading.Thread): | ||||
|     current_uuid = None | ||||
|  | ||||
|     def __init__(self, q, notification_q, app, datastore, *args, **kwargs): | ||||
|         logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) | ||||
|         self.q = q | ||||
|         self.app = app | ||||
|         self.notification_q = notification_q | ||||
| @@ -78,8 +77,8 @@ class update_worker(threading.Thread): | ||||
|             'uuid': watch.get('uuid') if watch else None, | ||||
|             'watch_url': watch.get('url') if watch else None, | ||||
|         }) | ||||
|         logging.info(">> SENDING NOTIFICATION") | ||||
|  | ||||
|         logger.debug(">> SENDING NOTIFICATION") | ||||
|         notification_q.put(n_object) | ||||
|  | ||||
|     # Prefer - Individual watch settings > Tag settings >  Global settings (in that order) | ||||
| @@ -180,7 +179,7 @@ class update_worker(threading.Thread): | ||||
|                 'screenshot': None | ||||
|             }) | ||||
|             self.notification_q.put(n_object) | ||||
|             print("Sent filter not found notification for {}".format(watch_uuid)) | ||||
|             logger.error(f"Sent filter not found notification for {watch_uuid}") | ||||
|  | ||||
|     def send_step_failure_notification(self, watch_uuid, step_n): | ||||
|         watch = self.datastore.data['watching'].get(watch_uuid, False) | ||||
| @@ -207,7 +206,7 @@ class update_worker(threading.Thread): | ||||
|                 'uuid': watch_uuid | ||||
|             }) | ||||
|             self.notification_q.put(n_object) | ||||
|             print("Sent step not found notification for {}".format(watch_uuid)) | ||||
|             logger.error(f"Sent step not found notification for {watch_uuid}") | ||||
|  | ||||
|  | ||||
|     def cleanup_error_artifacts(self, uuid): | ||||
| @@ -221,7 +220,8 @@ class update_worker(threading.Thread): | ||||
|     def run(self): | ||||
|  | ||||
|         from .processors import text_json_diff, restock_diff | ||||
|  | ||||
|         now = time.time() | ||||
|          | ||||
|         while not self.app.config.exit.is_set(): | ||||
|             update_handler = None | ||||
|  | ||||
| @@ -233,14 +233,14 @@ class update_worker(threading.Thread): | ||||
|             else: | ||||
|                 uuid = queued_item_data.item.get('uuid') | ||||
|                 self.current_uuid = uuid | ||||
|  | ||||
|                 if uuid in list(self.datastore.data['watching'].keys()) and self.datastore.data['watching'][uuid].get('url'): | ||||
|                     changed_detected = False | ||||
|                     contents = b'' | ||||
|                     process_changedetection_results = True | ||||
|                     update_obj = {} | ||||
|                     print("> Processing UUID {} Priority {} URL {}".format(uuid, queued_item_data.priority, | ||||
|                                                                            self.datastore.data['watching'][uuid]['url'])) | ||||
|                     logger.info(f"Processing watch UUID {uuid} " | ||||
|                             f"Priority {queued_item_data.priority} " | ||||
|                             f"URL {self.datastore.data['watching'][uuid]['url']}") | ||||
|                     now = time.time() | ||||
|  | ||||
|                     try: | ||||
| @@ -259,6 +259,13 @@ class update_worker(threading.Thread): | ||||
|                             update_handler = restock_diff.perform_site_check(datastore=self.datastore, | ||||
|                                                                              watch_uuid=uuid | ||||
|                                                                              ) | ||||
|                         elif processor.startswith('plugin_processor_'): | ||||
|                             from .flask_app import get_plugin_manager | ||||
|                             pm = get_plugin_manager() | ||||
|                             x = pm.hook.processor_call(processor_name=processor, datastore=self.datastore, watch_uuid=uuid) | ||||
|                             if x: | ||||
|                                 update_handler = x | ||||
|  | ||||
|                         else: | ||||
|                             # Used as a default and also by some tests | ||||
|                             update_handler = text_json_diff.perform_site_check(datastore=self.datastore, | ||||
| @@ -280,7 +287,8 @@ class update_worker(threading.Thread): | ||||
|                         if not isinstance(contents, (bytes, bytearray)): | ||||
|                             raise Exception("Error - returned data from the fetch handler SHOULD be bytes") | ||||
|                     except PermissionError as e: | ||||
|                         self.app.logger.error("File permission error updating", uuid, str(e)) | ||||
|                         logger.critical(f"File permission error updating file, watch: {uuid}") | ||||
|                         logger.critical(str(e)) | ||||
|                         process_changedetection_results = False | ||||
|                     except content_fetcher.ReplyWithContentButNoText as e: | ||||
|                         # Totally fine, it's by choice - just continue on, nothing more to care about | ||||
| @@ -338,7 +346,7 @@ class update_worker(threading.Thread): | ||||
|                             # Send notification if we reached the threshold? | ||||
|                             threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts', | ||||
|                                                                                            0) | ||||
|                             print("Filter for {} not found, consecutive_filter_failures: {}".format(uuid, c)) | ||||
|                             logger.error(f"Filter for {uuid} not found, consecutive_filter_failures: {c}") | ||||
|                             if threshold > 0 and c >= threshold: | ||||
|                                 if not self.datastore.data['watching'][uuid].get('notification_muted'): | ||||
|                                     self.send_filter_failure_notification(uuid) | ||||
| @@ -372,7 +380,7 @@ class update_worker(threading.Thread): | ||||
|                             # Other Error, more info is good. | ||||
|                             err_text += " " + str(e.original_e).splitlines()[0] | ||||
|  | ||||
|                         print(f"BrowserSteps exception at step {error_step}", str(e.original_e)) | ||||
|                         logger.debug(f"BrowserSteps exception at step {error_step} {str(e.original_e)}") | ||||
|  | ||||
|                         self.datastore.update_watch(uuid=uuid, | ||||
|                                                     update_obj={'last_error': err_text, | ||||
| @@ -386,7 +394,7 @@ class update_worker(threading.Thread): | ||||
|                             # Send notification if we reached the threshold? | ||||
|                             threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts', | ||||
|                                                                                            0) | ||||
|                             print("Step for {} not found, consecutive_filter_failures: {}".format(uuid, c)) | ||||
|                             logger.error(f"Step for {uuid} not found, consecutive_filter_failures: {c}") | ||||
|                             if threshold > 0 and c >= threshold: | ||||
|                                 if not self.datastore.data['watching'][uuid].get('notification_muted'): | ||||
|                                     self.send_step_failure_notification(watch_uuid=uuid, step_n=e.step_n) | ||||
| @@ -428,11 +436,13 @@ class update_worker(threading.Thread): | ||||
|                         process_changedetection_results = False | ||||
|                     except UnableToExtractRestockData as e: | ||||
|                         # Usually when fetcher.instock_data returns empty | ||||
|                         self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e)) | ||||
|                         logger.error(f"Exception (UnableToExtractRestockData) reached processing watch UUID: {uuid}") | ||||
|                         logger.error(str(e)) | ||||
|                         self.datastore.update_watch(uuid=uuid, update_obj={'last_error': f"Unable to extract restock data for this page unfortunately. (Got code {e.status_code} from server)"}) | ||||
|                         process_changedetection_results = False | ||||
|                     except Exception as e: | ||||
|                         self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e)) | ||||
|                         logger.error(f"Exception reached processing watch UUID: {uuid}") | ||||
|                         logger.error(str(e)) | ||||
|                         self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)}) | ||||
|                         # Other serious error | ||||
|                         process_changedetection_results = False | ||||
| @@ -468,7 +478,7 @@ class update_worker(threading.Thread): | ||||
|  | ||||
|                             # A change was detected | ||||
|                             if changed_detected: | ||||
|                                 print (">> Change detected in UUID {} - {}".format(uuid, watch['url'])) | ||||
|                                 logger.debug(f">> Change detected in UUID {uuid} - {watch['url']}") | ||||
|  | ||||
|                                 # Notifications should only trigger on the second time (first time, we gather the initial snapshot) | ||||
|                                 if watch.history_n >= 2: | ||||
| @@ -478,8 +488,8 @@ class update_worker(threading.Thread): | ||||
|  | ||||
|                         except Exception as e: | ||||
|                             # Catch everything possible here, so that if a worker crashes, we don't lose it until restart! | ||||
|                             print("!!!! Exception in update_worker !!!\n", e) | ||||
|                             self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e)) | ||||
|                             logger.critical("!!!! Exception in update_worker while processing process_changedetection_results !!!") | ||||
|                             logger.critical(str(e)) | ||||
|                             self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)}) | ||||
|  | ||||
|                     if self.datastore.data['watching'].get(uuid): | ||||
| @@ -499,6 +509,7 @@ class update_worker(threading.Thread): | ||||
|  | ||||
|                 self.current_uuid = None  # Done | ||||
|                 self.q.task_done() | ||||
|                 logger.debug(f"Watch {uuid} done in {time.time()-now:.2f}s") | ||||
|  | ||||
|                 # Give the CPU time to interrupt | ||||
|                 time.sleep(0.1) | ||||
|   | ||||
| @@ -16,6 +16,10 @@ services: | ||||
|   #      - PUID=1000 | ||||
|   #      - PGID=1000 | ||||
|   # | ||||
|   #        Log levels are in descending order. (TRACE is the most detailed one) | ||||
|   #        Log output levels: TRACE, DEBUG(default), INFO, SUCCESS, WARNING, ERROR, CRITICAL | ||||
|   #      - LOGGER_LEVEL=DEBUG | ||||
|   # | ||||
|   #       Alternative WebDriver/selenium URL, do not use "'s or 's! | ||||
|   #      - WEBDRIVER_URL=http://browser-chrome:4444/wd/hub | ||||
|   # | ||||
| @@ -91,6 +95,7 @@ services: | ||||
|  | ||||
|      # Used for fetching pages via Playwright+Chrome where you need Javascript support. | ||||
|      # Note: works well but is deprecated, does not fetch full page screenshots (doesnt work with Visual Selector) and other issues | ||||
|      # More information about the advantages of playwright/browserless https://www.browserless.io/blog/2023/12/13/migrating-selenium-to-playwright/ | ||||
| #    browser-chrome: | ||||
| #        hostname: browser-chrome | ||||
| #        image: selenium/standalone-chrome:4 | ||||
|   | ||||
| @@ -72,3 +72,6 @@ pytest-flask ~=1.2 | ||||
|  | ||||
| # Pin jsonschema version to prevent build errors on armv6 while rpds-py wheels aren't available (1708) | ||||
| jsonschema==4.17.3 | ||||
|  | ||||
| pluggy | ||||
| loguru | ||||
|   | ||||
		Reference in New Issue
	
	Block a user