mirror of
				https://github.com/dgtlmoon/changedetection.io.git
				synced 2025-10-31 14:47:21 +00:00 
			
		
		
		
	Compare commits
	
		
			1 Commits
		
	
	
		
			browser-no
			...
			armv7-buil
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | c0381b8c60 | 
| @@ -33,6 +33,7 @@ venv/ | ||||
| # Test and development files | ||||
| test-datastore/ | ||||
| tests/ | ||||
| docs/ | ||||
| *.md | ||||
| !README.md | ||||
|  | ||||
|   | ||||
							
								
								
									
										6
									
								
								.github/test/Dockerfile-alpine
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/test/Dockerfile-alpine
									
									
									
									
										vendored
									
									
								
							| @@ -2,7 +2,7 @@ | ||||
| # Test that we can still build on Alpine (musl modified libc https://musl.libc.org/) | ||||
| # Some packages wont install via pypi because they dont have a wheel available under this architecture. | ||||
|  | ||||
| FROM ghcr.io/linuxserver/baseimage-alpine:3.22 | ||||
| FROM ghcr.io/linuxserver/baseimage-alpine:3.21 | ||||
| ENV PYTHONUNBUFFERED=1 | ||||
|  | ||||
| COPY requirements.txt /requirements.txt | ||||
| @@ -18,19 +18,17 @@ RUN \ | ||||
|     libxslt-dev \ | ||||
|     openssl-dev \ | ||||
|     python3-dev \ | ||||
|     file \ | ||||
|     zip \ | ||||
|     zlib-dev && \ | ||||
|   apk add --update --no-cache \ | ||||
|     libjpeg \ | ||||
|     libxslt \ | ||||
|     file \ | ||||
|     nodejs \ | ||||
|     poppler-utils \ | ||||
|     python3 && \ | ||||
|   echo "**** pip3 install test of changedetection.io ****" && \ | ||||
|   python3 -m venv /lsiopy  && \ | ||||
|   pip install -U pip wheel setuptools && \ | ||||
|   pip install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.22/ -r /requirements.txt && \ | ||||
|   pip install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.21/ -r /requirements.txt && \ | ||||
|   apk del --purge \ | ||||
|     build-dependencies | ||||
|   | ||||
							
								
								
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @@ -30,7 +30,7 @@ jobs: | ||||
|  | ||||
|     steps: | ||||
|     - name: Checkout repository | ||||
|       uses: actions/checkout@v5 | ||||
|       uses: actions/checkout@v4 | ||||
|  | ||||
|     # Initializes the CodeQL tools for scanning. | ||||
|     - name: Initialize CodeQL | ||||
|   | ||||
							
								
								
									
										4
									
								
								.github/workflows/containers.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/containers.yml
									
									
									
									
										vendored
									
									
								
							| @@ -39,9 +39,9 @@ jobs: | ||||
|     # Or if we are in a tagged release scenario. | ||||
|     if: ${{ github.event.workflow_run.conclusion == 'success' }} || ${{ github.event.release.tag_name }} != '' | ||||
|     steps: | ||||
|       - uses: actions/checkout@v5 | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Set up Python 3.11 | ||||
|         uses: actions/setup-python@v6 | ||||
|         uses: actions/setup-python@v5 | ||||
|         with: | ||||
|           python-version: 3.11 | ||||
|  | ||||
|   | ||||
							
								
								
									
										10
									
								
								.github/workflows/pypi-release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								.github/workflows/pypi-release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -7,9 +7,9 @@ jobs: | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     steps: | ||||
|     - uses: actions/checkout@v5 | ||||
|     - uses: actions/checkout@v4 | ||||
|     - name: Set up Python | ||||
|       uses: actions/setup-python@v6 | ||||
|       uses: actions/setup-python@v5 | ||||
|       with: | ||||
|         python-version: "3.11" | ||||
|     - name: Install pypa/build | ||||
| @@ -34,12 +34,12 @@ jobs: | ||||
|     - build | ||||
|     steps: | ||||
|     - name: Download all the dists | ||||
|       uses: actions/download-artifact@v5 | ||||
|       uses: actions/download-artifact@v4 | ||||
|       with: | ||||
|         name: python-package-distributions | ||||
|         path: dist/ | ||||
|     - name: Set up Python 3.11 | ||||
|       uses: actions/setup-python@v6 | ||||
|       uses: actions/setup-python@v5 | ||||
|       with: | ||||
|         python-version: '3.11' | ||||
|     - name: Test that the basic pip built package runs without error | ||||
| @@ -72,7 +72,7 @@ jobs: | ||||
|  | ||||
|     steps: | ||||
|     - name: Download all the dists | ||||
|       uses: actions/download-artifact@v5 | ||||
|       uses: actions/download-artifact@v4 | ||||
|       with: | ||||
|         name: python-package-distributions | ||||
|         path: dist/ | ||||
|   | ||||
							
								
								
									
										4
									
								
								.github/workflows/test-container-build.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/test-container-build.yml
									
									
									
									
										vendored
									
									
								
							| @@ -46,9 +46,9 @@ jobs: | ||||
|           - platform: linux/arm64 | ||||
|             dockerfile: ./.github/test/Dockerfile-alpine | ||||
|     steps: | ||||
|         - uses: actions/checkout@v5 | ||||
|         - uses: actions/checkout@v4 | ||||
|         - name: Set up Python 3.11 | ||||
|           uses: actions/setup-python@v6 | ||||
|           uses: actions/setup-python@v5 | ||||
|           with: | ||||
|             python-version: 3.11 | ||||
|  | ||||
|   | ||||
							
								
								
									
										6
									
								
								.github/workflows/test-only.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/test-only.yml
									
									
									
									
										vendored
									
									
								
							| @@ -7,7 +7,7 @@ jobs: | ||||
|   lint-code: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v5 | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Lint with Ruff | ||||
|         run: | | ||||
|           pip install ruff | ||||
| @@ -15,10 +15,6 @@ jobs: | ||||
|           ruff check . --select E9,F63,F7,F82 | ||||
|           # Complete check with errors treated as warnings | ||||
|           ruff check . --exit-zero | ||||
|       - name: Validate OpenAPI spec | ||||
|         run: | | ||||
|           pip install openapi-spec-validator | ||||
|           python3 -c "from openapi_spec_validator import validate_spec; import yaml; validate_spec(yaml.safe_load(open('docs/api-spec.yaml')))" | ||||
|  | ||||
|   test-application-3-10: | ||||
|     needs: lint-code | ||||
|   | ||||
| @@ -20,11 +20,11 @@ jobs: | ||||
|     env: | ||||
|       PYTHON_VERSION: ${{ inputs.python-version }} | ||||
|     steps: | ||||
|       - uses: actions/checkout@v5 | ||||
|       - uses: actions/checkout@v4 | ||||
|  | ||||
|       # Mainly just for link/flake8 | ||||
|       - name: Set up Python ${{ env.PYTHON_VERSION }} | ||||
|         uses: actions/setup-python@v6 | ||||
|         uses: actions/setup-python@v5 | ||||
|         with: | ||||
|           python-version: ${{ env.PYTHON_VERSION }} | ||||
|  | ||||
| @@ -71,7 +71,6 @@ jobs: | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_watch_model' | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_jinja2_security' | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_semver' | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_browser_notifications' | ||||
|  | ||||
|       - name: Test built container with Pytest (generally as requests/plaintext fetching) | ||||
|         run: | | ||||
|   | ||||
							
								
								
									
										20
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										20
									
								
								Dockerfile
									
									
									
									
									
								
							| @@ -5,6 +5,7 @@ ARG PYTHON_VERSION=3.11 | ||||
| FROM python:${PYTHON_VERSION}-slim-bookworm AS builder | ||||
|  | ||||
| # See `cryptography` pin comment in requirements.txt | ||||
| ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1 | ||||
|  | ||||
| RUN apt-get update && apt-get install -y --no-install-recommends \ | ||||
|     g++ \ | ||||
| @@ -16,7 +17,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ | ||||
|     libxslt-dev \ | ||||
|     make \ | ||||
|     patch \ | ||||
|     pkg-config \ | ||||
|     zlib1g-dev | ||||
|  | ||||
| RUN mkdir /install | ||||
| @@ -26,14 +26,6 @@ COPY requirements.txt /requirements.txt | ||||
|  | ||||
| # Use cache mounts and multiple wheel sources for faster ARM builds | ||||
| ENV PIP_CACHE_DIR=/tmp/pip-cache | ||||
| # Help Rust find OpenSSL for cryptography package compilation on ARM | ||||
| ENV PKG_CONFIG_PATH="/usr/lib/pkgconfig:/usr/lib/arm-linux-gnueabihf/pkgconfig:/usr/lib/aarch64-linux-gnu/pkgconfig" | ||||
| ENV PKG_CONFIG_ALLOW_SYSTEM_CFLAGS=1 | ||||
| ENV OPENSSL_DIR="/usr" | ||||
| ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf" | ||||
| ENV OPENSSL_INCLUDE_DIR="/usr/include/openssl" | ||||
| # Additional environment variables for cryptography Rust build | ||||
| ENV CRYPTOGRAPHY_DONT_BUILD_RUST=1 | ||||
| RUN --mount=type=cache,target=/tmp/pip-cache \ | ||||
|     pip install \ | ||||
|     --extra-index-url https://www.piwheels.org/simple \ | ||||
| @@ -62,8 +54,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ | ||||
|     locales \ | ||||
|     # For pdftohtml | ||||
|     poppler-utils \ | ||||
|     # favicon type detection and other uses | ||||
|     file \ | ||||
|     zlib1g \ | ||||
|     && apt-get clean && rm -rf /var/lib/apt/lists/* | ||||
|  | ||||
| @@ -84,11 +74,6 @@ EXPOSE 5000 | ||||
|  | ||||
| # The actual flask app module | ||||
| COPY changedetectionio /app/changedetectionio | ||||
|  | ||||
| # Also for OpenAPI validation wrapper - needs the YML | ||||
| RUN [ ! -d "/app/docs" ] && mkdir /app/docs | ||||
| COPY docs/api-spec.yaml /app/docs/api-spec.yaml | ||||
|  | ||||
| # Starting wrapper | ||||
| COPY changedetection.py /app/changedetection.py | ||||
|  | ||||
| @@ -97,9 +82,6 @@ COPY changedetection.py /app/changedetection.py | ||||
| ARG LOGGER_LEVEL='' | ||||
| ENV LOGGER_LEVEL="$LOGGER_LEVEL" | ||||
|  | ||||
| # Default | ||||
| ENV LC_ALL=en_US.UTF-8 | ||||
|  | ||||
| WORKDIR /app | ||||
| CMD ["python", "./changedetection.py", "-d", "/datastore"] | ||||
|  | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| recursive-include changedetectionio/api * | ||||
| recursive-include changedetectionio/blueprint * | ||||
| recursive-include changedetectionio/conditions * | ||||
| recursive-include changedetectionio/content_fetchers * | ||||
| recursive-include changedetectionio/conditions * | ||||
| recursive-include changedetectionio/model * | ||||
| recursive-include changedetectionio/notification * | ||||
| recursive-include changedetectionio/processors * | ||||
| @@ -9,7 +9,6 @@ recursive-include changedetectionio/realtime * | ||||
| recursive-include changedetectionio/static * | ||||
| recursive-include changedetectionio/templates * | ||||
| recursive-include changedetectionio/tests * | ||||
| recursive-include changedetectionio/widgets * | ||||
| prune changedetectionio/static/package-lock.json | ||||
| prune changedetectionio/static/styles/node_modules | ||||
| prune changedetectionio/static/styles/package-lock.json | ||||
|   | ||||
| @@ -280,10 +280,7 @@ Excel import is recommended - that way you can better organise tags/groups of we | ||||
|  | ||||
| ## API Support | ||||
|  | ||||
| Full REST API for programmatic management of watches, tags, notifications and more.  | ||||
|  | ||||
| - **[Interactive API Documentation](https://changedetection.io/docs/api_v1/index.html)** - Complete API reference with live testing | ||||
| - **[OpenAPI Specification](docs/api-spec.yaml)** - Generate SDKs for any programming language | ||||
| Supports managing the website watch list [via our API](https://changedetection.io/docs/api_v1/index.html) | ||||
|  | ||||
| ## Support us | ||||
|  | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
| # Read more https://github.com/dgtlmoon/changedetection.io/wiki | ||||
|  | ||||
| __version__ = '0.50.14' | ||||
| __version__ = '0.50.5' | ||||
|  | ||||
| from changedetectionio.strtobool import strtobool | ||||
| from json.decoder import JSONDecodeError | ||||
| @@ -35,22 +35,13 @@ def sigshutdown_handler(_signo, _stack_frame): | ||||
|     app.config.exit.set() | ||||
|     datastore.stop_thread = True | ||||
|      | ||||
|     # Shutdown workers and queues immediately | ||||
|     # Shutdown workers immediately | ||||
|     try: | ||||
|         from changedetectionio import worker_handler | ||||
|         worker_handler.shutdown_workers() | ||||
|     except Exception as e: | ||||
|         logger.error(f"Error shutting down workers: {str(e)}") | ||||
|      | ||||
|     # Close janus queues properly | ||||
|     try: | ||||
|         from changedetectionio.flask_app import update_q, notification_q | ||||
|         update_q.close() | ||||
|         notification_q.close() | ||||
|         logger.debug("Janus queues closed successfully") | ||||
|     except Exception as e: | ||||
|         logger.critical(f"CRITICAL: Failed to close janus queues: {e}") | ||||
|      | ||||
|     # Shutdown socketio server fast | ||||
|     from changedetectionio.flask_app import socketio_server | ||||
|     if socketio_server and hasattr(socketio_server, 'shutdown'): | ||||
|   | ||||
| @@ -3,7 +3,7 @@ from changedetectionio.strtobool import strtobool | ||||
| from flask_restful import abort, Resource | ||||
| from flask import request | ||||
| import validators | ||||
| from . import auth, validate_openapi_request | ||||
| from . import auth | ||||
|  | ||||
|  | ||||
| class Import(Resource): | ||||
| @@ -12,9 +12,17 @@ class Import(Resource): | ||||
|         self.datastore = kwargs['datastore'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('importWatches') | ||||
|     def post(self): | ||||
|         """Import a list of watched URLs.""" | ||||
|         """ | ||||
|         @api {post} /api/v1/import Import a list of watched URLs | ||||
|         @apiDescription Accepts a line-feed separated list of URLs to import, additionally with ?tag_uuids=(tag  id), ?tag=(name), ?proxy={key}, ?dedupe=true (default true) one URL per line. | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/import --data-binary @list-of-sites.txt -H"x-api-key:8a111a21bc2f8f1dd9b9353bbd46049a" | ||||
|         @apiName Import | ||||
|         @apiGroup Watch | ||||
|         @apiSuccess (200) {List} OK List of watch UUIDs added | ||||
|         @apiSuccess (500) {String} ERR Some other error | ||||
|         """ | ||||
|  | ||||
|         extras = {} | ||||
|  | ||||
|   | ||||
| @@ -1,7 +1,9 @@ | ||||
| from flask_expects_json import expects_json | ||||
| from flask_restful import Resource, abort | ||||
| from flask_restful import Resource | ||||
| from . import auth | ||||
| from flask_restful import abort, Resource | ||||
| from flask import request | ||||
| from . import auth, validate_openapi_request | ||||
| from . import auth | ||||
| from . import schema_create_notification_urls, schema_delete_notification_urls | ||||
|  | ||||
| class Notifications(Resource): | ||||
| @@ -10,9 +12,19 @@ class Notifications(Resource): | ||||
|         self.datastore = kwargs['datastore'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('getNotifications') | ||||
|     def get(self): | ||||
|         """Return Notification URL List.""" | ||||
|         """ | ||||
|         @api {get} /api/v1/notifications Return Notification URL List | ||||
|         @apiDescription Return the Notification URL List from the configuration | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/notifications -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             HTTP/1.0 200 | ||||
|             { | ||||
|                 'notification_urls': ["notification-urls-list"] | ||||
|             } | ||||
|         @apiName Get | ||||
|         @apiGroup Notifications | ||||
|         """ | ||||
|  | ||||
|         notification_urls = self.datastore.data.get('settings', {}).get('application', {}).get('notification_urls', [])         | ||||
|  | ||||
| @@ -21,10 +33,18 @@ class Notifications(Resource): | ||||
|                }, 200 | ||||
|      | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('addNotifications') | ||||
|     @expects_json(schema_create_notification_urls) | ||||
|     def post(self): | ||||
|         """Create Notification URLs.""" | ||||
|         """ | ||||
|         @api {post} /api/v1/notifications Create Notification URLs | ||||
|         @apiDescription Add one or more notification URLs from the configuration | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/notifications/batch -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"notification_urls": ["url1", "url2"]}' | ||||
|         @apiName CreateBatch | ||||
|         @apiGroup Notifications | ||||
|         @apiSuccess (201) {Object[]} notification_urls List of added notification URLs | ||||
|         @apiError (400) {String} Invalid input | ||||
|         """ | ||||
|  | ||||
|         json_data = request.get_json() | ||||
|         notification_urls = json_data.get("notification_urls", []) | ||||
| @@ -49,10 +69,18 @@ class Notifications(Resource): | ||||
|         return {'notification_urls': added_urls}, 201 | ||||
|      | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('replaceNotifications') | ||||
|     @expects_json(schema_create_notification_urls) | ||||
|     def put(self): | ||||
|         """Replace Notification URLs.""" | ||||
|         """ | ||||
|         @api {put} /api/v1/notifications Replace Notification URLs | ||||
|         @apiDescription Replace all notification URLs with the provided list (can be empty) | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl -X PUT http://localhost:5000/api/v1/notifications -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"notification_urls": ["url1", "url2"]}' | ||||
|         @apiName Replace | ||||
|         @apiGroup Notifications | ||||
|         @apiSuccess (200) {Object[]} notification_urls List of current notification URLs | ||||
|         @apiError (400) {String} Invalid input | ||||
|         """ | ||||
|         json_data = request.get_json() | ||||
|         notification_urls = json_data.get("notification_urls", []) | ||||
|  | ||||
| @@ -72,10 +100,19 @@ class Notifications(Resource): | ||||
|         return {'notification_urls': clean_urls}, 200 | ||||
|          | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('deleteNotifications') | ||||
|     @expects_json(schema_delete_notification_urls) | ||||
|     def delete(self): | ||||
|         """Delete Notification URLs.""" | ||||
|         """ | ||||
|         @api {delete} /api/v1/notifications Delete Notification URLs | ||||
|         @apiDescription Deletes one or more notification URLs from the configuration | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/notifications -X DELETE -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"notification_urls": ["url1", "url2"]}' | ||||
|         @apiParam {String[]} notification_urls The notification URLs to delete. | ||||
|         @apiName Delete | ||||
|         @apiGroup Notifications | ||||
|         @apiSuccess (204) {String} OK Deleted | ||||
|         @apiError (400) {String} No matching notification URLs found. | ||||
|         """ | ||||
|  | ||||
|         json_data = request.get_json() | ||||
|         urls_to_delete = json_data.get("notification_urls", []) | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| from flask_restful import Resource, abort | ||||
| from flask import request | ||||
| from . import auth, validate_openapi_request | ||||
| from . import auth | ||||
|  | ||||
| class Search(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
| @@ -8,9 +8,21 @@ class Search(Resource): | ||||
|         self.datastore = kwargs['datastore'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('searchWatches') | ||||
|     def get(self): | ||||
|         """Search for watches by URL or title text.""" | ||||
|         """ | ||||
|         @api {get} /api/v1/search Search for watches | ||||
|         @apiDescription Search watches by URL or title text | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl "http://localhost:5000/api/v1/search?q=https://example.com/page1" -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl "http://localhost:5000/api/v1/search?q=https://example.com/page1?tag=Favourites" -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl "http://localhost:5000/api/v1/search?q=https://example.com?partial=true" -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|         @apiName Search | ||||
|         @apiGroup Watch Management | ||||
|         @apiQuery {String} q Search query to match against watch URLs and titles | ||||
|         @apiQuery {String} [tag] Optional name of tag to limit results (name not UUID) | ||||
|         @apiQuery {String} [partial] Allow partial matching of URL query | ||||
|         @apiSuccess (200) {Object} JSON Object containing matched watches | ||||
|         """ | ||||
|         query = request.args.get('q', '').strip() | ||||
|         tag_limit = request.args.get('tag', '').strip() | ||||
|         from changedetectionio.strtobool import strtobool | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| from flask_restful import Resource | ||||
| from . import auth, validate_openapi_request | ||||
| from . import auth | ||||
|  | ||||
|  | ||||
| class SystemInfo(Resource): | ||||
| @@ -9,9 +9,23 @@ class SystemInfo(Resource): | ||||
|         self.update_q = kwargs['update_q'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('getSystemInfo') | ||||
|     def get(self): | ||||
|         """Return system info.""" | ||||
|         """ | ||||
|         @api {get} /api/v1/systeminfo Return system info | ||||
|         @apiDescription Return some info about the current system state | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/systeminfo -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             HTTP/1.0 200 | ||||
|             { | ||||
|                 'queue_size': 10 , | ||||
|                 'overdue_watches': ["watch-uuid-list"], | ||||
|                 'uptime': 38344.55, | ||||
|                 'watch_count': 800, | ||||
|                 'version': "0.40.1" | ||||
|             } | ||||
|         @apiName Get Info | ||||
|         @apiGroup System Information | ||||
|         """ | ||||
|         import time | ||||
|         overdue_watches = [] | ||||
|  | ||||
|   | ||||
| @@ -1,46 +1,39 @@ | ||||
| from changedetectionio import queuedWatchMetaData | ||||
| from changedetectionio import worker_handler | ||||
| from flask_expects_json import expects_json | ||||
| from flask_restful import abort, Resource | ||||
|  | ||||
| from flask import request | ||||
| from . import auth | ||||
|  | ||||
| # Import schemas from __init__.py | ||||
| from . import schema_tag, schema_create_tag, schema_update_tag, validate_openapi_request | ||||
| from . import schema_tag, schema_create_tag, schema_update_tag | ||||
|  | ||||
|  | ||||
| class Tag(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
|         # datastore is a black box dependency | ||||
|         self.datastore = kwargs['datastore'] | ||||
|         self.update_q = kwargs['update_q'] | ||||
|  | ||||
|     # Get information about a single tag | ||||
|     # curl http://localhost:5000/api/v1/tag/<string:uuid> | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('getTag') | ||||
|     def get(self, uuid): | ||||
|         """Get data for a single tag/group, toggle notification muting, or recheck all.""" | ||||
|         """ | ||||
|         @api {get} /api/v1/tag/:uuid Single tag - get data or toggle notification muting. | ||||
|         @apiDescription Retrieve tag information and set notification_muted status | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/tag/cc0cfffa-f449-477b-83ea-0caafd1dc091 -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl "http://localhost:5000/api/v1/tag/cc0cfffa-f449-477b-83ea-0caafd1dc091?muted=muted" -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|         @apiName Tag | ||||
|         @apiGroup Tag | ||||
|         @apiParam {uuid} uuid Tag unique ID. | ||||
|         @apiQuery {String} [muted] =`muted` or =`unmuted` , Sets the MUTE NOTIFICATIONS state | ||||
|         @apiSuccess (200) {String} OK When muted operation OR full JSON object of the tag | ||||
|         @apiSuccess (200) {JSON} TagJSON JSON Full JSON object of the tag | ||||
|         """ | ||||
|         from copy import deepcopy | ||||
|         tag = deepcopy(self.datastore.data['settings']['application']['tags'].get(uuid)) | ||||
|         if not tag: | ||||
|             abort(404, message=f'No tag exists with the UUID of {uuid}') | ||||
|  | ||||
|         if request.args.get('recheck'): | ||||
|             # Recheck all, including muted | ||||
|             # Get most overdue first | ||||
|             i=0 | ||||
|             for k in sorted(self.datastore.data['watching'].items(), key=lambda item: item[1].get('last_checked', 0)): | ||||
|                 watch_uuid = k[0] | ||||
|                 watch = k[1] | ||||
|                 if not watch['paused'] and tag['uuid'] not in watch['tags']: | ||||
|                     continue | ||||
|                 worker_handler.queue_item_async_safe(self.update_q, queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid})) | ||||
|                 i+=1 | ||||
|  | ||||
|             return f"OK, {i} watches queued", 200 | ||||
|  | ||||
|         if request.args.get('muted', '') == 'muted': | ||||
|             self.datastore.data['settings']['application']['tags'][uuid]['notification_muted'] = True | ||||
|             return "OK", 200 | ||||
| @@ -51,9 +44,16 @@ class Tag(Resource): | ||||
|         return tag | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('deleteTag') | ||||
|     def delete(self, uuid): | ||||
|         """Delete a tag/group and remove it from all watches.""" | ||||
|         """ | ||||
|         @api {delete} /api/v1/tag/:uuid Delete a tag and remove it from all watches | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/tag/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X DELETE -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|         @apiParam {uuid} uuid Tag unique ID. | ||||
|         @apiName DeleteTag | ||||
|         @apiGroup Tag | ||||
|         @apiSuccess (200) {String} OK Was deleted | ||||
|         """ | ||||
|         if not self.datastore.data['settings']['application']['tags'].get(uuid): | ||||
|             abort(400, message='No tag exists with the UUID of {}'.format(uuid)) | ||||
|  | ||||
| @@ -68,10 +68,21 @@ class Tag(Resource): | ||||
|         return 'OK', 204 | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('updateTag') | ||||
|     @expects_json(schema_update_tag) | ||||
|     def put(self, uuid): | ||||
|         """Update tag information.""" | ||||
|         """ | ||||
|         @api {put} /api/v1/tag/:uuid Update tag information | ||||
|         @apiExample {curl} Example usage: | ||||
|             Update (PUT) | ||||
|             curl http://localhost:5000/api/v1/tag/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X PUT -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"title": "New Tag Title"}' | ||||
|  | ||||
|         @apiDescription Updates an existing tag using JSON | ||||
|         @apiParam {uuid} uuid Tag unique ID. | ||||
|         @apiName UpdateTag | ||||
|         @apiGroup Tag | ||||
|         @apiSuccess (200) {String} OK Was updated | ||||
|         @apiSuccess (500) {String} ERR Some other error | ||||
|         """ | ||||
|         tag = self.datastore.data['settings']['application']['tags'].get(uuid) | ||||
|         if not tag: | ||||
|             abort(404, message='No tag exists with the UUID of {}'.format(uuid)) | ||||
| @@ -83,10 +94,17 @@ class Tag(Resource): | ||||
|  | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('createTag') | ||||
|     # Only cares for {'title': 'xxxx'} | ||||
|     def post(self): | ||||
|         """Create a single tag/group.""" | ||||
|         """ | ||||
|         @api {post} /api/v1/watch Create a single tag | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"name": "Work related"}' | ||||
|         @apiName Create | ||||
|         @apiGroup Tag | ||||
|         @apiSuccess (200) {String} OK Was created | ||||
|         @apiSuccess (500) {String} ERR Some other error | ||||
|         """ | ||||
|  | ||||
|         json_data = request.get_json() | ||||
|         title = json_data.get("title",'').strip() | ||||
| @@ -104,9 +122,28 @@ class Tags(Resource): | ||||
|         self.datastore = kwargs['datastore'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('listTags') | ||||
|     def get(self): | ||||
|         """List tags/groups.""" | ||||
|         """ | ||||
|         @api {get} /api/v1/tags List tags | ||||
|         @apiDescription Return list of available tags | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/tags -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             { | ||||
|                 "cc0cfffa-f449-477b-83ea-0caafd1dc091": { | ||||
|                     "title": "Tech News", | ||||
|                     "notification_muted": false, | ||||
|                     "date_created": 1677103794 | ||||
|                 }, | ||||
|                 "e6f5fd5c-dbfe-468b-b8f3-f9d6ff5ad69b": { | ||||
|                     "title": "Shopping", | ||||
|                     "notification_muted": true, | ||||
|                     "date_created": 1676662819 | ||||
|                 } | ||||
|             } | ||||
|         @apiName ListTags | ||||
|         @apiGroup Tag Management | ||||
|         @apiSuccess (200) {String} OK JSON dict | ||||
|         """ | ||||
|         result = {} | ||||
|         for uuid, tag in self.datastore.data['settings']['application']['tags'].items(): | ||||
|             result[uuid] = { | ||||
|   | ||||
| @@ -11,40 +11,7 @@ from . import auth | ||||
| import copy | ||||
|  | ||||
| # Import schemas from __init__.py | ||||
| from . import schema, schema_create_watch, schema_update_watch, validate_openapi_request | ||||
|  | ||||
|  | ||||
| def validate_time_between_check_required(json_data): | ||||
|     """ | ||||
|     Validate that at least one time interval is specified when not using default settings. | ||||
|     Returns None if valid, or error message string if invalid. | ||||
|     Defaults to using global settings if time_between_check_use_default is not provided. | ||||
|     """ | ||||
|     # Default to using global settings if not specified | ||||
|     use_default = json_data.get('time_between_check_use_default', True) | ||||
|  | ||||
|     # If using default settings, no validation needed | ||||
|     if use_default: | ||||
|         return None | ||||
|  | ||||
|     # If not using defaults, check if time_between_check exists and has at least one non-zero value | ||||
|     time_check = json_data.get('time_between_check') | ||||
|     if not time_check: | ||||
|         # No time_between_check provided and not using defaults - this is an error | ||||
|         return "At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings." | ||||
|  | ||||
|     # time_between_check exists, check if it has at least one non-zero value | ||||
|     if any([ | ||||
|         (time_check.get('weeks') or 0) > 0, | ||||
|         (time_check.get('days') or 0) > 0, | ||||
|         (time_check.get('hours') or 0) > 0, | ||||
|         (time_check.get('minutes') or 0) > 0, | ||||
|         (time_check.get('seconds') or 0) > 0 | ||||
|     ]): | ||||
|         return None | ||||
|  | ||||
|     # time_between_check exists but all values are 0 or empty - this is an error | ||||
|     return "At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings." | ||||
| from . import schema, schema_create_watch, schema_update_watch | ||||
|  | ||||
|  | ||||
| class Watch(Resource): | ||||
| @@ -58,9 +25,23 @@ class Watch(Resource): | ||||
|     # @todo - version2 - ?muted and ?paused should be able to be called together, return the watch struct not "OK" | ||||
|     # ?recheck=true | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('getWatch') | ||||
|     def get(self, uuid): | ||||
|         """Get information about a single watch, recheck, pause, or mute.""" | ||||
|         """ | ||||
|         @api {get} /api/v1/watch/:uuid Single watch - get data, recheck, pause, mute. | ||||
|         @apiDescription Retrieve watch information and set muted/paused status | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091  -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl "http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091?muted=unmuted"  -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl "http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091?paused=unpaused"  -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|         @apiName Watch | ||||
|         @apiGroup Watch | ||||
|         @apiParam {uuid} uuid Watch unique ID. | ||||
|         @apiQuery {Boolean} [recheck] Recheck this watch `recheck=1` | ||||
|         @apiQuery {String} [paused] =`paused` or =`unpaused` , Sets the PAUSED state | ||||
|         @apiQuery {String} [muted] =`muted` or =`unmuted` , Sets the MUTE NOTIFICATIONS state | ||||
|         @apiSuccess (200) {String} OK When paused/muted/recheck operation OR full JSON object of the watch | ||||
|         @apiSuccess (200) {JSON} WatchJSON JSON Full JSON object of the watch | ||||
|         """ | ||||
|         from copy import deepcopy | ||||
|         watch = deepcopy(self.datastore.data['watching'].get(uuid)) | ||||
|         if not watch: | ||||
| @@ -88,14 +69,19 @@ class Watch(Resource): | ||||
|         # attr .last_changed will check for the last written text snapshot on change | ||||
|         watch['last_changed'] = watch.last_changed | ||||
|         watch['viewed'] = watch.viewed | ||||
|         watch['link'] = watch.link, | ||||
|  | ||||
|         return watch | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('deleteWatch') | ||||
|     def delete(self, uuid): | ||||
|         """Delete a watch and related history.""" | ||||
|         """ | ||||
|         @api {delete} /api/v1/watch/:uuid Delete a watch and related history | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X DELETE -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|         @apiParam {uuid} uuid Watch unique ID. | ||||
|         @apiName Delete | ||||
|         @apiGroup Watch | ||||
|         @apiSuccess (200) {String} OK Was deleted | ||||
|         """ | ||||
|         if not self.datastore.data['watching'].get(uuid): | ||||
|             abort(400, message='No watch exists with the UUID of {}'.format(uuid)) | ||||
|  | ||||
| @@ -103,10 +89,21 @@ class Watch(Resource): | ||||
|         return 'OK', 204 | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('updateWatch') | ||||
|     @expects_json(schema_update_watch) | ||||
|     def put(self, uuid): | ||||
|         """Update watch information.""" | ||||
|         """ | ||||
|         @api {put} /api/v1/watch/:uuid Update watch information | ||||
|         @apiExample {curl} Example usage: | ||||
|             Update (PUT) | ||||
|             curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X PUT -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"url": "https://my-nice.com" , "tag": "new list"}' | ||||
|  | ||||
|         @apiDescription Updates an existing watch using JSON, accepts the same structure as returned in <a href="#api-Watch-Watch">get single watch information</a> | ||||
|         @apiParam {uuid} uuid Watch unique ID. | ||||
|         @apiName Update a watch | ||||
|         @apiGroup Watch | ||||
|         @apiSuccess (200) {String} OK Was updated | ||||
|         @apiSuccess (500) {String} ERR Some other error | ||||
|         """ | ||||
|         watch = self.datastore.data['watching'].get(uuid) | ||||
|         if not watch: | ||||
|             abort(404, message='No watch exists with the UUID of {}'.format(uuid)) | ||||
| @@ -116,11 +113,6 @@ class Watch(Resource): | ||||
|             if not request.json.get('proxy') in plist: | ||||
|                 return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400 | ||||
|  | ||||
|         # Validate time_between_check when not using defaults | ||||
|         validation_error = validate_time_between_check_required(request.json) | ||||
|         if validation_error: | ||||
|             return validation_error, 400 | ||||
|  | ||||
|         watch.update(request.json) | ||||
|  | ||||
|         return "OK", 200 | ||||
| @@ -134,9 +126,22 @@ class WatchHistory(Resource): | ||||
|     # Get a list of available history for a watch by UUID | ||||
|     # curl http://localhost:5000/api/v1/watch/<string:uuid>/history | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('getWatchHistory') | ||||
|     def get(self, uuid): | ||||
|         """Get a list of all historical snapshots available for a watch.""" | ||||
|         """ | ||||
|         @api {get} /api/v1/watch/<string:uuid>/history Get a list of all historical snapshots available for a watch | ||||
|         @apiDescription Requires `uuid`, returns list | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" | ||||
|             { | ||||
|                 "1676649279": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/cb7e9be8258368262246910e6a2a4c30.txt", | ||||
|                 "1677092785": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/e20db368d6fc633e34f559ff67bb4044.txt", | ||||
|                 "1677103794": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/02efdd37dacdae96554a8cc85dc9c945.txt" | ||||
|             } | ||||
|         @apiName Get list of available stored snapshots for watch | ||||
|         @apiGroup Watch History | ||||
|         @apiSuccess (200) {String} OK | ||||
|         @apiSuccess (404) {String} ERR Not found | ||||
|         """ | ||||
|         watch = self.datastore.data['watching'].get(uuid) | ||||
|         if not watch: | ||||
|             abort(404, message='No watch exists with the UUID of {}'.format(uuid)) | ||||
| @@ -149,9 +154,18 @@ class WatchSingleHistory(Resource): | ||||
|         self.datastore = kwargs['datastore'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('getWatchSnapshot') | ||||
|     def get(self, uuid, timestamp): | ||||
|         """Get single snapshot from watch.""" | ||||
|         """ | ||||
|         @api {get} /api/v1/watch/<string:uuid>/history/<int:timestamp> Get single snapshot from watch | ||||
|         @apiDescription Requires watch `uuid` and `timestamp`. `timestamp` of "`latest`" for latest available snapshot, or <a href="#api-Watch_History-Get_list_of_available_stored_snapshots_for_watch">use the list returned here</a> | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history/1677092977 -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" | ||||
|         @apiName Get single snapshot content | ||||
|         @apiGroup Watch History | ||||
|         @apiParam {String} [html]       Optional Set to =1 to return the last HTML (only stores last 2 snapshots, use `latest` as timestamp) | ||||
|         @apiSuccess (200) {String} OK | ||||
|         @apiSuccess (404) {String} ERR Not found | ||||
|         """ | ||||
|         watch = self.datastore.data['watching'].get(uuid) | ||||
|         if not watch: | ||||
|             abort(404, message=f"No watch exists with the UUID of {uuid}") | ||||
| @@ -183,26 +197,25 @@ class WatchFavicon(Resource): | ||||
|         self.datastore = kwargs['datastore'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('getWatchFavicon') | ||||
|     def get(self, uuid): | ||||
|         """Get favicon for a watch.""" | ||||
|         """ | ||||
|         @api {get} /api/v1/watch/<string:uuid>/favicon Get Favicon for a watch | ||||
|         @apiDescription Requires watch `uuid` | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/favicon -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|         @apiName Get latest Favicon | ||||
|         @apiGroup Watch History | ||||
|         @apiSuccess (200) {String} OK | ||||
|         @apiSuccess (404) {String} ERR Not found | ||||
|         """ | ||||
|         watch = self.datastore.data['watching'].get(uuid) | ||||
|         if not watch: | ||||
|             abort(404, message=f"No watch exists with the UUID of {uuid}") | ||||
|  | ||||
|         favicon_filename = watch.get_favicon_filename() | ||||
|         if favicon_filename: | ||||
|             try: | ||||
|                 import magic | ||||
|                 mime = magic.from_file( | ||||
|                     os.path.join(watch.watch_data_dir, favicon_filename), | ||||
|                     mime=True | ||||
|                 ) | ||||
|             except ImportError: | ||||
|                 # Fallback, no python-magic | ||||
|                 import mimetypes | ||||
|                 mime, encoding = mimetypes.guess_type(favicon_filename) | ||||
|  | ||||
|             import mimetypes | ||||
|             mime, encoding = mimetypes.guess_type(favicon_filename) | ||||
|             response = make_response(send_from_directory(watch.watch_data_dir, favicon_filename)) | ||||
|             response.headers['Content-type'] = mime | ||||
|             response.headers['Cache-Control'] = 'max-age=300, must-revalidate'  # Cache for 5 minutes, then revalidate | ||||
| @@ -218,10 +231,18 @@ class CreateWatch(Resource): | ||||
|         self.update_q = kwargs['update_q'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('createWatch') | ||||
|     @expects_json(schema_create_watch) | ||||
|     def post(self): | ||||
|         """Create a single watch.""" | ||||
|         """ | ||||
|         @api {post} /api/v1/watch Create a single watch | ||||
|         @apiDescription Requires atleast `url` set, can accept the same structure as <a href="#api-Watch-Watch">get single watch information</a> to create. | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"url": "https://my-nice.com" , "tag": "nice list"}' | ||||
|         @apiName Create | ||||
|         @apiGroup Watch | ||||
|         @apiSuccess (200) {String} OK Was created | ||||
|         @apiSuccess (500) {String} ERR Some other error | ||||
|         """ | ||||
|  | ||||
|         json_data = request.get_json() | ||||
|         url = json_data['url'].strip() | ||||
| @@ -236,11 +257,6 @@ class CreateWatch(Resource): | ||||
|             if not json_data.get('proxy') in plist: | ||||
|                 return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400 | ||||
|  | ||||
|         # Validate time_between_check when not using defaults | ||||
|         validation_error = validate_time_between_check_required(json_data) | ||||
|         if validation_error: | ||||
|             return validation_error, 400 | ||||
|  | ||||
|         extras = copy.deepcopy(json_data) | ||||
|  | ||||
|         # Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API) | ||||
| @@ -259,9 +275,35 @@ class CreateWatch(Resource): | ||||
|             return "Invalid or unsupported URL", 400 | ||||
|  | ||||
|     @auth.check_token | ||||
|     @validate_openapi_request('listWatches') | ||||
|     def get(self): | ||||
|         """List watches.""" | ||||
|         """ | ||||
|         @api {get} /api/v1/watch List watches | ||||
|         @apiDescription Return concise list of available watches and some very basic info | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             { | ||||
|                 "6a4b7d5c-fee4-4616-9f43-4ac97046b595": { | ||||
|                     "last_changed": 1677103794, | ||||
|                     "last_checked": 1677103794, | ||||
|                     "last_error": false, | ||||
|                     "title": "", | ||||
|                     "url": "http://www.quotationspage.com/random.php" | ||||
|                 }, | ||||
|                 "e6f5fd5c-dbfe-468b-b8f3-f9d6ff5ad69b": { | ||||
|                     "last_changed": 0, | ||||
|                     "last_checked": 1676662819, | ||||
|                     "last_error": false, | ||||
|                     "title": "QuickLook", | ||||
|                     "url": "https://github.com/QL-Win/QuickLook/tags" | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|         @apiParam {String} [recheck_all]       Optional Set to =1 to force recheck of all watches | ||||
|         @apiParam {String} [tag]               Optional name of tag to limit results | ||||
|         @apiName ListWatches | ||||
|         @apiGroup Watch Management | ||||
|         @apiSuccess (200) {String} OK JSON dict | ||||
|         """ | ||||
|         list = {} | ||||
|  | ||||
|         tag_limit = request.args.get('tag', '').lower() | ||||
| @@ -275,8 +317,6 @@ class CreateWatch(Resource): | ||||
|                 'last_changed': watch.last_changed, | ||||
|                 'last_checked': watch['last_checked'], | ||||
|                 'last_error': watch['last_error'], | ||||
|                 'link': watch.link, | ||||
|                 'page_title': watch['page_title'], | ||||
|                 'title': watch['title'], | ||||
|                 'url': watch['url'], | ||||
|                 'viewed': watch.viewed | ||||
|   | ||||
| @@ -1,10 +1,4 @@ | ||||
| import copy | ||||
| import yaml | ||||
| import functools | ||||
| from flask import request, abort | ||||
| from loguru import logger | ||||
| from openapi_core import OpenAPI | ||||
| from openapi_core.contrib.flask import FlaskOpenAPIRequest | ||||
| from . import api_schema | ||||
| from ..model import watch_base | ||||
|  | ||||
| @@ -14,7 +8,6 @@ schema = api_schema.build_watch_json_schema(watch_base_config) | ||||
|  | ||||
| schema_create_watch = copy.deepcopy(schema) | ||||
| schema_create_watch['required'] = ['url'] | ||||
| del schema_create_watch['properties']['last_viewed'] | ||||
|  | ||||
| schema_update_watch = copy.deepcopy(schema) | ||||
| schema_update_watch['additionalProperties'] = False | ||||
| @@ -32,47 +25,9 @@ schema_create_notification_urls['required'] = ['notification_urls'] | ||||
| schema_delete_notification_urls = copy.deepcopy(schema_notification_urls) | ||||
| schema_delete_notification_urls['required'] = ['notification_urls'] | ||||
|  | ||||
| @functools.cache | ||||
| def get_openapi_spec(): | ||||
|     import os | ||||
|     spec_path = os.path.join(os.path.dirname(__file__), '../../docs/api-spec.yaml') | ||||
|     with open(spec_path, 'r') as f: | ||||
|         spec_dict = yaml.safe_load(f) | ||||
|     _openapi_spec = OpenAPI.from_dict(spec_dict) | ||||
|     return _openapi_spec | ||||
|  | ||||
| def validate_openapi_request(operation_id): | ||||
|     """Decorator to validate incoming requests against OpenAPI spec.""" | ||||
|     def decorator(f): | ||||
|         @functools.wraps(f) | ||||
|         def wrapper(*args, **kwargs): | ||||
|             try: | ||||
|                 # Skip OpenAPI validation for GET requests since they don't have request bodies | ||||
|                 if request.method.upper() != 'GET': | ||||
|                     spec = get_openapi_spec() | ||||
|                     openapi_request = FlaskOpenAPIRequest(request) | ||||
|                     result = spec.unmarshal_request(openapi_request) | ||||
|                     if result.errors: | ||||
|                         from werkzeug.exceptions import BadRequest | ||||
|                         error_details = [] | ||||
|                         for error in result.errors: | ||||
|                             error_details.append(str(error)) | ||||
|                         raise BadRequest(f"OpenAPI validation failed: {error_details}") | ||||
|             except BadRequest: | ||||
|                 # Re-raise BadRequest exceptions (validation failures) | ||||
|                 raise | ||||
|             except Exception as e: | ||||
|                 # If OpenAPI spec loading fails, log but don't break existing functionality | ||||
|                 logger.critical(f"OpenAPI validation warning for {operation_id}: {e}") | ||||
|                 abort(500) | ||||
|             return f(*args, **kwargs) | ||||
|         return wrapper | ||||
|     return decorator | ||||
|  | ||||
| # Import all API resources | ||||
| from .Watch import Watch, WatchHistory, WatchSingleHistory, CreateWatch, WatchFavicon | ||||
| from .Tags import Tags, Tag | ||||
| from .Import import Import | ||||
| from .SystemInfo import SystemInfo | ||||
| from .Notifications import Notifications | ||||
|  | ||||
|   | ||||
| @@ -78,13 +78,6 @@ def build_watch_json_schema(d): | ||||
|               ]: | ||||
|         schema['properties'][v]['anyOf'].append({'type': 'string', "maxLength": 5000}) | ||||
|  | ||||
|     for v in ['last_viewed']: | ||||
|         schema['properties'][v] = { | ||||
|             "type": "integer", | ||||
|             "description": "Unix timestamp in seconds of the last time the watch was viewed.", | ||||
|             "minimum": 0 | ||||
|         } | ||||
|  | ||||
|     # None or Boolean | ||||
|     schema['properties']['track_ldjson_price_data']['anyOf'].append({'type': 'boolean'}) | ||||
|  | ||||
| @@ -119,12 +112,6 @@ def build_watch_json_schema(d): | ||||
|  | ||||
|     schema['properties']['time_between_check'] = build_time_between_check_json_schema() | ||||
|  | ||||
|     schema['properties']['time_between_check_use_default'] = { | ||||
|         "type": "boolean", | ||||
|         "default": True, | ||||
|         "description": "Whether to use global settings for time between checks - defaults to true if not set" | ||||
|     } | ||||
|  | ||||
|     schema['properties']['browser_steps'] = { | ||||
|         "anyOf": [ | ||||
|             { | ||||
|   | ||||
| @@ -7,7 +7,6 @@ from changedetectionio.flask_app import watch_check_update | ||||
| import asyncio | ||||
| import importlib | ||||
| import os | ||||
| import queue | ||||
| import time | ||||
|  | ||||
| from loguru import logger | ||||
| @@ -38,23 +37,13 @@ async def async_update_worker(worker_id, q, notification_q, app, datastore): | ||||
|         watch = None | ||||
|  | ||||
|         try: | ||||
|             # Use native janus async interface - no threads needed! | ||||
|             queued_item_data = await asyncio.wait_for(q.async_get(), timeout=1.0) | ||||
|              | ||||
|             # Use asyncio wait_for to make queue.get() cancellable | ||||
|             queued_item_data = await asyncio.wait_for(q.get(), timeout=1.0) | ||||
|         except asyncio.TimeoutError: | ||||
|             # No jobs available, continue loop | ||||
|             continue | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Worker {worker_id} failed to get queue item: {type(e).__name__}: {e}") | ||||
|              | ||||
|             # Log queue health for debugging | ||||
|             try: | ||||
|                 queue_size = q.qsize() | ||||
|                 is_empty = q.empty() | ||||
|                 logger.critical(f"CRITICAL: Worker {worker_id} queue health - size: {queue_size}, empty: {is_empty}") | ||||
|             except Exception as health_e: | ||||
|                 logger.critical(f"CRITICAL: Worker {worker_id} queue health check failed: {health_e}") | ||||
|              | ||||
|             logger.error(f"Worker {worker_id} error getting queue item: {e}") | ||||
|             await asyncio.sleep(0.1) | ||||
|             continue | ||||
|          | ||||
| @@ -310,6 +299,15 @@ async def async_update_worker(worker_id, q, notification_q, app, datastore): | ||||
|                     continue | ||||
|  | ||||
|                 if process_changedetection_results: | ||||
|                     # Extract title if needed | ||||
|                     if datastore.data['settings']['application'].get('extract_title_as_title') or watch['extract_title_as_title']: | ||||
|                         if not watch['title'] or not len(watch['title']): | ||||
|                             try: | ||||
|                                 update_obj['title'] = html_tools.extract_element(find='title', html_content=update_handler.fetcher.content) | ||||
|                                 logger.info(f"UUID: {uuid} Extract <title> updated title to '{update_obj['title']}") | ||||
|                             except Exception as e: | ||||
|                                 logger.warning(f"UUID: {uuid} Extract <title> as watch title was enabled, but couldn't find a <title>.") | ||||
|  | ||||
|                     try: | ||||
|                         datastore.update_watch(uuid=uuid, update_obj=update_obj) | ||||
|  | ||||
| @@ -348,14 +346,6 @@ async def async_update_worker(worker_id, q, notification_q, app, datastore): | ||||
|                 # Always record attempt count | ||||
|                 count = watch.get('check_count', 0) + 1 | ||||
|  | ||||
|                 # Always record page title (used in notifications, and can change even when the content is the same) | ||||
|                 try: | ||||
|                     page_title = html_tools.extract_title(data=update_handler.fetcher.content) | ||||
|                     logger.debug(f"UUID: {uuid} Page <title> is '{page_title}'") | ||||
|                     datastore.update_watch(uuid=uuid, update_obj={'page_title': page_title}) | ||||
|                 except Exception as e: | ||||
|                     logger.warning(f"UUID: {uuid} Exception when extracting <title> - {str(e)}") | ||||
|  | ||||
|                 # Record server header | ||||
|                 try: | ||||
|                     server_header = update_handler.fetcher.headers.get('server', '').strip().lower()[:255] | ||||
|   | ||||
| @@ -1 +0,0 @@ | ||||
| # Browser notifications blueprint | ||||
| @@ -1,76 +0,0 @@ | ||||
| from flask import Blueprint, jsonify, request | ||||
| from loguru import logger | ||||
|  | ||||
|  | ||||
| def construct_blueprint(datastore): | ||||
|     browser_notifications_blueprint = Blueprint('browser_notifications', __name__) | ||||
|  | ||||
|     @browser_notifications_blueprint.route("/test", methods=['POST']) | ||||
|     def test_browser_notification(): | ||||
|         """Send a test browser notification using the apprise handler""" | ||||
|         try: | ||||
|             from changedetectionio.notification.apprise_plugin.custom_handlers import apprise_browser_notification_handler | ||||
|              | ||||
|             # Check if there are any subscriptions | ||||
|             browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', []) | ||||
|             if not browser_subscriptions: | ||||
|                 return jsonify({'success': False, 'message': 'No browser subscriptions found'}), 404 | ||||
|              | ||||
|             # Get notification data from request or use defaults | ||||
|             data = request.get_json() or {} | ||||
|             title = data.get('title', 'Test Notification') | ||||
|             body = data.get('body', 'This is a test notification from changedetection.io') | ||||
|              | ||||
|             # Use the apprise handler directly | ||||
|             success = apprise_browser_notification_handler( | ||||
|                 body=body, | ||||
|                 title=title, | ||||
|                 notify_type='info', | ||||
|                 meta={'url': 'browser://test'} | ||||
|             ) | ||||
|              | ||||
|             if success: | ||||
|                 subscription_count = len(browser_subscriptions) | ||||
|                 return jsonify({ | ||||
|                     'success': True, | ||||
|                     'message': f'Test notification sent successfully to {subscription_count} subscriber(s)' | ||||
|                 }) | ||||
|             else: | ||||
|                 return jsonify({'success': False, 'message': 'Failed to send test notification'}), 500 | ||||
|                  | ||||
|         except ImportError: | ||||
|             logger.error("Browser notification handler not available") | ||||
|             return jsonify({'success': False, 'message': 'Browser notification handler not available'}), 500 | ||||
|         except Exception as e: | ||||
|             logger.error(f"Failed to send test browser notification: {e}") | ||||
|             return jsonify({'success': False, 'message': f'Error: {str(e)}'}), 500 | ||||
|  | ||||
|     @browser_notifications_blueprint.route("/clear", methods=['POST']) | ||||
|     def clear_all_browser_notifications(): | ||||
|         """Clear all browser notification subscriptions from the datastore""" | ||||
|         try: | ||||
|             # Get current subscription count | ||||
|             browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', []) | ||||
|             subscription_count = len(browser_subscriptions) | ||||
|              | ||||
|             # Clear all subscriptions | ||||
|             if 'settings' not in datastore.data: | ||||
|                 datastore.data['settings'] = {} | ||||
|             if 'application' not in datastore.data['settings']: | ||||
|                 datastore.data['settings']['application'] = {} | ||||
|                  | ||||
|             datastore.data['settings']['application']['browser_subscriptions'] = [] | ||||
|             datastore.needs_write = True | ||||
|              | ||||
|             logger.info(f"Cleared {subscription_count} browser notification subscriptions") | ||||
|              | ||||
|             return jsonify({ | ||||
|                 'success': True,  | ||||
|                 'message': f'Cleared {subscription_count} browser notification subscription(s)' | ||||
|             }) | ||||
|              | ||||
|         except Exception as e: | ||||
|             logger.error(f"Failed to clear all browser notifications: {e}") | ||||
|             return jsonify({'success': False, 'message': f'Clear all failed: {str(e)}'}), 500 | ||||
|  | ||||
|     return browser_notifications_blueprint | ||||
| @@ -108,13 +108,10 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|  | ||||
|                 fe.link(link=diff_link) | ||||
|  | ||||
|                 # Same logic as watch-overview.html | ||||
|                 if datastore.data['settings']['application']['ui'].get('use_page_title_in_list') or watch.get('use_page_title_in_list'): | ||||
|                     watch_label = watch.label | ||||
|                 else: | ||||
|                     watch_label = watch.get('url') | ||||
|                 # @todo watch should be a getter - watch.get('title') (internally if URL else..) | ||||
|  | ||||
|                 fe.title(title=watch_label) | ||||
|                 watch_title = watch.get('title') if watch.get('title') else watch.get('url') | ||||
|                 fe.title(title=watch_title) | ||||
|                 try: | ||||
|  | ||||
|                     html_diff = diff.render_diff(previous_version_file_contents=watch.get_history_snapshot(dates[-2]), | ||||
| @@ -130,7 +127,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|                 # @todo User could decide if <link> goes to the diff page, or to the watch link | ||||
|                 rss_template = "<html><body>\n<h4><a href=\"{{watch_url}}\">{{watch_title}}</a></h4>\n<p>{{html_diff}}</p>\n</body></html>\n" | ||||
|  | ||||
|                 content = jinja_render(template_str=rss_template, watch_title=watch_label, html_diff=html_diff, watch_url=watch.link) | ||||
|                 content = jinja_render(template_str=rss_template, watch_title=watch_title, html_diff=html_diff, watch_url=watch.link) | ||||
|  | ||||
|                 # Out of range chars could also break feedgen | ||||
|                 if scan_invalid_chars_in_rss(content): | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| {% extends 'base.html' %} | ||||
|  | ||||
| {% block content %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form, render_ternary_field %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form %} | ||||
| {% from '_common_fields.html' import render_common_settings_form %} | ||||
| <script> | ||||
|     const notification_base_url="{{url_for('ui.ui_notification.ajax_callback_send_notification_test', mode="global-settings")}}"; | ||||
| @@ -75,10 +75,18 @@ | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_checkbox_field(form.application.form.rss_hide_muted_watches) }} | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.application.form.pager_size) }} | ||||
|                         <span class="pure-form-message-inline">Number of items per page in the watch overview list, 0 to disable.</span> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.application.form.rss_content_format) }} | ||||
|                         <span class="pure-form-message-inline">Love RSS? Does your reader support HTML? Set it here</span> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_checkbox_field(form.application.form.extract_title_as_title) }} | ||||
|                         <span class="pure-form-message-inline">Note: This will automatically apply to all existing watches.</span> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_checkbox_field(form.application.form.empty_pages_are_a_change) }} | ||||
|                         <span class="pure-form-message-inline">When a request returns no content, or the HTML does not contain any text, is this considered a change?</span> | ||||
| @@ -195,7 +203,7 @@ nav | ||||
|  | ||||
|             <div class="tab-pane-inner" id="api"> | ||||
|                 <h4>API Access</h4> | ||||
|                 <p>Drive your changedetection.io via API, More about <a href="https://changedetection.io/docs/api_v1/index.html">API access and examples here</a>.</p> | ||||
|                 <p>Drive your changedetection.io via API, More about <a href="https://github.com/dgtlmoon/changedetection.io/wiki/API-Reference">API access here</a></p> | ||||
|  | ||||
|                 <div class="pure-control-group"> | ||||
|                     {{ render_checkbox_field(form.application.form.api_access_token_enabled) }} | ||||
| @@ -248,18 +256,6 @@ nav | ||||
|                     {{ render_checkbox_field(form.application.form.ui.form.socket_io_enabled, class="socket_io_enabled") }} | ||||
|                     <span class="pure-form-message-inline">Realtime UI Updates Enabled - (Restart required if this is changed)</span> | ||||
|                 </div> | ||||
|                 <div class="pure-control-group"> | ||||
|                     {{ render_checkbox_field(form.application.form.ui.form.favicons_enabled, class="") }} | ||||
|                     <span class="pure-form-message-inline">Enable or Disable Favicons next to the watch list</span> | ||||
|                 </div> | ||||
|                 <div class="pure-control-group"> | ||||
|                     {{ render_checkbox_field(form.application.form.ui.use_page_title_in_list) }} | ||||
|                 </div> | ||||
|                 <div class="pure-control-group"> | ||||
|                     {{ render_field(form.application.form.pager_size) }} | ||||
|                     <span class="pure-form-message-inline">Number of items per page in the watch overview list, 0 to disable.</span> | ||||
|                 </div> | ||||
|  | ||||
|             </div> | ||||
|             <div class="tab-pane-inner" id="proxies"> | ||||
|                 <div id="recommended-proxy"> | ||||
| @@ -323,8 +319,8 @@ nav | ||||
|             <div id="actions"> | ||||
|                 <div class="pure-control-group"> | ||||
|                     {{ render_button(form.save_button) }} | ||||
|                     <a href="{{url_for('watchlist.index')}}" class="pure-button button-cancel">Back</a> | ||||
|                     <a href="{{url_for('ui.clear_all_history')}}" class="pure-button button-error">Clear Snapshot History</a> | ||||
|                     <a href="{{url_for('watchlist.index')}}" class="pure-button button-small button-cancel">Back</a> | ||||
|                     <a href="{{url_for('ui.clear_all_history')}}" class="pure-button button-small button-error">Clear Snapshot History</a> | ||||
|                 </div> | ||||
|             </div> | ||||
|         </form> | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| {% extends 'base.html' %} | ||||
| {% block content %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_ternary_field %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button %} | ||||
| {% from '_common_fields.html' import render_common_settings_form %} | ||||
| <script> | ||||
|     const notification_base_url="{{url_for('ui.ui_notification.ajax_callback_send_notification_test', mode="group-settings")}}"; | ||||
| @@ -64,7 +64,7 @@ | ||||
|             <div class="tab-pane-inner" id="notifications"> | ||||
|                 <fieldset> | ||||
|                     <div  class="pure-control-group inline-radio"> | ||||
|                       {{ render_ternary_field(form.notification_muted, BooleanField=True) }} | ||||
|                       {{ render_checkbox_field(form.notification_muted) }} | ||||
|                     </div> | ||||
|                     {% if 1 %} | ||||
|                     <div class="pure-control-group inline-radio"> | ||||
|   | ||||
| @@ -242,7 +242,6 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe | ||||
|                 'available_timezones': sorted(available_timezones()), | ||||
|                 'browser_steps_config': browser_step_ui_config, | ||||
|                 'emailprefix': os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False), | ||||
|                 'extra_classes': 'checking-now' if worker_handler.is_watch_running(uuid) else '', | ||||
|                 'extra_notification_token_placeholder_info': datastore.get_unique_notification_token_placeholders_available(), | ||||
|                 'extra_processor_config': form.extra_tab_content(), | ||||
|                 'extra_title': f" - Edit - {watch.label}", | ||||
|   | ||||
| @@ -93,15 +93,12 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe | ||||
|             return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|         # For submission of requesting an extract | ||||
|         extract_form = forms.extractDataForm(formdata=request.form, | ||||
|                                              data={'extract_regex': request.form.get('extract_regex', '')} | ||||
|                                              ) | ||||
|         extract_form = forms.extractDataForm(request.form) | ||||
|         if not extract_form.validate(): | ||||
|             flash("An error occurred, please see below.", "error") | ||||
|             return _render_diff_template(uuid, extract_form) | ||||
|  | ||||
|         else: | ||||
|             extract_regex = request.form.get('extract_regex', '').strip() | ||||
|             extract_regex = request.form.get('extract_regex').strip() | ||||
|             output = watch.extract_regex_from_all_history(extract_regex) | ||||
|             if output: | ||||
|                 watch_dir = os.path.join(datastore.datastore_path, uuid) | ||||
| @@ -112,11 +109,12 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe | ||||
|                 response.headers['Expires'] = "0" | ||||
|                 return response | ||||
|  | ||||
|             flash('No matches found while scanning all of the watch history for that RegEx.', 'error') | ||||
|         return redirect(url_for('ui.ui_views.diff_history_page', uuid=uuid) + '#extract') | ||||
|             flash('Nothing matches that RegEx', 'error') | ||||
|         redirect(url_for('ui_views.diff_history_page', uuid=uuid) + '#extract') | ||||
|  | ||||
|     def _render_diff_template(uuid, extract_form=None): | ||||
|         """Helper function to render the diff template with all required data""" | ||||
|     @views_blueprint.route("/diff/<string:uuid>", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def diff_history_page(uuid): | ||||
|         from changedetectionio import forms | ||||
|  | ||||
|         # More for testing, possible to return the first/only | ||||
| @@ -130,11 +128,8 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe | ||||
|             flash("No history found for the specified link, bad link?", "error") | ||||
|             return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|         # Use provided form or create a new one | ||||
|         if extract_form is None: | ||||
|             extract_form = forms.extractDataForm(formdata=request.form, | ||||
|                                                  data={'extract_regex': request.form.get('extract_regex', '')} | ||||
|                                                  ) | ||||
|         # For submission of requesting an extract | ||||
|         extract_form = forms.extractDataForm(request.form) | ||||
|  | ||||
|         history = watch.history | ||||
|         dates = list(history.keys()) | ||||
| @@ -175,7 +170,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe | ||||
|  | ||||
|         datastore.set_last_viewed(uuid, time.time()) | ||||
|  | ||||
|         return render_template("diff.html", | ||||
|         output = render_template("diff.html", | ||||
|                                  current_diff_url=watch['url'], | ||||
|                                  from_version=str(from_version), | ||||
|                                  to_version=str(to_version), | ||||
| @@ -198,10 +193,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe | ||||
|                                  watch_a=watch | ||||
|                                  ) | ||||
|  | ||||
|     @views_blueprint.route("/diff/<string:uuid>", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def diff_history_page(uuid): | ||||
|         return _render_diff_template(uuid) | ||||
|         return output | ||||
|  | ||||
|     @views_blueprint.route("/form/add/quickwatch", methods=['POST']) | ||||
|     @login_optionally_required | ||||
|   | ||||
| @@ -44,16 +44,12 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe | ||||
|         # Sort by last_changed and add the uuid which is usually the key.. | ||||
|         sorted_watches = [] | ||||
|         with_errors = request.args.get('with_errors') == "1" | ||||
|         unread_only = request.args.get('unread') == "1" | ||||
|         errored_count = 0 | ||||
|         search_q = request.args.get('q').strip().lower() if request.args.get('q') else False | ||||
|         for uuid, watch in datastore.data['watching'].items(): | ||||
|             if with_errors and not watch.get('last_error'): | ||||
|                 continue | ||||
|  | ||||
|             if unread_only and (watch.viewed or watch.last_changed == 0) : | ||||
|                 continue | ||||
|  | ||||
|             if active_tag_uuid and not active_tag_uuid in watch['tags']: | ||||
|                     continue | ||||
|             if watch.get('last_error'): | ||||
|   | ||||
| @@ -81,13 +81,10 @@ document.addEventListener('DOMContentLoaded', function() { | ||||
|     {%- if any_has_restock_price_processor -%} | ||||
|         {%- set cols_required = cols_required + 1 -%} | ||||
|     {%- endif -%} | ||||
|     {%- set ui_settings = datastore.data['settings']['application']['ui'] -%} | ||||
|  | ||||
|     <div id="watch-table-wrapper"> | ||||
|         {%- set table_classes = [ | ||||
|             'favicon-enabled' if 'favicons_enabled' not in ui_settings or ui_settings['favicons_enabled'] else 'favicon-not-enabled', | ||||
|         ] -%} | ||||
|         <table class="pure-table pure-table-striped watch-table {{ table_classes | reject('equalto', '') | join(' ') }}"> | ||||
|  | ||||
|         <table class="pure-table pure-table-striped watch-table"> | ||||
|             <thead> | ||||
|             <tr> | ||||
|                 {%- set link_order = "desc" if sort_order  == 'asc' else "asc" -%} | ||||
| @@ -117,9 +114,8 @@ document.addEventListener('DOMContentLoaded', function() { | ||||
|             {%- for watch in (watches|sort(attribute=sort_attribute, reverse=sort_order == 'asc'))|pagination_slice(skip=pagination.skip) -%} | ||||
|                 {%- set checking_now = is_checking_now(watch) -%} | ||||
|                 {%- set history_n = watch.history_n -%} | ||||
|                 {%- set favicon = watch.get_favicon_filename() -%} | ||||
|                 {%- set system_use_url_watchlist = datastore.data['settings']['application']['ui'].get('use_page_title_in_list')  -%} | ||||
|                 {#  Class settings mirrored in changedetectionio/static/js/realtime.js for the frontend #} | ||||
|                 {%- set has_favicon = watch.get_favicon_filename() -%} | ||||
|                 {#  Mirror in changedetectionio/static/js/realtime.js for the frontend #} | ||||
|                 {%- set row_classes = [ | ||||
|                     loop.cycle('pure-table-odd', 'pure-table-even'), | ||||
|                     'processor-' ~ watch['processor'], | ||||
| @@ -127,15 +123,14 @@ document.addEventListener('DOMContentLoaded', function() { | ||||
|                     'paused' if watch.paused is defined and watch.paused != False else '', | ||||
|                     'unviewed' if watch.has_unviewed else '', | ||||
|                     'has-restock-info' if watch.has_restock_info else 'no-restock-info', | ||||
|                     'has-favicon' if favicon else '', | ||||
|                     'has-favicon' if has_favicon else '', | ||||
|                     'in-stock' if watch.has_restock_info and watch['restock']['in_stock'] else '', | ||||
|                     'not-in-stock' if watch.has_restock_info and not watch['restock']['in_stock'] else '', | ||||
|                     'queued' if watch.uuid in queued_uuids else '', | ||||
|                     'checking-now' if checking_now else '', | ||||
|                     'notification_muted' if watch.notification_muted else '', | ||||
|                     'single-history' if history_n == 1 else '', | ||||
|                     'multiple-history' if history_n >= 2 else '', | ||||
|                     'use-html-title' if system_use_url_watchlist else 'no-html-title', | ||||
|                     'multiple-history' if history_n >= 2 else '',                     | ||||
|                 ] -%} | ||||
|             <tr id="{{ watch.uuid }}" data-watch-uuid="{{ watch.uuid }}" class="{{ row_classes | reject('equalto', '') | join(' ') }}"> | ||||
|                 <td class="inline checkbox-uuid" ><div><input name="uuids"  type="checkbox" value="{{ watch.uuid}} " > <span class="counter-i">{{ loop.index+pagination.skip }}</span></div></td> | ||||
| @@ -150,19 +145,12 @@ document.addEventListener('DOMContentLoaded', function() { | ||||
|  | ||||
|                 <td class="title-col inline"> | ||||
|                     <div class="flex-wrapper"> | ||||
|                     {% if 'favicons_enabled' not in ui_settings or ui_settings['favicons_enabled'] %} | ||||
|                         <div>{# A page might have hundreds of these images, set IMG options for lazy loading, don't set SRC if we dont have it so it doesnt fetch the placeholder'  #} | ||||
|                             <img alt="Favicon thumbnail" class="favicon" loading="lazy" decoding="async" fetchpriority="low" {% if favicon %} src="{{url_for('static_content', group='favicon', filename=watch.uuid)}}" {% else %} src='data:image/svg+xml;utf8,%3Csvg xmlns="http://www.w3.org/2000/svg" width="7.087" height="7.087" viewBox="0 0 7.087 7.087"%3E%3Ccircle cx="3.543" cy="3.543" r="3.279" stroke="%23e1e1e1" stroke-width="0.45" fill="none" opacity="0.74"/%3E%3C/svg%3E' {%  endif %} /> | ||||
|                             <img alt="Favicon thumbnail" style="display: none;" class="favicon" loading="lazy" decoding="async" fetchpriority="low" {% if has_favicon %} src="{{url_for('static_content', group='favicon', filename=watch.uuid)}}" {% else %} src="data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///ywAAAAAAQABAAACAUwAOw=="{%  endif %} /> | ||||
|                         </div> | ||||
|                     {%  endif %} | ||||
|                         <div> | ||||
|                         <span class="watch-title"> | ||||
|                             {% if system_use_url_watchlist or watch.get('use_page_title_in_list') %} | ||||
|                                 {{ watch.label }} | ||||
|                             {% else %} | ||||
|                                 {{ watch.get('title') or watch.link }} | ||||
|                             {% endif %} | ||||
|                            <a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}"> </a> | ||||
|                             {{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}} <a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}"> </a> | ||||
|                         </span> | ||||
|                             <div class="error-text" style="display:none;">{{ watch.compile_error_texts(has_proxies=datastore.proxy_list) }}</div> | ||||
|                             {%- if watch['processor'] == 'text_json_diff'  -%} | ||||
| @@ -252,9 +240,6 @@ document.addEventListener('DOMContentLoaded', function() { | ||||
|                 <a href="{{url_for('ui.mark_all_viewed', tag=active_tag_uuid) }}" class="pure-button button-tag " id="mark-all-viewed">Mark all viewed in '{{active_tag.title}}'</a> | ||||
|             </li> | ||||
|         {%-  endif -%} | ||||
|             <li id="post-list-unread" class="{%- if has_unviewed -%}has-unviewed{%- endif -%}" style="display: none;" > | ||||
|                 <a href="{{url_for('watchlist.index', unread=1, tag=request.args.get('tag')) }}" class="pure-button button-tag">Unread</a> | ||||
|             </li> | ||||
|             <li> | ||||
|                <a href="{{ url_for('ui.form_watch_checknow', tag=active_tag_uuid, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag" id="recheck-all">Recheck | ||||
|                 all {% if active_tag_uuid %}  in '{{active_tag.title}}'{%endif%}</a> | ||||
|   | ||||
| @@ -70,17 +70,15 @@ class Fetcher(): | ||||
|  | ||||
|     @abstractmethod | ||||
|     async def run(self, | ||||
|                   fetch_favicon=True, | ||||
|                   current_include_filters=None, | ||||
|                   empty_pages_are_a_change=False, | ||||
|                   ignore_status_codes=False, | ||||
|                   is_binary=False, | ||||
|                   request_body=None, | ||||
|                   request_headers=None, | ||||
|                   request_method=None, | ||||
|                   timeout=None, | ||||
|                   url=None, | ||||
|                   ): | ||||
|             url, | ||||
|             timeout, | ||||
|             request_headers, | ||||
|             request_body, | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False, | ||||
|             empty_pages_are_a_change=False): | ||||
|         # Should set self.error, self.status_code and self.content | ||||
|         pass | ||||
|  | ||||
|   | ||||
| @@ -143,17 +143,15 @@ class fetcher(Fetcher): | ||||
|             f.write(content) | ||||
|  | ||||
|     async def run(self, | ||||
|                   fetch_favicon=True, | ||||
|                   current_include_filters=None, | ||||
|                   empty_pages_are_a_change=False, | ||||
|                   ignore_status_codes=False, | ||||
|                   is_binary=False, | ||||
|                   request_body=None, | ||||
|                   request_headers=None, | ||||
|                   request_method=None, | ||||
|                   timeout=None, | ||||
|                   url=None, | ||||
|                   ): | ||||
|             url, | ||||
|             timeout, | ||||
|             request_headers, | ||||
|             request_body, | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False, | ||||
|             empty_pages_are_a_change=False): | ||||
|  | ||||
|         from playwright.async_api import async_playwright | ||||
|         import playwright._impl._errors | ||||
| @@ -236,12 +234,11 @@ class fetcher(Fetcher): | ||||
|                 await browser.close() | ||||
|                 raise PageUnloadable(url=url, status_code=None, message=str(e)) | ||||
|  | ||||
|             if fetch_favicon: | ||||
|                 try: | ||||
|                     self.favicon_blob = await self.page.evaluate(FAVICON_FETCHER_JS) | ||||
|                     await self.page.request_gc() | ||||
|                 except Exception as e: | ||||
|                     logger.error(f"Error fetching FavIcon info {str(e)}, continuing.") | ||||
|             try: | ||||
|                 self.favicon_blob = await self.page.evaluate(FAVICON_FETCHER_JS) | ||||
|                 await self.page.request_gc() | ||||
|             except Exception as e: | ||||
|                 logger.error(f"Error fetching FavIcon info {str(e)}, continuing.") | ||||
|  | ||||
|             if self.status_code != 200 and not ignore_status_codes: | ||||
|                 screenshot = await capture_full_page_async(self.page) | ||||
|   | ||||
| @@ -145,16 +145,15 @@ class fetcher(Fetcher): | ||||
|     #         f.write(content) | ||||
|  | ||||
|     async def fetch_page(self, | ||||
|                          current_include_filters, | ||||
|                          empty_pages_are_a_change, | ||||
|                          fetch_favicon, | ||||
|                          ignore_status_codes, | ||||
|                          is_binary, | ||||
|                          request_body, | ||||
|                          request_headers, | ||||
|                          request_method, | ||||
|                          timeout, | ||||
|                          url, | ||||
|                          timeout, | ||||
|                          request_headers, | ||||
|                          request_body, | ||||
|                          request_method, | ||||
|                          ignore_status_codes, | ||||
|                          current_include_filters, | ||||
|                          is_binary, | ||||
|                          empty_pages_are_a_change | ||||
|                          ): | ||||
|         import re | ||||
|         self.delete_browser_steps_screenshots() | ||||
| @@ -182,9 +181,6 @@ class fetcher(Fetcher): | ||||
|  | ||||
|         # more reliable is to just request a new page | ||||
|         self.page = await browser.newPage() | ||||
|          | ||||
|         # Add console handler to capture console.log from favicon fetcher | ||||
|         #self.page.on('console', lambda msg: logger.debug(f"Browser console [{msg.type}]: {msg.text}")) | ||||
|  | ||||
|         if '--window-size' in self.browser_connection_url: | ||||
|             # Be sure the viewport is always the window-size, this is often not the same thing | ||||
| @@ -294,11 +290,10 @@ class fetcher(Fetcher): | ||||
|             await browser.close() | ||||
|             raise PageUnloadable(url=url, status_code=None, message=str(e)) | ||||
|  | ||||
|         if fetch_favicon: | ||||
|             try: | ||||
|                 self.favicon_blob = await self.page.evaluate(FAVICON_FETCHER_JS) | ||||
|             except Exception as e: | ||||
|                 logger.error(f"Error fetching FavIcon info {str(e)}, continuing.") | ||||
|         try: | ||||
|             self.favicon_blob = await self.page.evaluate(FAVICON_FETCHER_JS) | ||||
|         except Exception as e: | ||||
|             logger.error(f"Error fetching FavIcon info {str(e)}, continuing.") | ||||
|  | ||||
|         if self.status_code != 200 and not ignore_status_codes: | ||||
|             screenshot = await capture_full_page(page=self.page) | ||||
| @@ -351,18 +346,8 @@ class fetcher(Fetcher): | ||||
|     async def main(self, **kwargs): | ||||
|         await self.fetch_page(**kwargs) | ||||
|  | ||||
|     async def run(self, | ||||
|                   fetch_favicon=True, | ||||
|                   current_include_filters=None, | ||||
|                   empty_pages_are_a_change=False, | ||||
|                   ignore_status_codes=False, | ||||
|                   is_binary=False, | ||||
|                   request_body=None, | ||||
|                   request_headers=None, | ||||
|                   request_method=None, | ||||
|                   timeout=None, | ||||
|                   url=None, | ||||
|                   ): | ||||
|     async def run(self, url, timeout, request_headers, request_body, request_method, ignore_status_codes=False, | ||||
|             current_include_filters=None, is_binary=False, empty_pages_are_a_change=False): | ||||
|  | ||||
|         #@todo make update_worker async which could run any of these content_fetchers within memory and time constraints | ||||
|         max_time = int(os.getenv('PUPPETEER_MAX_PROCESSING_TIMEOUT_SECONDS', 180)) | ||||
| @@ -370,17 +355,16 @@ class fetcher(Fetcher): | ||||
|         # Now we run this properly in async context since we're called from async worker | ||||
|         try: | ||||
|             await asyncio.wait_for(self.main( | ||||
|                 current_include_filters=current_include_filters, | ||||
|                 empty_pages_are_a_change=empty_pages_are_a_change, | ||||
|                 fetch_favicon=fetch_favicon, | ||||
|                 ignore_status_codes=ignore_status_codes, | ||||
|                 is_binary=is_binary, | ||||
|                 request_body=request_body, | ||||
|                 request_headers=request_headers, | ||||
|                 request_method=request_method, | ||||
|                 timeout=timeout, | ||||
|                 url=url, | ||||
|             ), timeout=max_time | ||||
|             ) | ||||
|                 timeout=timeout, | ||||
|                 request_headers=request_headers, | ||||
|                 request_body=request_body, | ||||
|                 request_method=request_method, | ||||
|                 ignore_status_codes=ignore_status_codes, | ||||
|                 current_include_filters=current_include_filters, | ||||
|                 is_binary=is_binary, | ||||
|                 empty_pages_are_a_change=empty_pages_are_a_change | ||||
|             ), timeout=max_time) | ||||
|         except asyncio.TimeoutError: | ||||
|             raise (BrowserFetchTimedOut(msg=f"Browser connected but was unable to process the page in {max_time} seconds.")) | ||||
|             raise(BrowserFetchTimedOut(msg=f"Browser connected but was unable to process the page in {max_time} seconds.")) | ||||
|  | ||||
|   | ||||
| @@ -104,17 +104,15 @@ class fetcher(Fetcher): | ||||
|         self.raw_content = r.content | ||||
|  | ||||
|     async def run(self, | ||||
|                   fetch_favicon=True, | ||||
|                   current_include_filters=None, | ||||
|                   empty_pages_are_a_change=False, | ||||
|                   ignore_status_codes=False, | ||||
|                   is_binary=False, | ||||
|                   request_body=None, | ||||
|                   request_headers=None, | ||||
|                   request_method=None, | ||||
|                   timeout=None, | ||||
|                   url=None, | ||||
|                   ): | ||||
|             url, | ||||
|             timeout, | ||||
|             request_headers, | ||||
|             request_body, | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False, | ||||
|             empty_pages_are_a_change=False): | ||||
|         """Async wrapper that runs the synchronous requests code in a thread pool""" | ||||
|          | ||||
|         loop = asyncio.get_event_loop() | ||||
|   | ||||
| @@ -1,101 +1,79 @@ | ||||
| (async () => { | ||||
|   // Define the function inside the IIFE for console testing | ||||
|   window.getFaviconAsBlob = async function() { | ||||
|     const links = Array.from(document.querySelectorAll( | ||||
|       'link[rel~="apple-touch-icon"], link[rel~="icon"]' | ||||
|     )); | ||||
|   const links = Array.from(document.querySelectorAll( | ||||
|     'link[rel~="apple-touch-icon"], link[rel~="icon"]' | ||||
|   )); | ||||
|  | ||||
|     const icons = links.map(link => { | ||||
|       const sizesStr = link.getAttribute('sizes'); | ||||
|       let size = 0; | ||||
|       if (sizesStr) { | ||||
|         const [w] = sizesStr.split('x').map(Number); | ||||
|         if (!isNaN(w)) size = w; | ||||
|       } else { | ||||
|         size = 16; | ||||
|       } | ||||
|       return { | ||||
|         size, | ||||
|         rel: link.getAttribute('rel'), | ||||
|         href: link.href, | ||||
|         hasSizes: !!sizesStr | ||||
|       }; | ||||
|     }); | ||||
|  | ||||
|     // If no icons found, add fallback favicon.ico | ||||
|     if (icons.length === 0) { | ||||
|       icons.push({ | ||||
|         size: 16, | ||||
|         rel: 'icon', | ||||
|         href: '/favicon.ico', | ||||
|         hasSizes: false | ||||
|       }); | ||||
|   const icons = links.map(link => { | ||||
|     const sizesStr = link.getAttribute('sizes'); | ||||
|     let size = 0; | ||||
|     if (sizesStr) { | ||||
|       const [w] = sizesStr.split('x').map(Number); | ||||
|       if (!isNaN(w)) size = w; | ||||
|     } else { | ||||
|       size = 16; | ||||
|     } | ||||
|     return { | ||||
|       size, | ||||
|       rel: link.getAttribute('rel'), | ||||
|       href: link.href | ||||
|     }; | ||||
|   }); | ||||
|  | ||||
|     // sort preference: highest resolution first, then apple-touch-icon, then regular icons | ||||
|     icons.sort((a, b) => { | ||||
|       // First priority: actual size (highest first) | ||||
|       if (a.size !== b.size) { | ||||
|         return b.size - a.size; | ||||
|       } | ||||
|        | ||||
|       // Second priority: apple-touch-icon over regular icon | ||||
|       const isAppleA = /apple-touch-icon/.test(a.rel); | ||||
|       const isAppleB = /apple-touch-icon/.test(b.rel); | ||||
|       if (isAppleA && !isAppleB) return -1; | ||||
|       if (!isAppleA && isAppleB) return 1; | ||||
|        | ||||
|       // Third priority: icons with no size attribute (fallback icons) last | ||||
|       const hasNoSizeA = !a.hasSizes; | ||||
|       const hasNoSizeB = !b.hasSizes; | ||||
|       if (hasNoSizeA && !hasNoSizeB) return 1; | ||||
|       if (!hasNoSizeA && hasNoSizeB) return -1; | ||||
|        | ||||
|       return 0; | ||||
|   // If no icons found, add fallback favicon.ico | ||||
|   if (icons.length === 0) { | ||||
|     icons.push({ | ||||
|       size: 16, | ||||
|       rel: 'icon', | ||||
|       href: '/favicon.ico' | ||||
|     }); | ||||
|   } | ||||
|  | ||||
|     const timeoutMs = 2000; | ||||
|   // sort preference | ||||
|   icons.sort((a, b) => { | ||||
|     const isAppleA = /apple-touch-icon/.test(a.rel); | ||||
|     const isAppleB = /apple-touch-icon/.test(b.rel); | ||||
|     if (isAppleA && !isAppleB) return -1; | ||||
|     if (!isAppleA && isAppleB) return 1; | ||||
|     return b.size - a.size; | ||||
|   }); | ||||
|  | ||||
|     for (const icon of icons) { | ||||
|       try { | ||||
|         const controller = new AbortController(); | ||||
|         const timeout = setTimeout(() => controller.abort(), timeoutMs); | ||||
|   const timeoutMs = 2000; | ||||
|  | ||||
|         const resp = await fetch(icon.href, { | ||||
|           signal: controller.signal, | ||||
|           redirect: 'follow' | ||||
|         }); | ||||
|   for (const icon of icons) { | ||||
|     try { | ||||
|       const controller = new AbortController(); | ||||
|       const timeout = setTimeout(() => controller.abort(), timeoutMs); | ||||
|  | ||||
|         clearTimeout(timeout); | ||||
|       const resp = await fetch(icon.href, { | ||||
|         signal: controller.signal, | ||||
|         redirect: 'follow' | ||||
|       }); | ||||
|  | ||||
|         if (!resp.ok) { | ||||
|           continue; | ||||
|         } | ||||
|       clearTimeout(timeout); | ||||
|  | ||||
|         const blob = await resp.blob(); | ||||
|  | ||||
|         // Convert blob to base64 | ||||
|         const reader = new FileReader(); | ||||
|         return await new Promise(resolve => { | ||||
|           reader.onloadend = () => { | ||||
|             resolve({ | ||||
|               url: icon.href, | ||||
|               base64: reader.result.split(",")[1] | ||||
|             }); | ||||
|           }; | ||||
|           reader.readAsDataURL(blob); | ||||
|         }); | ||||
|  | ||||
|       } catch (e) { | ||||
|       if (!resp.ok) { | ||||
|         continue; | ||||
|       } | ||||
|  | ||||
|       const blob = await resp.blob(); | ||||
|  | ||||
|       // Convert blob to base64 | ||||
|       const reader = new FileReader(); | ||||
|       return await new Promise(resolve => { | ||||
|         reader.onloadend = () => { | ||||
|           resolve({ | ||||
|             url: icon.href, | ||||
|             base64: reader.result.split(",")[1] | ||||
|           }); | ||||
|         }; | ||||
|         reader.readAsDataURL(blob); | ||||
|       }); | ||||
|  | ||||
|     } catch (e) { | ||||
|       continue; | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     // nothing found | ||||
|     return null; | ||||
|   }; | ||||
|  | ||||
|   // Auto-execute and return result for page.evaluate() | ||||
|   return await window.getFaviconAsBlob(); | ||||
|   // nothing found | ||||
|   return null; | ||||
| })(); | ||||
|  | ||||
|   | ||||
| @@ -47,7 +47,6 @@ async () => { | ||||
|             'nicht lieferbar', | ||||
|             'nicht verfügbar', | ||||
|             'nicht vorrätig', | ||||
|             'nicht mehr lieferbar', | ||||
|             'nicht zur verfügung', | ||||
|             'nie znaleziono produktów', | ||||
|             'niet beschikbaar', | ||||
|   | ||||
| @@ -4,10 +4,9 @@ import time | ||||
| from loguru import logger | ||||
| from changedetectionio.content_fetchers.base import Fetcher | ||||
|  | ||||
|  | ||||
| class fetcher(Fetcher): | ||||
|     if os.getenv("WEBDRIVER_URL"): | ||||
|         fetcher_description = f"WebDriver Chrome/Javascript via \"{os.getenv('WEBDRIVER_URL', '')}\"" | ||||
|         fetcher_description = "WebDriver Chrome/Javascript via '{}'".format(os.getenv("WEBDRIVER_URL")) | ||||
|     else: | ||||
|         fetcher_description = "WebDriver Chrome/Javascript" | ||||
|  | ||||
| @@ -26,6 +25,7 @@ class fetcher(Fetcher): | ||||
|             self.browser_connection_is_custom = True | ||||
|             self.browser_connection_url = custom_browser_connection_url | ||||
|  | ||||
|  | ||||
|         ##### PROXY SETUP ##### | ||||
|  | ||||
|         proxy_sources = [ | ||||
| @@ -38,7 +38,7 @@ class fetcher(Fetcher): | ||||
|             os.getenv('webdriver_proxyHttps'), | ||||
|             os.getenv('webdriver_httpsProxy'), | ||||
|             os.getenv('webdriver_sslProxy'), | ||||
|             proxy_override,  # last one should override | ||||
|             proxy_override, # last one should override | ||||
|         ] | ||||
|         # The built in selenium proxy handling is super unreliable!!! so we just grab which ever proxy setting we can find and throw it in --proxy-server= | ||||
|         for k in filter(None, proxy_sources): | ||||
| @@ -46,21 +46,20 @@ class fetcher(Fetcher): | ||||
|                 continue | ||||
|             self.proxy_url = k.strip() | ||||
|  | ||||
|  | ||||
|     async def run(self, | ||||
|                   fetch_favicon=True, | ||||
|                   current_include_filters=None, | ||||
|                   empty_pages_are_a_change=False, | ||||
|                   ignore_status_codes=False, | ||||
|                   is_binary=False, | ||||
|                   request_body=None, | ||||
|                   request_headers=None, | ||||
|                   request_method=None, | ||||
|                   timeout=None, | ||||
|                   url=None, | ||||
|                   ): | ||||
|             url, | ||||
|             timeout, | ||||
|             request_headers, | ||||
|             request_body, | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False, | ||||
|             empty_pages_are_a_change=False): | ||||
|  | ||||
|         import asyncio | ||||
|  | ||||
|          | ||||
|         # Wrap the entire selenium operation in a thread executor | ||||
|         def _run_sync(): | ||||
|             from selenium.webdriver.chrome.options import Options as ChromeOptions | ||||
| @@ -141,3 +140,4 @@ class fetcher(Fetcher): | ||||
|         # Run the selenium operations in a thread pool to avoid blocking the event loop | ||||
|         loop = asyncio.get_event_loop() | ||||
|         await loop.run_in_executor(None, _run_sync) | ||||
|  | ||||
|   | ||||
| @@ -12,7 +12,7 @@ from blinker import signal | ||||
|  | ||||
| from changedetectionio.strtobool import strtobool | ||||
| from threading import Event | ||||
| from changedetectionio.queue_handlers import RecheckPriorityQueue, NotificationQueue | ||||
| from changedetectionio.custom_queue import SignalPriorityQueue, AsyncSignalPriorityQueue, NotificationQueue | ||||
| from changedetectionio import worker_handler | ||||
|  | ||||
| from flask import ( | ||||
| @@ -39,11 +39,6 @@ from loguru import logger | ||||
| from changedetectionio import __version__ | ||||
| from changedetectionio import queuedWatchMetaData | ||||
| from changedetectionio.api import Watch, WatchHistory, WatchSingleHistory, CreateWatch, Import, SystemInfo, Tag, Tags, Notifications, WatchFavicon | ||||
| from changedetectionio.notification.BrowserNotifications import ( | ||||
|     BrowserNotificationsVapidPublicKey, | ||||
|     BrowserNotificationsSubscribe,  | ||||
|     BrowserNotificationsUnsubscribe | ||||
| ) | ||||
| from changedetectionio.api.Search import Search | ||||
| from .time_handler import is_within_schedule | ||||
|  | ||||
| @@ -53,8 +48,8 @@ datastore = None | ||||
| ticker_thread = None | ||||
| extra_stylesheets = [] | ||||
|  | ||||
| # Use bulletproof janus-based queues for sync/async reliability   | ||||
| update_q = RecheckPriorityQueue() | ||||
| # Use async queue by default, keep sync for backward compatibility   | ||||
| update_q = AsyncSignalPriorityQueue() if worker_handler.USE_ASYNC_WORKERS else SignalPriorityQueue() | ||||
| notification_q = NotificationQueue() | ||||
| MAX_QUEUE_SIZE = 2000 | ||||
|  | ||||
| @@ -99,7 +94,6 @@ except locale.Error: | ||||
|     logger.warning(f"Unable to set locale {default_locale}, locale is not installed maybe?") | ||||
|  | ||||
| watch_api = Api(app, decorators=[csrf.exempt]) | ||||
| browser_notification_api = Api(app, decorators=[csrf.exempt]) | ||||
|  | ||||
| def init_app_secret(datastore_path): | ||||
|     secret = "" | ||||
| @@ -335,18 +329,13 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                            resource_class_kwargs={'datastore': datastore}) | ||||
|  | ||||
|     watch_api.add_resource(Tag, '/api/v1/tag', '/api/v1/tag/<string:uuid>', | ||||
|                            resource_class_kwargs={'datastore': datastore, 'update_q': update_q}) | ||||
|                            resource_class_kwargs={'datastore': datastore}) | ||||
|                             | ||||
|     watch_api.add_resource(Search, '/api/v1/search', | ||||
|                            resource_class_kwargs={'datastore': datastore}) | ||||
|  | ||||
|     watch_api.add_resource(Notifications, '/api/v1/notifications', | ||||
|                            resource_class_kwargs={'datastore': datastore}) | ||||
|      | ||||
|     # Browser notification endpoints | ||||
|     browser_notification_api.add_resource(BrowserNotificationsVapidPublicKey, '/browser-notifications-api/vapid-public-key') | ||||
|     browser_notification_api.add_resource(BrowserNotificationsSubscribe, '/browser-notifications-api/subscribe') | ||||
|     browser_notification_api.add_resource(BrowserNotificationsUnsubscribe, '/browser-notifications-api/unsubscribe') | ||||
|  | ||||
|     @login_manager.user_loader | ||||
|     def user_loader(email): | ||||
| @@ -449,17 +438,8 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|  | ||||
|             favicon_filename = watch.get_favicon_filename() | ||||
|             if favicon_filename: | ||||
|                 try: | ||||
|                     import magic | ||||
|                     mime = magic.from_file( | ||||
|                         os.path.join(watch.watch_data_dir, favicon_filename), | ||||
|                         mime=True | ||||
|                     ) | ||||
|                 except ImportError: | ||||
|                     # Fallback, no python-magic | ||||
|                     import mimetypes | ||||
|                     mime, encoding = mimetypes.guess_type(favicon_filename) | ||||
|  | ||||
|                 import mimetypes | ||||
|                 mime, encoding = mimetypes.guess_type(favicon_filename) | ||||
|                 response = make_response(send_from_directory(watch.watch_data_dir, favicon_filename)) | ||||
|                 response.headers['Content-type'] = mime | ||||
|                 response.headers['Cache-Control'] = 'max-age=300, must-revalidate'  # Cache for 5 minutes, then revalidate | ||||
| @@ -500,29 +480,10 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|         except FileNotFoundError: | ||||
|             abort(404) | ||||
|  | ||||
|     @app.route("/service-worker.js", methods=['GET']) | ||||
|     def service_worker(): | ||||
|         from flask import make_response | ||||
|         try: | ||||
|             # Serve from the changedetectionio/static/js directory | ||||
|             static_js_path = os.path.join(os.path.dirname(__file__), 'static', 'js') | ||||
|             response = make_response(send_from_directory(static_js_path, "service-worker.js")) | ||||
|             response.headers['Content-Type'] = 'application/javascript' | ||||
|             response.headers['Service-Worker-Allowed'] = '/' | ||||
|             response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate' | ||||
|             response.headers['Pragma'] = 'no-cache' | ||||
|             response.headers['Expires'] = '0' | ||||
|             return response | ||||
|         except FileNotFoundError: | ||||
|             abort(404) | ||||
|  | ||||
|  | ||||
|     import changedetectionio.blueprint.browser_steps as browser_steps | ||||
|     app.register_blueprint(browser_steps.construct_blueprint(datastore), url_prefix='/browser-steps') | ||||
|  | ||||
|     import changedetectionio.blueprint.browser_notifications.browser_notifications as browser_notifications | ||||
|     app.register_blueprint(browser_notifications.construct_blueprint(datastore), url_prefix='/browser-notifications') | ||||
|  | ||||
|     from changedetectionio.blueprint.imports import construct_blueprint as construct_import_blueprint | ||||
|     app.register_blueprint(construct_import_blueprint(datastore, update_q, queuedWatchMetaData), url_prefix='/imports') | ||||
|  | ||||
| @@ -874,22 +835,16 @@ def ticker_thread_check_time_launch_checks(): | ||||
|  | ||||
|                     # Use Epoch time as priority, so we get a "sorted" PriorityQueue, but we can still push a priority 1 into it. | ||||
|                     priority = int(time.time()) | ||||
|                     logger.debug( | ||||
|                         f"> Queued watch UUID {uuid} " | ||||
|                         f"last checked at {watch['last_checked']} " | ||||
|                         f"queued at {now:0.2f} priority {priority} " | ||||
|                         f"jitter {watch.jitter_seconds:0.2f}s, " | ||||
|                         f"{now - watch['last_checked']:0.2f}s since last checked") | ||||
|  | ||||
|                     # Into the queue with you | ||||
|                     queued_successfully = worker_handler.queue_item_async_safe(update_q, | ||||
|                                                                                queuedWatchMetaData.PrioritizedItem(priority=priority, | ||||
|                                                                                                                    item={'uuid': uuid}) | ||||
|                                                                                ) | ||||
|                     if queued_successfully: | ||||
|                         logger.debug( | ||||
|                             f"> Queued watch UUID {uuid} " | ||||
|                             f"last checked at {watch['last_checked']} " | ||||
|                             f"queued at {now:0.2f} priority {priority} " | ||||
|                             f"jitter {watch.jitter_seconds:0.2f}s, " | ||||
|                             f"{now - watch['last_checked']:0.2f}s since last checked") | ||||
|                     else: | ||||
|                         logger.critical(f"CRITICAL: Failed to queue watch UUID {uuid} in ticker thread!") | ||||
|                          | ||||
|                     worker_handler.queue_item_async_safe(update_q, queuedWatchMetaData.PrioritizedItem(priority=priority, item={'uuid': uuid})) | ||||
|  | ||||
|                     # Reset for next time | ||||
|                     watch.jitter_seconds = 0 | ||||
|  | ||||
|   | ||||
| @@ -23,14 +23,11 @@ from wtforms import ( | ||||
| ) | ||||
| from flask_wtf.file import FileField, FileAllowed | ||||
| from wtforms.fields import FieldList | ||||
| from wtforms.utils import unset_value | ||||
|  | ||||
| from wtforms.validators import ValidationError | ||||
|  | ||||
| from validators.url import url as url_validator | ||||
|  | ||||
| from changedetectionio.widgets import TernaryNoneBooleanField | ||||
|  | ||||
|  | ||||
| # default | ||||
| # each select <option data-enabled="enabled-0-0" | ||||
| @@ -57,8 +54,6 @@ valid_method = { | ||||
|  | ||||
| default_method = 'GET' | ||||
| allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False')) | ||||
| REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT='At least one time interval (weeks, days, hours, minutes, or seconds) must be specified.' | ||||
| REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT='At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings.' | ||||
|  | ||||
| class StringListField(StringField): | ||||
|     widget = widgets.TextArea() | ||||
| @@ -215,35 +210,6 @@ class ScheduleLimitForm(Form): | ||||
|         self.sunday.form.enabled.label.text = "Sunday" | ||||
|  | ||||
|  | ||||
| def validate_time_between_check_has_values(form): | ||||
|     """ | ||||
|     Custom validation function for TimeBetweenCheckForm. | ||||
|     Returns True if at least one time interval field has a value > 0. | ||||
|     """ | ||||
|     res = any([ | ||||
|         form.weeks.data and int(form.weeks.data) > 0, | ||||
|         form.days.data and int(form.days.data) > 0, | ||||
|         form.hours.data and int(form.hours.data) > 0, | ||||
|         form.minutes.data and int(form.minutes.data) > 0, | ||||
|         form.seconds.data and int(form.seconds.data) > 0 | ||||
|     ]) | ||||
|  | ||||
|     return res | ||||
|  | ||||
|  | ||||
| class RequiredTimeInterval(object): | ||||
|     """ | ||||
|     WTForms validator that ensures at least one time interval field has a value > 0. | ||||
|     Use this with FormField(TimeBetweenCheckForm, validators=[RequiredTimeInterval()]). | ||||
|     """ | ||||
|     def __init__(self, message=None): | ||||
|         self.message = message or 'At least one time interval (weeks, days, hours, minutes, or seconds) must be specified.' | ||||
|  | ||||
|     def __call__(self, form, field): | ||||
|         if not validate_time_between_check_has_values(field.form): | ||||
|             raise ValidationError(self.message) | ||||
|  | ||||
|  | ||||
| class TimeBetweenCheckForm(Form): | ||||
|     weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")]) | ||||
|     days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")]) | ||||
| @@ -252,123 +218,6 @@ class TimeBetweenCheckForm(Form): | ||||
|     seconds = IntegerField('Seconds', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")]) | ||||
|     # @todo add total seconds minimum validatior = minimum_seconds_recheck_time | ||||
|  | ||||
|     def __init__(self, formdata=None, obj=None, prefix="", data=None, meta=None, **kwargs): | ||||
|         super().__init__(formdata, obj, prefix, data, meta, **kwargs) | ||||
|         self.require_at_least_one = kwargs.get('require_at_least_one', False) | ||||
|         self.require_at_least_one_message = kwargs.get('require_at_least_one_message', REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT) | ||||
|  | ||||
|     def validate(self, **kwargs): | ||||
|         """Custom validation that can optionally require at least one time interval.""" | ||||
|         # Run normal field validation first | ||||
|         if not super().validate(**kwargs): | ||||
|             return False | ||||
|  | ||||
|         # Apply optional "at least one" validation | ||||
|         if self.require_at_least_one: | ||||
|             if not validate_time_between_check_has_values(self): | ||||
|                 # Add error to the form's general errors (not field-specific) | ||||
|                 if not hasattr(self, '_formdata_errors'): | ||||
|                     self._formdata_errors = [] | ||||
|                 self._formdata_errors.append(self.require_at_least_one_message) | ||||
|                 return False | ||||
|  | ||||
|         return True | ||||
|  | ||||
|  | ||||
| class EnhancedFormField(FormField): | ||||
|     """ | ||||
|     An enhanced FormField that supports conditional validation with top-level error messages. | ||||
|     Adds a 'top_errors' property for validation errors at the FormField level. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, form_class, label=None, validators=None, separator="-", | ||||
|                  conditional_field=None, conditional_message=None, conditional_test_function=None, **kwargs): | ||||
|         """ | ||||
|         Initialize EnhancedFormField with optional conditional validation. | ||||
|  | ||||
|         :param conditional_field: Name of the field this FormField depends on (e.g. 'time_between_check_use_default') | ||||
|         :param conditional_message: Error message to show when validation fails | ||||
|         :param conditional_test_function: Custom function to test if FormField has valid values. | ||||
|                                         Should take self.form as parameter and return True if valid. | ||||
|         """ | ||||
|         super().__init__(form_class, label, validators, separator, **kwargs) | ||||
|         self.top_errors = [] | ||||
|         self.conditional_field = conditional_field | ||||
|         self.conditional_message = conditional_message or "At least one field must have a value when not using defaults." | ||||
|         self.conditional_test_function = conditional_test_function | ||||
|  | ||||
|     def validate(self, form, extra_validators=()): | ||||
|         """ | ||||
|         Custom validation that supports conditional logic and stores top-level errors. | ||||
|         """ | ||||
|         self.top_errors = [] | ||||
|  | ||||
|         # First run the normal FormField validation | ||||
|         base_valid = super().validate(form, extra_validators) | ||||
|  | ||||
|         # Apply conditional validation if configured | ||||
|         if self.conditional_field and hasattr(form, self.conditional_field): | ||||
|             conditional_field_obj = getattr(form, self.conditional_field) | ||||
|  | ||||
|             # If the conditional field is False/unchecked, check if this FormField has any values | ||||
|             if not conditional_field_obj.data: | ||||
|                 # Use custom test function if provided, otherwise use generic fallback | ||||
|                 if self.conditional_test_function: | ||||
|                     has_any_value = self.conditional_test_function(self.form) | ||||
|                 else: | ||||
|                     # Generic fallback - check if any field has truthy data | ||||
|                     has_any_value = any(field.data for field in self.form if hasattr(field, 'data') and field.data) | ||||
|  | ||||
|                 if not has_any_value: | ||||
|                     self.top_errors.append(self.conditional_message) | ||||
|                     base_valid = False | ||||
|  | ||||
|         return base_valid | ||||
|  | ||||
|  | ||||
| class RequiredFormField(FormField): | ||||
|     """ | ||||
|     A FormField that passes require_at_least_one=True to TimeBetweenCheckForm. | ||||
|     Use this when you want the sub-form to always require at least one value. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, form_class, label=None, validators=None, separator="-", **kwargs): | ||||
|         super().__init__(form_class, label, validators, separator, **kwargs) | ||||
|  | ||||
|     def process(self, formdata, data=unset_value, extra_filters=None): | ||||
|         if extra_filters: | ||||
|             raise TypeError( | ||||
|                 "FormField cannot take filters, as the encapsulated" | ||||
|                 "data is not mutable." | ||||
|             ) | ||||
|  | ||||
|         if data is unset_value: | ||||
|             try: | ||||
|                 data = self.default() | ||||
|             except TypeError: | ||||
|                 data = self.default | ||||
|             self._obj = data | ||||
|  | ||||
|         self.object_data = data | ||||
|  | ||||
|         prefix = self.name + self.separator | ||||
|         # Pass require_at_least_one=True to the sub-form | ||||
|         if isinstance(data, dict): | ||||
|             self.form = self.form_class(formdata=formdata, prefix=prefix, require_at_least_one=True, **data) | ||||
|         else: | ||||
|             self.form = self.form_class(formdata=formdata, obj=data, prefix=prefix, require_at_least_one=True) | ||||
|  | ||||
|     @property | ||||
|     def errors(self): | ||||
|         """Include sub-form validation errors""" | ||||
|         form_errors = self.form.errors | ||||
|         # Add any general form errors to a special 'form' key | ||||
|         if hasattr(self.form, '_formdata_errors') and self.form._formdata_errors: | ||||
|             form_errors = dict(form_errors)  # Make a copy | ||||
|             form_errors['form'] = self.form._formdata_errors | ||||
|         return form_errors | ||||
|  | ||||
|  | ||||
| # Separated by  key:value | ||||
| class StringDictKeyValue(StringField): | ||||
|     widget = widgets.TextArea() | ||||
| @@ -497,7 +346,7 @@ class ValidateJinja2Template(object): | ||||
|         joined_data = ' '.join(map(str, field.data)) if isinstance(field.data, list) else f"{field.data}" | ||||
|  | ||||
|         try: | ||||
|             jinja2_env = ImmutableSandboxedEnvironment(loader=BaseLoader, extensions=['jinja2_time.TimeExtension']) | ||||
|             jinja2_env = ImmutableSandboxedEnvironment(loader=BaseLoader) | ||||
|             jinja2_env.globals.update(notification.valid_tokens) | ||||
|             # Extra validation tokens provided on the form_class(... extra_tokens={}) setup | ||||
|             if hasattr(field, 'extra_notification_tokens'): | ||||
| @@ -547,19 +396,6 @@ def validate_url(test_url): | ||||
|         # This should be wtforms.validators. | ||||
|         raise ValidationError('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX or incorrect URL format') | ||||
|  | ||||
|  | ||||
| class ValidateSinglePythonRegexString(object): | ||||
|     def __init__(self, message=None): | ||||
|         self.message = message | ||||
|  | ||||
|     def __call__(self, form, field): | ||||
|         try: | ||||
|             re.compile(field.data) | ||||
|         except re.error: | ||||
|             message = field.gettext('RegEx \'%s\' is not a valid regular expression.') | ||||
|             raise ValidationError(message % (field.data)) | ||||
|  | ||||
|  | ||||
| class ValidateListRegex(object): | ||||
|     """ | ||||
|     Validates that anything that looks like a regex passes as a regex | ||||
| @@ -578,7 +414,6 @@ class ValidateListRegex(object): | ||||
|                     message = field.gettext('RegEx \'%s\' is not a valid regular expression.') | ||||
|                     raise ValidationError(message % (line)) | ||||
|  | ||||
|  | ||||
| class ValidateCSSJSONXPATHInput(object): | ||||
|     """ | ||||
|     Filter validation | ||||
| @@ -699,6 +534,7 @@ class commonSettingsForm(Form): | ||||
|         self.notification_title.extra_notification_tokens = kwargs.get('extra_notification_tokens', {}) | ||||
|         self.notification_urls.extra_notification_tokens = kwargs.get('extra_notification_tokens', {}) | ||||
|  | ||||
|     extract_title_as_title = BooleanField('Extract <title> from document and use as watch title', default=False) | ||||
|     fetch_backend = RadioField(u'Fetch Method', choices=content_fetchers.available_fetchers(), validators=[ValidateContentFetcherIsReady()]) | ||||
|     notification_body = TextAreaField('Notification Body', default='{{ watch_url }} had a change.', validators=[validators.Optional(), ValidateJinja2Template()]) | ||||
|     notification_format = SelectField('Notification format', choices=valid_notification_formats.keys()) | ||||
| @@ -707,7 +543,6 @@ class commonSettingsForm(Form): | ||||
|     processor = RadioField( label=u"Processor - What do you want to achieve?", choices=processors.available_processors(), default="text_json_diff") | ||||
|     timezone = StringField("Timezone for watch schedule", render_kw={"list": "timezones"}, validators=[validateTimeZoneName()]) | ||||
|     webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, message="Should contain one or more seconds")]) | ||||
|      | ||||
|  | ||||
|  | ||||
| class importForm(Form): | ||||
| @@ -733,16 +568,11 @@ class processor_text_json_diff_form(commonSettingsForm): | ||||
|     url = fields.URLField('URL', validators=[validateURL()]) | ||||
|     tags = StringTagUUID('Group tag', [validators.Optional()], default='') | ||||
|  | ||||
|     time_between_check = EnhancedFormField( | ||||
|         TimeBetweenCheckForm, | ||||
|         conditional_field='time_between_check_use_default', | ||||
|         conditional_message=REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT, | ||||
|         conditional_test_function=validate_time_between_check_has_values | ||||
|     ) | ||||
|     time_between_check = FormField(TimeBetweenCheckForm) | ||||
|  | ||||
|     time_schedule_limit = FormField(ScheduleLimitForm) | ||||
|  | ||||
|     time_between_check_use_default = BooleanField('Use global settings for time between check and scheduler.', default=False) | ||||
|     time_between_check_use_default = BooleanField('Use global settings for time between check', default=False) | ||||
|  | ||||
|     include_filters = StringListField('CSS/JSONPath/JQ/XPath Filters', [ValidateCSSJSONXPATHInput()], default='') | ||||
|  | ||||
| @@ -772,18 +602,18 @@ class processor_text_json_diff_form(commonSettingsForm): | ||||
|     text_should_not_be_present = StringListField('Block change-detection while text matches', [validators.Optional(), ValidateListRegex()]) | ||||
|     webdriver_js_execute_code = TextAreaField('Execute JavaScript before change detection', render_kw={"rows": "5"}, validators=[validators.Optional()]) | ||||
|  | ||||
|     save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|     save_button = SubmitField('Save', render_kw={"class": "pure-button button-small pure-button-primary"}) | ||||
|  | ||||
|     proxy = RadioField('Proxy') | ||||
|     # filter_failure_notification_send @todo make ternary | ||||
|     filter_failure_notification_send = BooleanField( | ||||
|         'Send a notification when the filter can no longer be found on the page', default=False) | ||||
|     notification_muted = TernaryNoneBooleanField('Notifications', default=None, yes_text="Muted", no_text="On") | ||||
|  | ||||
|     notification_muted = BooleanField('Notifications Muted / Off', default=False) | ||||
|     notification_screenshot = BooleanField('Attach screenshot to notification (where possible)', default=False) | ||||
|  | ||||
|     conditions_match_logic = RadioField(u'Match', choices=[('ALL', 'Match all of the following'),('ANY', 'Match any of the following')], default='ALL') | ||||
|     conditions = FieldList(FormField(ConditionFormRow), min_entries=1)  # Add rule logic here | ||||
|     use_page_title_in_list = TernaryNoneBooleanField('Use page <title> in list', default=None) | ||||
|  | ||||
|  | ||||
|     def extra_tab_content(self): | ||||
|         return None | ||||
| @@ -883,7 +713,7 @@ class DefaultUAInputForm(Form): | ||||
|  | ||||
| # datastore.data['settings']['requests'].. | ||||
| class globalSettingsRequestForm(Form): | ||||
|     time_between_check = RequiredFormField(TimeBetweenCheckForm) | ||||
|     time_between_check = FormField(TimeBetweenCheckForm) | ||||
|     time_schedule_limit = FormField(ScheduleLimitForm) | ||||
|     proxy = RadioField('Proxy') | ||||
|     jitter_seconds = IntegerField('Random jitter seconds ± check', | ||||
| @@ -910,8 +740,6 @@ class globalSettingsRequestForm(Form): | ||||
| class globalSettingsApplicationUIForm(Form): | ||||
|     open_diff_in_new_tab = BooleanField("Open 'History' page in a new tab", default=True, validators=[validators.Optional()]) | ||||
|     socket_io_enabled = BooleanField('Realtime UI Updates Enabled', default=True, validators=[validators.Optional()]) | ||||
|     favicons_enabled = BooleanField('Favicons Enabled', default=True, validators=[validators.Optional()]) | ||||
|     use_page_title_in_list = BooleanField('Use page <title> in watch overview list') #BooleanField=True | ||||
|  | ||||
| # datastore.data['settings']['application'].. | ||||
| class globalSettingsApplicationForm(commonSettingsForm): | ||||
| @@ -936,7 +764,7 @@ class globalSettingsApplicationForm(commonSettingsForm): | ||||
|  | ||||
|     removepassword_button = SubmitField('Remove password', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|     render_anchor_tag_content = BooleanField('Render anchor tag content', default=False) | ||||
|     shared_diff_access = BooleanField('Allow anonymous access to watch history page when password is enabled', default=False, validators=[validators.Optional()]) | ||||
|     shared_diff_access = BooleanField('Allow access to view diff page when password is enabled', default=False, validators=[validators.Optional()]) | ||||
|     rss_hide_muted_watches = BooleanField('Hide muted watches from RSS feed', default=True, | ||||
|                                       validators=[validators.Optional()]) | ||||
|     filter_failure_notification_threshold_attempts = IntegerField('Number of times the filter can be missing before sending a notification', | ||||
| @@ -958,9 +786,9 @@ class globalSettingsForm(Form): | ||||
|  | ||||
|     requests = FormField(globalSettingsRequestForm) | ||||
|     application = FormField(globalSettingsApplicationForm) | ||||
|     save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|     save_button = SubmitField('Save', render_kw={"class": "pure-button button-small pure-button-primary"}) | ||||
|  | ||||
|  | ||||
| class extractDataForm(Form): | ||||
|     extract_regex = StringField('RegEx to extract', validators=[validators.DataRequired(), ValidateSinglePythonRegexString()]) | ||||
|     extract_regex = StringField('RegEx to extract', validators=[validators.Length(min=1, message="Needs a RegEx")]) | ||||
|     extract_submit_button = SubmitField('Extract as CSV', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|   | ||||
| @@ -1,7 +1,6 @@ | ||||
| from loguru import logger | ||||
| from lxml import etree | ||||
| from typing import List | ||||
| import html | ||||
| import json | ||||
| import re | ||||
|  | ||||
| @@ -10,11 +9,6 @@ TEXT_FILTER_LIST_LINE_SUFFIX = "<br>" | ||||
| TRANSLATE_WHITESPACE_TABLE = str.maketrans('', '', '\r\n\t ') | ||||
| PERL_STYLE_REGEX = r'^/(.*?)/([a-z]*)?$' | ||||
|  | ||||
| TITLE_RE = re.compile(r"<title[^>]*>(.*?)</title>", re.I | re.S) | ||||
| META_CS  = re.compile(r'<meta[^>]+charset=["\']?\s*([a-z0-9_\-:+.]+)', re.I) | ||||
| META_CT  = re.compile(r'<meta[^>]+http-equiv=["\']?content-type["\']?[^>]*content=["\'][^>]*charset=([a-z0-9_\-:+.]+)', re.I) | ||||
|  | ||||
|  | ||||
| # 'price' , 'lowPrice', 'highPrice' are usually under here | ||||
| # All of those may or may not appear on different websites - I didnt find a way todo case-insensitive searching here | ||||
| LD_JSON_PRODUCT_OFFER_SELECTORS = ["json:$..offers", "json:$..Offers"] | ||||
| @@ -516,43 +510,3 @@ def get_triggered_text(content, trigger_text): | ||||
|         i += 1 | ||||
|  | ||||
|     return triggered_text | ||||
|  | ||||
|  | ||||
| def extract_title(data: bytes | str, sniff_bytes: int = 2048, scan_chars: int = 8192) -> str | None: | ||||
|     try: | ||||
|         # Only decode/process the prefix we need for title extraction | ||||
|         match data: | ||||
|             case bytes() if data.startswith((b"\xff\xfe", b"\xfe\xff")): | ||||
|                 prefix = data[:scan_chars * 2].decode("utf-16", errors="replace") | ||||
|             case bytes() if data.startswith((b"\xff\xfe\x00\x00", b"\x00\x00\xfe\xff")): | ||||
|                 prefix = data[:scan_chars * 4].decode("utf-32", errors="replace") | ||||
|             case bytes(): | ||||
|                 try: | ||||
|                     prefix = data[:scan_chars].decode("utf-8") | ||||
|                 except UnicodeDecodeError: | ||||
|                     try: | ||||
|                         head = data[:sniff_bytes].decode("ascii", errors="ignore") | ||||
|                         if m := (META_CS.search(head) or META_CT.search(head)): | ||||
|                             enc = m.group(1).lower() | ||||
|                         else: | ||||
|                             enc = "cp1252" | ||||
|                         prefix = data[:scan_chars * 2].decode(enc, errors="replace") | ||||
|                     except Exception as e: | ||||
|                         logger.error(f"Title extraction encoding detection failed: {e}") | ||||
|                         return None | ||||
|             case str(): | ||||
|                 prefix = data[:scan_chars] if len(data) > scan_chars else data | ||||
|             case _: | ||||
|                 logger.error(f"Title extraction received unsupported data type: {type(data)}") | ||||
|                 return None | ||||
|  | ||||
|         # Search only in the prefix | ||||
|         if m := TITLE_RE.search(prefix): | ||||
|             title = html.unescape(" ".join(m.group(1).split())).strip() | ||||
|             # Some safe limit | ||||
|             return title[:2000] | ||||
|         return None | ||||
|          | ||||
|     except Exception as e: | ||||
|         logger.error(f"Title extraction failed: {e}") | ||||
|         return None | ||||
| @@ -39,12 +39,12 @@ class model(dict): | ||||
|                     'api_access_token_enabled': True, | ||||
|                     'base_url' : None, | ||||
|                     'empty_pages_are_a_change': False, | ||||
|                     'extract_title_as_title': False, | ||||
|                     'fetch_backend': getenv("DEFAULT_FETCH_BACKEND", "html_requests"), | ||||
|                     'filter_failure_notification_threshold_attempts': _FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT, | ||||
|                     'global_ignore_text': [], # List of text to ignore when calculating the comparison checksum | ||||
|                     'global_subtractive_selectors': [], | ||||
|                     'ignore_whitespace': True, | ||||
|                     'ignore_status_codes': False, #@todo implement, as ternary. | ||||
|                     'notification_body': default_notification_body, | ||||
|                     'notification_format': default_notification_format, | ||||
|                     'notification_title': default_notification_title, | ||||
| @@ -57,19 +57,12 @@ class model(dict): | ||||
|                     'rss_hide_muted_watches': True, | ||||
|                     'schema_version' : 0, | ||||
|                     'shared_diff_access': False, | ||||
|                     'webdriver_delay': None , # Extra delay in seconds before extracting text | ||||
|                     'tags': {}, #@todo use Tag.model initialisers | ||||
|                     'timezone': None, # Default IANA timezone name | ||||
|                     'webdriver_delay': None , # Extra delay in seconds before extracting text | ||||
|                     'ui': { | ||||
|                         'use_page_title_in_list': True, | ||||
|                         'open_diff_in_new_tab': True, | ||||
|                         'socket_io_enabled': True, | ||||
|                         'favicons_enabled': True | ||||
|                     }, | ||||
|                     'vapid': { | ||||
|                         'private_key': None, | ||||
|                         'public_key': None, | ||||
|                         'contact_email': None | ||||
|                     }, | ||||
|                 } | ||||
|             } | ||||
|   | ||||
| @@ -14,8 +14,6 @@ from ..html_tools import TRANSLATE_WHITESPACE_TABLE | ||||
| # Allowable protocols, protects against javascript: etc | ||||
| # file:// is further checked by ALLOW_FILE_URI | ||||
| SAFE_PROTOCOL_REGEX='^(http|https|ftp|file):' | ||||
| FAVICON_RESAVE_THRESHOLD_SECONDS=86400 | ||||
|  | ||||
|  | ||||
| minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 3)) | ||||
| mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7} | ||||
| @@ -169,8 +167,8 @@ class model(watch_base): | ||||
|  | ||||
|     @property | ||||
|     def label(self): | ||||
|         # Used for sorting, display, etc | ||||
|         return self.get('title') or self.get('page_title') or self.link | ||||
|         # Used for sorting | ||||
|         return self.get('title') if self.get('title') else self.get('url') | ||||
|  | ||||
|     @property | ||||
|     def last_changed(self): | ||||
| @@ -422,28 +420,6 @@ class model(watch_base): | ||||
|         # False is not an option for AppRise, must be type None | ||||
|         return None | ||||
|  | ||||
|     def favicon_is_expired(self): | ||||
|         favicon_fname = self.get_favicon_filename() | ||||
|         import glob | ||||
|         import time | ||||
|  | ||||
|         if not favicon_fname: | ||||
|             return True | ||||
|         try: | ||||
|             fname = next(iter(glob.glob(os.path.join(self.watch_data_dir, "favicon.*"))), None) | ||||
|             logger.trace(f"Favicon file maybe found at {fname}") | ||||
|             if os.path.isfile(fname): | ||||
|                 file_age = int(time.time() - os.path.getmtime(fname)) | ||||
|                 logger.trace(f"Favicon file age is {file_age}s") | ||||
|                 if file_age < FAVICON_RESAVE_THRESHOLD_SECONDS: | ||||
|                     return False | ||||
|         except Exception as e: | ||||
|             logger.critical(f"Exception checking Favicon age {str(e)}") | ||||
|             return True | ||||
|  | ||||
|         # Also in the case that the file didnt exist | ||||
|         return True | ||||
|  | ||||
|     def bump_favicon(self, url, favicon_base_64: str) -> None: | ||||
|         from urllib.parse import urlparse | ||||
|         import base64 | ||||
| @@ -663,7 +639,7 @@ class model(watch_base): | ||||
|                     if res: | ||||
|                         if not csv_writer: | ||||
|                             # A file on the disk can be transferred much faster via flask than a string reply | ||||
|                             csv_output_filename = f"report-{self.get('uuid')}.csv" | ||||
|                             csv_output_filename = 'report.csv' | ||||
|                             f = open(os.path.join(self.watch_data_dir, csv_output_filename), 'w') | ||||
|                             # @todo some headers in the future | ||||
|                             #fieldnames = ['Epoch seconds', 'Date'] | ||||
|   | ||||
| @@ -3,7 +3,6 @@ import uuid | ||||
|  | ||||
| from changedetectionio import strtobool | ||||
| default_notification_format_for_watch = 'System default' | ||||
| CONDITIONS_MATCH_LOGIC_DEFAULT = 'ALL' | ||||
|  | ||||
| class watch_base(dict): | ||||
|  | ||||
| @@ -16,14 +15,13 @@ class watch_base(dict): | ||||
|             'body': None, | ||||
|             'browser_steps': [], | ||||
|             'browser_steps_last_error_step': None, | ||||
|             'conditions' : {}, | ||||
|             'conditions_match_logic': CONDITIONS_MATCH_LOGIC_DEFAULT, | ||||
|             'check_count': 0, | ||||
|             'check_unique_lines': False,  # On change-detected, compare against all history if its something new | ||||
|             'consecutive_filter_failures': 0,  # Every time the CSS/xPath filter cannot be located, reset when all is fine. | ||||
|             'content-type': None, | ||||
|             'date_created': None, | ||||
|             'extract_text': [],  # Extract text by regex after filters | ||||
|             'extract_title_as_title': False, | ||||
|             'fetch_backend': 'system',  # plaintext, playwright etc | ||||
|             'fetch_time': 0.0, | ||||
|             'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')), | ||||
| @@ -34,7 +32,6 @@ class watch_base(dict): | ||||
|             'has_ldjson_price_data': None, | ||||
|             'headers': {},  # Extra headers to send | ||||
|             'ignore_text': [],  # List of text to ignore when calculating the comparison checksum | ||||
|             'ignore_status_codes': None, | ||||
|             'in_stock_only': True,  # Only trigger change on going to instock from out-of-stock | ||||
|             'include_filters': [], | ||||
|             'last_checked': 0, | ||||
| @@ -49,7 +46,6 @@ class watch_base(dict): | ||||
|             'notification_screenshot': False,  # Include the latest screenshot if available and supported by the apprise URL | ||||
|             'notification_title': None, | ||||
|             'notification_urls': [],  # List of URLs to add to the notification Queue (Usually AppRise) | ||||
|             'page_title': None, # <title> from the page | ||||
|             'paused': False, | ||||
|             'previous_md5': False, | ||||
|             'previous_md5_before_filters': False,  # Used for skipping changedetection entirely | ||||
| @@ -123,13 +119,12 @@ class watch_base(dict): | ||||
|                     } | ||||
|                 }, | ||||
|             }, | ||||
|             'title': None, # An arbitrary field that overrides 'page_title' | ||||
|             'title': None, | ||||
|             'track_ldjson_price_data': None, | ||||
|             'trim_text_whitespace': False, | ||||
|             'remove_duplicate_lines': False, | ||||
|             'trigger_text': [],  # List of text or regex to wait for until a change is detected | ||||
|             'url': '', | ||||
|             'use_page_title_in_list': None, # None = use system settings | ||||
|             'uuid': str(uuid.uuid4()), | ||||
|             'webdriver_delay': None, | ||||
|             'webdriver_js_execute_code': None,  # Run before change-detection | ||||
|   | ||||
| @@ -1,217 +0,0 @@ | ||||
| import json | ||||
| from flask import request, current_app | ||||
| from flask_restful import Resource, marshal_with, fields | ||||
| from loguru import logger | ||||
|  | ||||
|  | ||||
| browser_notifications_fields = { | ||||
|     'success': fields.Boolean, | ||||
|     'message': fields.String, | ||||
| } | ||||
|  | ||||
| vapid_public_key_fields = { | ||||
|     'publicKey': fields.String, | ||||
| } | ||||
|  | ||||
| test_notification_fields = { | ||||
|     'success': fields.Boolean, | ||||
|     'message': fields.String, | ||||
|     'sent_count': fields.Integer, | ||||
| } | ||||
|  | ||||
|  | ||||
| class BrowserNotificationsVapidPublicKey(Resource): | ||||
|     """Get VAPID public key for browser push notifications""" | ||||
|      | ||||
|     @marshal_with(vapid_public_key_fields) | ||||
|     def get(self): | ||||
|         try: | ||||
|             from changedetectionio.notification.apprise_plugin.browser_notification_helpers import ( | ||||
|                 get_vapid_config_from_datastore, convert_pem_public_key_for_browser | ||||
|             ) | ||||
|              | ||||
|             datastore = current_app.config.get('DATASTORE') | ||||
|             if not datastore: | ||||
|                 return {'publicKey': None}, 500 | ||||
|                  | ||||
|             private_key, public_key_pem, contact_email = get_vapid_config_from_datastore(datastore) | ||||
|              | ||||
|             if not public_key_pem: | ||||
|                 return {'publicKey': None}, 404 | ||||
|              | ||||
|             # Convert PEM format to URL-safe base64 format for browser | ||||
|             public_key_b64 = convert_pem_public_key_for_browser(public_key_pem) | ||||
|              | ||||
|             if public_key_b64: | ||||
|                 return {'publicKey': public_key_b64} | ||||
|             else: | ||||
|                 return {'publicKey': None}, 500 | ||||
|                  | ||||
|         except Exception as e: | ||||
|             logger.error(f"Failed to get VAPID public key: {e}") | ||||
|             return {'publicKey': None}, 500 | ||||
|  | ||||
|  | ||||
| class BrowserNotificationsSubscribe(Resource): | ||||
|     """Subscribe to browser notifications""" | ||||
|      | ||||
|     @marshal_with(browser_notifications_fields) | ||||
|     def post(self): | ||||
|         try: | ||||
|             data = request.get_json() | ||||
|             if not data: | ||||
|                 return {'success': False, 'message': 'No data provided'}, 400 | ||||
|                  | ||||
|             subscription = data.get('subscription') | ||||
|              | ||||
|             if not subscription: | ||||
|                 return {'success': False, 'message': 'Subscription is required'}, 400 | ||||
|                  | ||||
|             # Validate subscription format | ||||
|             required_fields = ['endpoint', 'keys'] | ||||
|             for field in required_fields: | ||||
|                 if field not in subscription: | ||||
|                     return {'success': False, 'message': f'Missing subscription field: {field}'}, 400 | ||||
|                      | ||||
|             if 'p256dh' not in subscription['keys'] or 'auth' not in subscription['keys']: | ||||
|                 return {'success': False, 'message': 'Missing subscription keys'}, 400 | ||||
|                  | ||||
|             # Get datastore | ||||
|             datastore = current_app.config.get('DATASTORE') | ||||
|             if not datastore: | ||||
|                 return {'success': False, 'message': 'Datastore not available'}, 500 | ||||
|                  | ||||
|             # Initialize browser_subscriptions if it doesn't exist | ||||
|             if 'browser_subscriptions' not in datastore.data['settings']['application']: | ||||
|                 datastore.data['settings']['application']['browser_subscriptions'] = [] | ||||
|                  | ||||
|             # Check if subscription already exists | ||||
|             existing_subscriptions = datastore.data['settings']['application']['browser_subscriptions'] | ||||
|             for existing_sub in existing_subscriptions: | ||||
|                 if existing_sub.get('endpoint') == subscription.get('endpoint'): | ||||
|                     return {'success': True, 'message': 'Already subscribed to browser notifications'} | ||||
|                      | ||||
|             # Add new subscription | ||||
|             datastore.data['settings']['application']['browser_subscriptions'].append(subscription) | ||||
|             datastore.needs_write = True | ||||
|              | ||||
|             logger.info(f"New browser notification subscription: {subscription.get('endpoint')}") | ||||
|              | ||||
|             return {'success': True, 'message': 'Successfully subscribed to browser notifications'} | ||||
|              | ||||
|         except Exception as e: | ||||
|             logger.error(f"Failed to subscribe to browser notifications: {e}") | ||||
|             return {'success': False, 'message': f'Subscription failed: {str(e)}'}, 500 | ||||
|  | ||||
|  | ||||
| class BrowserNotificationsUnsubscribe(Resource): | ||||
|     """Unsubscribe from browser notifications""" | ||||
|      | ||||
|     @marshal_with(browser_notifications_fields) | ||||
|     def post(self): | ||||
|         try: | ||||
|             data = request.get_json() | ||||
|             if not data: | ||||
|                 return {'success': False, 'message': 'No data provided'}, 400 | ||||
|                  | ||||
|             subscription = data.get('subscription') | ||||
|              | ||||
|             if not subscription or not subscription.get('endpoint'): | ||||
|                 return {'success': False, 'message': 'Valid subscription is required'}, 400 | ||||
|                  | ||||
|             # Get datastore | ||||
|             datastore = current_app.config.get('DATASTORE') | ||||
|             if not datastore: | ||||
|                 return {'success': False, 'message': 'Datastore not available'}, 500 | ||||
|                  | ||||
|             # Check if subscriptions exist | ||||
|             browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', []) | ||||
|             if not browser_subscriptions: | ||||
|                 return {'success': True, 'message': 'No subscriptions found'} | ||||
|                  | ||||
|             # Remove subscription with matching endpoint | ||||
|             endpoint = subscription.get('endpoint') | ||||
|             original_count = len(browser_subscriptions) | ||||
|              | ||||
|             datastore.data['settings']['application']['browser_subscriptions'] = [ | ||||
|                 sub for sub in browser_subscriptions  | ||||
|                 if sub.get('endpoint') != endpoint | ||||
|             ] | ||||
|              | ||||
|             removed_count = original_count - len(datastore.data['settings']['application']['browser_subscriptions']) | ||||
|              | ||||
|             if removed_count > 0: | ||||
|                 datastore.needs_write = True | ||||
|                 logger.info(f"Removed {removed_count} browser notification subscription(s)") | ||||
|                 return {'success': True, 'message': 'Successfully unsubscribed from browser notifications'} | ||||
|             else: | ||||
|                 return {'success': True, 'message': 'No matching subscription found'} | ||||
|                  | ||||
|         except Exception as e: | ||||
|             logger.error(f"Failed to unsubscribe from browser notifications: {e}") | ||||
|             return {'success': False, 'message': f'Unsubscribe failed: {str(e)}'}, 500 | ||||
|  | ||||
|  | ||||
|  | ||||
| class BrowserNotificationsTest(Resource): | ||||
|     """Send a test browser notification""" | ||||
|      | ||||
|     @marshal_with(test_notification_fields) | ||||
|     def post(self): | ||||
|         try: | ||||
|             data = request.get_json() | ||||
|             if not data: | ||||
|                 return {'success': False, 'message': 'No data provided', 'sent_count': 0}, 400 | ||||
|                  | ||||
|             title = data.get('title', 'Test Notification') | ||||
|             body = data.get('body', 'This is a test notification from changedetection.io') | ||||
|              | ||||
|             # Get datastore to check if subscriptions exist | ||||
|             datastore = current_app.config.get('DATASTORE') | ||||
|             if not datastore: | ||||
|                 return {'success': False, 'message': 'Datastore not available', 'sent_count': 0}, 500 | ||||
|                  | ||||
|             # Check if there are subscriptions before attempting to send | ||||
|             browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', []) | ||||
|             if not browser_subscriptions: | ||||
|                 return {'success': False, 'message': 'No subscriptions found', 'sent_count': 0}, 404 | ||||
|              | ||||
|             # Use the apprise handler directly | ||||
|             try: | ||||
|                 from changedetectionio.notification.apprise_plugin.custom_handlers import apprise_browser_notification_handler | ||||
|                  | ||||
|                 # Call the apprise handler with test data | ||||
|                 success = apprise_browser_notification_handler( | ||||
|                     body=body, | ||||
|                     title=title, | ||||
|                     notify_type='info', | ||||
|                     meta={'url': 'browser://test'} | ||||
|                 ) | ||||
|                  | ||||
|                 # Count how many subscriptions we have after sending (some may have been removed if invalid) | ||||
|                 final_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', []) | ||||
|                 sent_count = len(browser_subscriptions)  # Original count | ||||
|                  | ||||
|                 if success: | ||||
|                     return { | ||||
|                         'success': True, | ||||
|                         'message': f'Test notification sent successfully to {sent_count} subscriber(s)', | ||||
|                         'sent_count': sent_count | ||||
|                     } | ||||
|                 else: | ||||
|                     return { | ||||
|                         'success': False, | ||||
|                         'message': 'Failed to send test notification', | ||||
|                         'sent_count': 0 | ||||
|                     }, 500 | ||||
|                      | ||||
|             except ImportError: | ||||
|                 return {'success': False, 'message': 'Browser notification handler not available', 'sent_count': 0}, 500 | ||||
|                  | ||||
|         except Exception as e: | ||||
|             logger.error(f"Failed to send test browser notification: {e}") | ||||
|             return {'success': False, 'message': f'Test failed: {str(e)}', 'sent_count': 0}, 500 | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -1,273 +0,0 @@ | ||||
| """ | ||||
| Browser notification helpers for Web Push API | ||||
| Shared utility functions for VAPID key handling and notification sending | ||||
| """ | ||||
|  | ||||
| import json | ||||
| import re | ||||
| import time | ||||
| from loguru import logger | ||||
|  | ||||
|  | ||||
| def convert_pem_private_key_for_pywebpush(private_key): | ||||
|     """ | ||||
|     Convert PEM private key to the format that pywebpush expects | ||||
|      | ||||
|     Args: | ||||
|         private_key: PEM private key string or already converted key | ||||
|          | ||||
|     Returns: | ||||
|         Vapid instance for pywebpush (avoids PEM parsing compatibility issues) | ||||
|     """ | ||||
|     try: | ||||
|         from py_vapid import Vapid | ||||
|         import tempfile | ||||
|         import os | ||||
|          | ||||
|         # If we get a string, assume it's PEM and create a Vapid instance from it | ||||
|         if isinstance(private_key, str) and private_key.startswith('-----BEGIN'): | ||||
|             # Write PEM to temporary file and load with Vapid.from_file | ||||
|             with tempfile.NamedTemporaryFile(mode='w', suffix='.pem', delete=False) as tmp_file: | ||||
|                 tmp_file.write(private_key) | ||||
|                 tmp_file.flush() | ||||
|                 temp_path = tmp_file.name | ||||
|                  | ||||
|             try: | ||||
|                 # Load using Vapid.from_file - this is more compatible with pywebpush | ||||
|                 vapid_instance = Vapid.from_file(temp_path) | ||||
|                 os.unlink(temp_path)  # Clean up | ||||
|                 logger.debug("Successfully created Vapid instance from PEM") | ||||
|                 return vapid_instance | ||||
|             except Exception as e: | ||||
|                 os.unlink(temp_path)  # Clean up even on error | ||||
|                 logger.error(f"Failed to create Vapid instance from PEM: {e}") | ||||
|                 # Fall back to returning the original PEM string | ||||
|                 return private_key | ||||
|         else: | ||||
|             # Return as-is if not a PEM string   | ||||
|             return private_key | ||||
|              | ||||
|     except Exception as e: | ||||
|         logger.error(f"Failed to convert private key: {e}") | ||||
|         return private_key | ||||
|  | ||||
|  | ||||
| def convert_pem_public_key_for_browser(public_key_pem): | ||||
|     """ | ||||
|     Convert PEM public key to URL-safe base64 format for browser applicationServerKey | ||||
|      | ||||
|     Args: | ||||
|         public_key_pem: PEM public key string | ||||
|          | ||||
|     Returns: | ||||
|         URL-safe base64 encoded public key without padding | ||||
|     """ | ||||
|     try: | ||||
|         from cryptography.hazmat.primitives import serialization | ||||
|         import base64 | ||||
|          | ||||
|         # Parse PEM directly using cryptography library | ||||
|         pem_bytes = public_key_pem.encode() if isinstance(public_key_pem, str) else public_key_pem | ||||
|          | ||||
|         # Load the public key from PEM | ||||
|         public_key_crypto = serialization.load_pem_public_key(pem_bytes) | ||||
|          | ||||
|         # Get the raw public key bytes in uncompressed format (what browsers expect) | ||||
|         public_key_raw = public_key_crypto.public_bytes( | ||||
|             encoding=serialization.Encoding.X962, | ||||
|             format=serialization.PublicFormat.UncompressedPoint | ||||
|         ) | ||||
|          | ||||
|         # Convert to URL-safe base64 (remove padding) | ||||
|         public_key_b64 = base64.urlsafe_b64encode(public_key_raw).decode('ascii').rstrip('=') | ||||
|          | ||||
|         return public_key_b64 | ||||
|          | ||||
|     except Exception as e: | ||||
|         logger.error(f"Failed to convert public key format: {e}") | ||||
|         return None | ||||
|  | ||||
|  | ||||
| def send_push_notifications(subscriptions, notification_payload, private_key, contact_email, datastore): | ||||
|     """ | ||||
|     Send push notifications to a list of subscriptions | ||||
|      | ||||
|     Args: | ||||
|         subscriptions: List of push subscriptions | ||||
|         notification_payload: Dict with notification data (title, body, etc.) | ||||
|         private_key: VAPID private key (will be converted if needed) | ||||
|         contact_email: Contact email for VAPID claims | ||||
|         datastore: Datastore object for updating subscriptions | ||||
|          | ||||
|     Returns: | ||||
|         Tuple of (success_count, total_count) | ||||
|     """ | ||||
|     try: | ||||
|         from pywebpush import webpush, WebPushException | ||||
|     except ImportError: | ||||
|         logger.error("pywebpush not available - cannot send browser notifications") | ||||
|         return 0, len(subscriptions) | ||||
|      | ||||
|     # Convert private key to format pywebpush expects | ||||
|     private_key_for_push = convert_pem_private_key_for_pywebpush(private_key) | ||||
|      | ||||
|     success_count = 0 | ||||
|     total_count = len(subscriptions) | ||||
|      | ||||
|     # Send to all subscriptions | ||||
|     for subscription in subscriptions[:]:  # Copy list to avoid modification issues | ||||
|         try: | ||||
|             webpush( | ||||
|                 subscription_info=subscription, | ||||
|                 data=json.dumps(notification_payload), | ||||
|                 vapid_private_key=private_key_for_push, | ||||
|                 vapid_claims={ | ||||
|                     "sub": f"mailto:{contact_email}", | ||||
|                     "aud": f"https://{subscription['endpoint'].split('/')[2]}" | ||||
|                 } | ||||
|             ) | ||||
|             success_count += 1 | ||||
|              | ||||
|         except WebPushException as e: | ||||
|             logger.warning(f"Failed to send browser notification to subscription: {e}") | ||||
|             # Remove invalid subscriptions (410 = Gone, 404 = Not Found) | ||||
|             if e.response and e.response.status_code in [404, 410]: | ||||
|                 logger.info("Removing invalid browser notification subscription") | ||||
|                 try: | ||||
|                     subscriptions.remove(subscription) | ||||
|                     datastore.needs_write = True | ||||
|                 except ValueError: | ||||
|                     pass  # Already removed | ||||
|                      | ||||
|         except Exception as e: | ||||
|             logger.error(f"Unexpected error sending browser notification: {e}") | ||||
|      | ||||
|     return success_count, total_count | ||||
|  | ||||
|  | ||||
| def create_notification_payload(title, body, icon_path=None): | ||||
|     """ | ||||
|     Create a standard notification payload | ||||
|      | ||||
|     Args: | ||||
|         title: Notification title | ||||
|         body: Notification body | ||||
|         icon_path: Optional icon path (defaults to favicon) | ||||
|          | ||||
|     Returns: | ||||
|         Dict with notification payload | ||||
|     """ | ||||
|     return { | ||||
|         'title': title, | ||||
|         'body': body, | ||||
|         'icon': icon_path or '/static/favicons/favicon-32x32.png', | ||||
|         'badge': '/static/favicons/favicon-32x32.png', | ||||
|         'timestamp': int(time.time() * 1000), | ||||
|     } | ||||
|  | ||||
|  | ||||
| def get_vapid_config_from_datastore(datastore): | ||||
|     """ | ||||
|     Get VAPID configuration from datastore with proper error handling | ||||
|      | ||||
|     Args: | ||||
|         datastore: Datastore object | ||||
|          | ||||
|     Returns: | ||||
|         Tuple of (private_key, public_key, contact_email) or (None, None, None) if error | ||||
|     """ | ||||
|     try: | ||||
|         if not datastore: | ||||
|             return None, None, None | ||||
|              | ||||
|         vapid_config = datastore.data.get('settings', {}).get('application', {}).get('vapid', {}) | ||||
|         private_key = vapid_config.get('private_key') | ||||
|         public_key = vapid_config.get('public_key') | ||||
|         contact_email = vapid_config.get('contact_email', 'citizen@example.com') | ||||
|          | ||||
|         return private_key, public_key, contact_email | ||||
|          | ||||
|     except Exception as e: | ||||
|         logger.error(f"Failed to get VAPID config from datastore: {e}") | ||||
|         return None, None, None | ||||
|  | ||||
|  | ||||
|  | ||||
| def get_browser_subscriptions(datastore): | ||||
|     """ | ||||
|     Get browser subscriptions from datastore | ||||
|      | ||||
|     Args: | ||||
|         datastore: Datastore object | ||||
|          | ||||
|     Returns: | ||||
|         List of subscriptions | ||||
|     """ | ||||
|     try: | ||||
|         if not datastore: | ||||
|             return [] | ||||
|              | ||||
|         return datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', []) | ||||
|          | ||||
|     except Exception as e: | ||||
|         logger.error(f"Failed to get browser subscriptions: {e}") | ||||
|         return [] | ||||
|  | ||||
|  | ||||
| def save_browser_subscriptions(datastore, subscriptions): | ||||
|     """ | ||||
|     Save browser subscriptions to datastore | ||||
|      | ||||
|     Args: | ||||
|         datastore: Datastore object | ||||
|         subscriptions: List of subscriptions to save | ||||
|     """ | ||||
|     try: | ||||
|         if not datastore: | ||||
|             return | ||||
|              | ||||
|         # Ensure the settings structure exists | ||||
|         if 'settings' not in datastore.data: | ||||
|             datastore.data['settings'] = {} | ||||
|         if 'application' not in datastore.data['settings']: | ||||
|             datastore.data['settings']['application'] = {} | ||||
|              | ||||
|         datastore.data['settings']['application']['browser_subscriptions'] = subscriptions | ||||
|         datastore.needs_write = True | ||||
|          | ||||
|     except Exception as e: | ||||
|         logger.error(f"Failed to save browser subscriptions: {e}") | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| def create_error_response(message, sent_count=0, status_code=500): | ||||
|     """ | ||||
|     Create standardized error response for API endpoints | ||||
|      | ||||
|     Args: | ||||
|         message: Error message | ||||
|         sent_count: Number of notifications sent (for test endpoints) | ||||
|         status_code: HTTP status code | ||||
|          | ||||
|     Returns: | ||||
|         Tuple of (response_dict, status_code) | ||||
|     """ | ||||
|     return {'success': False, 'message': message, 'sent_count': sent_count}, status_code | ||||
|  | ||||
|  | ||||
| def create_success_response(message, sent_count=None): | ||||
|     """ | ||||
|     Create standardized success response for API endpoints | ||||
|      | ||||
|     Args: | ||||
|         message: Success message | ||||
|         sent_count: Number of notifications sent (optional) | ||||
|          | ||||
|     Returns: | ||||
|         Response dict | ||||
|     """ | ||||
|     response = {'success': True, 'message': message} | ||||
|     if sent_count is not None: | ||||
|         response['sent_count'] = sent_count | ||||
|     return response | ||||
| @@ -1,6 +1,5 @@ | ||||
| import json | ||||
| import re | ||||
| import time | ||||
| from urllib.parse import unquote_plus | ||||
|  | ||||
| import requests | ||||
| @@ -111,80 +110,3 @@ def apprise_http_custom_handler( | ||||
|     except Exception as e: | ||||
|         logger.error(f"Unexpected error occurred while sending custom notification to {url}: {e}") | ||||
|         return False | ||||
|  | ||||
|  | ||||
| @notify(on="browser") | ||||
| def apprise_browser_notification_handler( | ||||
|     body: str, | ||||
|     title: str, | ||||
|     notify_type: str, | ||||
|     meta: dict, | ||||
|     *args, | ||||
|     **kwargs, | ||||
| ) -> bool: | ||||
|     """ | ||||
|     Browser push notification handler for browser:// URLs | ||||
|     Ignores anything after browser:// and uses single default channel | ||||
|     """ | ||||
|     try: | ||||
|         from pywebpush import webpush, WebPushException | ||||
|         from flask import current_app | ||||
|          | ||||
|         # Get VAPID keys from app settings | ||||
|         try: | ||||
|             datastore = current_app.config.get('DATASTORE') | ||||
|             if not datastore: | ||||
|                 logger.error("No datastore available for browser notifications") | ||||
|                 return False | ||||
|                  | ||||
|             vapid_config = datastore.data.get('settings', {}).get('application', {}).get('vapid', {}) | ||||
|             private_key = vapid_config.get('private_key') | ||||
|             public_key = vapid_config.get('public_key') | ||||
|             contact_email = vapid_config.get('contact_email', 'admin@changedetection.io') | ||||
|              | ||||
|             if not private_key or not public_key: | ||||
|                 logger.error("VAPID keys not configured for browser notifications") | ||||
|                 return False | ||||
|                  | ||||
|         except Exception as e: | ||||
|             logger.error(f"Failed to get VAPID configuration: {e}") | ||||
|             return False | ||||
|          | ||||
|         # Get subscriptions from datastore | ||||
|         browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', []) | ||||
|          | ||||
|         if not browser_subscriptions: | ||||
|             logger.info("No browser subscriptions found") | ||||
|             return True  # Not an error - just no subscribers | ||||
|              | ||||
|         # Import helper functions | ||||
|         try: | ||||
|             from .browser_notification_helpers import create_notification_payload, send_push_notifications | ||||
|         except ImportError: | ||||
|             logger.error("Browser notification helpers not available") | ||||
|             return False | ||||
|          | ||||
|         # Prepare notification payload | ||||
|         notification_payload = create_notification_payload(title, body) | ||||
|          | ||||
|         # Send notifications using shared helper | ||||
|         success_count, total_count = send_push_notifications( | ||||
|             subscriptions=browser_subscriptions, | ||||
|             notification_payload=notification_payload, | ||||
|             private_key=private_key, | ||||
|             contact_email=contact_email, | ||||
|             datastore=datastore | ||||
|         ) | ||||
|                  | ||||
|         # Update datastore with cleaned subscriptions | ||||
|         datastore.data['settings']['application']['browser_subscriptions'] = browser_subscriptions | ||||
|          | ||||
|         logger.info(f"Sent browser notifications: {success_count}/{total_count} successful") | ||||
|         return success_count > 0 | ||||
|          | ||||
|     except ImportError: | ||||
|         logger.error("pywebpush not available - cannot send browser notifications") | ||||
|         return False | ||||
|     except Exception as e: | ||||
|         logger.error(f"Unexpected error in browser notification handler: {e}") | ||||
|         return False | ||||
|   | ||||
| @@ -8,7 +8,7 @@ def process_notification(n_object, datastore): | ||||
|     from changedetectionio.safe_jinja import render as jinja_render | ||||
|     from . import default_notification_format_for_watch, default_notification_format, valid_notification_formats | ||||
|     # be sure its registered | ||||
|     from .apprise_plugin.custom_handlers import apprise_http_custom_handler, apprise_browser_notification_handler | ||||
|     from .apprise_plugin.custom_handlers import apprise_http_custom_handler | ||||
|  | ||||
|     now = time.time() | ||||
|     if n_object.get('notification_timestamp'): | ||||
| @@ -149,7 +149,7 @@ def create_notification_parameters(n_object, datastore): | ||||
|     uuid = n_object['uuid'] if 'uuid' in n_object else '' | ||||
|  | ||||
|     if uuid: | ||||
|         watch_title = datastore.data['watching'][uuid].label | ||||
|         watch_title = datastore.data['watching'][uuid].get('title', '') | ||||
|         tag_list = [] | ||||
|         tags = datastore.get_all_tags_for_watch(uuid) | ||||
|         if tags: | ||||
|   | ||||
| @@ -146,19 +146,18 @@ class difference_detection_processor(): | ||||
|  | ||||
|         # And here we go! call the right browser with browser-specific settings | ||||
|         empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False) | ||||
|  | ||||
|         # All fetchers are now async | ||||
|         await self.fetcher.run( | ||||
|             current_include_filters=self.watch.get('include_filters'), | ||||
|             empty_pages_are_a_change=empty_pages_are_a_change, | ||||
|             fetch_favicon=self.watch.favicon_is_expired(), | ||||
|             ignore_status_codes=ignore_status_codes, | ||||
|             is_binary=is_binary, | ||||
|             request_body=request_body, | ||||
|             request_headers=request_headers, | ||||
|             request_method=request_method, | ||||
|             timeout=timeout, | ||||
|             url=url, | ||||
|        ) | ||||
|         await self.fetcher.run(url=url, | ||||
|                                timeout=timeout, | ||||
|                                request_headers=request_headers, | ||||
|                                request_body=request_body, | ||||
|                                request_method=request_method, | ||||
|                                ignore_status_codes=ignore_status_codes, | ||||
|                                current_include_filters=self.watch.get('include_filters'), | ||||
|                                is_binary=is_binary, | ||||
|                                empty_pages_are_a_change=empty_pages_are_a_change | ||||
|                                ) | ||||
|  | ||||
|         #@todo .quit here could go on close object, so we can run JS if change-detected | ||||
|         self.fetcher.quit(watch=self.watch) | ||||
|   | ||||
| @@ -251,7 +251,8 @@ class perform_site_check(difference_detection_processor): | ||||
|         update_obj["last_check_status"] = self.fetcher.get_last_status_code() | ||||
|  | ||||
|         # 615 Extract text by regex | ||||
|         extract_text = list(dict.fromkeys(watch.get('extract_text', []) + self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='extract_text'))) | ||||
|         extract_text = watch.get('extract_text', []) | ||||
|         extract_text += self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='extract_text') | ||||
|         if len(extract_text) > 0: | ||||
|             regex_matched_output = [] | ||||
|             for s_re in extract_text: | ||||
| @@ -310,7 +311,8 @@ class perform_site_check(difference_detection_processor): | ||||
|  | ||||
|         ############ Blocking rules, after checksum ################# | ||||
|         blocked = False | ||||
|         trigger_text = list(dict.fromkeys(watch.get('trigger_text', []) + self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='trigger_text'))) | ||||
|         trigger_text = watch.get('trigger_text', []) | ||||
|         trigger_text += self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='trigger_text') | ||||
|         if len(trigger_text): | ||||
|             # Assume blocked | ||||
|             blocked = True | ||||
| @@ -324,7 +326,8 @@ class perform_site_check(difference_detection_processor): | ||||
|             if result: | ||||
|                 blocked = False | ||||
|  | ||||
|         text_should_not_be_present = list(dict.fromkeys(watch.get('text_should_not_be_present', []) + self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='text_should_not_be_present'))) | ||||
|         text_should_not_be_present = watch.get('text_should_not_be_present', []) | ||||
|         text_should_not_be_present += self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='text_should_not_be_present') | ||||
|         if len(text_should_not_be_present): | ||||
|             # If anything matched, then we should block a change from happening | ||||
|             result = html_tools.strip_ignore_text(content=str(stripped_text_from_html), | ||||
|   | ||||
| @@ -1,435 +0,0 @@ | ||||
| from blinker import signal | ||||
| from loguru import logger | ||||
| from typing import Dict, List, Any, Optional | ||||
| import heapq | ||||
| import queue | ||||
| import threading | ||||
|  | ||||
| try: | ||||
|     import janus | ||||
| except ImportError: | ||||
|     logger.critical(f"CRITICAL: janus library is required. Install with: pip install janus") | ||||
|     raise | ||||
|  | ||||
|  | ||||
| class RecheckPriorityQueue: | ||||
|     """ | ||||
|     Ultra-reliable priority queue using janus for async/sync bridging. | ||||
|      | ||||
|     CRITICAL DESIGN NOTE: Both sync_q and async_q are required because: | ||||
|     - sync_q: Used by Flask routes, ticker threads, and other synchronous code | ||||
|     - async_q: Used by async workers (the actual fetchers/processors) and coroutines | ||||
|      | ||||
|     DO NOT REMOVE EITHER INTERFACE - they bridge different execution contexts: | ||||
|     - Synchronous code (Flask, threads) cannot use async methods without blocking | ||||
|     - Async code cannot use sync methods without blocking the event loop | ||||
|     - janus provides the only safe bridge between these two worlds | ||||
|      | ||||
|     Attempting to unify to async-only would require: | ||||
|     - Converting all Flask routes to async (major breaking change) | ||||
|     - Using asyncio.run() in sync contexts (causes deadlocks) | ||||
|     - Thread-pool wrapping (adds complexity and overhead) | ||||
|      | ||||
|     Minimal implementation focused on reliability: | ||||
|     - Pure janus for sync/async bridge | ||||
|     - Thread-safe priority ordering   | ||||
|     - Bulletproof error handling with critical logging | ||||
|     """ | ||||
|      | ||||
|     def __init__(self, maxsize: int = 0): | ||||
|         try: | ||||
|             self._janus_queue = janus.Queue(maxsize=maxsize) | ||||
|             # BOTH interfaces required - see class docstring for why | ||||
|             self.sync_q = self._janus_queue.sync_q   # Flask routes, ticker thread | ||||
|             self.async_q = self._janus_queue.async_q # Async workers | ||||
|              | ||||
|             # Priority storage - thread-safe | ||||
|             self._priority_items = [] | ||||
|             self._lock = threading.RLock() | ||||
|              | ||||
|             # Signals for UI updates | ||||
|             self.queue_length_signal = signal('queue_length') | ||||
|              | ||||
|             logger.debug("RecheckPriorityQueue initialized successfully") | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to initialize RecheckPriorityQueue: {str(e)}") | ||||
|             raise | ||||
|      | ||||
|     # SYNC INTERFACE (for ticker thread) | ||||
|     def put(self, item, block: bool = True, timeout: Optional[float] = None): | ||||
|         """Thread-safe sync put with priority ordering""" | ||||
|         try: | ||||
|             # Add to priority storage | ||||
|             with self._lock: | ||||
|                 heapq.heappush(self._priority_items, item) | ||||
|              | ||||
|             # Notify via janus sync queue | ||||
|             self.sync_q.put(True, block=block, timeout=timeout) | ||||
|              | ||||
|             # Emit signals | ||||
|             self._emit_put_signals(item) | ||||
|              | ||||
|             logger.debug(f"Successfully queued item: {self._get_item_uuid(item)}") | ||||
|             return True | ||||
|              | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to put item {self._get_item_uuid(item)}: {str(e)}") | ||||
|             # Remove from priority storage if janus put failed | ||||
|             try: | ||||
|                 with self._lock: | ||||
|                     if item in self._priority_items: | ||||
|                         self._priority_items.remove(item) | ||||
|                         heapq.heapify(self._priority_items) | ||||
|             except Exception as cleanup_e: | ||||
|                 logger.critical(f"CRITICAL: Failed to cleanup after put failure: {str(e)}") | ||||
|             return False | ||||
|      | ||||
|     def get(self, block: bool = True, timeout: Optional[float] = None): | ||||
|         """Thread-safe sync get with priority ordering""" | ||||
|         try: | ||||
|             # Wait for notification | ||||
|             self.sync_q.get(block=block, timeout=timeout) | ||||
|              | ||||
|             # Get highest priority item | ||||
|             with self._lock: | ||||
|                 if not self._priority_items: | ||||
|                     logger.critical(f"CRITICAL: Queue notification received but no priority items available") | ||||
|                     raise Exception("Priority queue inconsistency") | ||||
|                 item = heapq.heappop(self._priority_items) | ||||
|              | ||||
|             # Emit signals | ||||
|             self._emit_get_signals() | ||||
|              | ||||
|             logger.debug(f"Successfully retrieved item: {self._get_item_uuid(item)}") | ||||
|             return item | ||||
|              | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to get item from queue: {str(e)}") | ||||
|             raise | ||||
|      | ||||
|     # ASYNC INTERFACE (for workers) | ||||
|     async def async_put(self, item): | ||||
|         """Pure async put with priority ordering""" | ||||
|         try: | ||||
|             # Add to priority storage | ||||
|             with self._lock: | ||||
|                 heapq.heappush(self._priority_items, item) | ||||
|              | ||||
|             # Notify via janus async queue | ||||
|             await self.async_q.put(True) | ||||
|              | ||||
|             # Emit signals | ||||
|             self._emit_put_signals(item) | ||||
|              | ||||
|             logger.debug(f"Successfully async queued item: {self._get_item_uuid(item)}") | ||||
|             return True | ||||
|              | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to async put item {self._get_item_uuid(item)}: {str(e)}") | ||||
|             # Remove from priority storage if janus put failed | ||||
|             try: | ||||
|                 with self._lock: | ||||
|                     if item in self._priority_items: | ||||
|                         self._priority_items.remove(item) | ||||
|                         heapq.heapify(self._priority_items) | ||||
|             except Exception as cleanup_e: | ||||
|                 logger.critical(f"CRITICAL: Failed to cleanup after async put failure: {str(e)}") | ||||
|             return False | ||||
|      | ||||
|     async def async_get(self): | ||||
|         """Pure async get with priority ordering""" | ||||
|         try: | ||||
|             # Wait for notification | ||||
|             await self.async_q.get() | ||||
|              | ||||
|             # Get highest priority item | ||||
|             with self._lock: | ||||
|                 if not self._priority_items: | ||||
|                     logger.critical(f"CRITICAL: Async queue notification received but no priority items available") | ||||
|                     raise Exception("Priority queue inconsistency") | ||||
|                 item = heapq.heappop(self._priority_items) | ||||
|              | ||||
|             # Emit signals | ||||
|             self._emit_get_signals() | ||||
|              | ||||
|             logger.debug(f"Successfully async retrieved item: {self._get_item_uuid(item)}") | ||||
|             return item | ||||
|              | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to async get item from queue: {str(e)}") | ||||
|             raise | ||||
|      | ||||
|     # UTILITY METHODS | ||||
|     def qsize(self) -> int: | ||||
|         """Get current queue size""" | ||||
|         try: | ||||
|             with self._lock: | ||||
|                 return len(self._priority_items) | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to get queue size: {str(e)}") | ||||
|             return 0 | ||||
|      | ||||
|     def empty(self) -> bool: | ||||
|         """Check if queue is empty""" | ||||
|         return self.qsize() == 0 | ||||
|      | ||||
|     def close(self): | ||||
|         """Close the janus queue""" | ||||
|         try: | ||||
|             self._janus_queue.close() | ||||
|             logger.debug("RecheckPriorityQueue closed successfully") | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to close RecheckPriorityQueue: {str(e)}") | ||||
|      | ||||
|     # COMPATIBILITY METHODS (from original implementation) | ||||
|     @property | ||||
|     def queue(self): | ||||
|         """Provide compatibility with original queue access""" | ||||
|         try: | ||||
|             with self._lock: | ||||
|                 return list(self._priority_items) | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to get queue list: {str(e)}") | ||||
|             return [] | ||||
|      | ||||
|     def get_uuid_position(self, target_uuid: str) -> Dict[str, Any]: | ||||
|         """Find position of UUID in queue""" | ||||
|         try: | ||||
|             with self._lock: | ||||
|                 queue_list = list(self._priority_items) | ||||
|                 total_items = len(queue_list) | ||||
|                  | ||||
|                 if total_items == 0: | ||||
|                     return {'position': None, 'total_items': 0, 'priority': None, 'found': False} | ||||
|                  | ||||
|                 # Find target item | ||||
|                 for item in queue_list: | ||||
|                     if (hasattr(item, 'item') and isinstance(item.item, dict) and  | ||||
|                         item.item.get('uuid') == target_uuid): | ||||
|                          | ||||
|                         # Count items with higher priority | ||||
|                         position = sum(1 for other in queue_list if other.priority < item.priority) | ||||
|                         return { | ||||
|                             'position': position, | ||||
|                             'total_items': total_items,  | ||||
|                             'priority': item.priority, | ||||
|                             'found': True | ||||
|                         } | ||||
|                  | ||||
|                 return {'position': None, 'total_items': total_items, 'priority': None, 'found': False} | ||||
|                  | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to get UUID position for {target_uuid}: {str(e)}") | ||||
|             return {'position': None, 'total_items': 0, 'priority': None, 'found': False} | ||||
|      | ||||
|     def get_all_queued_uuids(self, limit: Optional[int] = None, offset: int = 0) -> Dict[str, Any]: | ||||
|         """Get all queued UUIDs with pagination""" | ||||
|         try: | ||||
|             with self._lock: | ||||
|                 queue_list = sorted(self._priority_items)  # Sort by priority | ||||
|                 total_items = len(queue_list) | ||||
|                  | ||||
|                 if total_items == 0: | ||||
|                     return {'items': [], 'total_items': 0, 'returned_items': 0, 'has_more': False} | ||||
|                  | ||||
|                 # Apply pagination | ||||
|                 end_idx = min(offset + limit, total_items) if limit else total_items | ||||
|                 items_to_process = queue_list[offset:end_idx] | ||||
|                  | ||||
|                 result = [] | ||||
|                 for position, item in enumerate(items_to_process, start=offset): | ||||
|                     if (hasattr(item, 'item') and isinstance(item.item, dict) and  | ||||
|                         'uuid' in item.item): | ||||
|                         result.append({ | ||||
|                             'uuid': item.item['uuid'], | ||||
|                             'position': position, | ||||
|                             'priority': item.priority | ||||
|                         }) | ||||
|                  | ||||
|                 return { | ||||
|                     'items': result, | ||||
|                     'total_items': total_items, | ||||
|                     'returned_items': len(result), | ||||
|                     'has_more': (offset + len(result)) < total_items | ||||
|                 } | ||||
|                  | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to get all queued UUIDs: {str(e)}") | ||||
|             return {'items': [], 'total_items': 0, 'returned_items': 0, 'has_more': False} | ||||
|      | ||||
|     def get_queue_summary(self) -> Dict[str, Any]: | ||||
|         """Get queue summary statistics""" | ||||
|         try: | ||||
|             with self._lock: | ||||
|                 queue_list = list(self._priority_items) | ||||
|                 total_items = len(queue_list) | ||||
|                  | ||||
|                 if total_items == 0: | ||||
|                     return { | ||||
|                         'total_items': 0, 'priority_breakdown': {}, | ||||
|                         'immediate_items': 0, 'clone_items': 0, 'scheduled_items': 0 | ||||
|                     } | ||||
|                  | ||||
|                 immediate_items = clone_items = scheduled_items = 0 | ||||
|                 priority_counts = {} | ||||
|                  | ||||
|                 for item in queue_list: | ||||
|                     priority = item.priority | ||||
|                     priority_counts[priority] = priority_counts.get(priority, 0) + 1 | ||||
|                      | ||||
|                     if priority == 1: | ||||
|                         immediate_items += 1 | ||||
|                     elif priority == 5: | ||||
|                         clone_items += 1 | ||||
|                     elif priority > 100: | ||||
|                         scheduled_items += 1 | ||||
|                  | ||||
|                 return { | ||||
|                     'total_items': total_items, | ||||
|                     'priority_breakdown': priority_counts, | ||||
|                     'immediate_items': immediate_items, | ||||
|                     'clone_items': clone_items, | ||||
|                     'scheduled_items': scheduled_items, | ||||
|                     'min_priority': min(priority_counts.keys()) if priority_counts else None, | ||||
|                     'max_priority': max(priority_counts.keys()) if priority_counts else None | ||||
|                 } | ||||
|                  | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to get queue summary: {str(e)}") | ||||
|             return {'total_items': 0, 'priority_breakdown': {}, 'immediate_items': 0,  | ||||
|                    'clone_items': 0, 'scheduled_items': 0} | ||||
|      | ||||
|     # PRIVATE METHODS | ||||
|     def _get_item_uuid(self, item) -> str: | ||||
|         """Safely extract UUID from item for logging""" | ||||
|         try: | ||||
|             if hasattr(item, 'item') and isinstance(item.item, dict): | ||||
|                 return item.item.get('uuid', 'unknown') | ||||
|         except Exception: | ||||
|             pass | ||||
|         return 'unknown' | ||||
|      | ||||
|     def _emit_put_signals(self, item): | ||||
|         """Emit signals when item is added""" | ||||
|         try: | ||||
|             # Watch update signal | ||||
|             if hasattr(item, 'item') and isinstance(item.item, dict) and 'uuid' in item.item: | ||||
|                 watch_check_update = signal('watch_check_update') | ||||
|                 if watch_check_update: | ||||
|                     watch_check_update.send(watch_uuid=item.item['uuid']) | ||||
|              | ||||
|             # Queue length signal | ||||
|             if self.queue_length_signal: | ||||
|                 self.queue_length_signal.send(length=self.qsize()) | ||||
|                  | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to emit put signals: {str(e)}") | ||||
|      | ||||
|     def _emit_get_signals(self): | ||||
|         """Emit signals when item is removed""" | ||||
|         try: | ||||
|             if self.queue_length_signal: | ||||
|                 self.queue_length_signal.send(length=self.qsize()) | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to emit get signals: {str(e)}") | ||||
|  | ||||
|  | ||||
| class NotificationQueue: | ||||
|     """ | ||||
|     Ultra-reliable notification queue using pure janus. | ||||
|      | ||||
|     CRITICAL DESIGN NOTE: Both sync_q and async_q are required because: | ||||
|     - sync_q: Used by Flask routes, ticker threads, and other synchronous code | ||||
|     - async_q: Used by async workers and coroutines | ||||
|      | ||||
|     DO NOT REMOVE EITHER INTERFACE - they bridge different execution contexts. | ||||
|     See RecheckPriorityQueue docstring above for detailed explanation. | ||||
|      | ||||
|     Simple wrapper around janus with bulletproof error handling. | ||||
|     """ | ||||
|      | ||||
|     def __init__(self, maxsize: int = 0): | ||||
|         try: | ||||
|             self._janus_queue = janus.Queue(maxsize=maxsize) | ||||
|             # BOTH interfaces required - see class docstring for why | ||||
|             self.sync_q = self._janus_queue.sync_q   # Flask routes, threads | ||||
|             self.async_q = self._janus_queue.async_q # Async workers | ||||
|             self.notification_event_signal = signal('notification_event') | ||||
|             logger.debug("NotificationQueue initialized successfully") | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to initialize NotificationQueue: {str(e)}") | ||||
|             raise | ||||
|      | ||||
|     def put(self, item: Dict[str, Any], block: bool = True, timeout: Optional[float] = None): | ||||
|         """Thread-safe sync put with signal emission""" | ||||
|         try: | ||||
|             self.sync_q.put(item, block=block, timeout=timeout) | ||||
|             self._emit_notification_signal(item) | ||||
|             logger.debug(f"Successfully queued notification: {item.get('uuid', 'unknown')}") | ||||
|             return True | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to put notification {item.get('uuid', 'unknown')}: {str(e)}") | ||||
|             return False | ||||
|      | ||||
|     async def async_put(self, item: Dict[str, Any]): | ||||
|         """Pure async put with signal emission""" | ||||
|         try: | ||||
|             await self.async_q.put(item) | ||||
|             self._emit_notification_signal(item) | ||||
|             logger.debug(f"Successfully async queued notification: {item.get('uuid', 'unknown')}") | ||||
|             return True | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to async put notification {item.get('uuid', 'unknown')}: {str(e)}") | ||||
|             return False | ||||
|      | ||||
|     def get(self, block: bool = True, timeout: Optional[float] = None): | ||||
|         """Thread-safe sync get""" | ||||
|         try: | ||||
|             return self.sync_q.get(block=block, timeout=timeout) | ||||
|         except queue.Empty as e: | ||||
|             raise e | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to get notification: {str(e)}") | ||||
|             raise e | ||||
|      | ||||
|     async def async_get(self): | ||||
|         """Pure async get""" | ||||
|         try: | ||||
|             return await self.async_q.get() | ||||
|         except queue.Empty as e: | ||||
|             raise e | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to async get notification: {str(e)}") | ||||
|             raise e | ||||
|      | ||||
|     def qsize(self) -> int: | ||||
|         """Get current queue size""" | ||||
|         try: | ||||
|             return self.sync_q.qsize() | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to get notification queue size: {str(e)}") | ||||
|             return 0 | ||||
|      | ||||
|     def empty(self) -> bool: | ||||
|         """Check if queue is empty""" | ||||
|         return self.qsize() == 0 | ||||
|      | ||||
|     def close(self): | ||||
|         """Close the janus queue""" | ||||
|         try: | ||||
|             self._janus_queue.close() | ||||
|             logger.debug("NotificationQueue closed successfully") | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to close NotificationQueue: {str(e)}") | ||||
|      | ||||
|     def _emit_notification_signal(self, item: Dict[str, Any]): | ||||
|         """Emit notification signal""" | ||||
|         try: | ||||
|             if self.notification_event_signal and isinstance(item, dict): | ||||
|                 watch_uuid = item.get('uuid') | ||||
|                 if watch_uuid: | ||||
|                     self.notification_event_signal.send(watch_uuid=watch_uuid) | ||||
|                 else: | ||||
|                     self.notification_event_signal.send() | ||||
|         except Exception as e: | ||||
|             logger.critical(f"CRITICAL: Failed to emit notification signal: {str(e)}") | ||||
| @@ -1,6 +1,6 @@ | ||||
| { | ||||
|     "name": "changedetection.io", | ||||
|     "short_name": "changedetection", | ||||
|     "name": "", | ||||
|     "short_name": "", | ||||
|     "icons": [ | ||||
|         { | ||||
|             "src": "android-chrome-192x192.png", | ||||
| @@ -15,8 +15,5 @@ | ||||
|     ], | ||||
|     "theme_color": "#ffffff", | ||||
|     "background_color": "#ffffff", | ||||
|     "display": "standalone", | ||||
|     "start_url": "/", | ||||
|     "scope": "/", | ||||
|     "gcm_sender_id": "103953800507" | ||||
|     "display": "standalone" | ||||
| } | ||||
|   | ||||
| @@ -1,450 +0,0 @@ | ||||
| /** | ||||
|  * changedetection.io Browser Push Notifications | ||||
|  * Handles service worker registration, push subscription management, and notification permissions | ||||
|  */ | ||||
|  | ||||
| class BrowserNotifications { | ||||
|     constructor() { | ||||
|         this.serviceWorkerRegistration = null; | ||||
|         this.vapidPublicKey = null; | ||||
|         this.isSubscribed = false; | ||||
|         this.init(); | ||||
|     } | ||||
|  | ||||
|     async init() { | ||||
|         if (!this.isSupported()) { | ||||
|             console.warn('Push notifications are not supported in this browser'); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         try { | ||||
|             // Get VAPID public key from server | ||||
|             await this.fetchVapidPublicKey(); | ||||
|              | ||||
|             // Register service worker | ||||
|             await this.registerServiceWorker(); | ||||
|              | ||||
|             // Check existing subscription state | ||||
|             await this.checkExistingSubscription(); | ||||
|              | ||||
|             // Initialize UI elements | ||||
|             this.initializeUI(); | ||||
|              | ||||
|             // Set up notification URL monitoring | ||||
|             this.setupNotificationUrlMonitoring(); | ||||
|              | ||||
|         } catch (error) { | ||||
|             console.error('Failed to initialize browser notifications:', error); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     isSupported() { | ||||
|         return 'serviceWorker' in navigator &&  | ||||
|                'PushManager' in window &&  | ||||
|                'Notification' in window; | ||||
|     } | ||||
|  | ||||
|     async fetchVapidPublicKey() { | ||||
|         try { | ||||
|             const response = await fetch('/browser-notifications-api/vapid-public-key'); | ||||
|             if (!response.ok) { | ||||
|                 throw new Error(`HTTP ${response.status}: ${response.statusText}`); | ||||
|             } | ||||
|             const data = await response.json(); | ||||
|             this.vapidPublicKey = data.publicKey; | ||||
|         } catch (error) { | ||||
|             console.error('Failed to fetch VAPID public key:', error); | ||||
|             throw error; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     async registerServiceWorker() { | ||||
|         try { | ||||
|             this.serviceWorkerRegistration = await navigator.serviceWorker.register('/service-worker.js', { | ||||
|                 scope: '/' | ||||
|             }); | ||||
|  | ||||
|             console.log('Service Worker registered successfully'); | ||||
|  | ||||
|             // Wait for service worker to be ready | ||||
|             await navigator.serviceWorker.ready; | ||||
|  | ||||
|         } catch (error) { | ||||
|             console.error('Service Worker registration failed:', error); | ||||
|             throw error; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     initializeUI() { | ||||
|         // Bind event handlers to existing elements in the template | ||||
|         this.bindEventHandlers(); | ||||
|          | ||||
|         // Update UI based on current permission state | ||||
|         this.updatePermissionStatus(); | ||||
|     } | ||||
|  | ||||
|     bindEventHandlers() { | ||||
|         const enableBtn = document.querySelector('#enable-notifications-btn'); | ||||
|         const testBtn = document.querySelector('#test-notification-btn'); | ||||
|  | ||||
|         if (enableBtn) { | ||||
|             enableBtn.addEventListener('click', () => this.requestNotificationPermission()); | ||||
|         } | ||||
|  | ||||
|         if (testBtn) { | ||||
|             testBtn.addEventListener('click', () => this.sendTestNotification()); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     setupNotificationUrlMonitoring() { | ||||
|         // Monitor the notification URLs textarea for browser:// URLs | ||||
|         const notificationUrlsField = document.querySelector('textarea[name*="notification_urls"]'); | ||||
|         if (notificationUrlsField) { | ||||
|             const checkForBrowserUrls = async () => { | ||||
|                 const urls = notificationUrlsField.value || ''; | ||||
|                 const hasBrowserUrls = /browser:\/\//.test(urls); | ||||
|                  | ||||
|                 // If browser URLs are detected and we're not subscribed, auto-subscribe | ||||
|                 if (hasBrowserUrls && !this.isSubscribed && Notification.permission === 'default') { | ||||
|                     const shouldSubscribe = confirm('Browser notifications detected! Would you like to enable browser notifications now?'); | ||||
|                     if (shouldSubscribe) { | ||||
|                         await this.requestNotificationPermission(); | ||||
|                     } | ||||
|                 } else if (hasBrowserUrls && !this.isSubscribed && Notification.permission === 'granted') { | ||||
|                     // Permission already granted but not subscribed - auto-subscribe silently | ||||
|                     console.log('Auto-subscribing to browser notifications...'); | ||||
|                     await this.subscribe(); | ||||
|                 } | ||||
|             }; | ||||
|              | ||||
|             // Check immediately | ||||
|             checkForBrowserUrls(); | ||||
|              | ||||
|             // Check on input changes | ||||
|             notificationUrlsField.addEventListener('input', checkForBrowserUrls); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     async updatePermissionStatus() { | ||||
|         const statusElement = document.querySelector('#permission-status'); | ||||
|         const enableBtn = document.querySelector('#enable-notifications-btn'); | ||||
|         const testBtn = document.querySelector('#test-notification-btn'); | ||||
|  | ||||
|         if (!statusElement) return; | ||||
|  | ||||
|         const permission = Notification.permission; | ||||
|         statusElement.textContent = permission; | ||||
|         statusElement.className = `permission-${permission}`; | ||||
|  | ||||
|         // Show/hide controls based on permission | ||||
|         if (permission === 'default') { | ||||
|             if (enableBtn) enableBtn.style.display = 'inline-block'; | ||||
|             if (testBtn) testBtn.style.display = 'none'; | ||||
|         } else if (permission === 'granted') { | ||||
|             if (enableBtn) enableBtn.style.display = 'none'; | ||||
|             if (testBtn) testBtn.style.display = 'inline-block'; | ||||
|         } else { // denied | ||||
|             if (enableBtn) enableBtn.style.display = 'none'; | ||||
|             if (testBtn) testBtn.style.display = 'none'; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     async requestNotificationPermission() { | ||||
|         try { | ||||
|             const permission = await Notification.requestPermission(); | ||||
|             this.updatePermissionStatus(); | ||||
|              | ||||
|             if (permission === 'granted') { | ||||
|                 console.log('Notification permission granted'); | ||||
|                 // Automatically subscribe to browser notifications | ||||
|                 this.subscribe(); | ||||
|             } else { | ||||
|                 console.log('Notification permission denied'); | ||||
|             } | ||||
|         } catch (error) { | ||||
|             console.error('Error requesting notification permission:', error); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     async subscribe() { | ||||
|         if (Notification.permission !== 'granted') { | ||||
|             alert('Please enable notifications first'); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         if (this.isSubscribed) { | ||||
|             console.log('Already subscribed to browser notifications'); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         try { | ||||
|             // First, try to clear any existing subscription with different keys | ||||
|             await this.clearExistingSubscription(); | ||||
|  | ||||
|             // Create push subscription | ||||
|             const subscription = await this.serviceWorkerRegistration.pushManager.subscribe({ | ||||
|                 userVisibleOnly: true, | ||||
|                 applicationServerKey: this.urlBase64ToUint8Array(this.vapidPublicKey) | ||||
|             }); | ||||
|  | ||||
|             // Send subscription to server | ||||
|             const response = await fetch('/browser-notifications-api/subscribe', { | ||||
|                 method: 'POST', | ||||
|                 headers: { | ||||
|                     'Content-Type': 'application/json', | ||||
|                     'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value | ||||
|                 }, | ||||
|                 body: JSON.stringify({ | ||||
|                     subscription: subscription.toJSON() | ||||
|                 }) | ||||
|             }); | ||||
|  | ||||
|             if (!response.ok) { | ||||
|                 throw new Error(`HTTP ${response.status}: ${response.statusText}`); | ||||
|             } | ||||
|  | ||||
|             // Store subscription status | ||||
|             this.isSubscribed = true; | ||||
|              | ||||
|             console.log('Successfully subscribed to browser notifications'); | ||||
|  | ||||
|         } catch (error) { | ||||
|             console.error('Failed to subscribe to browser notifications:', error); | ||||
|              | ||||
|             // Show user-friendly error message | ||||
|             if (error.message.includes('different applicationServerKey')) { | ||||
|                 this.showSubscriptionConflictDialog(error); | ||||
|             } else { | ||||
|                 alert(`Failed to subscribe: ${error.message}`); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     async unsubscribe() { | ||||
|         try { | ||||
|             if (!this.isSubscribed) return; | ||||
|  | ||||
|             // Get current subscription | ||||
|             const subscription = await this.serviceWorkerRegistration.pushManager.getSubscription(); | ||||
|             if (!subscription) { | ||||
|                 this.isSubscribed = false; | ||||
|                 return; | ||||
|             } | ||||
|  | ||||
|             // Unsubscribe from server | ||||
|             const response = await fetch('/browser-notifications-api/unsubscribe', { | ||||
|                 method: 'POST', | ||||
|                 headers: { | ||||
|                     'Content-Type': 'application/json', | ||||
|                     'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value | ||||
|                 }, | ||||
|                 body: JSON.stringify({ | ||||
|                     subscription: subscription.toJSON() | ||||
|                 }) | ||||
|             }); | ||||
|  | ||||
|             if (!response.ok) { | ||||
|                 console.warn(`Server unsubscribe failed: ${response.status}`); | ||||
|             } | ||||
|  | ||||
|             // Unsubscribe locally | ||||
|             await subscription.unsubscribe(); | ||||
|  | ||||
|             // Update status | ||||
|             this.isSubscribed = false; | ||||
|              | ||||
|             console.log('Unsubscribed from browser notifications'); | ||||
|  | ||||
|         } catch (error) { | ||||
|             console.error('Failed to unsubscribe from browser notifications:', error); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     async sendTestNotification() { | ||||
|         try { | ||||
|             // First, check if we're subscribed | ||||
|             if (!this.isSubscribed) { | ||||
|                 const shouldSubscribe = confirm('You need to subscribe to browser notifications first. Subscribe now?'); | ||||
|                 if (shouldSubscribe) { | ||||
|                     await this.subscribe(); | ||||
|                     // Give a moment for subscription to complete | ||||
|                     await new Promise(resolve => setTimeout(resolve, 1000)); | ||||
|                 } else { | ||||
|                     return; | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             const response = await fetch('/browser-notifications/test', { | ||||
|                 method: 'POST', | ||||
|                 headers: { | ||||
|                     'Content-Type': 'application/json', | ||||
|                     'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value | ||||
|                 } | ||||
|             }); | ||||
|  | ||||
|             if (!response.ok) { | ||||
|                 if (response.status === 404) { | ||||
|                     // No subscriptions found on server - try subscribing | ||||
|                     alert('No browser subscriptions found. Subscribing now...'); | ||||
|                     await this.subscribe(); | ||||
|                     return; | ||||
|                 } | ||||
|                 throw new Error(`HTTP ${response.status}: ${response.statusText}`); | ||||
|             } | ||||
|  | ||||
|             const result = await response.json(); | ||||
|             alert(result.message); | ||||
|             console.log('Test notification result:', result); | ||||
|         } catch (error) { | ||||
|             console.error('Failed to send test notification:', error); | ||||
|             alert(`Failed to send test notification: ${error.message}`); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|     urlBase64ToUint8Array(base64String) { | ||||
|         const padding = '='.repeat((4 - base64String.length % 4) % 4); | ||||
|         const base64 = (base64String + padding) | ||||
|             .replace(/-/g, '+') | ||||
|             .replace(/_/g, '/'); | ||||
|  | ||||
|         const rawData = window.atob(base64); | ||||
|         const outputArray = new Uint8Array(rawData.length); | ||||
|  | ||||
|         for (let i = 0; i < rawData.length; ++i) { | ||||
|             outputArray[i] = rawData.charCodeAt(i); | ||||
|         } | ||||
|         return outputArray; | ||||
|     } | ||||
|  | ||||
|     async checkExistingSubscription() { | ||||
|         /** | ||||
|          * Check if we already have a valid browser subscription | ||||
|          * Updates this.isSubscribed based on actual browser state | ||||
|          */ | ||||
|         try { | ||||
|             if (!this.serviceWorkerRegistration) { | ||||
|                 this.isSubscribed = false; | ||||
|                 return; | ||||
|             } | ||||
|              | ||||
|             const existingSubscription = await this.serviceWorkerRegistration.pushManager.getSubscription(); | ||||
|              | ||||
|             if (existingSubscription) { | ||||
|                 // We have a subscription - verify it's still valid and matches our VAPID key | ||||
|                 const subscriptionJson = existingSubscription.toJSON(); | ||||
|                  | ||||
|                 // Check if the endpoint is still active (basic validation) | ||||
|                 if (subscriptionJson.endpoint && subscriptionJson.keys) { | ||||
|                     console.log('Found existing valid subscription'); | ||||
|                     this.isSubscribed = true; | ||||
|                 } else { | ||||
|                     console.log('Found invalid subscription, clearing...'); | ||||
|                     await existingSubscription.unsubscribe(); | ||||
|                     this.isSubscribed = false; | ||||
|                 } | ||||
|             } else { | ||||
|                 console.log('No existing subscription found'); | ||||
|                 this.isSubscribed = false; | ||||
|             } | ||||
|         } catch (error) { | ||||
|             console.warn('Failed to check existing subscription:', error); | ||||
|             this.isSubscribed = false; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     async clearExistingSubscription() { | ||||
|         /** | ||||
|          * Clear any existing push subscription that might conflict with our VAPID keys | ||||
|          */ | ||||
|         try { | ||||
|             const existingSubscription = await this.serviceWorkerRegistration.pushManager.getSubscription(); | ||||
|              | ||||
|             if (existingSubscription) { | ||||
|                 console.log('Found existing subscription, unsubscribing...'); | ||||
|                 await existingSubscription.unsubscribe(); | ||||
|                 console.log('Successfully cleared existing subscription'); | ||||
|             } | ||||
|         } catch (error) { | ||||
|             console.warn('Failed to clear existing subscription:', error); | ||||
|             // Don't throw - this is just cleanup | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     showSubscriptionConflictDialog(error) { | ||||
|         /** | ||||
|          * Show user-friendly dialog for subscription conflicts | ||||
|          */ | ||||
|         const message = `Browser notifications are already set up for a different changedetection.io instance or with different settings. | ||||
|  | ||||
| To fix this: | ||||
| 1. Clear your existing subscription  | ||||
| 2. Try subscribing again | ||||
|  | ||||
| Would you like to automatically clear the old subscription and retry?`; | ||||
|  | ||||
|         if (confirm(message)) { | ||||
|             this.clearExistingSubscription().then(() => { | ||||
|                 // Retry subscription after clearing | ||||
|                 setTimeout(() => { | ||||
|                     this.subscribe(); | ||||
|                 }, 500); | ||||
|             }); | ||||
|         } else { | ||||
|             alert('To use browser notifications, please manually clear your browser notifications for this site in browser settings, then try again.'); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     async clearAllNotifications() { | ||||
|         /** | ||||
|          * Clear all browser notification subscriptions (admin function) | ||||
|          */ | ||||
|         try { | ||||
|             // Call the server to clear ALL subscriptions from datastore | ||||
|             const response = await fetch('/browser-notifications/clear', { | ||||
|                 method: 'POST', | ||||
|                 headers: { | ||||
|                     'Content-Type': 'application/json', | ||||
|                     'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value | ||||
|                 } | ||||
|             }); | ||||
|              | ||||
|             if (response.ok) { | ||||
|                 const result = await response.json(); | ||||
|                 console.log('Server response:', result.message); | ||||
|                  | ||||
|                 // Also clear the current browser's subscription if it exists | ||||
|                 const existingSubscription = await this.serviceWorkerRegistration.pushManager.getSubscription(); | ||||
|                 if (existingSubscription) { | ||||
|                     await existingSubscription.unsubscribe(); | ||||
|                     console.log('Cleared current browser subscription'); | ||||
|                 } | ||||
|                  | ||||
|                 // Update status | ||||
|                 this.isSubscribed = false; | ||||
|                  | ||||
|                 alert(result.message + '. All browser notifications have been cleared.'); | ||||
|             } else { | ||||
|                 const error = await response.json(); | ||||
|                 console.error('Server clear failed:', error.message); | ||||
|                 alert('Failed to clear server subscriptions: ' + error.message); | ||||
|             } | ||||
|              | ||||
|         } catch (error) { | ||||
|             console.error('Failed to clear all notifications:', error); | ||||
|             alert('Failed to clear notifications: ' + error.message); | ||||
|         } | ||||
|     } | ||||
|  | ||||
| } | ||||
|  | ||||
| // Initialize when DOM is ready | ||||
| if (document.readyState === 'loading') { | ||||
|     document.addEventListener('DOMContentLoaded', () => { | ||||
|         window.browserNotifications = new BrowserNotifications(); | ||||
|     }); | ||||
| } else { | ||||
|     window.browserNotifications = new BrowserNotifications(); | ||||
| } | ||||
| @@ -153,7 +153,6 @@ $(document).ready(function () { | ||||
|  | ||||
|                 // Tabs at bottom of list | ||||
|                 $('#post-list-mark-views').toggleClass("has-unviewed", general_stats.has_unviewed); | ||||
|                 $('#post-list-unread').toggleClass("has-unviewed", general_stats.has_unviewed); | ||||
|                 $('#post-list-with-errors').toggleClass("has-error", general_stats.count_errors !== 0) | ||||
|                 $('#post-list-with-errors a').text(`With errors (${ general_stats.count_errors })`); | ||||
|  | ||||
|   | ||||
| @@ -1,95 +0,0 @@ | ||||
| // changedetection.io Service Worker for Browser Push Notifications | ||||
|  | ||||
| self.addEventListener('install', function(event) { | ||||
|     console.log('Service Worker installing'); | ||||
|     self.skipWaiting(); | ||||
| }); | ||||
|  | ||||
| self.addEventListener('activate', function(event) { | ||||
|     console.log('Service Worker activating'); | ||||
|     event.waitUntil(self.clients.claim()); | ||||
| }); | ||||
|  | ||||
| self.addEventListener('push', function(event) { | ||||
|     console.log('Push message received', event); | ||||
|      | ||||
|     let notificationData = { | ||||
|         title: 'changedetection.io', | ||||
|         body: 'A watched page has changed', | ||||
|         icon: '/static/favicons/favicon-32x32.png', | ||||
|         badge: '/static/favicons/favicon-32x32.png', | ||||
|         tag: 'changedetection-notification', | ||||
|         requireInteraction: false, | ||||
|         timestamp: Date.now() | ||||
|     }; | ||||
|      | ||||
|     // Parse push data if available | ||||
|     if (event.data) { | ||||
|         try { | ||||
|             const pushData = event.data.json(); | ||||
|             notificationData = { | ||||
|                 ...notificationData, | ||||
|                 ...pushData | ||||
|             }; | ||||
|         } catch (e) { | ||||
|             console.warn('Failed to parse push data:', e); | ||||
|             notificationData.body = event.data.text() || notificationData.body; | ||||
|         } | ||||
|     } | ||||
|      | ||||
|     const promiseChain = self.registration.showNotification( | ||||
|         notificationData.title, | ||||
|         { | ||||
|             body: notificationData.body, | ||||
|             icon: notificationData.icon, | ||||
|             badge: notificationData.badge, | ||||
|             tag: notificationData.tag, | ||||
|             requireInteraction: notificationData.requireInteraction, | ||||
|             timestamp: notificationData.timestamp, | ||||
|             data: { | ||||
|                 url: notificationData.url || '/', | ||||
|                 timestamp: notificationData.timestamp | ||||
|             } | ||||
|         } | ||||
|     ); | ||||
|      | ||||
|     event.waitUntil(promiseChain); | ||||
| }); | ||||
|  | ||||
| self.addEventListener('notificationclick', function(event) { | ||||
|     console.log('Notification clicked', event); | ||||
|      | ||||
|     event.notification.close(); | ||||
|      | ||||
|     const targetUrl = event.notification.data?.url || '/'; | ||||
|      | ||||
|     event.waitUntil( | ||||
|         clients.matchAll().then(function(clientList) { | ||||
|             // Check if there's already a window/tab open with our app | ||||
|             for (let i = 0; i < clientList.length; i++) { | ||||
|                 const client = clientList[i]; | ||||
|                 if (client.url.includes(self.location.origin) && 'focus' in client) { | ||||
|                     client.navigate(targetUrl); | ||||
|                     return client.focus(); | ||||
|                 } | ||||
|             } | ||||
|             // If no existing window, open a new one | ||||
|             if (clients.openWindow) { | ||||
|                 return clients.openWindow(targetUrl); | ||||
|             } | ||||
|         }) | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| self.addEventListener('notificationclose', function(event) { | ||||
|     console.log('Notification closed', event); | ||||
| }); | ||||
|  | ||||
| // Handle messages from the main thread | ||||
| self.addEventListener('message', function(event) { | ||||
|     console.log('Service Worker received message:', event.data); | ||||
|      | ||||
|     if (event.data && event.data.type === 'SKIP_WAITING') { | ||||
|         self.skipWaiting(); | ||||
|     } | ||||
| }); | ||||
| @@ -51,7 +51,6 @@ $(document).ready(function () { | ||||
|         $('#notification_body').val(''); | ||||
|         $('#notification_format').val('System default'); | ||||
|         $('#notification_urls').val(''); | ||||
|         $('#notification_muted_none').prop('checked', true); // in the case of a ternary field | ||||
|         e.preventDefault(); | ||||
|     }); | ||||
|     $("#notification-token-toggle").click(function (e) { | ||||
|   | ||||
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							| @@ -3,16 +3,15 @@ | ||||
|   "version": "0.0.3", | ||||
|   "description": "", | ||||
|   "main": "index.js", | ||||
|   "engines": { | ||||
|     "node": ">=18.0.0" | ||||
|   }, | ||||
|   "scripts": { | ||||
|     "watch": "sass --watch scss:. --style=compressed --no-source-map", | ||||
|     "build": "sass scss:. --style=compressed --no-source-map" | ||||
|     "watch": "node-sass -w scss -o .", | ||||
|     "build": "node-sass scss -o ." | ||||
|   }, | ||||
|   "author": "Leigh Morresi / Web Technologies s.r.o.", | ||||
|   "license": "Apache", | ||||
|   "author": "", | ||||
|   "license": "ISC", | ||||
|   "dependencies": { | ||||
|     "sass": "^1.77.8" | ||||
|     "node-sass": "^7.0.0", | ||||
|     "tar": "^6.1.9", | ||||
|     "trim-newlines": "^3.0.1" | ||||
|   } | ||||
| } | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| @use "parts/variables"; | ||||
| @import "parts/_variables.scss"; | ||||
|  | ||||
| #diff-ui { | ||||
|  | ||||
|   | ||||
| @@ -64,17 +64,17 @@ body.proxy-check-active { | ||||
| #recommended-proxy { | ||||
|   display: grid; | ||||
|   gap: 2rem; | ||||
|   padding-bottom: 1em; | ||||
|    | ||||
|   @media  (min-width: 991px) { | ||||
|     grid-template-columns: repeat(2, 1fr); | ||||
|   } | ||||
|     @media  (min-width: 991px) { | ||||
|       grid-template-columns: repeat(2, 1fr); | ||||
|     } | ||||
|  | ||||
|   > div { | ||||
|     border: 1px #aaa solid; | ||||
|     border-radius: 4px; | ||||
|     padding: 1em; | ||||
|   } | ||||
|  | ||||
|   padding-bottom: 1em; | ||||
| } | ||||
|  | ||||
| #extra-proxies-setting { | ||||
|   | ||||
| @@ -1,42 +1,27 @@ | ||||
| .watch-table { | ||||
|   &.favicon-not-enabled { | ||||
|     tr { | ||||
|       .favicon { | ||||
|         display: none; | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   tr { | ||||
|     /* make the icons and the text inline-ish */ | ||||
|     td.inline.title-col { | ||||
|       .flex-wrapper { | ||||
|         display: flex; | ||||
|         align-items: center; | ||||
|         gap: 4px; | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|  | ||||
|  | ||||
|   td, | ||||
|   th { | ||||
|     vertical-align: middle; | ||||
|  | ||||
|   } | ||||
|  | ||||
|   tr.has-favicon { | ||||
|     img.favicon { | ||||
|       display: inline-block !important; | ||||
|     } | ||||
|  | ||||
|     &.unviewed { | ||||
|       img.favicon { | ||||
|         opacity: 1.0 !important; | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   .status-icons { | ||||
|     white-space: nowrap; | ||||
|     display: flex; | ||||
|     align-items: center; /* Vertical centering */ | ||||
|     gap: 4px; /* Space between image and text */ | ||||
|       display: flex; | ||||
|   align-items: center; /* Vertical centering */ | ||||
|   gap: 4px; /* Space between image and text */ | ||||
|     > * { | ||||
|       vertical-align: middle; | ||||
|     } | ||||
| @@ -70,23 +55,33 @@ | ||||
|     padding-right: 4px; | ||||
|   } | ||||
|  | ||||
|     // Reserved for future use | ||||
|   /*  &.thumbnail-type-screenshot { | ||||
|       tr.has-favicon { | ||||
|         td.inline.title-col { | ||||
|           img.thumbnail { | ||||
|             background-color: #fff; !* fallback bg for SVGs without bg *! | ||||
|             border-radius: 4px; !* subtle rounded corners *! | ||||
|             border: 1px solid #ddd; !* light border for contrast *! | ||||
|             box-shadow: 0 2px 6px rgba(0, 0, 0, 0.15); !* soft shadow *! | ||||
|             filter: contrast(1.05) saturate(1.1) drop-shadow(0 0 0.5px rgba(0, 0, 0, 0.2)); | ||||
|             object-fit: cover; !* crop/fill if needed *! | ||||
|             opacity: 0.8; | ||||
|             max-width: 30px; | ||||
|             max-height: 30px; | ||||
|             height: 30px; | ||||
|           } | ||||
|   tr.has-favicon { | ||||
|     td.inline.title-col { | ||||
|       .flex-wrapper { | ||||
|         display: flex; | ||||
|         align-items: center; | ||||
|         gap: 4px; | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   // Reserved for future use | ||||
| /*  &.thumbnail-type-screenshot { | ||||
|     tr.has-favicon { | ||||
|       td.inline.title-col { | ||||
|         img.thumbnail { | ||||
|           background-color: #fff; !* fallback bg for SVGs without bg *! | ||||
|           border-radius: 4px; !* subtle rounded corners *! | ||||
|           border: 1px solid #ddd; !* light border for contrast *! | ||||
|           box-shadow: 0 2px 6px rgba(0, 0, 0, 0.15); !* soft shadow *! | ||||
|           filter: contrast(1.05) saturate(1.1) drop-shadow(0 0 0.5px rgba(0, 0, 0, 0.2)); | ||||
|           object-fit: cover; !* crop/fill if needed *! | ||||
|           opacity: 0.8; | ||||
|           max-width: 30px; | ||||
|           max-height: 30px; | ||||
|           height: 30px; | ||||
|         } | ||||
|       } | ||||
|     }*/ | ||||
|     } | ||||
|   }*/ | ||||
| } | ||||
| @@ -1,4 +1,4 @@ | ||||
| @use "minitabs"; | ||||
| @import "minitabs"; | ||||
|  | ||||
| body.preview-text-enabled { | ||||
|  | ||||
|   | ||||
| @@ -24,9 +24,6 @@ body.checking-now { | ||||
|   #post-list-mark-views.has-unviewed { | ||||
|     display: inline-block !important; | ||||
|   } | ||||
|   #post-list-unread.has-unviewed { | ||||
|     display: inline-block !important; | ||||
|   } | ||||
| } | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -34,17 +34,11 @@ $grid-gap: 0.5rem; | ||||
|  | ||||
|  | ||||
|     .last-checked { | ||||
|       margin-left: calc($grid-col-checkbox + $grid-gap); | ||||
|  | ||||
|       > span { | ||||
|         vertical-align: middle; | ||||
|       } | ||||
|     } | ||||
|  | ||||
|     .last-changed { | ||||
|       margin-left: calc($grid-col-checkbox + $grid-gap); | ||||
|     } | ||||
|  | ||||
|     .last-checked::before { | ||||
|       color: var(--color-text); | ||||
|       content: "Last Checked "; | ||||
| @@ -173,6 +167,6 @@ $grid-gap: 0.5rem; | ||||
|     } | ||||
|   } | ||||
|   .pure-table td { | ||||
|     padding: 3px !important; | ||||
|     padding: 5px !important; | ||||
|   } | ||||
| } | ||||
| @@ -1,115 +0,0 @@ | ||||
|  | ||||
| // Ternary radio button group component | ||||
| .ternary-radio-group { | ||||
|   display: flex; | ||||
|   gap: 0; | ||||
|   border: 1px solid var(--color-grey-750); | ||||
|   border-radius: 4px; | ||||
|   overflow: hidden; | ||||
|   width: fit-content; | ||||
|   background: var(--color-background); | ||||
|  | ||||
|   .ternary-radio-option { | ||||
|     position: relative; | ||||
|     cursor: pointer; | ||||
|     margin: 0; | ||||
|     display: flex; | ||||
|     align-items: center; | ||||
|  | ||||
|     input[type="radio"] { | ||||
|       position: absolute; | ||||
|       opacity: 0; | ||||
|       width: 0; | ||||
|       height: 0; | ||||
|     } | ||||
|  | ||||
|     .ternary-radio-label { | ||||
|       padding: 8px 16px; | ||||
|       background: var(--color-grey-900); | ||||
|       border: none; | ||||
|       border-right: 1px solid var(--color-grey-750); | ||||
|       font-size: 13px; | ||||
|       font-weight: 500; | ||||
|       color: var(--color-text); | ||||
|       transition: all 0.2s ease; | ||||
|       cursor: pointer; | ||||
|       display: block; | ||||
|       min-width: 60px; | ||||
|       text-align: center; | ||||
|     } | ||||
|  | ||||
|     &:last-child .ternary-radio-label { | ||||
|       border-right: none; | ||||
|     } | ||||
|  | ||||
|     input:checked + .ternary-radio-label { | ||||
|       background: var(--color-link); | ||||
|       color: var(--color-text-button); | ||||
|       font-weight: 600; | ||||
|  | ||||
|       &.ternary-default { | ||||
|         background: var(--color-grey-600); | ||||
|         color: var(--color-text-button); | ||||
|       } | ||||
|  | ||||
|       &:hover { | ||||
|         background: #1a7bc4; | ||||
|  | ||||
|         &.ternary-default { | ||||
|           background: var(--color-grey-500); | ||||
|         } | ||||
|       } | ||||
|     } | ||||
|  | ||||
|     &:hover .ternary-radio-label { | ||||
|       background: var(--color-grey-800); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   @media (max-width: 480px) { | ||||
|     width: 100%; | ||||
|  | ||||
|     .ternary-radio-label { | ||||
|       flex: 1; | ||||
|       min-width: auto; | ||||
|     } | ||||
|   } | ||||
| } | ||||
|  | ||||
| // Standard radio button styling | ||||
| input[type="radio"].pure-radio:checked + label, | ||||
| input[type="radio"].pure-radio:checked { | ||||
|   background: var(--color-link); | ||||
|   color: var(--color-text-button); | ||||
| } | ||||
|  | ||||
| html[data-darkmode="true"] { | ||||
|   .ternary-radio-group { | ||||
|     .ternary-radio-option { | ||||
|       .ternary-radio-label { | ||||
|         background: var(--color-grey-350); | ||||
|       } | ||||
|  | ||||
|       &:hover .ternary-radio-label { | ||||
|         background: var(--color-grey-400); | ||||
|       } | ||||
|  | ||||
|       input:checked + .ternary-radio-label { | ||||
|         background: var(--color-link); | ||||
|         color: var(--color-text-button); | ||||
|  | ||||
|         &.ternary-default { | ||||
|           background: var(--color-grey-600); | ||||
|         } | ||||
|  | ||||
|         &:hover { | ||||
|           background: #1a7bc4; | ||||
|  | ||||
|           &.ternary-default { | ||||
|             background: var(--color-grey-500); | ||||
|           } | ||||
|         } | ||||
|       } | ||||
|     } | ||||
|   } | ||||
| } | ||||
| @@ -2,25 +2,24 @@ | ||||
|  * -- BASE STYLES -- | ||||
|  */ | ||||
|  | ||||
| @use "parts/variables"; | ||||
| @use "parts/arrows"; | ||||
| @use "parts/browser-steps"; | ||||
| @use "parts/extra_proxies"; | ||||
| @use "parts/extra_browsers"; | ||||
| @use "parts/pagination"; | ||||
| @use "parts/spinners"; | ||||
| @use "parts/darkmode"; | ||||
| @use "parts/menu"; | ||||
| @use "parts/love"; | ||||
| @use "parts/preview_text_filter"; | ||||
| @use "parts/watch_table"; | ||||
| @use "parts/watch_table-mobile"; | ||||
| @use "parts/edit"; | ||||
| @use "parts/conditions_table"; | ||||
| @use "parts/lister_extra"; | ||||
| @use "parts/socket"; | ||||
| @use "parts/visualselector"; | ||||
| @use "parts/widgets"; | ||||
| @import "parts/_arrows"; | ||||
| @import "parts/_browser-steps"; | ||||
| @import "parts/_extra_proxies"; | ||||
| @import "parts/_extra_browsers"; | ||||
| @import "parts/_pagination"; | ||||
| @import "parts/_spinners"; | ||||
| @import "parts/_variables"; | ||||
| @import "parts/_darkmode"; | ||||
| @import "parts/_menu"; | ||||
| @import "parts/_love"; | ||||
| @import "parts/preview_text_filter"; | ||||
| @import "parts/_watch_table"; | ||||
| @import "parts/_watch_table-mobile"; | ||||
| @import "parts/_edit"; | ||||
| @import "parts/_conditions_table"; | ||||
| @import "parts/_lister_extra"; | ||||
| @import "parts/_socket"; | ||||
|  | ||||
|  | ||||
| body { | ||||
|   color: var(--color-text); | ||||
| @@ -188,15 +187,9 @@ code { | ||||
|   @extend .inline-tag; | ||||
| } | ||||
|  | ||||
| @media (min-width: 768px) { | ||||
|   .box { | ||||
|     margin: 0 1em !important; | ||||
|   } | ||||
| } | ||||
|  | ||||
| .box { | ||||
|   max-width: 100%; | ||||
|   margin: 0 0.3em; | ||||
|   margin: 0 1em; | ||||
|   flex-direction: column; | ||||
|   display: flex; | ||||
|   justify-content: center; | ||||
| @@ -958,6 +951,8 @@ ul { | ||||
|   } | ||||
| } | ||||
|  | ||||
| @import "parts/_visualselector"; | ||||
|  | ||||
| #webdriver_delay { | ||||
|     width: 5em; | ||||
| } | ||||
| @@ -1075,23 +1070,17 @@ ul { | ||||
|  | ||||
|  | ||||
| #quick-watch-processor-type { | ||||
|   ul#processor { | ||||
|     color: #fff; | ||||
|     padding-left: 0px; | ||||
|   color: #fff; | ||||
|   ul { | ||||
|     padding: 0.3rem; | ||||
|     li { | ||||
|       list-style: none; | ||||
|       font-size: 0.9rem; | ||||
|       display: grid; | ||||
|       grid-template-columns: auto 1fr; | ||||
|       align-items: center; | ||||
|       gap: 0.5rem; | ||||
|       margin-bottom: 0.5rem; | ||||
|       > * { | ||||
|         display: inline-block; | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|   label, input { | ||||
|     padding: 0; | ||||
|     margin: 0; | ||||
|   } | ||||
| } | ||||
|  | ||||
| .restock-label { | ||||
| @@ -1130,12 +1119,11 @@ ul { | ||||
| } | ||||
|  | ||||
| #realtime-conn-error { | ||||
|   position: fixed; | ||||
|   position: absolute; | ||||
|   bottom: 0; | ||||
|   left: 0; | ||||
|   left: 30px; | ||||
|   background: var(--color-warning); | ||||
|   padding: 10px; | ||||
|   font-size: 0.8rem; | ||||
|   color: #fff; | ||||
|   opacity: 0.8; | ||||
| } | ||||
|   | ||||
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							| @@ -140,28 +140,6 @@ class ChangeDetectionStore: | ||||
|             secret = secrets.token_hex(16) | ||||
|             self.__data['settings']['application']['api_access_token'] = secret | ||||
|  | ||||
|         # Generate VAPID keys for browser push notifications | ||||
|         if not self.__data['settings']['application']['vapid'].get('private_key'): | ||||
|             try: | ||||
|                 from py_vapid import Vapid | ||||
|                 vapid = Vapid() | ||||
|                 vapid.generate_keys() | ||||
|                 # Convert bytes to strings for JSON serialization | ||||
|                 private_pem = vapid.private_pem() | ||||
|                 public_pem = vapid.public_pem() | ||||
|                  | ||||
|                 self.__data['settings']['application']['vapid']['private_key'] = private_pem.decode() if isinstance(private_pem, bytes) else private_pem | ||||
|                 self.__data['settings']['application']['vapid']['public_key'] = public_pem.decode() if isinstance(public_pem, bytes) else public_pem | ||||
|                  | ||||
|                 # Set default contact email if not present | ||||
|                 if not self.__data['settings']['application']['vapid'].get('contact_email'): | ||||
|                     self.__data['settings']['application']['vapid']['contact_email'] = 'citizen@example.com' | ||||
|                 logger.info("Generated new VAPID keys for browser push notifications") | ||||
|             except ImportError: | ||||
|                 logger.warning("py_vapid not available - browser notifications will not work") | ||||
|             except Exception as e: | ||||
|                 logger.warning(f"Failed to generate VAPID keys: {e}") | ||||
|  | ||||
|         self.needs_write = True | ||||
|  | ||||
|         # Finally start the thread that will manage periodic data saves to JSON | ||||
| @@ -284,6 +262,11 @@ class ChangeDetectionStore: | ||||
|         extras = deepcopy(self.data['watching'][uuid]) | ||||
|         new_uuid = self.add_watch(url=url, extras=extras) | ||||
|         watch = self.data['watching'][new_uuid] | ||||
|  | ||||
|         if self.data['settings']['application'].get('extract_title_as_title') or watch['extract_title_as_title']: | ||||
|             # Because it will be recalculated on the next fetch | ||||
|             self.data['watching'][new_uuid]['title'] = None | ||||
|  | ||||
|         return new_uuid | ||||
|  | ||||
|     def url_exists(self, url): | ||||
| @@ -325,6 +308,7 @@ class ChangeDetectionStore: | ||||
|                     'browser_steps', | ||||
|                     'css_filter', | ||||
|                     'extract_text', | ||||
|                     'extract_title_as_title', | ||||
|                     'headers', | ||||
|                     'ignore_text', | ||||
|                     'include_filters', | ||||
| @@ -339,7 +323,6 @@ class ChangeDetectionStore: | ||||
|                     'title', | ||||
|                     'trigger_text', | ||||
|                     'url', | ||||
|                     'use_page_title_in_list', | ||||
|                     'webdriver_js_execute_code', | ||||
|                 ]: | ||||
|                     if res.get(k): | ||||
| @@ -990,16 +973,6 @@ class ChangeDetectionStore: | ||||
|                         f_d.write(zlib.compress(f_j.read())) | ||||
|                         os.unlink(json_path) | ||||
|  | ||||
|     def update_20(self): | ||||
|         for uuid, watch in self.data['watching'].items(): | ||||
|             if self.data['watching'][uuid].get('extract_title_as_title'): | ||||
|                 self.data['watching'][uuid]['use_page_title_in_list'] = self.data['watching'][uuid].get('extract_title_as_title') | ||||
|                 del self.data['watching'][uuid]['extract_title_as_title'] | ||||
|  | ||||
|         if self.data['settings']['application'].get('extract_title_as_title'): | ||||
|             self.data['settings']['application']['ui']['use_page_title_in_list'] = self.data['settings']['application'].get('extract_title_as_title') | ||||
|  | ||||
|  | ||||
|     def add_notification_url(self, notification_url): | ||||
|          | ||||
|         logger.debug(f">>> Adding new notification_url - '{notification_url}'") | ||||
|   | ||||
| @@ -33,34 +33,6 @@ | ||||
|                                 <div id="notification-test-log" style="display: none;"><span class="pure-form-message-inline">Processing..</span></div> | ||||
|                             </div> | ||||
|                         </div> | ||||
|                          | ||||
|                         <!-- Browser Notifications --> | ||||
|                         <div id="browser-notification-section"> | ||||
|                             <div class="pure-control-group"> | ||||
|                                 <label>Browser Notifications</label> | ||||
|                                 <div class="pure-form-message-inline"> | ||||
|                                     <p><strong>Browser push notifications!</strong> Use <code>browser://</code> URLs in your notification settings to receive real-time push notifications even when this tab is closed.</p> | ||||
|                                     <p><small><strong>Troubleshooting:</strong> If you get "different applicationServerKey" errors, click "Clear All Notifications" below and try again. This happens when switching between different changedetection.io instances.</small></p> | ||||
|                                     <div id="browser-notification-controls" style="margin-top: 1em;"> | ||||
|                                         <div id="notification-permission-status"> | ||||
|                                             <p>Browser notifications: <span id="permission-status">checking...</span></p> | ||||
|                                         </div> | ||||
|                                         <div id="browser-notification-actions"> | ||||
|                                             <button type="button" id="enable-notifications-btn" class="pure-button button-secondary button-xsmall" style="display: none;"> | ||||
|                                                 Enable Browser Notifications | ||||
|                                             </button> | ||||
|                                             <button type="button" id="test-notification-btn" class="pure-button button-secondary button-xsmall" style="display: none;"> | ||||
|                                                 Send browser test notification | ||||
|                                             </button> | ||||
|                                             <button type="button" id="clear-notifications-btn" class="pure-button button-secondary button-xsmall" onclick="window.browserNotifications?.clearAllNotifications()" style="margin-left: 0.5em;"> | ||||
|                                                 Clear All Notifications | ||||
|                                             </button> | ||||
|                                         </div> | ||||
|                                     </div> | ||||
|                                 </div> | ||||
|                             </div> | ||||
|                         </div> | ||||
|                          | ||||
|                         <div id="notification-customisation" class="pure-control-group"> | ||||
|                             <div class="pure-control-group"> | ||||
|                                 {{ render_field(form.notification_title, class="m-d notification-title", placeholder=settings_application['notification_title']) }} | ||||
| @@ -98,7 +70,7 @@ | ||||
|                                     </tr> | ||||
|                                     <tr> | ||||
|                                         <td><code>{{ '{{watch_title}}' }}</code></td> | ||||
|                                         <td>The page title of the watch, uses <title> if not set, falls back to URL</td> | ||||
|                                         <td>The title of the watch.</td> | ||||
|                                     </tr> | ||||
|                                     <tr> | ||||
|                                         <td><code>{{ '{{watch_tag}}' }}</code></td> | ||||
|   | ||||
| @@ -1,29 +1,14 @@ | ||||
| {% macro render_field(field) %} | ||||
|     <div {% if field.errors or field.top_errors %} class="error" {% endif %}>{{ field.label }}</div> | ||||
|     <div {% if field.errors or field.top_errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }} | ||||
|         {% if field.top_errors %} | ||||
|             top | ||||
|             <ul class="errors top-errors"> | ||||
|                 {% for error in field.top_errors %} | ||||
|                     <li>{{ error }}</li> | ||||
|                 {% endfor %} | ||||
|             </ul> | ||||
|         {% endif %} | ||||
|         {% if field.errors %} | ||||
|             <ul class=errors> | ||||
|                 {% if field.errors is mapping and 'form' in field.errors %} | ||||
|                     {#  and subfield form errors, such as used in RequiredFormField() for TimeBetweenCheckForm sub form #} | ||||
|                     {% set errors = field.errors['form'] %} | ||||
|                 {% else %} | ||||
|                     {#  regular list of errors with this field #} | ||||
|                     {% set errors = field.errors %} | ||||
|                 {% endif %} | ||||
|                 {% for error in errors %} | ||||
|                     <li>{{ error }}</li> | ||||
|                 {% endfor %} | ||||
|             </ul> | ||||
|         {% endif %} | ||||
|     </div> | ||||
|   <div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div> | ||||
|   <div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }} | ||||
|   {% if field.errors %} | ||||
|     <ul class=errors> | ||||
|     {% for error in field.errors %} | ||||
|       <li>{{ error }}</li> | ||||
|     {% endfor %} | ||||
|     </ul> | ||||
|   {% endif %} | ||||
|   </div> | ||||
| {% endmacro %} | ||||
|  | ||||
| {% macro render_checkbox_field(field) %} | ||||
| @@ -39,23 +24,6 @@ | ||||
|   </div> | ||||
| {% endmacro %} | ||||
|  | ||||
| {% macro render_ternary_field(field, BooleanField=false) %} | ||||
|   {% if BooleanField %} | ||||
|     {% set _ = field.__setattr__('boolean_mode', true) %} | ||||
|   {% endif %} | ||||
|   <div class="ternary-field {% if field.errors %} error {% endif %}"> | ||||
|     <div class="ternary-field-label">{{ field.label }}</div> | ||||
|     <div class="ternary-field-widget">{{ field(**kwargs)|safe }}</div> | ||||
|     {% if field.errors %} | ||||
|       <ul class=errors> | ||||
|       {% for error in field.errors %} | ||||
|         <li>{{ error }}</li> | ||||
|       {% endfor %} | ||||
|       </ul> | ||||
|     {% endif %} | ||||
|   </div> | ||||
| {% endmacro %} | ||||
|  | ||||
|  | ||||
| {% macro render_simple_field(field) %} | ||||
|   <span class="label {% if field.errors %}error{% endif %}">{{ field.label }}</span> | ||||
|   | ||||
| @@ -5,7 +5,6 @@ | ||||
|     <meta charset="utf-8" > | ||||
|     <meta name="viewport" content="width=device-width, initial-scale=1.0" > | ||||
|     <meta name="description" content="Self hosted website change detection." > | ||||
|     <meta name="robots" content="noindex"> | ||||
|     <title>Change Detection{{extra_title}}</title> | ||||
|     {% if app_rss_token %} | ||||
|       <link rel="alternate" type="application/rss+xml" title="Changedetection.io » Feed{% if active_tag_uuid %}- {{active_tag.title}}{% endif %}" href="{{ url_for('rss.feed', tag=active_tag_uuid , token=app_rss_token)}}" > | ||||
| @@ -35,14 +34,13 @@ | ||||
|     <script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script> | ||||
|     <script src="{{url_for('static_content', group='js', filename='csrf.js')}}" defer></script> | ||||
|     <script src="{{url_for('static_content', group='js', filename='feather-icons.min.js')}}" defer></script> | ||||
|     <script src="{{url_for('static_content', group='js', filename='browser-notifications.js')}}" defer></script> | ||||
|     {% if socket_io_enabled %} | ||||
|     <script src="{{url_for('static_content', group='js', filename='socket.io.min.js')}}"></script> | ||||
|     <script src="{{url_for('static_content', group='js', filename='realtime.js')}}" defer></script> | ||||
|     {% endif %} | ||||
|   </head> | ||||
|  | ||||
|   <body class="{{extra_classes}}"> | ||||
|   <body class=""> | ||||
|     <div class="header"> | ||||
|     <div class="pure-menu-fixed" style="width: 100%;"> | ||||
|       <div class="home-menu pure-menu pure-menu-horizontal" id="nav-menu"> | ||||
| @@ -238,7 +236,7 @@ | ||||
|     <script src="{{url_for('static_content', group='js', filename='toggle-theme.js')}}" defer></script> | ||||
|  | ||||
|     <div id="checking-now-fixed-tab" style="display: none;"><span class="spinner"></span><span> Checking now</span></div> | ||||
|     <div id="realtime-conn-error" style="display:none">Real-time updates offline</div> | ||||
|     <div id="realtime-conn-error" style="display:none">Offline</div> | ||||
|   </body> | ||||
|  | ||||
| </html> | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| {% extends 'base.html' %} | ||||
| {% block content %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form, playwright_warning, only_playwright_type_watches_warning, render_conditions_fieldlist_of_formfields_as_table, render_ternary_field %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form, playwright_warning, only_playwright_type_watches_warning, render_conditions_fieldlist_of_formfields_as_table %} | ||||
| {% from '_common_fields.html' import render_common_settings_form %} | ||||
| <script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script> | ||||
| @@ -72,16 +72,15 @@ | ||||
|                         <div class="pure-form-message">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></div> | ||||
|                         <div class="pure-form-message">Variables are supported in the URL (<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a>).</div> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.tags) }} | ||||
|                         <span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group inline-radio"> | ||||
|                         {{ render_field(form.processor) }} | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.title, class="m-d", placeholder=watch.label) }} | ||||
|                         <span class="pure-form-message-inline">Automatically uses the page title if found, you can also use your own title/description here</span> | ||||
|                         {{ render_field(form.title, class="m-d") }} | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.tags) }} | ||||
|                         <span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group time-between-check border-fieldset"> | ||||
| 
 | ||||
| @@ -102,16 +101,15 @@ | ||||
|                         </div> | ||||
| <br> | ||||
|               </div> | ||||
| 
 | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_checkbox_field(form.extract_title_as_title) }} | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_checkbox_field(form.filter_failure_notification_send) }} | ||||
|                         <span class="pure-form-message-inline"> | ||||
|                          Sends a notification when the filter can no longer be seen on the page, good for knowing when the page changed and your filter will not work anymore. | ||||
|                         </span> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_ternary_field(form.use_page_title_in_list) }} | ||||
|                     </div> | ||||
|                 </fieldset> | ||||
|             </div> | ||||
| 
 | ||||
| @@ -264,7 +262,7 @@ Math: {{ 1 + 1 }}") }} | ||||
|             <div class="tab-pane-inner" id="notifications"> | ||||
|                 <fieldset> | ||||
|                     <div  class="pure-control-group inline-radio"> | ||||
|                       {{ render_ternary_field(form.notification_muted, BooleanField=true) }} | ||||
|                       {{ render_checkbox_field(form.notification_muted) }} | ||||
|                     </div> | ||||
|                     {% if watch_needs_selenium_or_playwright %} | ||||
|                     <div class="pure-control-group inline-radio"> | ||||
| @@ -471,11 +469,11 @@ Math: {{ 1 + 1 }}") }} | ||||
|                 <div class="pure-control-group"> | ||||
|                     {{ render_button(form.save_button) }} | ||||
|                     <a href="{{url_for('ui.form_delete', uuid=uuid)}}" | ||||
|                        class="pure-button button-error ">Delete</a> | ||||
|                        class="pure-button button-small button-error ">Delete</a> | ||||
|                     {% if watch.history_n %}<a href="{{url_for('ui.clear_watch_history', uuid=uuid)}}" | ||||
|                        class="pure-button button-error">Clear History</a>{% endif %} | ||||
|                        class="pure-button button-small button-error ">Clear History</a>{% endif %} | ||||
|                     <a href="{{url_for('ui.form_clone', uuid=uuid)}}" | ||||
|                        class="pure-button">Clone & Edit</a> | ||||
|                        class="pure-button button-small ">Clone & Edit</a> | ||||
|                 </div> | ||||
|             </div> | ||||
|         </form> | ||||
| @@ -55,8 +55,7 @@ def do_test(client, live_server, make_test_use_extra_browser=False): | ||||
|                   "tags": "", | ||||
|                   "headers": "", | ||||
|                   'fetch_backend': f"extra_browser_{custom_browser_name}", | ||||
|                   'webdriver_js_execute_code': '', | ||||
|                   "time_between_check_use_default": "y" | ||||
|                   'webdriver_js_execute_code': '' | ||||
|             }, | ||||
|             follow_redirects=True | ||||
|         ) | ||||
|   | ||||
| @@ -14,12 +14,9 @@ def test_fetch_webdriver_content(client, live_server, measure_memory_usage): | ||||
|     ##################### | ||||
|     res = client.post( | ||||
|         url_for("settings.settings_page"), | ||||
|         data={ | ||||
|             "application-empty_pages_are_a_change": "", | ||||
|             "requests-time_between_check-minutes": 180, | ||||
|             'application-fetch_backend': "html_webdriver", | ||||
|             'application-ui-favicons_enabled': "y", | ||||
|         }, | ||||
|         data={"application-empty_pages_are_a_change": "", | ||||
|               "requests-time_between_check-minutes": 180, | ||||
|               'application-fetch_backend': "html_webdriver"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
| @@ -64,22 +61,3 @@ def test_fetch_webdriver_content(client, live_server, measure_memory_usage): | ||||
|         ) | ||||
|         assert res.status_code == 200 | ||||
|         assert len(res.data) > 10 | ||||
|  | ||||
|     ##################### disable favicons check | ||||
|     res = client.post( | ||||
|         url_for("settings.settings_page"), | ||||
|         data={ | ||||
|             "requests-time_between_check-minutes": 180, | ||||
|             'application-ui-favicons_enabled': "", | ||||
|             "application-empty_pages_are_a_change": "", | ||||
|         }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b"Settings updated." in res.data | ||||
|  | ||||
|     res = client.get( | ||||
|         url_for("watchlist.index"), | ||||
|     ) | ||||
|     # The UI can access it here | ||||
|     assert f'src="/static/favicon'.encode('utf8') not in res.data | ||||
|   | ||||
| @@ -28,7 +28,6 @@ def test_execute_custom_js(client, live_server, measure_memory_usage): | ||||
|             'fetch_backend': "html_webdriver", | ||||
|             'webdriver_js_execute_code': 'document.querySelector("button[name=test-button]").click();', | ||||
|             'headers': "testheader: yes\buser-agent: MyCustomAgent", | ||||
|             "time_between_check_use_default": "y", | ||||
|         }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|   | ||||
| @@ -27,7 +27,6 @@ def test_preferred_proxy(client, live_server, measure_memory_usage): | ||||
|                 "proxy": "proxy-two", | ||||
|                 "tags": "", | ||||
|                 "url": url, | ||||
|                 "time_between_check_use_default": "y", | ||||
|               }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|   | ||||
| @@ -62,7 +62,6 @@ def test_noproxy_option(client, live_server, measure_memory_usage): | ||||
|                 "proxy": "no-proxy", | ||||
|                 "tags": "", | ||||
|                 "url": url, | ||||
|                 "time_between_check_use_default": "y", | ||||
|               }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|   | ||||
| @@ -44,7 +44,6 @@ def test_proxy_noconnect_custom(client, live_server, measure_memory_usage): | ||||
|         "url": test_url, | ||||
|         "fetch_backend": "html_webdriver" if os.getenv('PLAYWRIGHT_DRIVER_URL') or os.getenv("WEBDRIVER_URL") else "html_requests", | ||||
|         "proxy": "ui-0custom-test-proxy", | ||||
|         "time_between_check_use_default": "y", | ||||
|     } | ||||
|  | ||||
|     res = client.post( | ||||
|   | ||||
| @@ -66,7 +66,6 @@ def test_socks5(client, live_server, measure_memory_usage): | ||||
|             "proxy": "ui-0socks5proxy", | ||||
|             "tags": "", | ||||
|             "url": test_url, | ||||
|             "time_between_check_use_default": "y", | ||||
|         }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|   | ||||
| @@ -53,7 +53,6 @@ def test_socks5_from_proxiesjson_file(client, live_server, measure_memory_usage) | ||||
|             "proxy": "socks5proxy", | ||||
|             "tags": "", | ||||
|             "url": test_url, | ||||
|             "time_between_check_use_default": "y", | ||||
|         }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|   | ||||
| @@ -157,8 +157,7 @@ def test_check_notification_email_formats_default_Text_override_HTML(client, liv | ||||
|         data={ | ||||
|             "url": test_url, | ||||
|             "notification_format": 'HTML', | ||||
|             'fetch_backend': "html_requests", | ||||
|             "time_between_check_use_default": "y"}, | ||||
|             'fetch_backend': "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|   | ||||
| @@ -61,8 +61,7 @@ def test_check_removed_line_contains_trigger(client, live_server, measure_memory | ||||
|         data={"trigger_text": 'The golden line', | ||||
|               "url": test_url, | ||||
|               'fetch_backend': "html_requests", | ||||
|               'filter_text_removed': 'y', | ||||
|               "time_between_check_use_default": "y"}, | ||||
|               'filter_text_removed': 'y'}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
| @@ -155,8 +154,7 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa | ||||
|               'processor': 'text_json_diff', | ||||
|               'fetch_backend': "html_requests", | ||||
|               'filter_text_removed': '', | ||||
|               'filter_text_added': 'y', | ||||
|               "time_between_check_use_default": "y"}, | ||||
|               'filter_text_added': 'y'}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|   | ||||
| @@ -292,7 +292,9 @@ def test_access_denied(client, live_server, measure_memory_usage): | ||||
|  | ||||
| def test_api_watch_PUT_update(client, live_server, measure_memory_usage): | ||||
|  | ||||
|      | ||||
|     api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token') | ||||
|  | ||||
|     # Create a watch | ||||
|     set_original_response() | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
| @@ -300,27 +302,14 @@ def test_api_watch_PUT_update(client, live_server, measure_memory_usage): | ||||
|     # Create new | ||||
|     res = client.post( | ||||
|         url_for("createwatch"), | ||||
|         data=json.dumps({"url": test_url, | ||||
|                          'tag': "One, Two", | ||||
|                          "title": "My test URL", | ||||
|                          'headers': {'cookie': 'yum'}, | ||||
|                          "conditions": [ | ||||
|                              { | ||||
|                                  "field": "page_filtered_text", | ||||
|                                  "operator": "contains_regex", | ||||
|                                  "value": "."  # contains anything | ||||
|                              } | ||||
|                          ], | ||||
|                          "conditions_match_logic": "ALL", | ||||
|                          } | ||||
|                         ), | ||||
|         data=json.dumps({"url": test_url, 'tag': "One, Two", "title": "My test URL", 'headers': {'cookie': 'yum'} }), | ||||
|         headers={'content-type': 'application/json', 'x-api-key': api_key}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert res.status_code == 201 | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # Get a listing, it will be the first one | ||||
|     res = client.get( | ||||
|         url_for("createwatch"), | ||||
| @@ -328,7 +317,6 @@ def test_api_watch_PUT_update(client, live_server, measure_memory_usage): | ||||
|     ) | ||||
|  | ||||
|     watch_uuid = list(res.json.keys())[0] | ||||
|     assert not res.json[watch_uuid].get('viewed'), 'A newly created watch can only be unviewed' | ||||
|  | ||||
|     # Check in the edit page just to be sure | ||||
|     res = client.get( | ||||
| @@ -342,12 +330,7 @@ def test_api_watch_PUT_update(client, live_server, measure_memory_usage): | ||||
|     res = client.put( | ||||
|         url_for("watch", uuid=watch_uuid), | ||||
|         headers={'x-api-key': api_key, 'content-type': 'application/json'}, | ||||
|         data=json.dumps({ | ||||
|             "title": "new title", | ||||
|             'time_between_check': {'minutes': 552}, | ||||
|             'headers': {'cookie': 'all eaten'}, | ||||
|             'last_viewed': int(time.time()) | ||||
|         }), | ||||
|         data=json.dumps({"title": "new title", 'time_between_check': {'minutes': 552}, 'headers': {'cookie': 'all eaten'}}), | ||||
|     ) | ||||
|     assert res.status_code == 200, "HTTP PUT update was sent OK" | ||||
|  | ||||
| @@ -357,7 +340,6 @@ def test_api_watch_PUT_update(client, live_server, measure_memory_usage): | ||||
|         headers={'x-api-key': api_key} | ||||
|     ) | ||||
|     assert res.json.get('title') == 'new title' | ||||
|     assert res.json.get('viewed'), 'With the timestamp greater than "changed" a watch can be updated to viewed' | ||||
|  | ||||
|     # Check in the edit page just to be sure | ||||
|     res = client.get( | ||||
| @@ -390,13 +372,13 @@ def test_api_watch_PUT_update(client, live_server, measure_memory_usage): | ||||
|  | ||||
|  | ||||
| def test_api_import(client, live_server, measure_memory_usage): | ||||
|  | ||||
|      | ||||
|     api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token') | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("import") + "?tag=import-test", | ||||
|         data='https://website1.com\r\nhttps://website2.com', | ||||
|         headers={'x-api-key': api_key, 'content-type': 'text/plain'}, | ||||
|         headers={'x-api-key': api_key}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|   | ||||
| @@ -1,199 +0,0 @@ | ||||
| #!/usr/bin/env python3 | ||||
| """ | ||||
| OpenAPI validation tests for ChangeDetection.io API | ||||
|  | ||||
| This test file specifically verifies that OpenAPI validation is working correctly | ||||
| by testing various scenarios that should trigger validation errors. | ||||
| """ | ||||
|  | ||||
| import time | ||||
| import json | ||||
| from flask import url_for | ||||
| from .util import live_server_setup, wait_for_all_checks | ||||
|  | ||||
|  | ||||
| def test_openapi_validation_invalid_content_type_on_create_watch(client, live_server, measure_memory_usage): | ||||
|     """Test that creating a watch with invalid content-type triggers OpenAPI validation error.""" | ||||
|     api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token') | ||||
|  | ||||
|     # Try to create a watch with JSON data but without proper content-type header | ||||
|     res = client.post( | ||||
|         url_for("createwatch"), | ||||
|         data=json.dumps({"url": "https://example.com", "title": "Test Watch"}), | ||||
|         headers={'x-api-key': api_key},  # Missing 'content-type': 'application/json' | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     # Should get 400 error due to OpenAPI validation failure | ||||
|     assert res.status_code == 400, f"Expected 400 but got {res.status_code}" | ||||
|     assert b"OpenAPI validation failed" in res.data, "Should contain OpenAPI validation error message" | ||||
|  | ||||
|  | ||||
| def test_openapi_validation_missing_required_field_create_watch(client, live_server, measure_memory_usage): | ||||
|     """Test that creating a watch without required URL field triggers OpenAPI validation error.""" | ||||
|     api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token') | ||||
|  | ||||
|     # Try to create a watch without the required 'url' field | ||||
|     res = client.post( | ||||
|         url_for("createwatch"), | ||||
|         data=json.dumps({"title": "Test Watch Without URL"}),  # Missing required 'url' field | ||||
|         headers={'x-api-key': api_key, 'content-type': 'application/json'}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     # Should get 400 error due to missing required field | ||||
|     assert res.status_code == 400, f"Expected 400 but got {res.status_code}" | ||||
|     assert b"OpenAPI validation failed" in res.data, "Should contain OpenAPI validation error message" | ||||
|  | ||||
|  | ||||
| def test_openapi_validation_invalid_field_in_request_body(client, live_server, measure_memory_usage): | ||||
|     """Test that including invalid fields triggers OpenAPI validation error.""" | ||||
|     api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token') | ||||
|  | ||||
|     # First create a valid watch | ||||
|     res = client.post( | ||||
|         url_for("createwatch"), | ||||
|         data=json.dumps({"url": "https://example.com", "title": "Test Watch"}), | ||||
|         headers={'x-api-key': api_key, 'content-type': 'application/json'}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert res.status_code == 201, "Watch creation should succeed" | ||||
|  | ||||
|     # Get the watch list to find the UUID | ||||
|     res = client.get( | ||||
|         url_for("createwatch"), | ||||
|         headers={'x-api-key': api_key} | ||||
|     ) | ||||
|     assert res.status_code == 200 | ||||
|     watch_uuid = list(res.json.keys())[0] | ||||
|  | ||||
|     # Now try to update the watch with an invalid field | ||||
|     res = client.put( | ||||
|         url_for("watch", uuid=watch_uuid), | ||||
|         headers={'x-api-key': api_key, 'content-type': 'application/json'}, | ||||
|         data=json.dumps({ | ||||
|             "title": "Updated title", | ||||
|             "invalid_field_that_doesnt_exist": "this should cause validation error" | ||||
|         }), | ||||
|     ) | ||||
|  | ||||
|     # Should get 400 error due to invalid field (this will be caught by internal validation) | ||||
|     # Note: This tests the flow where OpenAPI validation passes but internal validation catches it | ||||
|     assert res.status_code == 400, f"Expected 400 but got {res.status_code}" | ||||
|     assert b"Additional properties are not allowed" in res.data, "Should contain validation error about additional properties" | ||||
|  | ||||
|  | ||||
| def test_openapi_validation_import_wrong_content_type(client, live_server, measure_memory_usage): | ||||
|     """Test that import endpoint with wrong content-type triggers OpenAPI validation error.""" | ||||
|     api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token') | ||||
|  | ||||
|     # Try to import URLs with JSON content-type instead of text/plain | ||||
|     res = client.post( | ||||
|         url_for("import") + "?tag=test-import", | ||||
|         data='https://website1.com\nhttps://website2.com', | ||||
|         headers={'x-api-key': api_key, 'content-type': 'application/json'},  # Wrong content-type | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     # Should get 400 error due to content-type mismatch | ||||
|     assert res.status_code == 400, f"Expected 400 but got {res.status_code}" | ||||
|     assert b"OpenAPI validation failed" in res.data, "Should contain OpenAPI validation error message" | ||||
|  | ||||
|  | ||||
| def test_openapi_validation_import_correct_content_type_succeeds(client, live_server, measure_memory_usage): | ||||
|     """Test that import endpoint with correct content-type succeeds (positive test).""" | ||||
|     api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token') | ||||
|  | ||||
|     # Import URLs with correct text/plain content-type | ||||
|     res = client.post( | ||||
|         url_for("import") + "?tag=test-import", | ||||
|         data='https://website1.com\nhttps://website2.com', | ||||
|         headers={'x-api-key': api_key, 'content-type': 'text/plain'},  # Correct content-type | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     # Should succeed | ||||
|     assert res.status_code == 200, f"Expected 200 but got {res.status_code}" | ||||
|     assert len(res.json) == 2, "Should import 2 URLs" | ||||
|  | ||||
|  | ||||
| def test_openapi_validation_get_requests_bypass_validation(client, live_server, measure_memory_usage): | ||||
|     """Test that GET requests bypass OpenAPI validation entirely.""" | ||||
|     api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token') | ||||
|  | ||||
|     # Disable API token requirement first | ||||
|     res = client.post( | ||||
|         url_for("settings.settings_page"), | ||||
|         data={ | ||||
|             "requests-time_between_check-minutes": 180, | ||||
|             "application-fetch_backend": "html_requests", | ||||
|             "application-api_access_token_enabled": "" | ||||
|         }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Settings updated." in res.data | ||||
|  | ||||
|     # Make GET request to list watches - should succeed even without API key or content-type | ||||
|     res = client.get(url_for("createwatch"))  # No headers needed for GET | ||||
|     assert res.status_code == 200, f"GET requests should succeed without OpenAPI validation, got {res.status_code}" | ||||
|  | ||||
|     # Should return JSON with watch list (empty in this case) | ||||
|     assert isinstance(res.json, dict), "Should return JSON dictionary for watch list" | ||||
|  | ||||
|  | ||||
| def test_openapi_validation_create_tag_missing_required_title(client, live_server, measure_memory_usage): | ||||
|     """Test that creating a tag without required title triggers OpenAPI validation error.""" | ||||
|     api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token') | ||||
|  | ||||
|     # Try to create a tag without the required 'title' field | ||||
|     res = client.post( | ||||
|         url_for("tag"), | ||||
|         data=json.dumps({"notification_urls": ["mailto:test@example.com"]}),  # Missing required 'title' field | ||||
|         headers={'x-api-key': api_key, 'content-type': 'application/json'}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     # Should get 400 error due to missing required field | ||||
|     assert res.status_code == 400, f"Expected 400 but got {res.status_code}" | ||||
|     assert b"OpenAPI validation failed" in res.data, "Should contain OpenAPI validation error message" | ||||
|  | ||||
|  | ||||
| def test_openapi_validation_watch_update_allows_partial_updates(client, live_server, measure_memory_usage): | ||||
|     """Test that watch updates allow partial updates without requiring all fields (positive test).""" | ||||
|     api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token') | ||||
|  | ||||
|     # First create a valid watch | ||||
|     res = client.post( | ||||
|         url_for("createwatch"), | ||||
|         data=json.dumps({"url": "https://example.com", "title": "Test Watch"}), | ||||
|         headers={'x-api-key': api_key, 'content-type': 'application/json'}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert res.status_code == 201, "Watch creation should succeed" | ||||
|  | ||||
|     # Get the watch list to find the UUID | ||||
|     res = client.get( | ||||
|         url_for("createwatch"), | ||||
|         headers={'x-api-key': api_key} | ||||
|     ) | ||||
|     assert res.status_code == 200 | ||||
|     watch_uuid = list(res.json.keys())[0] | ||||
|  | ||||
|     # Update only the title (partial update) - should succeed | ||||
|     res = client.put( | ||||
|         url_for("watch", uuid=watch_uuid), | ||||
|         headers={'x-api-key': api_key, 'content-type': 'application/json'}, | ||||
|         data=json.dumps({"title": "Updated Title Only"}),  # Only updating title, not URL | ||||
|     ) | ||||
|  | ||||
|     # Should succeed because UpdateWatch schema allows partial updates | ||||
|     assert res.status_code == 200, f"Partial updates should succeed, got {res.status_code}" | ||||
|  | ||||
|     # Verify the update worked | ||||
|     res = client.get( | ||||
|         url_for("watch", uuid=watch_uuid), | ||||
|         headers={'x-api-key': api_key} | ||||
|     ) | ||||
|     assert res.status_code == 200 | ||||
|     assert res.json.get('title') == 'Updated Title Only', "Title should be updated" | ||||
|     assert res.json.get('url') == 'https://example.com', "URL should remain unchanged" | ||||
| @@ -1,18 +1,15 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| from flask import url_for | ||||
| from .util import live_server_setup, wait_for_all_checks, set_original_response | ||||
| from .util import live_server_setup, wait_for_all_checks | ||||
| import json | ||||
| import time | ||||
|  | ||||
| def test_api_tags_listing(client, live_server, measure_memory_usage): | ||||
|    #  live_server_setup(live_server) # Setup on conftest per function | ||||
|     api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token') | ||||
|     tag_title = 'Test Tag' | ||||
|  | ||||
|  | ||||
|     set_original_response() | ||||
|  | ||||
|     # Get a listing | ||||
|     res = client.get( | ||||
|         url_for("tags"), | ||||
|         headers={'x-api-key': api_key} | ||||
| @@ -107,8 +104,6 @@ def test_api_tags_listing(client, live_server, measure_memory_usage): | ||||
|     assert res.status_code == 201 | ||||
|     watch_uuid = res.json.get('uuid') | ||||
|  | ||||
|  | ||||
|     wait_for_all_checks() | ||||
|     # Verify tag is associated with watch by name if need be | ||||
|     res = client.get( | ||||
|         url_for("watch", uuid=watch_uuid), | ||||
| @@ -117,21 +112,6 @@ def test_api_tags_listing(client, live_server, measure_memory_usage): | ||||
|     assert res.status_code == 200 | ||||
|     assert new_tag_uuid in res.json.get('tags', []) | ||||
|  | ||||
|     # Check recheck by tag | ||||
|     before_check_time = live_server.app.config['DATASTORE'].data['watching'][watch_uuid].get('last_checked') | ||||
|     time.sleep(1) | ||||
|     res = client.get( | ||||
|        url_for("tag", uuid=new_tag_uuid) + "?recheck=true", | ||||
|        headers={'x-api-key': api_key} | ||||
|     ) | ||||
|     wait_for_all_checks() | ||||
|     assert res.status_code == 200 | ||||
|     assert b'OK, 1 watches' in res.data | ||||
|  | ||||
|     after_check_time = live_server.app.config['DATASTORE'].data['watching'][watch_uuid].get('last_checked') | ||||
|  | ||||
|     assert before_check_time != after_check_time | ||||
|  | ||||
|     # Delete tag | ||||
|     res = client.delete( | ||||
|         url_for("tag", uuid=new_tag_uuid), | ||||
| @@ -161,6 +141,3 @@ def test_api_tags_listing(client, live_server, measure_memory_usage): | ||||
|         headers={'x-api-key': api_key}, | ||||
|     ) | ||||
|     assert res.status_code == 204 | ||||
|  | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -23,7 +23,7 @@ def test_basic_auth(client, live_server, measure_memory_usage): | ||||
|     # Check form validation | ||||
|     res = client.post( | ||||
|         url_for("ui.ui_edit.edit_page", uuid="first"), | ||||
|         data={"include_filters": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"}, | ||||
|         data={"include_filters": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|   | ||||
| @@ -89,7 +89,7 @@ def test_check_basic_change_detection_functionality(client, live_server, measure | ||||
|     assert b'CDATA' in res.data | ||||
|  | ||||
|     assert expected_url.encode('utf-8') in res.data | ||||
| # | ||||
|  | ||||
|     # Following the 'diff' link, it should no longer display as 'unviewed' even after we recheck it a few times | ||||
|     res = client.get(url_for("ui.ui_views.diff_history_page", uuid=uuid)) | ||||
|     assert b'selected=""' in res.data, "Confirm diff history page loaded" | ||||
| @@ -104,34 +104,26 @@ def test_check_basic_change_detection_functionality(client, live_server, measure | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|  | ||||
|     # Do this a few times.. ensures we don't accidently set the status | ||||
|     # Do this a few times.. ensures we dont accidently set the status | ||||
|     for n in range(2): | ||||
|         res = client.get(url_for("ui.form_watch_checknow"), follow_redirects=True) | ||||
|         client.get(url_for("ui.form_watch_checknow"), follow_redirects=True) | ||||
|  | ||||
|         # Give the thread time to pick it up | ||||
|         wait_for_all_checks(client) | ||||
|  | ||||
|         # It should report nothing found (no new 'unviewed' class) | ||||
|         res = client.get(url_for("watchlist.index")) | ||||
|  | ||||
|  | ||||
|         assert b'unviewed' not in res.data | ||||
|         assert b'class="has-unviewed' not in res.data | ||||
|         assert b'head title' in res.data  # Should be ON by default | ||||
|         assert b'head title' not in res.data  # Should not be present because this is off by default | ||||
|         assert b'test-endpoint' in res.data | ||||
|  | ||||
|     # Recheck it but only with a title change, content wasnt changed | ||||
|     set_original_response(extra_title=" and more") | ||||
|     set_original_response() | ||||
|  | ||||
|     client.get(url_for("ui.form_watch_checknow"), follow_redirects=True) | ||||
|     wait_for_all_checks(client) | ||||
|     res = client.get(url_for("watchlist.index")) | ||||
|     assert b'head title and more' in res.data | ||||
|  | ||||
|     # disable <title> pickup | ||||
|     # Enable auto pickup of <title> in settings | ||||
|     res = client.post( | ||||
|         url_for("settings.settings_page"), | ||||
|         data={"application-ui-use_page_title_in_list": "", "requests-time_between_check-minutes": 180, | ||||
|         data={"application-extract_title_as_title": "1", "requests-time_between_check-minutes": 180, | ||||
|               'application-fetch_backend': "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
| @@ -142,14 +134,16 @@ def test_check_basic_change_detection_functionality(client, live_server, measure | ||||
|     res = client.get(url_for("watchlist.index")) | ||||
|     assert b'unviewed' in res.data | ||||
|     assert b'class="has-unviewed' in res.data | ||||
|     assert b'head title' not in res.data  # should now be off | ||||
|  | ||||
|     # It should have picked up the <title> | ||||
|     assert b'head title' in res.data | ||||
|  | ||||
|     # Be sure the last_viewed is going to be greater than the last snapshot | ||||
|     time.sleep(1) | ||||
|  | ||||
|     # hit the mark all viewed link | ||||
|     res = client.get(url_for("ui.mark_all_viewed"), follow_redirects=True) | ||||
|     time.sleep(0.2) | ||||
|  | ||||
|     assert b'class="has-unviewed' not in res.data | ||||
|     assert b'unviewed' not in res.data | ||||
|   | ||||
| @@ -86,8 +86,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu | ||||
|         url_for("ui.ui_edit.edit_page", uuid="first"), | ||||
|         data={"text_should_not_be_present": ignore_text, | ||||
|               "url": test_url, | ||||
|               'fetch_backend': "html_requests", | ||||
|               "time_between_check_use_default": "y" | ||||
|               'fetch_backend': "html_requests" | ||||
|               }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|   | ||||
| @@ -4,8 +4,6 @@ import time | ||||
|  | ||||
| from flask import url_for | ||||
| from .util import live_server_setup, wait_for_all_checks | ||||
| from ..model import CONDITIONS_MATCH_LOGIC_DEFAULT | ||||
|  | ||||
|  | ||||
| def set_original_response(number="50"): | ||||
|     test_return_data = f"""<html> | ||||
| @@ -78,7 +76,7 @@ def test_conditions_with_text_and_number(client, live_server): | ||||
|             "fetch_backend": "html_requests", | ||||
|             "include_filters": ".number-container", | ||||
|             "title": "Number AND Text Condition Test", | ||||
|             "conditions_match_logic": CONDITIONS_MATCH_LOGIC_DEFAULT,  # ALL = AND logic | ||||
|             "conditions_match_logic": "ALL",  # ALL = AND logic | ||||
|             "conditions-0-operator": "in", | ||||
|             "conditions-0-field": "page_filtered_text", | ||||
|             "conditions-0-value": "5", | ||||
| @@ -105,7 +103,6 @@ def test_conditions_with_text_and_number(client, live_server): | ||||
|             "conditions-5-operator": "contains_regex", | ||||
|             "conditions-5-field": "page_filtered_text", | ||||
|             "conditions-5-value": "\d", | ||||
|             "time_between_check_use_default": "y", | ||||
|         }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
| @@ -286,11 +283,10 @@ def test_lev_conditions_plugin(client, live_server, measure_memory_usage): | ||||
|         data={ | ||||
|             "url": test_url, | ||||
|             "fetch_backend": "html_requests", | ||||
|             "conditions_match_logic": CONDITIONS_MATCH_LOGIC_DEFAULT,  # ALL = AND logic | ||||
|             "conditions_match_logic": "ALL",  # ALL = AND logic | ||||
|             "conditions-0-field": "levenshtein_ratio", | ||||
|             "conditions-0-operator": "<", | ||||
|             "conditions-0-value": "0.8", # needs to be more of a diff to trigger a change | ||||
|             "time_between_check_use_default": "y" | ||||
|             "conditions-0-value": "0.8" # needs to be more of a diff to trigger a change | ||||
|         }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|   | ||||
| @@ -95,7 +95,7 @@ def test_check_markup_include_filters_restriction(client, live_server, measure_m | ||||
|     # Add our URL to the import page | ||||
|     res = client.post( | ||||
|         url_for("ui.ui_edit.edit_page", uuid="first"), | ||||
|         data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"}, | ||||
|         data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
| @@ -154,8 +154,7 @@ def test_check_multiple_filters(client, live_server, measure_memory_usage): | ||||
|               "url": test_url, | ||||
|               "tags": "", | ||||
|               "headers": "", | ||||
|               'fetch_backend': "html_requests", | ||||
|               "time_between_check_use_default": "y"}, | ||||
|               'fetch_backend': "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
| @@ -209,8 +208,7 @@ def test_filter_is_empty_help_suggestion(client, live_server, measure_memory_usa | ||||
|               "url": test_url, | ||||
|               "tags": "", | ||||
|               "headers": "", | ||||
|               'fetch_backend': "html_requests", | ||||
|               "time_between_check_use_default": "y"}, | ||||
|               'fetch_backend': "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|   | ||||
| @@ -171,7 +171,6 @@ def test_element_removal_full(client, live_server, measure_memory_usage): | ||||
|             "tags": "", | ||||
|             "headers": "", | ||||
|             "fetch_backend": "html_requests", | ||||
|             "time_between_check_use_default": "y", | ||||
|         }, | ||||
|         follow_redirects=True, | ||||
|     ) | ||||
| @@ -246,7 +245,6 @@ body > table > tr:nth-child(3) > td:nth-child(3)""", | ||||
|                 "url": test_url, | ||||
|                 "tags": "", | ||||
|                 "fetch_backend": "html_requests", | ||||
|                 "time_between_check_use_default": "y", | ||||
|             }, | ||||
|             follow_redirects=True, | ||||
|         ) | ||||
|   | ||||
| @@ -127,8 +127,7 @@ def test_low_level_errors_clear_correctly(client, live_server, measure_memory_us | ||||
|         url_for("ui.ui_edit.edit_page", uuid="first"), | ||||
|         data={ | ||||
|             "url": test_url, | ||||
|             "fetch_backend": "html_requests", | ||||
|             "time_between_check_use_default": "y"}, | ||||
|             "fetch_backend": "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|   | ||||
| @@ -46,7 +46,7 @@ def test_check_extract_text_from_diff(client, live_server, measure_memory_usage) | ||||
|         follow_redirects=False | ||||
|     ) | ||||
|  | ||||
|     assert b'No matches found while scanning all of the watch history for that RegEx.' not in res.data | ||||
|     assert b'Nothing matches that RegEx' not in res.data | ||||
|     assert res.content_type == 'text/csv' | ||||
|  | ||||
|     # Read the csv reply as stringio | ||||
|   | ||||
| @@ -95,8 +95,7 @@ def test_check_filter_multiline(client, live_server, measure_memory_usage): | ||||
|               "url": test_url, | ||||
|               "tags": "", | ||||
|               "headers": "", | ||||
|               'fetch_backend': "html_requests", | ||||
|               "time_between_check_use_default": "y" | ||||
|               'fetch_backend': "html_requests" | ||||
|               }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
| @@ -150,8 +149,7 @@ def test_check_filter_and_regex_extract(client, live_server, measure_memory_usag | ||||
|               "url": test_url, | ||||
|               "tags": "", | ||||
|               "headers": "", | ||||
|               'fetch_backend': "html_requests", | ||||
|               "time_between_check_use_default": "y" | ||||
|               'fetch_backend': "html_requests" | ||||
|               }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
| @@ -224,8 +222,7 @@ def test_regex_error_handling(client, live_server, measure_memory_usage): | ||||
|         url_for("ui.ui_edit.edit_page", uuid="first"), | ||||
|         data={"extract_text": '/something bad\d{3/XYZ', | ||||
|               "url": test_url, | ||||
|               "fetch_backend": "html_requests", | ||||
|               "time_between_check_use_default": "y"}, | ||||
|               "fetch_backend": "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|   | ||||
| @@ -94,8 +94,7 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se | ||||
|         "title": "my title", | ||||
|         "headers": "", | ||||
|         "include_filters": '.ticket-available', | ||||
|         "fetch_backend": "html_requests", | ||||
|         "time_between_check_use_default": "y"}) | ||||
|         "fetch_backend": "html_requests"}) | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("ui.ui_edit.edit_page", uuid="first"), | ||||
|   | ||||
| @@ -72,7 +72,6 @@ def run_filter_test(client, live_server, content_filter): | ||||
|                   "notification_format": "Text", | ||||
|                   "fetch_backend": "html_requests", | ||||
|                   "filter_failure_notification_send": 'y', | ||||
|                   "time_between_check_use_default": "y", | ||||
|                   "headers": "", | ||||
|                   "tags": "my tag", | ||||
|                   "title": "my title 123", | ||||
|   | ||||
| @@ -424,8 +424,7 @@ def test_order_of_filters_tag_filter_and_watch_filter(client, live_server, measu | ||||
|             "url": test_url, | ||||
|             "tags": "test-tag-keep-order", | ||||
|             "headers": "", | ||||
|             'fetch_backend': "html_requests", | ||||
|             "time_between_check_use_default": "y"}, | ||||
|             'fetch_backend': "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|   | ||||
| @@ -111,7 +111,7 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa | ||||
|     # Add our URL to the import page | ||||
|     res = client.post( | ||||
|         url_for("ui.ui_edit.edit_page", uuid="first"), | ||||
|         data={"ignore_text": ignore_text, "url": test_url, 'fetch_backend': "html_requests", "time_between_check_use_default": "y"}, | ||||
|         data={"ignore_text": ignore_text, "url": test_url, 'fetch_backend': "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
| @@ -205,7 +205,7 @@ def _run_test_global_ignore(client, as_source=False, extra_ignore=""): | ||||
|     #Adding some ignore text should not trigger a change | ||||
|     res = client.post( | ||||
|         url_for("ui.ui_edit.edit_page", uuid="first"), | ||||
|         data={"ignore_text": "something irrelevent but just to check", "url": test_url, 'fetch_backend': "html_requests", "time_between_check_use_default": "y"}, | ||||
|         data={"ignore_text": "something irrelevent but just to check", "url": test_url, 'fetch_backend': "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|   | ||||
| @@ -108,7 +108,7 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server, measu | ||||
|     # Add our URL to the import page | ||||
|     res = client.post( | ||||
|         url_for("ui.ui_edit.edit_page", uuid="first"), | ||||
|         data={"ignore_status_codes": "y", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"}, | ||||
|         data={"ignore_status_codes": "y", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|   | ||||
| @@ -257,8 +257,7 @@ def check_json_filter(json_filter, client, live_server): | ||||
|               "url": test_url, | ||||
|               "tags": "", | ||||
|               "headers": "", | ||||
|               "fetch_backend": "html_requests", | ||||
|               "time_between_check_use_default": "y" | ||||
|               "fetch_backend": "html_requests" | ||||
|               }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
| @@ -329,8 +328,7 @@ def check_json_filter_bool_val(json_filter, client, live_server): | ||||
|               "url": test_url, | ||||
|               "tags": "", | ||||
|               "headers": "", | ||||
|               "fetch_backend": "html_requests", | ||||
|               "time_between_check_use_default": "y" | ||||
|               "fetch_backend": "html_requests" | ||||
|               }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
| @@ -395,8 +393,7 @@ def check_json_ext_filter(json_filter, client, live_server): | ||||
|               "url": test_url, | ||||
|               "tags": "", | ||||
|               "headers": "", | ||||
|               "fetch_backend": "html_requests", | ||||
|               "time_between_check_use_default": "y" | ||||
|               "fetch_backend": "html_requests" | ||||
|               }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|   | ||||
| @@ -38,7 +38,6 @@ def test_content_filter_live_preview(client, live_server, measure_memory_usage): | ||||
|             "ignore_text": "something to ignore", | ||||
|             "trigger_text": "something to trigger", | ||||
|             "url": test_url, | ||||
|             "time_between_check_use_default": "y", | ||||
|         }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|   | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user