Compare commits

..

2 Commits

Author SHA1 Message Date
dgtlmoon
a484822dcf Update store.py 2025-06-16 16:30:46 +02:00
dgtlmoon
5737c65a9e Better path cross-platform file handling 2025-06-16 15:58:03 +02:00
163 changed files with 4794 additions and 7290 deletions

View File

@@ -33,6 +33,7 @@ venv/
# Test and development files
test-datastore/
tests/
docs/
*.md
!README.md

View File

@@ -2,7 +2,7 @@
# Test that we can still build on Alpine (musl modified libc https://musl.libc.org/)
# Some packages wont install via pypi because they dont have a wheel available under this architecture.
FROM ghcr.io/linuxserver/baseimage-alpine:3.22
FROM ghcr.io/linuxserver/baseimage-alpine:3.21
ENV PYTHONUNBUFFERED=1
COPY requirements.txt /requirements.txt
@@ -18,19 +18,17 @@ RUN \
libxslt-dev \
openssl-dev \
python3-dev \
file \
zip \
zlib-dev && \
apk add --update --no-cache \
libjpeg \
libxslt \
file \
nodejs \
poppler-utils \
python3 && \
echo "**** pip3 install test of changedetection.io ****" && \
python3 -m venv /lsiopy && \
pip install -U pip wheel setuptools && \
pip install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.22/ -r /requirements.txt && \
pip install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.21/ -r /requirements.txt && \
apk del --purge \
build-dependencies

View File

@@ -30,7 +30,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL

View File

@@ -39,9 +39,9 @@ jobs:
# Or if we are in a tagged release scenario.
if: ${{ github.event.workflow_run.conclusion == 'success' }} || ${{ github.event.release.tag_name }} != ''
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v6
uses: actions/setup-python@v5
with:
python-version: 3.11

View File

@@ -7,9 +7,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v6
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install pypa/build
@@ -34,12 +34,12 @@ jobs:
- build
steps:
- name: Download all the dists
uses: actions/download-artifact@v5
uses: actions/download-artifact@v4
with:
name: python-package-distributions
path: dist/
- name: Set up Python 3.11
uses: actions/setup-python@v6
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Test that the basic pip built package runs without error
@@ -72,7 +72,7 @@ jobs:
steps:
- name: Download all the dists
uses: actions/download-artifact@v5
uses: actions/download-artifact@v4
with:
name: python-package-distributions
path: dist/

View File

@@ -46,9 +46,9 @@ jobs:
- platform: linux/arm64
dockerfile: ./.github/test/Dockerfile-alpine
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v6
uses: actions/setup-python@v5
with:
python-version: 3.11

View File

@@ -7,7 +7,7 @@ jobs:
lint-code:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
- name: Lint with Ruff
run: |
pip install ruff
@@ -15,10 +15,6 @@ jobs:
ruff check . --select E9,F63,F7,F82
# Complete check with errors treated as warnings
ruff check . --exit-zero
- name: Validate OpenAPI spec
run: |
pip install openapi-spec-validator
python3 -c "from openapi_spec_validator import validate_spec; import yaml; validate_spec(yaml.safe_load(open('docs/api-spec.yaml')))"
test-application-3-10:
needs: lint-code

View File

@@ -20,11 +20,11 @@ jobs:
env:
PYTHON_VERSION: ${{ inputs.python-version }}
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
# Mainly just for link/flake8
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@v6
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
@@ -71,7 +71,6 @@ jobs:
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_watch_model'
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_jinja2_security'
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_semver'
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_browser_notifications'
- name: Test built container with Pytest (generally as requests/plaintext fetching)
run: |

View File

@@ -5,6 +5,7 @@ ARG PYTHON_VERSION=3.11
FROM python:${PYTHON_VERSION}-slim-bookworm AS builder
# See `cryptography` pin comment in requirements.txt
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
RUN apt-get update && apt-get install -y --no-install-recommends \
g++ \
@@ -15,8 +16,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libssl-dev \
libxslt-dev \
make \
patch \
pkg-config \
zlib1g-dev
RUN mkdir /install
@@ -26,14 +25,6 @@ COPY requirements.txt /requirements.txt
# Use cache mounts and multiple wheel sources for faster ARM builds
ENV PIP_CACHE_DIR=/tmp/pip-cache
# Help Rust find OpenSSL for cryptography package compilation on ARM
ENV PKG_CONFIG_PATH="/usr/lib/pkgconfig:/usr/lib/arm-linux-gnueabihf/pkgconfig:/usr/lib/aarch64-linux-gnu/pkgconfig"
ENV PKG_CONFIG_ALLOW_SYSTEM_CFLAGS=1
ENV OPENSSL_DIR="/usr"
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
ENV OPENSSL_INCLUDE_DIR="/usr/include/openssl"
# Additional environment variables for cryptography Rust build
ENV CRYPTOGRAPHY_DONT_BUILD_RUST=1
RUN --mount=type=cache,target=/tmp/pip-cache \
pip install \
--extra-index-url https://www.piwheels.org/simple \
@@ -62,8 +53,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
locales \
# For pdftohtml
poppler-utils \
# favicon type detection and other uses
file \
zlib1g \
&& apt-get clean && rm -rf /var/lib/apt/lists/*
@@ -84,11 +73,6 @@ EXPOSE 5000
# The actual flask app module
COPY changedetectionio /app/changedetectionio
# Also for OpenAPI validation wrapper - needs the YML
RUN [ ! -d "/app/docs" ] && mkdir /app/docs
COPY docs/api-spec.yaml /app/docs/api-spec.yaml
# Starting wrapper
COPY changedetection.py /app/changedetection.py
@@ -97,9 +81,6 @@ COPY changedetection.py /app/changedetection.py
ARG LOGGER_LEVEL=''
ENV LOGGER_LEVEL="$LOGGER_LEVEL"
# Default
ENV LC_ALL=en_US.UTF-8
WORKDIR /app
CMD ["python", "./changedetection.py", "-d", "/datastore"]

View File

@@ -186,7 +186,7 @@
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2025 Web Technologies s.r.o.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.

View File

@@ -1,7 +1,7 @@
recursive-include changedetectionio/api *
recursive-include changedetectionio/blueprint *
recursive-include changedetectionio/conditions *
recursive-include changedetectionio/content_fetchers *
recursive-include changedetectionio/conditions *
recursive-include changedetectionio/model *
recursive-include changedetectionio/notification *
recursive-include changedetectionio/processors *
@@ -9,7 +9,6 @@ recursive-include changedetectionio/realtime *
recursive-include changedetectionio/static *
recursive-include changedetectionio/templates *
recursive-include changedetectionio/tests *
recursive-include changedetectionio/widgets *
prune changedetectionio/static/package-lock.json
prune changedetectionio/static/styles/node_modules
prune changedetectionio/static/styles/package-lock.json

View File

@@ -1,21 +1,11 @@
# Monitor website changes
## Web Site Change Detection, Monitoring and Notification.
Detect WebPage Changes Automatically — Monitor Web Page Changes in Real Time
Monitor websites for updates — get notified via Discord, Email, Slack, Telegram, Webhook and many more.
Detect web page content changes and get instant alerts.
[Changedetection.io is the best tool to monitor web-pages for changes](https://changedetection.io) Track website content changes and receive notifications via Discord, Email, Slack, Telegram and 90+ more
Ideal for monitoring price changes, content edits, conditional changes and more.
Live your data-life pro-actively, track website content changes and receive notifications via Discord, Email, Slack, Telegram and 70+ more
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring, list of websites with changes" title="Self-hosted web page change monitoring, list of websites with changes" />](https://changedetection.io)
[**Don't have time? Try our extremely affordable subscription use our proxies and support!**](https://changedetection.io)
[**Don't have time? Let us host it for you! try our extremely affordable subscription use our proxies and support!**](https://changedetection.io)
### Target specific parts of the webpage using the Visual Selector tool.

View File

@@ -1,13 +1,11 @@
# Detect Website Changes Automatically — Monitor Web Page Changes in Real Time
## Web Site Change Detection, Restock monitoring and notifications.
Monitor websites for updates — get notified via Discord, Email, Slack, Telegram, Webhook and many more.
**_Detect website content changes and perform meaningful actions - trigger notifications via Discord, Email, Slack, Telegram, API calls and many more._**
**Detect web page content changes and get instant alerts.**
Ideal for monitoring price changes, content edits, conditional changes and more.
_Live your data-life pro-actively._
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Web site page change monitoring" title="Web site page change monitoring" />](https://changedetection.io?src=github)
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web site page change monitoring" title="Self-hosted web site page change monitoring" />](https://changedetection.io?src=github)
[![Release Version][release-shield]][release-link] [![Docker Pulls][docker-pulls]][docker-link] [![License][license-shield]](LICENSE.md)
@@ -15,7 +13,6 @@ Ideal for monitoring price changes, content edits, conditional changes and more.
[**Get started with website page change monitoring straight away. Don't have time? Try our $8.99/month subscription, use our proxies and support!**](https://changedetection.io) , _half the price of other website change monitoring services!_
- Chrome browser included.
- Nothing to install, access via browser login after signup.
- Super fast, no registration needed setup.
@@ -102,7 +99,9 @@ _Need an actual Chrome runner with Javascript support? We support fetching via W
- Configurable [proxy per watch](https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration)
- Send a screenshot with the notification when a change is detected in the web page
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $150 using our signup link.
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $100 using our signup link.
[Oxylabs](https://oxylabs.go2cloud.org/SH2d) is also an excellent proxy provider and well worth using, they offer Residental, ISP, Rotating and many other proxy types to suit your project.
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
@@ -280,10 +279,7 @@ Excel import is recommended - that way you can better organise tags/groups of we
## API Support
Full REST API for programmatic management of watches, tags, notifications and more.
- **[Interactive API Documentation](https://changedetection.io/docs/api_v1/index.html)** - Complete API reference with live testing
- **[OpenAPI Specification](docs/api-spec.yaml)** - Generate SDKs for any programming language
Supports managing the website watch list [via our API](https://changedetection.io/docs/api_v1/index.html)
## Support us

View File

@@ -2,7 +2,7 @@
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
__version__ = '0.50.14'
__version__ = '0.50.3'
from changedetectionio.strtobool import strtobool
from json.decoder import JSONDecodeError
@@ -35,22 +35,13 @@ def sigshutdown_handler(_signo, _stack_frame):
app.config.exit.set()
datastore.stop_thread = True
# Shutdown workers and queues immediately
# Shutdown workers immediately
try:
from changedetectionio import worker_handler
worker_handler.shutdown_workers()
except Exception as e:
logger.error(f"Error shutting down workers: {str(e)}")
# Close janus queues properly
try:
from changedetectionio.flask_app import update_q, notification_q
update_q.close()
notification_q.close()
logger.debug("Janus queues closed successfully")
except Exception as e:
logger.critical(f"CRITICAL: Failed to close janus queues: {e}")
# Shutdown socketio server fast
from changedetectionio.flask_app import socketio_server
if socketio_server and hasattr(socketio_server, 'shutdown'):

View File

@@ -3,7 +3,7 @@ from changedetectionio.strtobool import strtobool
from flask_restful import abort, Resource
from flask import request
import validators
from . import auth, validate_openapi_request
from . import auth
class Import(Resource):
@@ -12,9 +12,17 @@ class Import(Resource):
self.datastore = kwargs['datastore']
@auth.check_token
@validate_openapi_request('importWatches')
def post(self):
"""Import a list of watched URLs."""
"""
@api {post} /api/v1/import Import a list of watched URLs
@apiDescription Accepts a line-feed separated list of URLs to import, additionally with ?tag_uuids=(tag id), ?tag=(name), ?proxy={key}, ?dedupe=true (default true) one URL per line.
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/import --data-binary @list-of-sites.txt -H"x-api-key:8a111a21bc2f8f1dd9b9353bbd46049a"
@apiName Import
@apiGroup Watch
@apiSuccess (200) {List} OK List of watch UUIDs added
@apiSuccess (500) {String} ERR Some other error
"""
extras = {}

View File

@@ -1,7 +1,9 @@
from flask_expects_json import expects_json
from flask_restful import Resource, abort
from flask_restful import Resource
from . import auth
from flask_restful import abort, Resource
from flask import request
from . import auth, validate_openapi_request
from . import auth
from . import schema_create_notification_urls, schema_delete_notification_urls
class Notifications(Resource):
@@ -10,9 +12,19 @@ class Notifications(Resource):
self.datastore = kwargs['datastore']
@auth.check_token
@validate_openapi_request('getNotifications')
def get(self):
"""Return Notification URL List."""
"""
@api {get} /api/v1/notifications Return Notification URL List
@apiDescription Return the Notification URL List from the configuration
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/notifications -H"x-api-key:813031b16330fe25e3780cf0325daa45"
HTTP/1.0 200
{
'notification_urls': ["notification-urls-list"]
}
@apiName Get
@apiGroup Notifications
"""
notification_urls = self.datastore.data.get('settings', {}).get('application', {}).get('notification_urls', [])
@@ -21,10 +33,18 @@ class Notifications(Resource):
}, 200
@auth.check_token
@validate_openapi_request('addNotifications')
@expects_json(schema_create_notification_urls)
def post(self):
"""Create Notification URLs."""
"""
@api {post} /api/v1/notifications Create Notification URLs
@apiDescription Add one or more notification URLs from the configuration
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/notifications/batch -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"notification_urls": ["url1", "url2"]}'
@apiName CreateBatch
@apiGroup Notifications
@apiSuccess (201) {Object[]} notification_urls List of added notification URLs
@apiError (400) {String} Invalid input
"""
json_data = request.get_json()
notification_urls = json_data.get("notification_urls", [])
@@ -49,10 +69,18 @@ class Notifications(Resource):
return {'notification_urls': added_urls}, 201
@auth.check_token
@validate_openapi_request('replaceNotifications')
@expects_json(schema_create_notification_urls)
def put(self):
"""Replace Notification URLs."""
"""
@api {put} /api/v1/notifications Replace Notification URLs
@apiDescription Replace all notification URLs with the provided list (can be empty)
@apiExample {curl} Example usage:
curl -X PUT http://localhost:5000/api/v1/notifications -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"notification_urls": ["url1", "url2"]}'
@apiName Replace
@apiGroup Notifications
@apiSuccess (200) {Object[]} notification_urls List of current notification URLs
@apiError (400) {String} Invalid input
"""
json_data = request.get_json()
notification_urls = json_data.get("notification_urls", [])
@@ -72,10 +100,19 @@ class Notifications(Resource):
return {'notification_urls': clean_urls}, 200
@auth.check_token
@validate_openapi_request('deleteNotifications')
@expects_json(schema_delete_notification_urls)
def delete(self):
"""Delete Notification URLs."""
"""
@api {delete} /api/v1/notifications Delete Notification URLs
@apiDescription Deletes one or more notification URLs from the configuration
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/notifications -X DELETE -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"notification_urls": ["url1", "url2"]}'
@apiParam {String[]} notification_urls The notification URLs to delete.
@apiName Delete
@apiGroup Notifications
@apiSuccess (204) {String} OK Deleted
@apiError (400) {String} No matching notification URLs found.
"""
json_data = request.get_json()
urls_to_delete = json_data.get("notification_urls", [])

View File

@@ -1,6 +1,6 @@
from flask_restful import Resource, abort
from flask import request
from . import auth, validate_openapi_request
from . import auth
class Search(Resource):
def __init__(self, **kwargs):
@@ -8,9 +8,21 @@ class Search(Resource):
self.datastore = kwargs['datastore']
@auth.check_token
@validate_openapi_request('searchWatches')
def get(self):
"""Search for watches by URL or title text."""
"""
@api {get} /api/v1/search Search for watches
@apiDescription Search watches by URL or title text
@apiExample {curl} Example usage:
curl "http://localhost:5000/api/v1/search?q=https://example.com/page1" -H"x-api-key:813031b16330fe25e3780cf0325daa45"
curl "http://localhost:5000/api/v1/search?q=https://example.com/page1?tag=Favourites" -H"x-api-key:813031b16330fe25e3780cf0325daa45"
curl "http://localhost:5000/api/v1/search?q=https://example.com?partial=true" -H"x-api-key:813031b16330fe25e3780cf0325daa45"
@apiName Search
@apiGroup Watch Management
@apiQuery {String} q Search query to match against watch URLs and titles
@apiQuery {String} [tag] Optional name of tag to limit results (name not UUID)
@apiQuery {String} [partial] Allow partial matching of URL query
@apiSuccess (200) {Object} JSON Object containing matched watches
"""
query = request.args.get('q', '').strip()
tag_limit = request.args.get('tag', '').strip()
from changedetectionio.strtobool import strtobool

View File

@@ -1,5 +1,5 @@
from flask_restful import Resource
from . import auth, validate_openapi_request
from . import auth
class SystemInfo(Resource):
@@ -9,9 +9,23 @@ class SystemInfo(Resource):
self.update_q = kwargs['update_q']
@auth.check_token
@validate_openapi_request('getSystemInfo')
def get(self):
"""Return system info."""
"""
@api {get} /api/v1/systeminfo Return system info
@apiDescription Return some info about the current system state
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/systeminfo -H"x-api-key:813031b16330fe25e3780cf0325daa45"
HTTP/1.0 200
{
'queue_size': 10 ,
'overdue_watches': ["watch-uuid-list"],
'uptime': 38344.55,
'watch_count': 800,
'version': "0.40.1"
}
@apiName Get Info
@apiGroup System Information
"""
import time
overdue_watches = []

View File

@@ -1,46 +1,39 @@
from changedetectionio import queuedWatchMetaData
from changedetectionio import worker_handler
from flask_expects_json import expects_json
from flask_restful import abort, Resource
from flask import request
from . import auth
# Import schemas from __init__.py
from . import schema_tag, schema_create_tag, schema_update_tag, validate_openapi_request
from . import schema_tag, schema_create_tag, schema_update_tag
class Tag(Resource):
def __init__(self, **kwargs):
# datastore is a black box dependency
self.datastore = kwargs['datastore']
self.update_q = kwargs['update_q']
# Get information about a single tag
# curl http://localhost:5000/api/v1/tag/<string:uuid>
@auth.check_token
@validate_openapi_request('getTag')
def get(self, uuid):
"""Get data for a single tag/group, toggle notification muting, or recheck all."""
"""
@api {get} /api/v1/tag/:uuid Single tag - get data or toggle notification muting.
@apiDescription Retrieve tag information and set notification_muted status
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/tag/cc0cfffa-f449-477b-83ea-0caafd1dc091 -H"x-api-key:813031b16330fe25e3780cf0325daa45"
curl "http://localhost:5000/api/v1/tag/cc0cfffa-f449-477b-83ea-0caafd1dc091?muted=muted" -H"x-api-key:813031b16330fe25e3780cf0325daa45"
@apiName Tag
@apiGroup Tag
@apiParam {uuid} uuid Tag unique ID.
@apiQuery {String} [muted] =`muted` or =`unmuted` , Sets the MUTE NOTIFICATIONS state
@apiSuccess (200) {String} OK When muted operation OR full JSON object of the tag
@apiSuccess (200) {JSON} TagJSON JSON Full JSON object of the tag
"""
from copy import deepcopy
tag = deepcopy(self.datastore.data['settings']['application']['tags'].get(uuid))
if not tag:
abort(404, message=f'No tag exists with the UUID of {uuid}')
if request.args.get('recheck'):
# Recheck all, including muted
# Get most overdue first
i=0
for k in sorted(self.datastore.data['watching'].items(), key=lambda item: item[1].get('last_checked', 0)):
watch_uuid = k[0]
watch = k[1]
if not watch['paused'] and tag['uuid'] not in watch['tags']:
continue
worker_handler.queue_item_async_safe(self.update_q, queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid}))
i+=1
return f"OK, {i} watches queued", 200
if request.args.get('muted', '') == 'muted':
self.datastore.data['settings']['application']['tags'][uuid]['notification_muted'] = True
return "OK", 200
@@ -51,9 +44,16 @@ class Tag(Resource):
return tag
@auth.check_token
@validate_openapi_request('deleteTag')
def delete(self, uuid):
"""Delete a tag/group and remove it from all watches."""
"""
@api {delete} /api/v1/tag/:uuid Delete a tag and remove it from all watches
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/tag/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X DELETE -H"x-api-key:813031b16330fe25e3780cf0325daa45"
@apiParam {uuid} uuid Tag unique ID.
@apiName DeleteTag
@apiGroup Tag
@apiSuccess (200) {String} OK Was deleted
"""
if not self.datastore.data['settings']['application']['tags'].get(uuid):
abort(400, message='No tag exists with the UUID of {}'.format(uuid))
@@ -68,10 +68,21 @@ class Tag(Resource):
return 'OK', 204
@auth.check_token
@validate_openapi_request('updateTag')
@expects_json(schema_update_tag)
def put(self, uuid):
"""Update tag information."""
"""
@api {put} /api/v1/tag/:uuid Update tag information
@apiExample {curl} Example usage:
Update (PUT)
curl http://localhost:5000/api/v1/tag/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X PUT -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"title": "New Tag Title"}'
@apiDescription Updates an existing tag using JSON
@apiParam {uuid} uuid Tag unique ID.
@apiName UpdateTag
@apiGroup Tag
@apiSuccess (200) {String} OK Was updated
@apiSuccess (500) {String} ERR Some other error
"""
tag = self.datastore.data['settings']['application']['tags'].get(uuid)
if not tag:
abort(404, message='No tag exists with the UUID of {}'.format(uuid))
@@ -83,10 +94,17 @@ class Tag(Resource):
@auth.check_token
@validate_openapi_request('createTag')
# Only cares for {'title': 'xxxx'}
def post(self):
"""Create a single tag/group."""
"""
@api {post} /api/v1/watch Create a single tag
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"name": "Work related"}'
@apiName Create
@apiGroup Tag
@apiSuccess (200) {String} OK Was created
@apiSuccess (500) {String} ERR Some other error
"""
json_data = request.get_json()
title = json_data.get("title",'').strip()
@@ -104,9 +122,28 @@ class Tags(Resource):
self.datastore = kwargs['datastore']
@auth.check_token
@validate_openapi_request('listTags')
def get(self):
"""List tags/groups."""
"""
@api {get} /api/v1/tags List tags
@apiDescription Return list of available tags
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/tags -H"x-api-key:813031b16330fe25e3780cf0325daa45"
{
"cc0cfffa-f449-477b-83ea-0caafd1dc091": {
"title": "Tech News",
"notification_muted": false,
"date_created": 1677103794
},
"e6f5fd5c-dbfe-468b-b8f3-f9d6ff5ad69b": {
"title": "Shopping",
"notification_muted": true,
"date_created": 1676662819
}
}
@apiName ListTags
@apiGroup Tag Management
@apiSuccess (200) {String} OK JSON dict
"""
result = {}
for uuid, tag in self.datastore.data['settings']['application']['tags'].items():
result[uuid] = {

View File

@@ -5,46 +5,13 @@ from flask_expects_json import expects_json
from changedetectionio import queuedWatchMetaData
from changedetectionio import worker_handler
from flask_restful import abort, Resource
from flask import request, make_response, send_from_directory
from flask import request, make_response
import validators
from . import auth
import copy
# Import schemas from __init__.py
from . import schema, schema_create_watch, schema_update_watch, validate_openapi_request
def validate_time_between_check_required(json_data):
"""
Validate that at least one time interval is specified when not using default settings.
Returns None if valid, or error message string if invalid.
Defaults to using global settings if time_between_check_use_default is not provided.
"""
# Default to using global settings if not specified
use_default = json_data.get('time_between_check_use_default', True)
# If using default settings, no validation needed
if use_default:
return None
# If not using defaults, check if time_between_check exists and has at least one non-zero value
time_check = json_data.get('time_between_check')
if not time_check:
# No time_between_check provided and not using defaults - this is an error
return "At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings."
# time_between_check exists, check if it has at least one non-zero value
if any([
(time_check.get('weeks') or 0) > 0,
(time_check.get('days') or 0) > 0,
(time_check.get('hours') or 0) > 0,
(time_check.get('minutes') or 0) > 0,
(time_check.get('seconds') or 0) > 0
]):
return None
# time_between_check exists but all values are 0 or empty - this is an error
return "At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings."
from . import schema, schema_create_watch, schema_update_watch
class Watch(Resource):
@@ -58,9 +25,23 @@ class Watch(Resource):
# @todo - version2 - ?muted and ?paused should be able to be called together, return the watch struct not "OK"
# ?recheck=true
@auth.check_token
@validate_openapi_request('getWatch')
def get(self, uuid):
"""Get information about a single watch, recheck, pause, or mute."""
"""
@api {get} /api/v1/watch/:uuid Single watch - get data, recheck, pause, mute.
@apiDescription Retrieve watch information and set muted/paused status
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -H"x-api-key:813031b16330fe25e3780cf0325daa45"
curl "http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091?muted=unmuted" -H"x-api-key:813031b16330fe25e3780cf0325daa45"
curl "http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091?paused=unpaused" -H"x-api-key:813031b16330fe25e3780cf0325daa45"
@apiName Watch
@apiGroup Watch
@apiParam {uuid} uuid Watch unique ID.
@apiQuery {Boolean} [recheck] Recheck this watch `recheck=1`
@apiQuery {String} [paused] =`paused` or =`unpaused` , Sets the PAUSED state
@apiQuery {String} [muted] =`muted` or =`unmuted` , Sets the MUTE NOTIFICATIONS state
@apiSuccess (200) {String} OK When paused/muted/recheck operation OR full JSON object of the watch
@apiSuccess (200) {JSON} WatchJSON JSON Full JSON object of the watch
"""
from copy import deepcopy
watch = deepcopy(self.datastore.data['watching'].get(uuid))
if not watch:
@@ -88,14 +69,19 @@ class Watch(Resource):
# attr .last_changed will check for the last written text snapshot on change
watch['last_changed'] = watch.last_changed
watch['viewed'] = watch.viewed
watch['link'] = watch.link,
return watch
@auth.check_token
@validate_openapi_request('deleteWatch')
def delete(self, uuid):
"""Delete a watch and related history."""
"""
@api {delete} /api/v1/watch/:uuid Delete a watch and related history
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X DELETE -H"x-api-key:813031b16330fe25e3780cf0325daa45"
@apiParam {uuid} uuid Watch unique ID.
@apiName Delete
@apiGroup Watch
@apiSuccess (200) {String} OK Was deleted
"""
if not self.datastore.data['watching'].get(uuid):
abort(400, message='No watch exists with the UUID of {}'.format(uuid))
@@ -103,10 +89,21 @@ class Watch(Resource):
return 'OK', 204
@auth.check_token
@validate_openapi_request('updateWatch')
@expects_json(schema_update_watch)
def put(self, uuid):
"""Update watch information."""
"""
@api {put} /api/v1/watch/:uuid Update watch information
@apiExample {curl} Example usage:
Update (PUT)
curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X PUT -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"url": "https://my-nice.com" , "tag": "new list"}'
@apiDescription Updates an existing watch using JSON, accepts the same structure as returned in <a href="#api-Watch-Watch">get single watch information</a>
@apiParam {uuid} uuid Watch unique ID.
@apiName Update a watch
@apiGroup Watch
@apiSuccess (200) {String} OK Was updated
@apiSuccess (500) {String} ERR Some other error
"""
watch = self.datastore.data['watching'].get(uuid)
if not watch:
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
@@ -116,11 +113,6 @@ class Watch(Resource):
if not request.json.get('proxy') in plist:
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
# Validate time_between_check when not using defaults
validation_error = validate_time_between_check_required(request.json)
if validation_error:
return validation_error, 400
watch.update(request.json)
return "OK", 200
@@ -134,9 +126,22 @@ class WatchHistory(Resource):
# Get a list of available history for a watch by UUID
# curl http://localhost:5000/api/v1/watch/<string:uuid>/history
@auth.check_token
@validate_openapi_request('getWatchHistory')
def get(self, uuid):
"""Get a list of all historical snapshots available for a watch."""
"""
@api {get} /api/v1/watch/<string:uuid>/history Get a list of all historical snapshots available for a watch
@apiDescription Requires `uuid`, returns list
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json"
{
"1676649279": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/cb7e9be8258368262246910e6a2a4c30.txt",
"1677092785": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/e20db368d6fc633e34f559ff67bb4044.txt",
"1677103794": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/02efdd37dacdae96554a8cc85dc9c945.txt"
}
@apiName Get list of available stored snapshots for watch
@apiGroup Watch History
@apiSuccess (200) {String} OK
@apiSuccess (404) {String} ERR Not found
"""
watch = self.datastore.data['watching'].get(uuid)
if not watch:
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
@@ -149,9 +154,18 @@ class WatchSingleHistory(Resource):
self.datastore = kwargs['datastore']
@auth.check_token
@validate_openapi_request('getWatchSnapshot')
def get(self, uuid, timestamp):
"""Get single snapshot from watch."""
"""
@api {get} /api/v1/watch/<string:uuid>/history/<int:timestamp> Get single snapshot from watch
@apiDescription Requires watch `uuid` and `timestamp`. `timestamp` of "`latest`" for latest available snapshot, or <a href="#api-Watch_History-Get_list_of_available_stored_snapshots_for_watch">use the list returned here</a>
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history/1677092977 -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json"
@apiName Get single snapshot content
@apiGroup Watch History
@apiParam {String} [html] Optional Set to =1 to return the last HTML (only stores last 2 snapshots, use `latest` as timestamp)
@apiSuccess (200) {String} OK
@apiSuccess (404) {String} ERR Not found
"""
watch = self.datastore.data['watching'].get(uuid)
if not watch:
abort(404, message=f"No watch exists with the UUID of {uuid}")
@@ -177,39 +191,6 @@ class WatchSingleHistory(Resource):
return response
class WatchFavicon(Resource):
def __init__(self, **kwargs):
# datastore is a black box dependency
self.datastore = kwargs['datastore']
@auth.check_token
@validate_openapi_request('getWatchFavicon')
def get(self, uuid):
"""Get favicon for a watch."""
watch = self.datastore.data['watching'].get(uuid)
if not watch:
abort(404, message=f"No watch exists with the UUID of {uuid}")
favicon_filename = watch.get_favicon_filename()
if favicon_filename:
try:
import magic
mime = magic.from_file(
os.path.join(watch.watch_data_dir, favicon_filename),
mime=True
)
except ImportError:
# Fallback, no python-magic
import mimetypes
mime, encoding = mimetypes.guess_type(favicon_filename)
response = make_response(send_from_directory(watch.watch_data_dir, favicon_filename))
response.headers['Content-type'] = mime
response.headers['Cache-Control'] = 'max-age=300, must-revalidate' # Cache for 5 minutes, then revalidate
return response
abort(404, message=f'No Favicon available for {uuid}')
class CreateWatch(Resource):
def __init__(self, **kwargs):
@@ -218,10 +199,18 @@ class CreateWatch(Resource):
self.update_q = kwargs['update_q']
@auth.check_token
@validate_openapi_request('createWatch')
@expects_json(schema_create_watch)
def post(self):
"""Create a single watch."""
"""
@api {post} /api/v1/watch Create a single watch
@apiDescription Requires atleast `url` set, can accept the same structure as <a href="#api-Watch-Watch">get single watch information</a> to create.
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"url": "https://my-nice.com" , "tag": "nice list"}'
@apiName Create
@apiGroup Watch
@apiSuccess (200) {String} OK Was created
@apiSuccess (500) {String} ERR Some other error
"""
json_data = request.get_json()
url = json_data['url'].strip()
@@ -236,11 +225,6 @@ class CreateWatch(Resource):
if not json_data.get('proxy') in plist:
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
# Validate time_between_check when not using defaults
validation_error = validate_time_between_check_required(json_data)
if validation_error:
return validation_error, 400
extras = copy.deepcopy(json_data)
# Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API)
@@ -259,9 +243,35 @@ class CreateWatch(Resource):
return "Invalid or unsupported URL", 400
@auth.check_token
@validate_openapi_request('listWatches')
def get(self):
"""List watches."""
"""
@api {get} /api/v1/watch List watches
@apiDescription Return concise list of available watches and some very basic info
@apiExample {curl} Example usage:
curl http://localhost:5000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45"
{
"6a4b7d5c-fee4-4616-9f43-4ac97046b595": {
"last_changed": 1677103794,
"last_checked": 1677103794,
"last_error": false,
"title": "",
"url": "http://www.quotationspage.com/random.php"
},
"e6f5fd5c-dbfe-468b-b8f3-f9d6ff5ad69b": {
"last_changed": 0,
"last_checked": 1676662819,
"last_error": false,
"title": "QuickLook",
"url": "https://github.com/QL-Win/QuickLook/tags"
}
}
@apiParam {String} [recheck_all] Optional Set to =1 to force recheck of all watches
@apiParam {String} [tag] Optional name of tag to limit results
@apiName ListWatches
@apiGroup Watch Management
@apiSuccess (200) {String} OK JSON dict
"""
list = {}
tag_limit = request.args.get('tag', '').lower()
@@ -275,8 +285,6 @@ class CreateWatch(Resource):
'last_changed': watch.last_changed,
'last_checked': watch['last_checked'],
'last_error': watch['last_error'],
'link': watch.link,
'page_title': watch['page_title'],
'title': watch['title'],
'url': watch['url'],
'viewed': watch.viewed

View File

@@ -1,10 +1,4 @@
import copy
import yaml
import functools
from flask import request, abort
from loguru import logger
from openapi_core import OpenAPI
from openapi_core.contrib.flask import FlaskOpenAPIRequest
from . import api_schema
from ..model import watch_base
@@ -14,7 +8,6 @@ schema = api_schema.build_watch_json_schema(watch_base_config)
schema_create_watch = copy.deepcopy(schema)
schema_create_watch['required'] = ['url']
del schema_create_watch['properties']['last_viewed']
schema_update_watch = copy.deepcopy(schema)
schema_update_watch['additionalProperties'] = False
@@ -32,47 +25,9 @@ schema_create_notification_urls['required'] = ['notification_urls']
schema_delete_notification_urls = copy.deepcopy(schema_notification_urls)
schema_delete_notification_urls['required'] = ['notification_urls']
@functools.cache
def get_openapi_spec():
import os
spec_path = os.path.join(os.path.dirname(__file__), '../../docs/api-spec.yaml')
with open(spec_path, 'r') as f:
spec_dict = yaml.safe_load(f)
_openapi_spec = OpenAPI.from_dict(spec_dict)
return _openapi_spec
def validate_openapi_request(operation_id):
"""Decorator to validate incoming requests against OpenAPI spec."""
def decorator(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
try:
# Skip OpenAPI validation for GET requests since they don't have request bodies
if request.method.upper() != 'GET':
spec = get_openapi_spec()
openapi_request = FlaskOpenAPIRequest(request)
result = spec.unmarshal_request(openapi_request)
if result.errors:
from werkzeug.exceptions import BadRequest
error_details = []
for error in result.errors:
error_details.append(str(error))
raise BadRequest(f"OpenAPI validation failed: {error_details}")
except BadRequest:
# Re-raise BadRequest exceptions (validation failures)
raise
except Exception as e:
# If OpenAPI spec loading fails, log but don't break existing functionality
logger.critical(f"OpenAPI validation warning for {operation_id}: {e}")
abort(500)
return f(*args, **kwargs)
return wrapper
return decorator
# Import all API resources
from .Watch import Watch, WatchHistory, WatchSingleHistory, CreateWatch, WatchFavicon
from .Watch import Watch, WatchHistory, WatchSingleHistory, CreateWatch
from .Tags import Tags, Tag
from .Import import Import
from .SystemInfo import SystemInfo
from .Notifications import Notifications

View File

@@ -78,13 +78,6 @@ def build_watch_json_schema(d):
]:
schema['properties'][v]['anyOf'].append({'type': 'string', "maxLength": 5000})
for v in ['last_viewed']:
schema['properties'][v] = {
"type": "integer",
"description": "Unix timestamp in seconds of the last time the watch was viewed.",
"minimum": 0
}
# None or Boolean
schema['properties']['track_ldjson_price_data']['anyOf'].append({'type': 'boolean'})
@@ -119,12 +112,6 @@ def build_watch_json_schema(d):
schema['properties']['time_between_check'] = build_time_between_check_json_schema()
schema['properties']['time_between_check_use_default'] = {
"type": "boolean",
"default": True,
"description": "Whether to use global settings for time between checks - defaults to true if not set"
}
schema['properties']['browser_steps'] = {
"anyOf": [
{

View File

@@ -7,7 +7,6 @@ from changedetectionio.flask_app import watch_check_update
import asyncio
import importlib
import os
import queue
import time
from loguru import logger
@@ -38,23 +37,13 @@ async def async_update_worker(worker_id, q, notification_q, app, datastore):
watch = None
try:
# Use native janus async interface - no threads needed!
queued_item_data = await asyncio.wait_for(q.async_get(), timeout=1.0)
# Use asyncio wait_for to make queue.get() cancellable
queued_item_data = await asyncio.wait_for(q.get(), timeout=1.0)
except asyncio.TimeoutError:
# No jobs available, continue loop
continue
except Exception as e:
logger.critical(f"CRITICAL: Worker {worker_id} failed to get queue item: {type(e).__name__}: {e}")
# Log queue health for debugging
try:
queue_size = q.qsize()
is_empty = q.empty()
logger.critical(f"CRITICAL: Worker {worker_id} queue health - size: {queue_size}, empty: {is_empty}")
except Exception as health_e:
logger.critical(f"CRITICAL: Worker {worker_id} queue health check failed: {health_e}")
logger.error(f"Worker {worker_id} error getting queue item: {e}")
await asyncio.sleep(0.1)
continue
@@ -310,6 +299,15 @@ async def async_update_worker(worker_id, q, notification_q, app, datastore):
continue
if process_changedetection_results:
# Extract title if needed
if datastore.data['settings']['application'].get('extract_title_as_title') or watch['extract_title_as_title']:
if not watch['title'] or not len(watch['title']):
try:
update_obj['title'] = html_tools.extract_element(find='title', html_content=update_handler.fetcher.content)
logger.info(f"UUID: {uuid} Extract <title> updated title to '{update_obj['title']}")
except Exception as e:
logger.warning(f"UUID: {uuid} Extract <title> as watch title was enabled, but couldn't find a <title>.")
try:
datastore.update_watch(uuid=uuid, update_obj=update_obj)
@@ -348,14 +346,6 @@ async def async_update_worker(worker_id, q, notification_q, app, datastore):
# Always record attempt count
count = watch.get('check_count', 0) + 1
# Always record page title (used in notifications, and can change even when the content is the same)
try:
page_title = html_tools.extract_title(data=update_handler.fetcher.content)
logger.debug(f"UUID: {uuid} Page <title> is '{page_title}'")
datastore.update_watch(uuid=uuid, update_obj={'page_title': page_title})
except Exception as e:
logger.warning(f"UUID: {uuid} Exception when extracting <title> - {str(e)}")
# Record server header
try:
server_header = update_handler.fetcher.headers.get('server', '').strip().lower()[:255]
@@ -363,12 +353,6 @@ async def async_update_worker(worker_id, q, notification_q, app, datastore):
except Exception as e:
pass
# Store favicon if necessary
if update_handler.fetcher.favicon_blob and update_handler.fetcher.favicon_blob.get('base64'):
watch.bump_favicon(url=update_handler.fetcher.favicon_blob.get('url'),
favicon_base_64=update_handler.fetcher.favicon_blob.get('base64')
)
datastore.update_watch(uuid=uuid, update_obj={'fetch_time': round(time.time() - fetch_start_time, 3),
'check_count': count})

View File

@@ -1 +0,0 @@
# Browser notifications blueprint

View File

@@ -1,76 +0,0 @@
from flask import Blueprint, jsonify, request
from loguru import logger
def construct_blueprint(datastore):
browser_notifications_blueprint = Blueprint('browser_notifications', __name__)
@browser_notifications_blueprint.route("/test", methods=['POST'])
def test_browser_notification():
"""Send a test browser notification using the apprise handler"""
try:
from changedetectionio.notification.apprise_plugin.custom_handlers import apprise_browser_notification_handler
# Check if there are any subscriptions
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
if not browser_subscriptions:
return jsonify({'success': False, 'message': 'No browser subscriptions found'}), 404
# Get notification data from request or use defaults
data = request.get_json() or {}
title = data.get('title', 'Test Notification')
body = data.get('body', 'This is a test notification from changedetection.io')
# Use the apprise handler directly
success = apprise_browser_notification_handler(
body=body,
title=title,
notify_type='info',
meta={'url': 'browser://test'}
)
if success:
subscription_count = len(browser_subscriptions)
return jsonify({
'success': True,
'message': f'Test notification sent successfully to {subscription_count} subscriber(s)'
})
else:
return jsonify({'success': False, 'message': 'Failed to send test notification'}), 500
except ImportError:
logger.error("Browser notification handler not available")
return jsonify({'success': False, 'message': 'Browser notification handler not available'}), 500
except Exception as e:
logger.error(f"Failed to send test browser notification: {e}")
return jsonify({'success': False, 'message': f'Error: {str(e)}'}), 500
@browser_notifications_blueprint.route("/clear", methods=['POST'])
def clear_all_browser_notifications():
"""Clear all browser notification subscriptions from the datastore"""
try:
# Get current subscription count
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
subscription_count = len(browser_subscriptions)
# Clear all subscriptions
if 'settings' not in datastore.data:
datastore.data['settings'] = {}
if 'application' not in datastore.data['settings']:
datastore.data['settings']['application'] = {}
datastore.data['settings']['application']['browser_subscriptions'] = []
datastore.needs_write = True
logger.info(f"Cleared {subscription_count} browser notification subscriptions")
return jsonify({
'success': True,
'message': f'Cleared {subscription_count} browser notification subscription(s)'
})
except Exception as e:
logger.error(f"Failed to clear all browser notifications: {e}")
return jsonify({'success': False, 'message': f'Clear all failed: {str(e)}'}), 500
return browser_notifications_blueprint

View File

@@ -108,13 +108,10 @@ def construct_blueprint(datastore: ChangeDetectionStore):
fe.link(link=diff_link)
# Same logic as watch-overview.html
if datastore.data['settings']['application']['ui'].get('use_page_title_in_list') or watch.get('use_page_title_in_list'):
watch_label = watch.label
else:
watch_label = watch.get('url')
# @todo watch should be a getter - watch.get('title') (internally if URL else..)
fe.title(title=watch_label)
watch_title = watch.get('title') if watch.get('title') else watch.get('url')
fe.title(title=watch_title)
try:
html_diff = diff.render_diff(previous_version_file_contents=watch.get_history_snapshot(dates[-2]),
@@ -130,7 +127,7 @@ def construct_blueprint(datastore: ChangeDetectionStore):
# @todo User could decide if <link> goes to the diff page, or to the watch link
rss_template = "<html><body>\n<h4><a href=\"{{watch_url}}\">{{watch_title}}</a></h4>\n<p>{{html_diff}}</p>\n</body></html>\n"
content = jinja_render(template_str=rss_template, watch_title=watch_label, html_diff=html_diff, watch_url=watch.link)
content = jinja_render(template_str=rss_template, watch_title=watch_title, html_diff=html_diff, watch_url=watch.link)
# Out of range chars could also break feedgen
if scan_invalid_chars_in_rss(content):

View File

@@ -1,7 +1,7 @@
{% extends 'base.html' %}
{% block content %}
{% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form, render_ternary_field %}
{% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form %}
{% from '_common_fields.html' import render_common_settings_form %}
<script>
const notification_base_url="{{url_for('ui.ui_notification.ajax_callback_send_notification_test', mode="global-settings")}}";
@@ -75,10 +75,18 @@
<div class="pure-control-group">
{{ render_checkbox_field(form.application.form.rss_hide_muted_watches) }}
</div>
<div class="pure-control-group">
{{ render_field(form.application.form.pager_size) }}
<span class="pure-form-message-inline">Number of items per page in the watch overview list, 0 to disable.</span>
</div>
<div class="pure-control-group">
{{ render_field(form.application.form.rss_content_format) }}
<span class="pure-form-message-inline">Love RSS? Does your reader support HTML? Set it here</span>
</div>
<div class="pure-control-group">
{{ render_checkbox_field(form.application.form.extract_title_as_title) }}
<span class="pure-form-message-inline">Note: This will automatically apply to all existing watches.</span>
</div>
<div class="pure-control-group">
{{ render_checkbox_field(form.application.form.empty_pages_are_a_change) }}
<span class="pure-form-message-inline">When a request returns no content, or the HTML does not contain any text, is this considered a change?</span>
@@ -195,7 +203,7 @@ nav
<div class="tab-pane-inner" id="api">
<h4>API Access</h4>
<p>Drive your changedetection.io via API, More about <a href="https://changedetection.io/docs/api_v1/index.html">API access and examples here</a>.</p>
<p>Drive your changedetection.io via API, More about <a href="https://github.com/dgtlmoon/changedetection.io/wiki/API-Reference">API access here</a></p>
<div class="pure-control-group">
{{ render_checkbox_field(form.application.form.api_access_token_enabled) }}
@@ -248,18 +256,6 @@ nav
{{ render_checkbox_field(form.application.form.ui.form.socket_io_enabled, class="socket_io_enabled") }}
<span class="pure-form-message-inline">Realtime UI Updates Enabled - (Restart required if this is changed)</span>
</div>
<div class="pure-control-group">
{{ render_checkbox_field(form.application.form.ui.form.favicons_enabled, class="") }}
<span class="pure-form-message-inline">Enable or Disable Favicons next to the watch list</span>
</div>
<div class="pure-control-group">
{{ render_checkbox_field(form.application.form.ui.use_page_title_in_list) }}
</div>
<div class="pure-control-group">
{{ render_field(form.application.form.pager_size) }}
<span class="pure-form-message-inline">Number of items per page in the watch overview list, 0 to disable.</span>
</div>
</div>
<div class="tab-pane-inner" id="proxies">
<div id="recommended-proxy">
@@ -323,8 +319,8 @@ nav
<div id="actions">
<div class="pure-control-group">
{{ render_button(form.save_button) }}
<a href="{{url_for('watchlist.index')}}" class="pure-button button-cancel">Back</a>
<a href="{{url_for('ui.clear_all_history')}}" class="pure-button button-error">Clear Snapshot History</a>
<a href="{{url_for('watchlist.index')}}" class="pure-button button-small button-cancel">Back</a>
<a href="{{url_for('ui.clear_all_history')}}" class="pure-button button-small button-error">Clear Snapshot History</a>
</div>
</div>
</form>

View File

@@ -1,6 +1,6 @@
{% extends 'base.html' %}
{% block content %}
{% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_ternary_field %}
{% from '_helpers.html' import render_field, render_checkbox_field, render_button %}
{% from '_common_fields.html' import render_common_settings_form %}
<script>
const notification_base_url="{{url_for('ui.ui_notification.ajax_callback_send_notification_test', mode="group-settings")}}";
@@ -64,7 +64,7 @@
<div class="tab-pane-inner" id="notifications">
<fieldset>
<div class="pure-control-group inline-radio">
{{ render_ternary_field(form.notification_muted, BooleanField=True) }}
{{ render_checkbox_field(form.notification_muted) }}
</div>
{% if 1 %}
<div class="pure-control-group inline-radio">

View File

@@ -242,7 +242,6 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
'available_timezones': sorted(available_timezones()),
'browser_steps_config': browser_step_ui_config,
'emailprefix': os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
'extra_classes': 'checking-now' if worker_handler.is_watch_running(uuid) else '',
'extra_notification_token_placeholder_info': datastore.get_unique_notification_token_placeholders_available(),
'extra_processor_config': form.extra_tab_content(),
'extra_title': f" - Edit - {watch.label}",

View File

@@ -1,7 +1,8 @@
from flask import Blueprint, request, redirect, url_for, flash, render_template, make_response, send_from_directory, abort
from flask_login import current_user
import os
import time
from loguru import logger
from copy import deepcopy
from changedetectionio.store import ChangeDetectionStore
from changedetectionio.auth_decorator import login_optionally_required
@@ -77,46 +78,9 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
return output
@views_blueprint.route("/diff/<string:uuid>", methods=['POST'])
@views_blueprint.route("/diff/<string:uuid>", methods=['GET', 'POST'])
@login_optionally_required
def diff_history_page_build_report(uuid):
from changedetectionio import forms
# More for testing, possible to return the first/only
if uuid == 'first':
uuid = list(datastore.data['watching'].keys()).pop()
try:
watch = datastore.data['watching'][uuid]
except KeyError:
flash("No history found for the specified link, bad link?", "error")
return redirect(url_for('watchlist.index'))
# For submission of requesting an extract
extract_form = forms.extractDataForm(formdata=request.form,
data={'extract_regex': request.form.get('extract_regex', '')}
)
if not extract_form.validate():
flash("An error occurred, please see below.", "error")
return _render_diff_template(uuid, extract_form)
else:
extract_regex = request.form.get('extract_regex', '').strip()
output = watch.extract_regex_from_all_history(extract_regex)
if output:
watch_dir = os.path.join(datastore.datastore_path, uuid)
response = make_response(send_from_directory(directory=watch_dir, path=output, as_attachment=True))
response.headers['Content-type'] = 'text/csv'
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
response.headers['Pragma'] = 'no-cache'
response.headers['Expires'] = "0"
return response
flash('No matches found while scanning all of the watch history for that RegEx.', 'error')
return redirect(url_for('ui.ui_views.diff_history_page', uuid=uuid) + '#extract')
def _render_diff_template(uuid, extract_form=None):
"""Helper function to render the diff template with all required data"""
def diff_history_page(uuid):
from changedetectionio import forms
# More for testing, possible to return the first/only
@@ -130,36 +94,62 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
flash("No history found for the specified link, bad link?", "error")
return redirect(url_for('watchlist.index'))
# Use provided form or create a new one
if extract_form is None:
extract_form = forms.extractDataForm(formdata=request.form,
data={'extract_regex': request.form.get('extract_regex', '')}
)
# For submission of requesting an extract
extract_form = forms.extractDataForm(request.form)
if request.method == 'POST':
if not extract_form.validate():
flash("An error occurred, please see below.", "error")
else:
extract_regex = request.form.get('extract_regex').strip()
output = watch.extract_regex_from_all_history(extract_regex)
if output:
watch_dir = os.path.join(datastore.datastore_path, uuid)
response = make_response(send_from_directory(directory=watch_dir, path=output, as_attachment=True))
response.headers['Content-type'] = 'text/csv'
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
response.headers['Pragma'] = 'no-cache'
response.headers['Expires'] = 0
return response
flash('Nothing matches that RegEx', 'error')
redirect(url_for('ui_views.diff_history_page', uuid=uuid)+'#extract')
history = watch.history
dates = list(history.keys())
# If a "from_version" was requested, then find it (or the closest one)
# Also set "from version" to be the closest version to the one that was last viewed.
if len(dates) < 2:
flash("Not enough saved change detection snapshots to produce a report.", "error")
return redirect(url_for('watchlist.index'))
best_last_viewed_timestamp = watch.get_from_version_based_on_last_viewed
from_version_timestamp = best_last_viewed_timestamp if best_last_viewed_timestamp else dates[-2]
from_version = request.args.get('from_version', from_version_timestamp )
# Save the current newest history as the most recently viewed
datastore.set_last_viewed(uuid, time.time())
# Use the current one if nothing was specified
to_version = request.args.get('to_version', str(dates[-1]))
# Read as binary and force decode as UTF-8
# Windows may fail decode in python if we just use 'r' mode (chardet decode exception)
from_version = request.args.get('from_version')
from_version_index = -2 # second newest
if from_version and from_version in dates:
from_version_index = dates.index(from_version)
else:
from_version = dates[from_version_index]
try:
to_version_file_contents = watch.get_history_snapshot(timestamp=to_version)
from_version_file_contents = watch.get_history_snapshot(dates[from_version_index])
except Exception as e:
logger.error(f"Unable to read watch history to-version for version {to_version}: {str(e)}")
to_version_file_contents = f"Unable to read to-version at {to_version}.\n"
from_version_file_contents = f"Unable to read to-version at index {dates[from_version_index]}.\n"
to_version = request.args.get('to_version')
to_version_index = -1
if to_version and to_version in dates:
to_version_index = dates.index(to_version)
else:
to_version = dates[to_version_index]
try:
from_version_file_contents = watch.get_history_snapshot(timestamp=from_version)
to_version_file_contents = watch.get_history_snapshot(dates[to_version_index])
except Exception as e:
logger.error(f"Unable to read watch history from-version for version {from_version}: {str(e)}")
from_version_file_contents = f"Unable to read to-version {from_version}.\n"
to_version_file_contents = "Unable to read to-version at index{}.\n".format(dates[to_version_index])
screenshot_url = watch.get_screenshot()
@@ -173,9 +163,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
if datastore.data['settings']['application'].get('password') or os.getenv("SALTED_PASS", False):
password_enabled_and_share_is_off = not datastore.data['settings']['application'].get('shared_diff_access')
datastore.set_last_viewed(uuid, time.time())
return render_template("diff.html",
output = render_template("diff.html",
current_diff_url=watch['url'],
from_version=str(from_version),
to_version=str(to_version),
@@ -198,10 +186,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
watch_a=watch
)
@views_blueprint.route("/diff/<string:uuid>", methods=['GET'])
@login_optionally_required
def diff_history_page(uuid):
return _render_diff_template(uuid)
return output
@views_blueprint.route("/form/add/quickwatch", methods=['POST'])
@login_optionally_required

View File

@@ -44,16 +44,12 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
# Sort by last_changed and add the uuid which is usually the key..
sorted_watches = []
with_errors = request.args.get('with_errors') == "1"
unread_only = request.args.get('unread') == "1"
errored_count = 0
search_q = request.args.get('q').strip().lower() if request.args.get('q') else False
for uuid, watch in datastore.data['watching'].items():
if with_errors and not watch.get('last_error'):
continue
if unread_only and (watch.viewed or watch.last_changed == 0) :
continue
if active_tag_uuid and not active_tag_uuid in watch['tags']:
continue
if watch.get('last_error'):

View File

@@ -4,7 +4,6 @@
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
<script src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script>
<script>let nowtimeserver={{ now_time_server }};</script>
<script>let favicon_baseURL="{{ url_for('static_content', group='favicon', filename="PLACEHOLDER")}}";</script>
<script>
// Initialize Feather icons after the page loads
document.addEventListener('DOMContentLoaded', function() {
@@ -81,23 +80,16 @@ document.addEventListener('DOMContentLoaded', function() {
{%- if any_has_restock_price_processor -%}
{%- set cols_required = cols_required + 1 -%}
{%- endif -%}
{%- set ui_settings = datastore.data['settings']['application']['ui'] -%}
<div id="watch-table-wrapper">
{%- set table_classes = [
'favicon-enabled' if 'favicons_enabled' not in ui_settings or ui_settings['favicons_enabled'] else 'favicon-not-enabled',
] -%}
<table class="pure-table pure-table-striped watch-table {{ table_classes | reject('equalto', '') | join(' ') }}">
<table class="pure-table pure-table-striped watch-table">
<thead>
<tr>
{%- set link_order = "desc" if sort_order == 'asc' else "asc" -%}
{%- set arrow_span = "" -%}
<th><input style="vertical-align: middle" type="checkbox" id="check-all" > <a class="{{ 'active '+link_order if sort_attribute == 'date_created' else 'inactive' }}" href="{{url_for('watchlist.index', sort='date_created', order=link_order, tag=active_tag_uuid)}}"># <span class='arrow {{link_order}}'></span></a></th>
<th>
<a class="{{ 'active '+link_order if sort_attribute == 'paused' else 'inactive' }}" href="{{url_for('watchlist.index', sort='paused', order=link_order, tag=active_tag_uuid)}}"><i data-feather="pause" style="vertical-align: bottom; width: 14px; height: 14px; margin-right: 4px;"></i><span class='arrow {{link_order}}'></span></a>
&nbsp;
<a class="{{ 'active '+link_order if sort_attribute == 'notification_muted' else 'inactive' }}" href="{{url_for('watchlist.index', sort='notification_muted', order=link_order, tag=active_tag_uuid)}}"><i data-feather="volume-2" style="vertical-align: bottom; width: 14px; height: 14px; margin-right: 4px;"></i><span class='arrow {{link_order}}'></span></a>
</th>
<th class="empty-cell"></th>
<th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('watchlist.index', sort='label', order=link_order, tag=active_tag_uuid)}}">Website <span class='arrow {{link_order}}'></span></a></th>
{%- if any_has_restock_price_processor -%}
<th>Restock &amp; Price</th>
@@ -113,13 +105,10 @@ document.addEventListener('DOMContentLoaded', function() {
<td colspan="{{ cols_required }}" style="text-wrap: wrap;">No website watches configured, please add a URL in the box above, or <a href="{{ url_for('imports.import_page')}}" >import a list</a>.</td>
</tr>
{%- endif -%}
{%- for watch in (watches|sort(attribute=sort_attribute, reverse=sort_order == 'asc'))|pagination_slice(skip=pagination.skip) -%}
{%- set checking_now = is_checking_now(watch) -%}
{%- set history_n = watch.history_n -%}
{%- set favicon = watch.get_favicon_filename() -%}
{%- set system_use_url_watchlist = datastore.data['settings']['application']['ui'].get('use_page_title_in_list') -%}
{# Class settings mirrored in changedetectionio/static/js/realtime.js for the frontend #}
{# Mirror in changedetectionio/static/js/realtime.js for the frontend #}
{%- set row_classes = [
loop.cycle('pure-table-odd', 'pure-table-even'),
'processor-' ~ watch['processor'],
@@ -127,69 +116,49 @@ document.addEventListener('DOMContentLoaded', function() {
'paused' if watch.paused is defined and watch.paused != False else '',
'unviewed' if watch.has_unviewed else '',
'has-restock-info' if watch.has_restock_info else 'no-restock-info',
'has-favicon' if favicon else '',
'in-stock' if watch.has_restock_info and watch['restock']['in_stock'] else '',
'not-in-stock' if watch.has_restock_info and not watch['restock']['in_stock'] else '',
'queued' if watch.uuid in queued_uuids else '',
'checking-now' if checking_now else '',
'notification_muted' if watch.notification_muted else '',
'single-history' if history_n == 1 else '',
'multiple-history' if history_n >= 2 else '',
'use-html-title' if system_use_url_watchlist else 'no-html-title',
'multiple-history' if history_n >= 2 else ''
] -%}
<tr id="{{ watch.uuid }}" data-watch-uuid="{{ watch.uuid }}" class="{{ row_classes | reject('equalto', '') | join(' ') }}">
<td class="inline checkbox-uuid" ><div><input name="uuids" type="checkbox" value="{{ watch.uuid}} " > <span class="counter-i">{{ loop.index+pagination.skip }}</span></div></td>
<td class="inline checkbox-uuid" ><input name="uuids" type="checkbox" value="{{ watch.uuid}} " > <span>{{ loop.index+pagination.skip }}</span></td>
<td class="inline watch-controls">
<div>
<a class="ajax-op state-off pause-toggle" data-op="pause" href="{{url_for('watchlist.index', op='pause', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause checks" title="Pause checks" class="icon icon-pause" ></a>
<a class="ajax-op state-on pause-toggle" data-op="pause" style="display: none" href="{{url_for('watchlist.index', op='pause', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='play.svg')}}" alt="UnPause checks" title="UnPause checks" class="icon icon-unpause" ></a>
<a class="ajax-op state-off mute-toggle" data-op="mute" href="{{url_for('watchlist.index', op='mute', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notification" title="Mute notification" class="icon icon-mute" ></a>
<a class="ajax-op state-on mute-toggle" data-op="mute" style="display: none" href="{{url_for('watchlist.index', op='mute', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="UnMute notification" title="UnMute notification" class="icon icon-mute" ></a>
</div>
</td>
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
<a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}">&nbsp;</a>
<a class="link-spread" href="{{url_for('ui.form_share_put_watch', uuid=watch.uuid)}}"><img src="{{url_for('static_content', group='images', filename='spread.svg')}}" class="status-icon icon icon-spread" title="Create a link to share watch config with others" ></a>
<td class="title-col inline">
<div class="flex-wrapper">
{% if 'favicons_enabled' not in ui_settings or ui_settings['favicons_enabled'] %}
<div>{# A page might have hundreds of these images, set IMG options for lazy loading, don't set SRC if we dont have it so it doesnt fetch the placeholder' #}
<img alt="Favicon thumbnail" class="favicon" loading="lazy" decoding="async" fetchpriority="low" {% if favicon %} src="{{url_for('static_content', group='favicon', filename=watch.uuid)}}" {% else %} src='data:image/svg+xml;utf8,%3Csvg xmlns="http://www.w3.org/2000/svg" width="7.087" height="7.087" viewBox="0 0 7.087 7.087"%3E%3Ccircle cx="3.543" cy="3.543" r="3.279" stroke="%23e1e1e1" stroke-width="0.45" fill="none" opacity="0.74"/%3E%3C/svg%3E' {% endif %} />
</div>
{% endif %}
<div>
<span class="watch-title">
{% if system_use_url_watchlist or watch.get('use_page_title_in_list') %}
{{ watch.label }}
{% else %}
{{ watch.get('title') or watch.link }}
{% endif %}
<a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}">&nbsp;</a>
</span>
<div class="error-text" style="display:none;">{{ watch.compile_error_texts(has_proxies=datastore.proxy_list) }}</div>
{%- if watch['processor'] == 'text_json_diff' -%}
{%- if watch['has_ldjson_price_data'] and not watch['track_ldjson_price_data'] -%}
<div class="ldjson-price-track-offer">Switch to Restock & Price watch mode? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
{%- endif -%}
{%- endif -%}
{%- if watch['processor'] == 'restock_diff' -%}
<span class="tracking-ldjson-price-data" title="Automatically following embedded price information"><img src="{{url_for('static_content', group='images', filename='price-tag-icon.svg')}}" class="status-icon price-follow-tag-icon" > Price</span>
{%- endif -%}
{%- for watch_tag_uuid, watch_tag in datastore.get_all_tags_for_watch(watch['uuid']).items() -%}
<span class="watch-tag-list">{{ watch_tag.title }}</span>
{%- endfor -%}
</div>
<div class="status-icons">
<a class="link-spread" href="{{url_for('ui.form_share_put_watch', uuid=watch.uuid)}}"><img src="{{url_for('static_content', group='images', filename='spread.svg')}}" class="status-icon icon icon-spread" title="Create a link to share watch config with others" ></a>
{%- if watch.get_fetch_backend == "html_webdriver"
or ( watch.get_fetch_backend == "system" and system_default_fetcher == 'html_webdriver' )
or "extra_browser_" in watch.get_fetch_backend
-%}
<img class="status-icon" src="{{url_for('static_content', group='images', filename='google-chrome-icon.png')}}" alt="Using a Chrome browser" title="Using a Chrome browser" >
{%- endif -%}
{%- if watch.is_pdf -%}<img class="status-icon" src="{{url_for('static_content', group='images', filename='pdf-icon.svg')}}" alt="Converting PDF to text" >{%- endif -%}
{%- if watch.has_browser_steps -%}<img class="status-icon status-browsersteps" src="{{url_for('static_content', group='images', filename='steps.svg')}}" alt="Browser Steps is enabled" >{%- endif -%}
{%- if watch.get_fetch_backend == "html_webdriver"
or ( watch.get_fetch_backend == "system" and system_default_fetcher == 'html_webdriver' )
or "extra_browser_" in watch.get_fetch_backend
-%}
<img class="status-icon" src="{{url_for('static_content', group='images', filename='google-chrome-icon.png')}}" alt="Using a Chrome browser" title="Using a Chrome browser" >
{%- endif -%}
</div>
</div>
{%- if watch.is_pdf -%}<img class="status-icon" src="{{url_for('static_content', group='images', filename='pdf-icon.svg')}}" alt="Converting PDF to text" >{%- endif -%}
{%- if watch.has_browser_steps -%}<img class="status-icon status-browsersteps" src="{{url_for('static_content', group='images', filename='steps.svg')}}" alt="Browser Steps is enabled" >{%- endif -%}
<div class="error-text" style="display:none;">{{ watch.compile_error_texts(has_proxies=datastore.proxy_list)|safe }}</div>
{%- if watch['processor'] == 'text_json_diff' -%}
{%- if watch['has_ldjson_price_data'] and not watch['track_ldjson_price_data'] -%}
<div class="ldjson-price-track-offer">Switch to Restock & Price watch mode? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
{%- endif -%}
{%- endif -%}
{%- if watch['processor'] == 'restock_diff' -%}
<span class="tracking-ldjson-price-data" title="Automatically following embedded price information"><img src="{{url_for('static_content', group='images', filename='price-tag-icon.svg')}}" class="status-icon price-follow-tag-icon" > Price</span>
{%- endif -%}
{%- for watch_tag_uuid, watch_tag in datastore.get_all_tags_for_watch(watch['uuid']).items() -%}
<span class="watch-tag-list">{{ watch_tag.title }}</span>
{%- endfor -%}
</td>
{%- if any_has_restock_price_processor -%}
<td class="restock-and-price">
@@ -226,15 +195,13 @@ document.addEventListener('DOMContentLoaded', function() {
Not yet
{%- endif -%}
</td>
<td class="buttons">
<div>
<td>
{%- set target_attr = ' target="' ~ watch.uuid ~ '"' if datastore.data['settings']['application']['ui'].get('open_diff_in_new_tab') else '' -%}
<a href="" class="already-in-queue-button recheck pure-button pure-button-primary" style="display: none;" disabled="disabled">Queued</a>
<a href="{{ url_for('ui.form_watch_checknow', uuid=watch.uuid, tag=request.args.get('tag')) }}" data-op='recheck' class="ajax-op recheck pure-button pure-button-primary">Recheck</a>
<a href="{{ url_for('ui.ui_edit.edit_page', uuid=watch.uuid, tag=active_tag_uuid)}}#general" class="pure-button pure-button-primary">Edit</a>
<a href="{{ url_for('ui.ui_views.diff_history_page', uuid=watch.uuid)}}" {{target_attr}} class="pure-button pure-button-primary history-link" style="display: none;">History</a>
<a href="{{ url_for('ui.ui_views.preview_page', uuid=watch.uuid)}}" {{target_attr}} class="pure-button pure-button-primary preview-link" style="display: none;">Preview</a>
</div>
</td>
</tr>
{%- endfor -%}
@@ -252,9 +219,6 @@ document.addEventListener('DOMContentLoaded', function() {
<a href="{{url_for('ui.mark_all_viewed', tag=active_tag_uuid) }}" class="pure-button button-tag " id="mark-all-viewed">Mark all viewed in '{{active_tag.title}}'</a>
</li>
{%- endif -%}
<li id="post-list-unread" class="{%- if has_unviewed -%}has-unviewed{%- endif -%}" style="display: none;" >
<a href="{{url_for('watchlist.index', unread=1, tag=request.args.get('tag')) }}" class="pure-button button-tag">Unread</a>
</li>
<li>
<a href="{{ url_for('ui.form_watch_checknow', tag=active_tag_uuid, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag" id="recheck-all">Recheck
all {% if active_tag_uuid %} in '{{active_tag.title}}'{%endif%}</a>

View File

@@ -1,3 +1,5 @@
from flask import Blueprint
from json_logic.builtins import BUILTINS
from .exceptions import EmptyConditionRuleRowNotUsable
@@ -14,6 +16,7 @@ operator_choices = [
("==", "Equals"),
("!=", "Not Equals"),
("in", "Contains"),
("!in", "Does Not Contain"),
]
# Fields available in the rules

View File

@@ -21,21 +21,17 @@ def register_operators():
def length_max(_, text, strlen):
return len(text) <= int(strlen)
# Custom function for case-insensitive regex matching
# Custom function for case-insensitive regex matching
def contains_regex(_, text, pattern):
"""Returns True if `text` contains `pattern` (case-insensitive regex match)."""
return bool(re.search(pattern, str(text), re.IGNORECASE))
# Custom function for NOT matching case-insensitive regex
# Custom function for NOT matching case-insensitive regex
def not_contains_regex(_, text, pattern):
"""Returns True if `text` does NOT contain `pattern` (case-insensitive regex match)."""
return not bool(re.search(pattern, str(text), re.IGNORECASE))
def not_contains(_, text, pattern):
return not pattern in text
return {
"!in": not_contains,
"!contains_regex": not_contains_regex,
"contains_regex": contains_regex,
"ends_with": ends_with,
@@ -47,7 +43,6 @@ def register_operators():
@hookimpl
def register_operator_choices():
return [
("!in", "Does NOT Contain"),
("starts_with", "Text Starts With"),
("ends_with", "Text Ends With"),
("length_min", "Length minimum"),

View File

@@ -1,8 +1,6 @@
import pluggy
from loguru import logger
LEVENSHTEIN_MAX_LEN_FOR_EDIT_STATS=100000
# Support both plugin systems
conditions_hookimpl = pluggy.HookimplMarker("changedetectionio_conditions")
global_hookimpl = pluggy.HookimplMarker("changedetectionio")
@@ -74,17 +72,7 @@ def ui_edit_stats_extras(watch):
"""Generate the HTML for Levenshtein stats - shared by both plugin systems"""
if len(watch.history.keys()) < 2:
return "<p>Not enough history to calculate Levenshtein metrics</p>"
# Protection against the algorithm getting stuck on huge documents
k = list(watch.history.keys())
if any(
len(watch.get_history_snapshot(timestamp=k[idx])) > LEVENSHTEIN_MAX_LEN_FOR_EDIT_STATS
for idx in (-1, -2)
if len(k) >= abs(idx)
):
return "<p>Snapshot too large for edit statistics, skipping.</p>"
try:
lev_data = levenshtein_ratio_recent_history(watch)
if not lev_data or not isinstance(lev_data, dict):

View File

@@ -28,7 +28,6 @@ from changedetectionio.content_fetchers.requests import fetcher as html_requests
import importlib.resources
XPATH_ELEMENT_JS = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('xpath_element_scraper.js').read_text(encoding='utf-8')
INSTOCK_DATA_JS = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('stock-not-in-stock.js').read_text(encoding='utf-8')
FAVICON_FETCHER_JS = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('favicon-fetcher.js').read_text(encoding='utf-8')
def available_fetchers():

View File

@@ -48,7 +48,6 @@ class Fetcher():
error = None
fetcher_description = "No description"
headers = {}
favicon_blob = None
instock_data = None
instock_data_js = ""
status_code = None
@@ -70,17 +69,15 @@ class Fetcher():
@abstractmethod
async def run(self,
fetch_favicon=True,
current_include_filters=None,
empty_pages_are_a_change=False,
ignore_status_codes=False,
is_binary=False,
request_body=None,
request_headers=None,
request_method=None,
timeout=None,
url=None,
):
url,
timeout,
request_headers,
request_body,
request_method,
ignore_status_codes=False,
current_include_filters=None,
is_binary=False,
empty_pages_are_a_change=False):
# Should set self.error, self.status_code and self.content
pass

View File

@@ -5,7 +5,7 @@ from urllib.parse import urlparse
from loguru import logger
from changedetectionio.content_fetchers import SCREENSHOT_MAX_HEIGHT_DEFAULT, visualselector_xpath_selectors, \
SCREENSHOT_SIZE_STITCH_THRESHOLD, SCREENSHOT_MAX_TOTAL_HEIGHT, XPATH_ELEMENT_JS, INSTOCK_DATA_JS, FAVICON_FETCHER_JS
SCREENSHOT_SIZE_STITCH_THRESHOLD, SCREENSHOT_MAX_TOTAL_HEIGHT, XPATH_ELEMENT_JS, INSTOCK_DATA_JS
from changedetectionio.content_fetchers.base import Fetcher, manage_user_agent
from changedetectionio.content_fetchers.exceptions import PageUnloadable, Non200ErrorCodeReceived, EmptyReply, ScreenshotUnavailable
@@ -143,17 +143,15 @@ class fetcher(Fetcher):
f.write(content)
async def run(self,
fetch_favicon=True,
current_include_filters=None,
empty_pages_are_a_change=False,
ignore_status_codes=False,
is_binary=False,
request_body=None,
request_headers=None,
request_method=None,
timeout=None,
url=None,
):
url,
timeout,
request_headers,
request_body,
request_method,
ignore_status_codes=False,
current_include_filters=None,
is_binary=False,
empty_pages_are_a_change=False):
from playwright.async_api import async_playwright
import playwright._impl._errors
@@ -236,13 +234,6 @@ class fetcher(Fetcher):
await browser.close()
raise PageUnloadable(url=url, status_code=None, message=str(e))
if fetch_favicon:
try:
self.favicon_blob = await self.page.evaluate(FAVICON_FETCHER_JS)
await self.page.request_gc()
except Exception as e:
logger.error(f"Error fetching FavIcon info {str(e)}, continuing.")
if self.status_code != 200 and not ignore_status_codes:
screenshot = await capture_full_page_async(self.page)
raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, screenshot=screenshot)
@@ -283,7 +274,6 @@ class fetcher(Fetcher):
await self.page.request_gc()
logger.debug(f"Scrape xPath element data in browser done in {time.time() - now:.2f}s")
# Bug 3 in Playwright screenshot handling
# Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it
# JPEG is better here because the screenshots can be very very large

View File

@@ -8,7 +8,7 @@ from loguru import logger
from changedetectionio.content_fetchers import SCREENSHOT_MAX_HEIGHT_DEFAULT, visualselector_xpath_selectors, \
SCREENSHOT_SIZE_STITCH_THRESHOLD, SCREENSHOT_DEFAULT_QUALITY, XPATH_ELEMENT_JS, INSTOCK_DATA_JS, \
SCREENSHOT_MAX_TOTAL_HEIGHT, FAVICON_FETCHER_JS
SCREENSHOT_MAX_TOTAL_HEIGHT
from changedetectionio.content_fetchers.base import Fetcher, manage_user_agent
from changedetectionio.content_fetchers.exceptions import PageUnloadable, Non200ErrorCodeReceived, EmptyReply, BrowserFetchTimedOut, \
BrowserConnectError
@@ -145,16 +145,15 @@ class fetcher(Fetcher):
# f.write(content)
async def fetch_page(self,
current_include_filters,
empty_pages_are_a_change,
fetch_favicon,
ignore_status_codes,
is_binary,
request_body,
request_headers,
request_method,
timeout,
url,
timeout,
request_headers,
request_body,
request_method,
ignore_status_codes,
current_include_filters,
is_binary,
empty_pages_are_a_change
):
import re
self.delete_browser_steps_screenshots()
@@ -180,11 +179,10 @@ class fetcher(Fetcher):
except Exception as e:
raise BrowserConnectError(msg=f"Error connecting to the browser - Exception '{str(e)}'")
# more reliable is to just request a new page
self.page = await browser.newPage()
# Add console handler to capture console.log from favicon fetcher
#self.page.on('console', lambda msg: logger.debug(f"Browser console [{msg.type}]: {msg.text}"))
# Better is to launch chrome with the URL as arg
# non-headless - newPage() will launch an extra tab/window, .browser should already contain 1 page/tab
# headless - ask a new page
self.page = (pages := await browser.pages) and len(pages) or await browser.newPage()
if '--window-size' in self.browser_connection_url:
# Be sure the viewport is always the window-size, this is often not the same thing
@@ -294,12 +292,6 @@ class fetcher(Fetcher):
await browser.close()
raise PageUnloadable(url=url, status_code=None, message=str(e))
if fetch_favicon:
try:
self.favicon_blob = await self.page.evaluate(FAVICON_FETCHER_JS)
except Exception as e:
logger.error(f"Error fetching FavIcon info {str(e)}, continuing.")
if self.status_code != 200 and not ignore_status_codes:
screenshot = await capture_full_page(page=self.page)
@@ -351,18 +343,8 @@ class fetcher(Fetcher):
async def main(self, **kwargs):
await self.fetch_page(**kwargs)
async def run(self,
fetch_favicon=True,
current_include_filters=None,
empty_pages_are_a_change=False,
ignore_status_codes=False,
is_binary=False,
request_body=None,
request_headers=None,
request_method=None,
timeout=None,
url=None,
):
async def run(self, url, timeout, request_headers, request_body, request_method, ignore_status_codes=False,
current_include_filters=None, is_binary=False, empty_pages_are_a_change=False):
#@todo make update_worker async which could run any of these content_fetchers within memory and time constraints
max_time = int(os.getenv('PUPPETEER_MAX_PROCESSING_TIMEOUT_SECONDS', 180))
@@ -370,17 +352,16 @@ class fetcher(Fetcher):
# Now we run this properly in async context since we're called from async worker
try:
await asyncio.wait_for(self.main(
current_include_filters=current_include_filters,
empty_pages_are_a_change=empty_pages_are_a_change,
fetch_favicon=fetch_favicon,
ignore_status_codes=ignore_status_codes,
is_binary=is_binary,
request_body=request_body,
request_headers=request_headers,
request_method=request_method,
timeout=timeout,
url=url,
), timeout=max_time
)
timeout=timeout,
request_headers=request_headers,
request_body=request_body,
request_method=request_method,
ignore_status_codes=ignore_status_codes,
current_include_filters=current_include_filters,
is_binary=is_binary,
empty_pages_are_a_change=empty_pages_are_a_change
), timeout=max_time)
except asyncio.TimeoutError:
raise (BrowserFetchTimedOut(msg=f"Browser connected but was unable to process the page in {max_time} seconds."))
raise(BrowserFetchTimedOut(msg=f"Browser connected but was unable to process the page in {max_time} seconds."))

View File

@@ -104,17 +104,15 @@ class fetcher(Fetcher):
self.raw_content = r.content
async def run(self,
fetch_favicon=True,
current_include_filters=None,
empty_pages_are_a_change=False,
ignore_status_codes=False,
is_binary=False,
request_body=None,
request_headers=None,
request_method=None,
timeout=None,
url=None,
):
url,
timeout,
request_headers,
request_body,
request_method,
ignore_status_codes=False,
current_include_filters=None,
is_binary=False,
empty_pages_are_a_change=False):
"""Async wrapper that runs the synchronous requests code in a thread pool"""
loop = asyncio.get_event_loop()

View File

@@ -1,101 +0,0 @@
(async () => {
// Define the function inside the IIFE for console testing
window.getFaviconAsBlob = async function() {
const links = Array.from(document.querySelectorAll(
'link[rel~="apple-touch-icon"], link[rel~="icon"]'
));
const icons = links.map(link => {
const sizesStr = link.getAttribute('sizes');
let size = 0;
if (sizesStr) {
const [w] = sizesStr.split('x').map(Number);
if (!isNaN(w)) size = w;
} else {
size = 16;
}
return {
size,
rel: link.getAttribute('rel'),
href: link.href,
hasSizes: !!sizesStr
};
});
// If no icons found, add fallback favicon.ico
if (icons.length === 0) {
icons.push({
size: 16,
rel: 'icon',
href: '/favicon.ico',
hasSizes: false
});
}
// sort preference: highest resolution first, then apple-touch-icon, then regular icons
icons.sort((a, b) => {
// First priority: actual size (highest first)
if (a.size !== b.size) {
return b.size - a.size;
}
// Second priority: apple-touch-icon over regular icon
const isAppleA = /apple-touch-icon/.test(a.rel);
const isAppleB = /apple-touch-icon/.test(b.rel);
if (isAppleA && !isAppleB) return -1;
if (!isAppleA && isAppleB) return 1;
// Third priority: icons with no size attribute (fallback icons) last
const hasNoSizeA = !a.hasSizes;
const hasNoSizeB = !b.hasSizes;
if (hasNoSizeA && !hasNoSizeB) return 1;
if (!hasNoSizeA && hasNoSizeB) return -1;
return 0;
});
const timeoutMs = 2000;
for (const icon of icons) {
try {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), timeoutMs);
const resp = await fetch(icon.href, {
signal: controller.signal,
redirect: 'follow'
});
clearTimeout(timeout);
if (!resp.ok) {
continue;
}
const blob = await resp.blob();
// Convert blob to base64
const reader = new FileReader();
return await new Promise(resolve => {
reader.onloadend = () => {
resolve({
url: icon.href,
base64: reader.result.split(",")[1]
});
};
reader.readAsDataURL(blob);
});
} catch (e) {
continue;
}
}
// nothing found
return null;
};
// Auto-execute and return result for page.evaluate()
return await window.getFaviconAsBlob();
})();

View File

@@ -17,7 +17,6 @@ async () => {
'back in stock soon',
'back-order or out of stock',
'backordered',
'backorder',
'benachrichtigt mich', // notify me
'binnenkort leverbaar', // coming soon
'brak na stanie',
@@ -40,14 +39,12 @@ async () => {
'mail me when available',
'message if back in stock',
'mevcut değil',
'more on order',
'nachricht bei',
'nicht auf lager',
'nicht lagernd',
'nicht lieferbar',
'nicht verfügbar',
'nicht vorrätig',
'nicht mehr lieferbar',
'nicht zur verfügung',
'nie znaleziono produktów',
'niet beschikbaar',

View File

@@ -4,10 +4,9 @@ import time
from loguru import logger
from changedetectionio.content_fetchers.base import Fetcher
class fetcher(Fetcher):
if os.getenv("WEBDRIVER_URL"):
fetcher_description = f"WebDriver Chrome/Javascript via \"{os.getenv('WEBDRIVER_URL', '')}\""
fetcher_description = "WebDriver Chrome/Javascript via '{}'".format(os.getenv("WEBDRIVER_URL"))
else:
fetcher_description = "WebDriver Chrome/Javascript"
@@ -26,6 +25,7 @@ class fetcher(Fetcher):
self.browser_connection_is_custom = True
self.browser_connection_url = custom_browser_connection_url
##### PROXY SETUP #####
proxy_sources = [
@@ -38,7 +38,7 @@ class fetcher(Fetcher):
os.getenv('webdriver_proxyHttps'),
os.getenv('webdriver_httpsProxy'),
os.getenv('webdriver_sslProxy'),
proxy_override, # last one should override
proxy_override, # last one should override
]
# The built in selenium proxy handling is super unreliable!!! so we just grab which ever proxy setting we can find and throw it in --proxy-server=
for k in filter(None, proxy_sources):
@@ -46,21 +46,20 @@ class fetcher(Fetcher):
continue
self.proxy_url = k.strip()
async def run(self,
fetch_favicon=True,
current_include_filters=None,
empty_pages_are_a_change=False,
ignore_status_codes=False,
is_binary=False,
request_body=None,
request_headers=None,
request_method=None,
timeout=None,
url=None,
):
url,
timeout,
request_headers,
request_body,
request_method,
ignore_status_codes=False,
current_include_filters=None,
is_binary=False,
empty_pages_are_a_change=False):
import asyncio
# Wrap the entire selenium operation in a thread executor
def _run_sync():
from selenium.webdriver.chrome.options import Options as ChromeOptions
@@ -141,3 +140,4 @@ class fetcher(Fetcher):
# Run the selenium operations in a thread pool to avoid blocking the event loop
loop = asyncio.get_event_loop()
await loop.run_in_executor(None, _run_sync)

View File

@@ -12,17 +12,19 @@ from blinker import signal
from changedetectionio.strtobool import strtobool
from threading import Event
from changedetectionio.queue_handlers import RecheckPriorityQueue, NotificationQueue
from changedetectionio.custom_queue import SignalPriorityQueue, AsyncSignalPriorityQueue, NotificationQueue
from changedetectionio import worker_handler
from flask import (
Flask,
abort,
flash,
make_response,
redirect,
render_template,
request,
send_from_directory,
session,
url_for,
)
from flask_compress import Compress as FlaskCompress
@@ -38,12 +40,7 @@ from loguru import logger
from changedetectionio import __version__
from changedetectionio import queuedWatchMetaData
from changedetectionio.api import Watch, WatchHistory, WatchSingleHistory, CreateWatch, Import, SystemInfo, Tag, Tags, Notifications, WatchFavicon
from changedetectionio.notification.BrowserNotifications import (
BrowserNotificationsVapidPublicKey,
BrowserNotificationsSubscribe,
BrowserNotificationsUnsubscribe
)
from changedetectionio.api import Watch, WatchHistory, WatchSingleHistory, CreateWatch, Import, SystemInfo, Tag, Tags, Notifications
from changedetectionio.api.Search import Search
from .time_handler import is_within_schedule
@@ -53,8 +50,8 @@ datastore = None
ticker_thread = None
extra_stylesheets = []
# Use bulletproof janus-based queues for sync/async reliability
update_q = RecheckPriorityQueue()
# Use async queue by default, keep sync for backward compatibility
update_q = AsyncSignalPriorityQueue() if worker_handler.USE_ASYNC_WORKERS else SignalPriorityQueue()
notification_q = NotificationQueue()
MAX_QUEUE_SIZE = 2000
@@ -99,7 +96,6 @@ except locale.Error:
logger.warning(f"Unable to set locale {default_locale}, locale is not installed maybe?")
watch_api = Api(app, decorators=[csrf.exempt])
browser_notification_api = Api(app, decorators=[csrf.exempt])
def init_app_secret(datastore_path):
secret = ""
@@ -311,9 +307,7 @@ def changedetection_app(config=None, datastore_o=None):
watch_api.add_resource(WatchSingleHistory,
'/api/v1/watch/<string:uuid>/history/<string:timestamp>',
resource_class_kwargs={'datastore': datastore, 'update_q': update_q})
watch_api.add_resource(WatchFavicon,
'/api/v1/watch/<string:uuid>/favicon',
resource_class_kwargs={'datastore': datastore})
watch_api.add_resource(WatchHistory,
'/api/v1/watch/<string:uuid>/history',
resource_class_kwargs={'datastore': datastore})
@@ -335,18 +329,13 @@ def changedetection_app(config=None, datastore_o=None):
resource_class_kwargs={'datastore': datastore})
watch_api.add_resource(Tag, '/api/v1/tag', '/api/v1/tag/<string:uuid>',
resource_class_kwargs={'datastore': datastore, 'update_q': update_q})
resource_class_kwargs={'datastore': datastore})
watch_api.add_resource(Search, '/api/v1/search',
resource_class_kwargs={'datastore': datastore})
watch_api.add_resource(Notifications, '/api/v1/notifications',
resource_class_kwargs={'datastore': datastore})
# Browser notification endpoints
browser_notification_api.add_resource(BrowserNotificationsVapidPublicKey, '/browser-notifications-api/vapid-public-key')
browser_notification_api.add_resource(BrowserNotificationsSubscribe, '/browser-notifications-api/subscribe')
browser_notification_api.add_resource(BrowserNotificationsUnsubscribe, '/browser-notifications-api/unsubscribe')
@login_manager.user_loader
def user_loader(email):
@@ -438,32 +427,6 @@ def changedetection_app(config=None, datastore_o=None):
except FileNotFoundError:
abort(404)
if group == 'favicon':
# Could be sensitive, follow password requirements
if datastore.data['settings']['application']['password'] and not flask_login.current_user.is_authenticated:
abort(403)
# Get the watch object
watch = datastore.data['watching'].get(filename)
if not watch:
abort(404)
favicon_filename = watch.get_favicon_filename()
if favicon_filename:
try:
import magic
mime = magic.from_file(
os.path.join(watch.watch_data_dir, favicon_filename),
mime=True
)
except ImportError:
# Fallback, no python-magic
import mimetypes
mime, encoding = mimetypes.guess_type(favicon_filename)
response = make_response(send_from_directory(watch.watch_data_dir, favicon_filename))
response.headers['Content-type'] = mime
response.headers['Cache-Control'] = 'max-age=300, must-revalidate' # Cache for 5 minutes, then revalidate
return response
if group == 'visual_selector_data':
# Could be sensitive, follow password requirements
@@ -500,29 +463,10 @@ def changedetection_app(config=None, datastore_o=None):
except FileNotFoundError:
abort(404)
@app.route("/service-worker.js", methods=['GET'])
def service_worker():
from flask import make_response
try:
# Serve from the changedetectionio/static/js directory
static_js_path = os.path.join(os.path.dirname(__file__), 'static', 'js')
response = make_response(send_from_directory(static_js_path, "service-worker.js"))
response.headers['Content-Type'] = 'application/javascript'
response.headers['Service-Worker-Allowed'] = '/'
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
response.headers['Pragma'] = 'no-cache'
response.headers['Expires'] = '0'
return response
except FileNotFoundError:
abort(404)
import changedetectionio.blueprint.browser_steps as browser_steps
app.register_blueprint(browser_steps.construct_blueprint(datastore), url_prefix='/browser-steps')
import changedetectionio.blueprint.browser_notifications.browser_notifications as browser_notifications
app.register_blueprint(browser_notifications.construct_blueprint(datastore), url_prefix='/browser-notifications')
from changedetectionio.blueprint.imports import construct_blueprint as construct_import_blueprint
app.register_blueprint(construct_import_blueprint(datastore, update_q, queuedWatchMetaData), url_prefix='/imports')
@@ -874,22 +818,16 @@ def ticker_thread_check_time_launch_checks():
# Use Epoch time as priority, so we get a "sorted" PriorityQueue, but we can still push a priority 1 into it.
priority = int(time.time())
logger.debug(
f"> Queued watch UUID {uuid} "
f"last checked at {watch['last_checked']} "
f"queued at {now:0.2f} priority {priority} "
f"jitter {watch.jitter_seconds:0.2f}s, "
f"{now - watch['last_checked']:0.2f}s since last checked")
# Into the queue with you
queued_successfully = worker_handler.queue_item_async_safe(update_q,
queuedWatchMetaData.PrioritizedItem(priority=priority,
item={'uuid': uuid})
)
if queued_successfully:
logger.debug(
f"> Queued watch UUID {uuid} "
f"last checked at {watch['last_checked']} "
f"queued at {now:0.2f} priority {priority} "
f"jitter {watch.jitter_seconds:0.2f}s, "
f"{now - watch['last_checked']:0.2f}s since last checked")
else:
logger.critical(f"CRITICAL: Failed to queue watch UUID {uuid} in ticker thread!")
worker_handler.queue_item_async_safe(update_q, queuedWatchMetaData.PrioritizedItem(priority=priority, item={'uuid': uuid}))
# Reset for next time
watch.jitter_seconds = 0

View File

@@ -23,14 +23,11 @@ from wtforms import (
)
from flask_wtf.file import FileField, FileAllowed
from wtforms.fields import FieldList
from wtforms.utils import unset_value
from wtforms.validators import ValidationError
from validators.url import url as url_validator
from changedetectionio.widgets import TernaryNoneBooleanField
# default
# each select <option data-enabled="enabled-0-0"
@@ -57,8 +54,6 @@ valid_method = {
default_method = 'GET'
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT='At least one time interval (weeks, days, hours, minutes, or seconds) must be specified.'
REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT='At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings.'
class StringListField(StringField):
widget = widgets.TextArea()
@@ -215,35 +210,6 @@ class ScheduleLimitForm(Form):
self.sunday.form.enabled.label.text = "Sunday"
def validate_time_between_check_has_values(form):
"""
Custom validation function for TimeBetweenCheckForm.
Returns True if at least one time interval field has a value > 0.
"""
res = any([
form.weeks.data and int(form.weeks.data) > 0,
form.days.data and int(form.days.data) > 0,
form.hours.data and int(form.hours.data) > 0,
form.minutes.data and int(form.minutes.data) > 0,
form.seconds.data and int(form.seconds.data) > 0
])
return res
class RequiredTimeInterval(object):
"""
WTForms validator that ensures at least one time interval field has a value > 0.
Use this with FormField(TimeBetweenCheckForm, validators=[RequiredTimeInterval()]).
"""
def __init__(self, message=None):
self.message = message or 'At least one time interval (weeks, days, hours, minutes, or seconds) must be specified.'
def __call__(self, form, field):
if not validate_time_between_check_has_values(field.form):
raise ValidationError(self.message)
class TimeBetweenCheckForm(Form):
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
@@ -252,123 +218,6 @@ class TimeBetweenCheckForm(Form):
seconds = IntegerField('Seconds', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
# @todo add total seconds minimum validatior = minimum_seconds_recheck_time
def __init__(self, formdata=None, obj=None, prefix="", data=None, meta=None, **kwargs):
super().__init__(formdata, obj, prefix, data, meta, **kwargs)
self.require_at_least_one = kwargs.get('require_at_least_one', False)
self.require_at_least_one_message = kwargs.get('require_at_least_one_message', REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT)
def validate(self, **kwargs):
"""Custom validation that can optionally require at least one time interval."""
# Run normal field validation first
if not super().validate(**kwargs):
return False
# Apply optional "at least one" validation
if self.require_at_least_one:
if not validate_time_between_check_has_values(self):
# Add error to the form's general errors (not field-specific)
if not hasattr(self, '_formdata_errors'):
self._formdata_errors = []
self._formdata_errors.append(self.require_at_least_one_message)
return False
return True
class EnhancedFormField(FormField):
"""
An enhanced FormField that supports conditional validation with top-level error messages.
Adds a 'top_errors' property for validation errors at the FormField level.
"""
def __init__(self, form_class, label=None, validators=None, separator="-",
conditional_field=None, conditional_message=None, conditional_test_function=None, **kwargs):
"""
Initialize EnhancedFormField with optional conditional validation.
:param conditional_field: Name of the field this FormField depends on (e.g. 'time_between_check_use_default')
:param conditional_message: Error message to show when validation fails
:param conditional_test_function: Custom function to test if FormField has valid values.
Should take self.form as parameter and return True if valid.
"""
super().__init__(form_class, label, validators, separator, **kwargs)
self.top_errors = []
self.conditional_field = conditional_field
self.conditional_message = conditional_message or "At least one field must have a value when not using defaults."
self.conditional_test_function = conditional_test_function
def validate(self, form, extra_validators=()):
"""
Custom validation that supports conditional logic and stores top-level errors.
"""
self.top_errors = []
# First run the normal FormField validation
base_valid = super().validate(form, extra_validators)
# Apply conditional validation if configured
if self.conditional_field and hasattr(form, self.conditional_field):
conditional_field_obj = getattr(form, self.conditional_field)
# If the conditional field is False/unchecked, check if this FormField has any values
if not conditional_field_obj.data:
# Use custom test function if provided, otherwise use generic fallback
if self.conditional_test_function:
has_any_value = self.conditional_test_function(self.form)
else:
# Generic fallback - check if any field has truthy data
has_any_value = any(field.data for field in self.form if hasattr(field, 'data') and field.data)
if not has_any_value:
self.top_errors.append(self.conditional_message)
base_valid = False
return base_valid
class RequiredFormField(FormField):
"""
A FormField that passes require_at_least_one=True to TimeBetweenCheckForm.
Use this when you want the sub-form to always require at least one value.
"""
def __init__(self, form_class, label=None, validators=None, separator="-", **kwargs):
super().__init__(form_class, label, validators, separator, **kwargs)
def process(self, formdata, data=unset_value, extra_filters=None):
if extra_filters:
raise TypeError(
"FormField cannot take filters, as the encapsulated"
"data is not mutable."
)
if data is unset_value:
try:
data = self.default()
except TypeError:
data = self.default
self._obj = data
self.object_data = data
prefix = self.name + self.separator
# Pass require_at_least_one=True to the sub-form
if isinstance(data, dict):
self.form = self.form_class(formdata=formdata, prefix=prefix, require_at_least_one=True, **data)
else:
self.form = self.form_class(formdata=formdata, obj=data, prefix=prefix, require_at_least_one=True)
@property
def errors(self):
"""Include sub-form validation errors"""
form_errors = self.form.errors
# Add any general form errors to a special 'form' key
if hasattr(self.form, '_formdata_errors') and self.form._formdata_errors:
form_errors = dict(form_errors) # Make a copy
form_errors['form'] = self.form._formdata_errors
return form_errors
# Separated by key:value
class StringDictKeyValue(StringField):
widget = widgets.TextArea()
@@ -497,7 +346,7 @@ class ValidateJinja2Template(object):
joined_data = ' '.join(map(str, field.data)) if isinstance(field.data, list) else f"{field.data}"
try:
jinja2_env = ImmutableSandboxedEnvironment(loader=BaseLoader, extensions=['jinja2_time.TimeExtension'])
jinja2_env = ImmutableSandboxedEnvironment(loader=BaseLoader)
jinja2_env.globals.update(notification.valid_tokens)
# Extra validation tokens provided on the form_class(... extra_tokens={}) setup
if hasattr(field, 'extra_notification_tokens'):
@@ -547,19 +396,6 @@ def validate_url(test_url):
# This should be wtforms.validators.
raise ValidationError('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX or incorrect URL format')
class ValidateSinglePythonRegexString(object):
def __init__(self, message=None):
self.message = message
def __call__(self, form, field):
try:
re.compile(field.data)
except re.error:
message = field.gettext('RegEx \'%s\' is not a valid regular expression.')
raise ValidationError(message % (field.data))
class ValidateListRegex(object):
"""
Validates that anything that looks like a regex passes as a regex
@@ -578,7 +414,6 @@ class ValidateListRegex(object):
message = field.gettext('RegEx \'%s\' is not a valid regular expression.')
raise ValidationError(message % (line))
class ValidateCSSJSONXPATHInput(object):
"""
Filter validation
@@ -699,6 +534,7 @@ class commonSettingsForm(Form):
self.notification_title.extra_notification_tokens = kwargs.get('extra_notification_tokens', {})
self.notification_urls.extra_notification_tokens = kwargs.get('extra_notification_tokens', {})
extract_title_as_title = BooleanField('Extract <title> from document and use as watch title', default=False)
fetch_backend = RadioField(u'Fetch Method', choices=content_fetchers.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
notification_body = TextAreaField('Notification Body', default='{{ watch_url }} had a change.', validators=[validators.Optional(), ValidateJinja2Template()])
notification_format = SelectField('Notification format', choices=valid_notification_formats.keys())
@@ -707,7 +543,6 @@ class commonSettingsForm(Form):
processor = RadioField( label=u"Processor - What do you want to achieve?", choices=processors.available_processors(), default="text_json_diff")
timezone = StringField("Timezone for watch schedule", render_kw={"list": "timezones"}, validators=[validateTimeZoneName()])
webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, message="Should contain one or more seconds")])
class importForm(Form):
@@ -733,16 +568,11 @@ class processor_text_json_diff_form(commonSettingsForm):
url = fields.URLField('URL', validators=[validateURL()])
tags = StringTagUUID('Group tag', [validators.Optional()], default='')
time_between_check = EnhancedFormField(
TimeBetweenCheckForm,
conditional_field='time_between_check_use_default',
conditional_message=REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT,
conditional_test_function=validate_time_between_check_has_values
)
time_between_check = FormField(TimeBetweenCheckForm)
time_schedule_limit = FormField(ScheduleLimitForm)
time_between_check_use_default = BooleanField('Use global settings for time between check and scheduler.', default=False)
time_between_check_use_default = BooleanField('Use global settings for time between check', default=False)
include_filters = StringListField('CSS/JSONPath/JQ/XPath Filters', [ValidateCSSJSONXPATHInput()], default='')
@@ -772,18 +602,18 @@ class processor_text_json_diff_form(commonSettingsForm):
text_should_not_be_present = StringListField('Block change-detection while text matches', [validators.Optional(), ValidateListRegex()])
webdriver_js_execute_code = TextAreaField('Execute JavaScript before change detection', render_kw={"rows": "5"}, validators=[validators.Optional()])
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
save_button = SubmitField('Save', render_kw={"class": "pure-button button-small pure-button-primary"})
proxy = RadioField('Proxy')
# filter_failure_notification_send @todo make ternary
filter_failure_notification_send = BooleanField(
'Send a notification when the filter can no longer be found on the page', default=False)
notification_muted = TernaryNoneBooleanField('Notifications', default=None, yes_text="Muted", no_text="On")
notification_muted = BooleanField('Notifications Muted / Off', default=False)
notification_screenshot = BooleanField('Attach screenshot to notification (where possible)', default=False)
conditions_match_logic = RadioField(u'Match', choices=[('ALL', 'Match all of the following'),('ANY', 'Match any of the following')], default='ALL')
conditions = FieldList(FormField(ConditionFormRow), min_entries=1) # Add rule logic here
use_page_title_in_list = TernaryNoneBooleanField('Use page <title> in list', default=None)
def extra_tab_content(self):
return None
@@ -883,7 +713,7 @@ class DefaultUAInputForm(Form):
# datastore.data['settings']['requests']..
class globalSettingsRequestForm(Form):
time_between_check = RequiredFormField(TimeBetweenCheckForm)
time_between_check = FormField(TimeBetweenCheckForm)
time_schedule_limit = FormField(ScheduleLimitForm)
proxy = RadioField('Proxy')
jitter_seconds = IntegerField('Random jitter seconds ± check',
@@ -910,8 +740,6 @@ class globalSettingsRequestForm(Form):
class globalSettingsApplicationUIForm(Form):
open_diff_in_new_tab = BooleanField("Open 'History' page in a new tab", default=True, validators=[validators.Optional()])
socket_io_enabled = BooleanField('Realtime UI Updates Enabled', default=True, validators=[validators.Optional()])
favicons_enabled = BooleanField('Favicons Enabled', default=True, validators=[validators.Optional()])
use_page_title_in_list = BooleanField('Use page <title> in watch overview list') #BooleanField=True
# datastore.data['settings']['application']..
class globalSettingsApplicationForm(commonSettingsForm):
@@ -936,7 +764,7 @@ class globalSettingsApplicationForm(commonSettingsForm):
removepassword_button = SubmitField('Remove password', render_kw={"class": "pure-button pure-button-primary"})
render_anchor_tag_content = BooleanField('Render anchor tag content', default=False)
shared_diff_access = BooleanField('Allow anonymous access to watch history page when password is enabled', default=False, validators=[validators.Optional()])
shared_diff_access = BooleanField('Allow access to view diff page when password is enabled', default=False, validators=[validators.Optional()])
rss_hide_muted_watches = BooleanField('Hide muted watches from RSS feed', default=True,
validators=[validators.Optional()])
filter_failure_notification_threshold_attempts = IntegerField('Number of times the filter can be missing before sending a notification',
@@ -958,9 +786,9 @@ class globalSettingsForm(Form):
requests = FormField(globalSettingsRequestForm)
application = FormField(globalSettingsApplicationForm)
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
save_button = SubmitField('Save', render_kw={"class": "pure-button button-small pure-button-primary"})
class extractDataForm(Form):
extract_regex = StringField('RegEx to extract', validators=[validators.DataRequired(), ValidateSinglePythonRegexString()])
extract_regex = StringField('RegEx to extract', validators=[validators.Length(min=1, message="Needs a RegEx")])
extract_submit_button = SubmitField('Extract as CSV', render_kw={"class": "pure-button pure-button-primary"})

View File

@@ -1,7 +1,6 @@
from loguru import logger
from lxml import etree
from typing import List
import html
import json
import re
@@ -10,11 +9,6 @@ TEXT_FILTER_LIST_LINE_SUFFIX = "<br>"
TRANSLATE_WHITESPACE_TABLE = str.maketrans('', '', '\r\n\t ')
PERL_STYLE_REGEX = r'^/(.*?)/([a-z]*)?$'
TITLE_RE = re.compile(r"<title[^>]*>(.*?)</title>", re.I | re.S)
META_CS = re.compile(r'<meta[^>]+charset=["\']?\s*([a-z0-9_\-:+.]+)', re.I)
META_CT = re.compile(r'<meta[^>]+http-equiv=["\']?content-type["\']?[^>]*content=["\'][^>]*charset=([a-z0-9_\-:+.]+)', re.I)
# 'price' , 'lowPrice', 'highPrice' are usually under here
# All of those may or may not appear on different websites - I didnt find a way todo case-insensitive searching here
LD_JSON_PRODUCT_OFFER_SELECTORS = ["json:$..offers", "json:$..Offers"]
@@ -516,43 +510,3 @@ def get_triggered_text(content, trigger_text):
i += 1
return triggered_text
def extract_title(data: bytes | str, sniff_bytes: int = 2048, scan_chars: int = 8192) -> str | None:
try:
# Only decode/process the prefix we need for title extraction
match data:
case bytes() if data.startswith((b"\xff\xfe", b"\xfe\xff")):
prefix = data[:scan_chars * 2].decode("utf-16", errors="replace")
case bytes() if data.startswith((b"\xff\xfe\x00\x00", b"\x00\x00\xfe\xff")):
prefix = data[:scan_chars * 4].decode("utf-32", errors="replace")
case bytes():
try:
prefix = data[:scan_chars].decode("utf-8")
except UnicodeDecodeError:
try:
head = data[:sniff_bytes].decode("ascii", errors="ignore")
if m := (META_CS.search(head) or META_CT.search(head)):
enc = m.group(1).lower()
else:
enc = "cp1252"
prefix = data[:scan_chars * 2].decode(enc, errors="replace")
except Exception as e:
logger.error(f"Title extraction encoding detection failed: {e}")
return None
case str():
prefix = data[:scan_chars] if len(data) > scan_chars else data
case _:
logger.error(f"Title extraction received unsupported data type: {type(data)}")
return None
# Search only in the prefix
if m := TITLE_RE.search(prefix):
title = html.unescape(" ".join(m.group(1).split())).strip()
# Some safe limit
return title[:2000]
return None
except Exception as e:
logger.error(f"Title extraction failed: {e}")
return None

View File

@@ -39,12 +39,12 @@ class model(dict):
'api_access_token_enabled': True,
'base_url' : None,
'empty_pages_are_a_change': False,
'extract_title_as_title': False,
'fetch_backend': getenv("DEFAULT_FETCH_BACKEND", "html_requests"),
'filter_failure_notification_threshold_attempts': _FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT,
'global_ignore_text': [], # List of text to ignore when calculating the comparison checksum
'global_subtractive_selectors': [],
'ignore_whitespace': True,
'ignore_status_codes': False, #@todo implement, as ternary.
'notification_body': default_notification_body,
'notification_format': default_notification_format,
'notification_title': default_notification_title,
@@ -57,19 +57,12 @@ class model(dict):
'rss_hide_muted_watches': True,
'schema_version' : 0,
'shared_diff_access': False,
'webdriver_delay': None , # Extra delay in seconds before extracting text
'tags': {}, #@todo use Tag.model initialisers
'timezone': None, # Default IANA timezone name
'webdriver_delay': None , # Extra delay in seconds before extracting text
'ui': {
'use_page_title_in_list': True,
'open_diff_in_new_tab': True,
'socket_io_enabled': True,
'favicons_enabled': True
},
'vapid': {
'private_key': None,
'public_key': None,
'contact_email': None
'socket_io_enabled': True
},
}
}

View File

@@ -8,14 +8,11 @@ import re
from pathlib import Path
from loguru import logger
from .. import safe_jinja
from ..html_tools import TRANSLATE_WHITESPACE_TABLE
# Allowable protocols, protects against javascript: etc
# file:// is further checked by ALLOW_FILE_URI
SAFE_PROTOCOL_REGEX='^(http|https|ftp|file):'
FAVICON_RESAVE_THRESHOLD_SECONDS=86400
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 3))
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
@@ -105,13 +102,6 @@ class model(watch_base):
return 'DISABLED'
return ready_url
@property
def domain_only_from_link(self):
from urllib.parse import urlparse
parsed = urlparse(self.link)
domain = parsed.hostname
return domain
def clear_watch(self):
import pathlib
@@ -169,8 +159,8 @@ class model(watch_base):
@property
def label(self):
# Used for sorting, display, etc
return self.get('title') or self.get('page_title') or self.link
# Used for sorting
return self.get('title') if self.get('title') else self.get('url')
@property
def last_changed(self):
@@ -422,154 +412,6 @@ class model(watch_base):
# False is not an option for AppRise, must be type None
return None
def favicon_is_expired(self):
favicon_fname = self.get_favicon_filename()
import glob
import time
if not favicon_fname:
return True
try:
fname = next(iter(glob.glob(os.path.join(self.watch_data_dir, "favicon.*"))), None)
logger.trace(f"Favicon file maybe found at {fname}")
if os.path.isfile(fname):
file_age = int(time.time() - os.path.getmtime(fname))
logger.trace(f"Favicon file age is {file_age}s")
if file_age < FAVICON_RESAVE_THRESHOLD_SECONDS:
return False
except Exception as e:
logger.critical(f"Exception checking Favicon age {str(e)}")
return True
# Also in the case that the file didnt exist
return True
def bump_favicon(self, url, favicon_base_64: str) -> None:
from urllib.parse import urlparse
import base64
import binascii
decoded = None
if url:
try:
parsed = urlparse(url)
filename = os.path.basename(parsed.path)
(base, extension) = filename.lower().strip().rsplit('.', 1)
except ValueError:
logger.error(f"UUID: {self.get('uuid')} Cant work out file extension from '{url}'")
return None
else:
# Assume favicon.ico
base = "favicon"
extension = "ico"
fname = os.path.join(self.watch_data_dir, f"favicon.{extension}")
try:
# validate=True makes sure the string only contains valid base64 chars
decoded = base64.b64decode(favicon_base_64, validate=True)
except (binascii.Error, ValueError) as e:
logger.warning(f"UUID: {self.get('uuid')} FavIcon save data (Base64) corrupt? {str(e)}")
else:
if decoded:
try:
with open(fname, 'wb') as f:
f.write(decoded)
# A signal that could trigger the socket server to update the browser also
watch_check_update = signal('watch_favicon_bump')
if watch_check_update:
watch_check_update.send(watch_uuid=self.get('uuid'))
except Exception as e:
logger.warning(f"UUID: {self.get('uuid')} error saving FavIcon to {fname} - {str(e)}")
# @todo - Store some checksum and only write when its different
logger.debug(f"UUID: {self.get('uuid')} updated favicon to at {fname}")
def get_favicon_filename(self) -> str | None:
"""
Find any favicon.* file in the current working directory
and return the contents of the newest one.
Returns:
bytes: Contents of the newest favicon file, or None if not found.
"""
import glob
# Search for all favicon.* files
files = glob.glob(os.path.join(self.watch_data_dir, "favicon.*"))
if not files:
return None
# Find the newest by modification time
newest_file = max(files, key=os.path.getmtime)
return os.path.basename(newest_file)
def get_screenshot_as_thumbnail(self, max_age=3200):
"""Return path to a square thumbnail of the most recent screenshot.
Creates a 150x150 pixel thumbnail from the top portion of the screenshot.
Args:
max_age: Maximum age in seconds before recreating thumbnail
Returns:
Path to thumbnail or None if no screenshot exists
"""
import os
import time
thumbnail_path = os.path.join(self.watch_data_dir, "thumbnail.jpeg")
top_trim = 500 # Pixels from top of screenshot to use
screenshot_path = self.get_screenshot()
if not screenshot_path:
return None
# Reuse thumbnail if it's fresh and screenshot hasn't changed
if os.path.isfile(thumbnail_path):
thumbnail_mtime = os.path.getmtime(thumbnail_path)
screenshot_mtime = os.path.getmtime(screenshot_path)
if screenshot_mtime <= thumbnail_mtime and time.time() - thumbnail_mtime < max_age:
return thumbnail_path
try:
from PIL import Image
with Image.open(screenshot_path) as img:
# Crop top portion first (full width, top_trim height)
top_crop_height = min(top_trim, img.height)
img = img.crop((0, 0, img.width, top_crop_height))
# Create a smaller intermediate image (to reduce memory usage)
aspect = img.width / img.height
interim_width = min(top_trim, img.width)
interim_height = int(interim_width / aspect) if aspect > 0 else top_trim
img = img.resize((interim_width, interim_height), Image.NEAREST)
# Convert to RGB if needed
if img.mode != 'RGB':
img = img.convert('RGB')
# Crop to square from top center
square_size = min(img.width, img.height)
left = (img.width - square_size) // 2
img = img.crop((left, 0, left + square_size, square_size))
# Final resize to exact thumbnail size with better filter
img = img.resize((350, 350), Image.BILINEAR)
# Save with optimized settings
img.save(thumbnail_path, "JPEG", quality=75, optimize=True)
return thumbnail_path
except Exception as e:
logger.error(f"Error creating thumbnail for {self.get('uuid')}: {str(e)}")
return None
def __get_file_ctime(self, filename):
fname = os.path.join(self.watch_data_dir, filename)
if os.path.isfile(fname):
@@ -663,7 +505,7 @@ class model(watch_base):
if res:
if not csv_writer:
# A file on the disk can be transferred much faster via flask than a string reply
csv_output_filename = f"report-{self.get('uuid')}.csv"
csv_output_filename = 'report.csv'
f = open(os.path.join(self.watch_data_dir, csv_output_filename), 'w')
# @todo some headers in the future
#fieldnames = ['Epoch seconds', 'Date']
@@ -849,11 +691,11 @@ class model(watch_base):
output.append(str(Markup(f"<div class=\"notification-error\"><a href=\"{url_for('settings.notification_logs')}\">{ self.get('last_notification_error') }</a></div>")))
else:
# Lo_Fi version - no app context, cant rely on Jinja2 Markup
# Lo_Fi version
if last_error:
output.append(safe_jinja.render_fully_escaped(last_error))
output.append(str(Markup(last_error)))
if self.get('last_notification_error'):
output.append(safe_jinja.render_fully_escaped(self.get('last_notification_error')))
output.append(str(Markup(self.get('last_notification_error'))))
res = "\n".join(output)
return res

View File

@@ -3,7 +3,6 @@ import uuid
from changedetectionio import strtobool
default_notification_format_for_watch = 'System default'
CONDITIONS_MATCH_LOGIC_DEFAULT = 'ALL'
class watch_base(dict):
@@ -16,14 +15,13 @@ class watch_base(dict):
'body': None,
'browser_steps': [],
'browser_steps_last_error_step': None,
'conditions' : {},
'conditions_match_logic': CONDITIONS_MATCH_LOGIC_DEFAULT,
'check_count': 0,
'check_unique_lines': False, # On change-detected, compare against all history if its something new
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
'content-type': None,
'date_created': None,
'extract_text': [], # Extract text by regex after filters
'extract_title_as_title': False,
'fetch_backend': 'system', # plaintext, playwright etc
'fetch_time': 0.0,
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
@@ -34,7 +32,6 @@ class watch_base(dict):
'has_ldjson_price_data': None,
'headers': {}, # Extra headers to send
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
'ignore_status_codes': None,
'in_stock_only': True, # Only trigger change on going to instock from out-of-stock
'include_filters': [],
'last_checked': 0,
@@ -49,7 +46,6 @@ class watch_base(dict):
'notification_screenshot': False, # Include the latest screenshot if available and supported by the apprise URL
'notification_title': None,
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
'page_title': None, # <title> from the page
'paused': False,
'previous_md5': False,
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
@@ -123,13 +119,12 @@ class watch_base(dict):
}
},
},
'title': None, # An arbitrary field that overrides 'page_title'
'title': None,
'track_ldjson_price_data': None,
'trim_text_whitespace': False,
'remove_duplicate_lines': False,
'trigger_text': [], # List of text or regex to wait for until a change is detected
'url': '',
'use_page_title_in_list': None, # None = use system settings
'uuid': str(uuid.uuid4()),
'webdriver_delay': None,
'webdriver_js_execute_code': None, # Run before change-detection

View File

@@ -1,217 +0,0 @@
import json
from flask import request, current_app
from flask_restful import Resource, marshal_with, fields
from loguru import logger
browser_notifications_fields = {
'success': fields.Boolean,
'message': fields.String,
}
vapid_public_key_fields = {
'publicKey': fields.String,
}
test_notification_fields = {
'success': fields.Boolean,
'message': fields.String,
'sent_count': fields.Integer,
}
class BrowserNotificationsVapidPublicKey(Resource):
"""Get VAPID public key for browser push notifications"""
@marshal_with(vapid_public_key_fields)
def get(self):
try:
from changedetectionio.notification.apprise_plugin.browser_notification_helpers import (
get_vapid_config_from_datastore, convert_pem_public_key_for_browser
)
datastore = current_app.config.get('DATASTORE')
if not datastore:
return {'publicKey': None}, 500
private_key, public_key_pem, contact_email = get_vapid_config_from_datastore(datastore)
if not public_key_pem:
return {'publicKey': None}, 404
# Convert PEM format to URL-safe base64 format for browser
public_key_b64 = convert_pem_public_key_for_browser(public_key_pem)
if public_key_b64:
return {'publicKey': public_key_b64}
else:
return {'publicKey': None}, 500
except Exception as e:
logger.error(f"Failed to get VAPID public key: {e}")
return {'publicKey': None}, 500
class BrowserNotificationsSubscribe(Resource):
"""Subscribe to browser notifications"""
@marshal_with(browser_notifications_fields)
def post(self):
try:
data = request.get_json()
if not data:
return {'success': False, 'message': 'No data provided'}, 400
subscription = data.get('subscription')
if not subscription:
return {'success': False, 'message': 'Subscription is required'}, 400
# Validate subscription format
required_fields = ['endpoint', 'keys']
for field in required_fields:
if field not in subscription:
return {'success': False, 'message': f'Missing subscription field: {field}'}, 400
if 'p256dh' not in subscription['keys'] or 'auth' not in subscription['keys']:
return {'success': False, 'message': 'Missing subscription keys'}, 400
# Get datastore
datastore = current_app.config.get('DATASTORE')
if not datastore:
return {'success': False, 'message': 'Datastore not available'}, 500
# Initialize browser_subscriptions if it doesn't exist
if 'browser_subscriptions' not in datastore.data['settings']['application']:
datastore.data['settings']['application']['browser_subscriptions'] = []
# Check if subscription already exists
existing_subscriptions = datastore.data['settings']['application']['browser_subscriptions']
for existing_sub in existing_subscriptions:
if existing_sub.get('endpoint') == subscription.get('endpoint'):
return {'success': True, 'message': 'Already subscribed to browser notifications'}
# Add new subscription
datastore.data['settings']['application']['browser_subscriptions'].append(subscription)
datastore.needs_write = True
logger.info(f"New browser notification subscription: {subscription.get('endpoint')}")
return {'success': True, 'message': 'Successfully subscribed to browser notifications'}
except Exception as e:
logger.error(f"Failed to subscribe to browser notifications: {e}")
return {'success': False, 'message': f'Subscription failed: {str(e)}'}, 500
class BrowserNotificationsUnsubscribe(Resource):
"""Unsubscribe from browser notifications"""
@marshal_with(browser_notifications_fields)
def post(self):
try:
data = request.get_json()
if not data:
return {'success': False, 'message': 'No data provided'}, 400
subscription = data.get('subscription')
if not subscription or not subscription.get('endpoint'):
return {'success': False, 'message': 'Valid subscription is required'}, 400
# Get datastore
datastore = current_app.config.get('DATASTORE')
if not datastore:
return {'success': False, 'message': 'Datastore not available'}, 500
# Check if subscriptions exist
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
if not browser_subscriptions:
return {'success': True, 'message': 'No subscriptions found'}
# Remove subscription with matching endpoint
endpoint = subscription.get('endpoint')
original_count = len(browser_subscriptions)
datastore.data['settings']['application']['browser_subscriptions'] = [
sub for sub in browser_subscriptions
if sub.get('endpoint') != endpoint
]
removed_count = original_count - len(datastore.data['settings']['application']['browser_subscriptions'])
if removed_count > 0:
datastore.needs_write = True
logger.info(f"Removed {removed_count} browser notification subscription(s)")
return {'success': True, 'message': 'Successfully unsubscribed from browser notifications'}
else:
return {'success': True, 'message': 'No matching subscription found'}
except Exception as e:
logger.error(f"Failed to unsubscribe from browser notifications: {e}")
return {'success': False, 'message': f'Unsubscribe failed: {str(e)}'}, 500
class BrowserNotificationsTest(Resource):
"""Send a test browser notification"""
@marshal_with(test_notification_fields)
def post(self):
try:
data = request.get_json()
if not data:
return {'success': False, 'message': 'No data provided', 'sent_count': 0}, 400
title = data.get('title', 'Test Notification')
body = data.get('body', 'This is a test notification from changedetection.io')
# Get datastore to check if subscriptions exist
datastore = current_app.config.get('DATASTORE')
if not datastore:
return {'success': False, 'message': 'Datastore not available', 'sent_count': 0}, 500
# Check if there are subscriptions before attempting to send
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
if not browser_subscriptions:
return {'success': False, 'message': 'No subscriptions found', 'sent_count': 0}, 404
# Use the apprise handler directly
try:
from changedetectionio.notification.apprise_plugin.custom_handlers import apprise_browser_notification_handler
# Call the apprise handler with test data
success = apprise_browser_notification_handler(
body=body,
title=title,
notify_type='info',
meta={'url': 'browser://test'}
)
# Count how many subscriptions we have after sending (some may have been removed if invalid)
final_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
sent_count = len(browser_subscriptions) # Original count
if success:
return {
'success': True,
'message': f'Test notification sent successfully to {sent_count} subscriber(s)',
'sent_count': sent_count
}
else:
return {
'success': False,
'message': 'Failed to send test notification',
'sent_count': 0
}, 500
except ImportError:
return {'success': False, 'message': 'Browser notification handler not available', 'sent_count': 0}, 500
except Exception as e:
logger.error(f"Failed to send test browser notification: {e}")
return {'success': False, 'message': f'Test failed: {str(e)}', 'sent_count': 0}, 500

View File

@@ -1,273 +0,0 @@
"""
Browser notification helpers for Web Push API
Shared utility functions for VAPID key handling and notification sending
"""
import json
import re
import time
from loguru import logger
def convert_pem_private_key_for_pywebpush(private_key):
"""
Convert PEM private key to the format that pywebpush expects
Args:
private_key: PEM private key string or already converted key
Returns:
Vapid instance for pywebpush (avoids PEM parsing compatibility issues)
"""
try:
from py_vapid import Vapid
import tempfile
import os
# If we get a string, assume it's PEM and create a Vapid instance from it
if isinstance(private_key, str) and private_key.startswith('-----BEGIN'):
# Write PEM to temporary file and load with Vapid.from_file
with tempfile.NamedTemporaryFile(mode='w', suffix='.pem', delete=False) as tmp_file:
tmp_file.write(private_key)
tmp_file.flush()
temp_path = tmp_file.name
try:
# Load using Vapid.from_file - this is more compatible with pywebpush
vapid_instance = Vapid.from_file(temp_path)
os.unlink(temp_path) # Clean up
logger.debug("Successfully created Vapid instance from PEM")
return vapid_instance
except Exception as e:
os.unlink(temp_path) # Clean up even on error
logger.error(f"Failed to create Vapid instance from PEM: {e}")
# Fall back to returning the original PEM string
return private_key
else:
# Return as-is if not a PEM string
return private_key
except Exception as e:
logger.error(f"Failed to convert private key: {e}")
return private_key
def convert_pem_public_key_for_browser(public_key_pem):
"""
Convert PEM public key to URL-safe base64 format for browser applicationServerKey
Args:
public_key_pem: PEM public key string
Returns:
URL-safe base64 encoded public key without padding
"""
try:
from cryptography.hazmat.primitives import serialization
import base64
# Parse PEM directly using cryptography library
pem_bytes = public_key_pem.encode() if isinstance(public_key_pem, str) else public_key_pem
# Load the public key from PEM
public_key_crypto = serialization.load_pem_public_key(pem_bytes)
# Get the raw public key bytes in uncompressed format (what browsers expect)
public_key_raw = public_key_crypto.public_bytes(
encoding=serialization.Encoding.X962,
format=serialization.PublicFormat.UncompressedPoint
)
# Convert to URL-safe base64 (remove padding)
public_key_b64 = base64.urlsafe_b64encode(public_key_raw).decode('ascii').rstrip('=')
return public_key_b64
except Exception as e:
logger.error(f"Failed to convert public key format: {e}")
return None
def send_push_notifications(subscriptions, notification_payload, private_key, contact_email, datastore):
"""
Send push notifications to a list of subscriptions
Args:
subscriptions: List of push subscriptions
notification_payload: Dict with notification data (title, body, etc.)
private_key: VAPID private key (will be converted if needed)
contact_email: Contact email for VAPID claims
datastore: Datastore object for updating subscriptions
Returns:
Tuple of (success_count, total_count)
"""
try:
from pywebpush import webpush, WebPushException
except ImportError:
logger.error("pywebpush not available - cannot send browser notifications")
return 0, len(subscriptions)
# Convert private key to format pywebpush expects
private_key_for_push = convert_pem_private_key_for_pywebpush(private_key)
success_count = 0
total_count = len(subscriptions)
# Send to all subscriptions
for subscription in subscriptions[:]: # Copy list to avoid modification issues
try:
webpush(
subscription_info=subscription,
data=json.dumps(notification_payload),
vapid_private_key=private_key_for_push,
vapid_claims={
"sub": f"mailto:{contact_email}",
"aud": f"https://{subscription['endpoint'].split('/')[2]}"
}
)
success_count += 1
except WebPushException as e:
logger.warning(f"Failed to send browser notification to subscription: {e}")
# Remove invalid subscriptions (410 = Gone, 404 = Not Found)
if e.response and e.response.status_code in [404, 410]:
logger.info("Removing invalid browser notification subscription")
try:
subscriptions.remove(subscription)
datastore.needs_write = True
except ValueError:
pass # Already removed
except Exception as e:
logger.error(f"Unexpected error sending browser notification: {e}")
return success_count, total_count
def create_notification_payload(title, body, icon_path=None):
"""
Create a standard notification payload
Args:
title: Notification title
body: Notification body
icon_path: Optional icon path (defaults to favicon)
Returns:
Dict with notification payload
"""
return {
'title': title,
'body': body,
'icon': icon_path or '/static/favicons/favicon-32x32.png',
'badge': '/static/favicons/favicon-32x32.png',
'timestamp': int(time.time() * 1000),
}
def get_vapid_config_from_datastore(datastore):
"""
Get VAPID configuration from datastore with proper error handling
Args:
datastore: Datastore object
Returns:
Tuple of (private_key, public_key, contact_email) or (None, None, None) if error
"""
try:
if not datastore:
return None, None, None
vapid_config = datastore.data.get('settings', {}).get('application', {}).get('vapid', {})
private_key = vapid_config.get('private_key')
public_key = vapid_config.get('public_key')
contact_email = vapid_config.get('contact_email', 'citizen@example.com')
return private_key, public_key, contact_email
except Exception as e:
logger.error(f"Failed to get VAPID config from datastore: {e}")
return None, None, None
def get_browser_subscriptions(datastore):
"""
Get browser subscriptions from datastore
Args:
datastore: Datastore object
Returns:
List of subscriptions
"""
try:
if not datastore:
return []
return datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
except Exception as e:
logger.error(f"Failed to get browser subscriptions: {e}")
return []
def save_browser_subscriptions(datastore, subscriptions):
"""
Save browser subscriptions to datastore
Args:
datastore: Datastore object
subscriptions: List of subscriptions to save
"""
try:
if not datastore:
return
# Ensure the settings structure exists
if 'settings' not in datastore.data:
datastore.data['settings'] = {}
if 'application' not in datastore.data['settings']:
datastore.data['settings']['application'] = {}
datastore.data['settings']['application']['browser_subscriptions'] = subscriptions
datastore.needs_write = True
except Exception as e:
logger.error(f"Failed to save browser subscriptions: {e}")
def create_error_response(message, sent_count=0, status_code=500):
"""
Create standardized error response for API endpoints
Args:
message: Error message
sent_count: Number of notifications sent (for test endpoints)
status_code: HTTP status code
Returns:
Tuple of (response_dict, status_code)
"""
return {'success': False, 'message': message, 'sent_count': sent_count}, status_code
def create_success_response(message, sent_count=None):
"""
Create standardized success response for API endpoints
Args:
message: Success message
sent_count: Number of notifications sent (optional)
Returns:
Response dict
"""
response = {'success': True, 'message': message}
if sent_count is not None:
response['sent_count'] = sent_count
return response

View File

@@ -1,6 +1,5 @@
import json
import re
import time
from urllib.parse import unquote_plus
import requests
@@ -111,80 +110,3 @@ def apprise_http_custom_handler(
except Exception as e:
logger.error(f"Unexpected error occurred while sending custom notification to {url}: {e}")
return False
@notify(on="browser")
def apprise_browser_notification_handler(
body: str,
title: str,
notify_type: str,
meta: dict,
*args,
**kwargs,
) -> bool:
"""
Browser push notification handler for browser:// URLs
Ignores anything after browser:// and uses single default channel
"""
try:
from pywebpush import webpush, WebPushException
from flask import current_app
# Get VAPID keys from app settings
try:
datastore = current_app.config.get('DATASTORE')
if not datastore:
logger.error("No datastore available for browser notifications")
return False
vapid_config = datastore.data.get('settings', {}).get('application', {}).get('vapid', {})
private_key = vapid_config.get('private_key')
public_key = vapid_config.get('public_key')
contact_email = vapid_config.get('contact_email', 'admin@changedetection.io')
if not private_key or not public_key:
logger.error("VAPID keys not configured for browser notifications")
return False
except Exception as e:
logger.error(f"Failed to get VAPID configuration: {e}")
return False
# Get subscriptions from datastore
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
if not browser_subscriptions:
logger.info("No browser subscriptions found")
return True # Not an error - just no subscribers
# Import helper functions
try:
from .browser_notification_helpers import create_notification_payload, send_push_notifications
except ImportError:
logger.error("Browser notification helpers not available")
return False
# Prepare notification payload
notification_payload = create_notification_payload(title, body)
# Send notifications using shared helper
success_count, total_count = send_push_notifications(
subscriptions=browser_subscriptions,
notification_payload=notification_payload,
private_key=private_key,
contact_email=contact_email,
datastore=datastore
)
# Update datastore with cleaned subscriptions
datastore.data['settings']['application']['browser_subscriptions'] = browser_subscriptions
logger.info(f"Sent browser notifications: {success_count}/{total_count} successful")
return success_count > 0
except ImportError:
logger.error("pywebpush not available - cannot send browser notifications")
return False
except Exception as e:
logger.error(f"Unexpected error in browser notification handler: {e}")
return False

View File

@@ -8,7 +8,7 @@ def process_notification(n_object, datastore):
from changedetectionio.safe_jinja import render as jinja_render
from . import default_notification_format_for_watch, default_notification_format, valid_notification_formats
# be sure its registered
from .apprise_plugin.custom_handlers import apprise_http_custom_handler, apprise_browser_notification_handler
from .apprise_plugin.custom_handlers import apprise_http_custom_handler
now = time.time()
if n_object.get('notification_timestamp'):
@@ -149,7 +149,7 @@ def create_notification_parameters(n_object, datastore):
uuid = n_object['uuid'] if 'uuid' in n_object else ''
if uuid:
watch_title = datastore.data['watching'][uuid].label
watch_title = datastore.data['watching'][uuid].get('title', '')
tag_list = []
tags = datastore.get_all_tags_for_watch(uuid)
if tags:

View File

@@ -146,19 +146,18 @@ class difference_detection_processor():
# And here we go! call the right browser with browser-specific settings
empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
# All fetchers are now async
await self.fetcher.run(
current_include_filters=self.watch.get('include_filters'),
empty_pages_are_a_change=empty_pages_are_a_change,
fetch_favicon=self.watch.favicon_is_expired(),
ignore_status_codes=ignore_status_codes,
is_binary=is_binary,
request_body=request_body,
request_headers=request_headers,
request_method=request_method,
timeout=timeout,
url=url,
)
await self.fetcher.run(url=url,
timeout=timeout,
request_headers=request_headers,
request_body=request_body,
request_method=request_method,
ignore_status_codes=ignore_status_codes,
current_include_filters=self.watch.get('include_filters'),
is_binary=is_binary,
empty_pages_are_a_change=empty_pages_are_a_change
)
#@todo .quit here could go on close object, so we can run JS if change-detected
self.fetcher.quit(watch=self.watch)

View File

@@ -251,7 +251,8 @@ class perform_site_check(difference_detection_processor):
update_obj["last_check_status"] = self.fetcher.get_last_status_code()
# 615 Extract text by regex
extract_text = list(dict.fromkeys(watch.get('extract_text', []) + self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='extract_text')))
extract_text = watch.get('extract_text', [])
extract_text += self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='extract_text')
if len(extract_text) > 0:
regex_matched_output = []
for s_re in extract_text:
@@ -310,7 +311,8 @@ class perform_site_check(difference_detection_processor):
############ Blocking rules, after checksum #################
blocked = False
trigger_text = list(dict.fromkeys(watch.get('trigger_text', []) + self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='trigger_text')))
trigger_text = watch.get('trigger_text', [])
trigger_text += self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='trigger_text')
if len(trigger_text):
# Assume blocked
blocked = True
@@ -324,7 +326,8 @@ class perform_site_check(difference_detection_processor):
if result:
blocked = False
text_should_not_be_present = list(dict.fromkeys(watch.get('text_should_not_be_present', []) + self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='text_should_not_be_present')))
text_should_not_be_present = watch.get('text_should_not_be_present', [])
text_should_not_be_present += self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='text_should_not_be_present')
if len(text_should_not_be_present):
# If anything matched, then we should block a change from happening
result = html_tools.strip_ignore_text(content=str(stripped_text_from_html),

View File

@@ -1,435 +0,0 @@
from blinker import signal
from loguru import logger
from typing import Dict, List, Any, Optional
import heapq
import queue
import threading
try:
import janus
except ImportError:
logger.critical(f"CRITICAL: janus library is required. Install with: pip install janus")
raise
class RecheckPriorityQueue:
"""
Ultra-reliable priority queue using janus for async/sync bridging.
CRITICAL DESIGN NOTE: Both sync_q and async_q are required because:
- sync_q: Used by Flask routes, ticker threads, and other synchronous code
- async_q: Used by async workers (the actual fetchers/processors) and coroutines
DO NOT REMOVE EITHER INTERFACE - they bridge different execution contexts:
- Synchronous code (Flask, threads) cannot use async methods without blocking
- Async code cannot use sync methods without blocking the event loop
- janus provides the only safe bridge between these two worlds
Attempting to unify to async-only would require:
- Converting all Flask routes to async (major breaking change)
- Using asyncio.run() in sync contexts (causes deadlocks)
- Thread-pool wrapping (adds complexity and overhead)
Minimal implementation focused on reliability:
- Pure janus for sync/async bridge
- Thread-safe priority ordering
- Bulletproof error handling with critical logging
"""
def __init__(self, maxsize: int = 0):
try:
self._janus_queue = janus.Queue(maxsize=maxsize)
# BOTH interfaces required - see class docstring for why
self.sync_q = self._janus_queue.sync_q # Flask routes, ticker thread
self.async_q = self._janus_queue.async_q # Async workers
# Priority storage - thread-safe
self._priority_items = []
self._lock = threading.RLock()
# Signals for UI updates
self.queue_length_signal = signal('queue_length')
logger.debug("RecheckPriorityQueue initialized successfully")
except Exception as e:
logger.critical(f"CRITICAL: Failed to initialize RecheckPriorityQueue: {str(e)}")
raise
# SYNC INTERFACE (for ticker thread)
def put(self, item, block: bool = True, timeout: Optional[float] = None):
"""Thread-safe sync put with priority ordering"""
try:
# Add to priority storage
with self._lock:
heapq.heappush(self._priority_items, item)
# Notify via janus sync queue
self.sync_q.put(True, block=block, timeout=timeout)
# Emit signals
self._emit_put_signals(item)
logger.debug(f"Successfully queued item: {self._get_item_uuid(item)}")
return True
except Exception as e:
logger.critical(f"CRITICAL: Failed to put item {self._get_item_uuid(item)}: {str(e)}")
# Remove from priority storage if janus put failed
try:
with self._lock:
if item in self._priority_items:
self._priority_items.remove(item)
heapq.heapify(self._priority_items)
except Exception as cleanup_e:
logger.critical(f"CRITICAL: Failed to cleanup after put failure: {str(e)}")
return False
def get(self, block: bool = True, timeout: Optional[float] = None):
"""Thread-safe sync get with priority ordering"""
try:
# Wait for notification
self.sync_q.get(block=block, timeout=timeout)
# Get highest priority item
with self._lock:
if not self._priority_items:
logger.critical(f"CRITICAL: Queue notification received but no priority items available")
raise Exception("Priority queue inconsistency")
item = heapq.heappop(self._priority_items)
# Emit signals
self._emit_get_signals()
logger.debug(f"Successfully retrieved item: {self._get_item_uuid(item)}")
return item
except Exception as e:
logger.critical(f"CRITICAL: Failed to get item from queue: {str(e)}")
raise
# ASYNC INTERFACE (for workers)
async def async_put(self, item):
"""Pure async put with priority ordering"""
try:
# Add to priority storage
with self._lock:
heapq.heappush(self._priority_items, item)
# Notify via janus async queue
await self.async_q.put(True)
# Emit signals
self._emit_put_signals(item)
logger.debug(f"Successfully async queued item: {self._get_item_uuid(item)}")
return True
except Exception as e:
logger.critical(f"CRITICAL: Failed to async put item {self._get_item_uuid(item)}: {str(e)}")
# Remove from priority storage if janus put failed
try:
with self._lock:
if item in self._priority_items:
self._priority_items.remove(item)
heapq.heapify(self._priority_items)
except Exception as cleanup_e:
logger.critical(f"CRITICAL: Failed to cleanup after async put failure: {str(e)}")
return False
async def async_get(self):
"""Pure async get with priority ordering"""
try:
# Wait for notification
await self.async_q.get()
# Get highest priority item
with self._lock:
if not self._priority_items:
logger.critical(f"CRITICAL: Async queue notification received but no priority items available")
raise Exception("Priority queue inconsistency")
item = heapq.heappop(self._priority_items)
# Emit signals
self._emit_get_signals()
logger.debug(f"Successfully async retrieved item: {self._get_item_uuid(item)}")
return item
except Exception as e:
logger.critical(f"CRITICAL: Failed to async get item from queue: {str(e)}")
raise
# UTILITY METHODS
def qsize(self) -> int:
"""Get current queue size"""
try:
with self._lock:
return len(self._priority_items)
except Exception as e:
logger.critical(f"CRITICAL: Failed to get queue size: {str(e)}")
return 0
def empty(self) -> bool:
"""Check if queue is empty"""
return self.qsize() == 0
def close(self):
"""Close the janus queue"""
try:
self._janus_queue.close()
logger.debug("RecheckPriorityQueue closed successfully")
except Exception as e:
logger.critical(f"CRITICAL: Failed to close RecheckPriorityQueue: {str(e)}")
# COMPATIBILITY METHODS (from original implementation)
@property
def queue(self):
"""Provide compatibility with original queue access"""
try:
with self._lock:
return list(self._priority_items)
except Exception as e:
logger.critical(f"CRITICAL: Failed to get queue list: {str(e)}")
return []
def get_uuid_position(self, target_uuid: str) -> Dict[str, Any]:
"""Find position of UUID in queue"""
try:
with self._lock:
queue_list = list(self._priority_items)
total_items = len(queue_list)
if total_items == 0:
return {'position': None, 'total_items': 0, 'priority': None, 'found': False}
# Find target item
for item in queue_list:
if (hasattr(item, 'item') and isinstance(item.item, dict) and
item.item.get('uuid') == target_uuid):
# Count items with higher priority
position = sum(1 for other in queue_list if other.priority < item.priority)
return {
'position': position,
'total_items': total_items,
'priority': item.priority,
'found': True
}
return {'position': None, 'total_items': total_items, 'priority': None, 'found': False}
except Exception as e:
logger.critical(f"CRITICAL: Failed to get UUID position for {target_uuid}: {str(e)}")
return {'position': None, 'total_items': 0, 'priority': None, 'found': False}
def get_all_queued_uuids(self, limit: Optional[int] = None, offset: int = 0) -> Dict[str, Any]:
"""Get all queued UUIDs with pagination"""
try:
with self._lock:
queue_list = sorted(self._priority_items) # Sort by priority
total_items = len(queue_list)
if total_items == 0:
return {'items': [], 'total_items': 0, 'returned_items': 0, 'has_more': False}
# Apply pagination
end_idx = min(offset + limit, total_items) if limit else total_items
items_to_process = queue_list[offset:end_idx]
result = []
for position, item in enumerate(items_to_process, start=offset):
if (hasattr(item, 'item') and isinstance(item.item, dict) and
'uuid' in item.item):
result.append({
'uuid': item.item['uuid'],
'position': position,
'priority': item.priority
})
return {
'items': result,
'total_items': total_items,
'returned_items': len(result),
'has_more': (offset + len(result)) < total_items
}
except Exception as e:
logger.critical(f"CRITICAL: Failed to get all queued UUIDs: {str(e)}")
return {'items': [], 'total_items': 0, 'returned_items': 0, 'has_more': False}
def get_queue_summary(self) -> Dict[str, Any]:
"""Get queue summary statistics"""
try:
with self._lock:
queue_list = list(self._priority_items)
total_items = len(queue_list)
if total_items == 0:
return {
'total_items': 0, 'priority_breakdown': {},
'immediate_items': 0, 'clone_items': 0, 'scheduled_items': 0
}
immediate_items = clone_items = scheduled_items = 0
priority_counts = {}
for item in queue_list:
priority = item.priority
priority_counts[priority] = priority_counts.get(priority, 0) + 1
if priority == 1:
immediate_items += 1
elif priority == 5:
clone_items += 1
elif priority > 100:
scheduled_items += 1
return {
'total_items': total_items,
'priority_breakdown': priority_counts,
'immediate_items': immediate_items,
'clone_items': clone_items,
'scheduled_items': scheduled_items,
'min_priority': min(priority_counts.keys()) if priority_counts else None,
'max_priority': max(priority_counts.keys()) if priority_counts else None
}
except Exception as e:
logger.critical(f"CRITICAL: Failed to get queue summary: {str(e)}")
return {'total_items': 0, 'priority_breakdown': {}, 'immediate_items': 0,
'clone_items': 0, 'scheduled_items': 0}
# PRIVATE METHODS
def _get_item_uuid(self, item) -> str:
"""Safely extract UUID from item for logging"""
try:
if hasattr(item, 'item') and isinstance(item.item, dict):
return item.item.get('uuid', 'unknown')
except Exception:
pass
return 'unknown'
def _emit_put_signals(self, item):
"""Emit signals when item is added"""
try:
# Watch update signal
if hasattr(item, 'item') and isinstance(item.item, dict) and 'uuid' in item.item:
watch_check_update = signal('watch_check_update')
if watch_check_update:
watch_check_update.send(watch_uuid=item.item['uuid'])
# Queue length signal
if self.queue_length_signal:
self.queue_length_signal.send(length=self.qsize())
except Exception as e:
logger.critical(f"CRITICAL: Failed to emit put signals: {str(e)}")
def _emit_get_signals(self):
"""Emit signals when item is removed"""
try:
if self.queue_length_signal:
self.queue_length_signal.send(length=self.qsize())
except Exception as e:
logger.critical(f"CRITICAL: Failed to emit get signals: {str(e)}")
class NotificationQueue:
"""
Ultra-reliable notification queue using pure janus.
CRITICAL DESIGN NOTE: Both sync_q and async_q are required because:
- sync_q: Used by Flask routes, ticker threads, and other synchronous code
- async_q: Used by async workers and coroutines
DO NOT REMOVE EITHER INTERFACE - they bridge different execution contexts.
See RecheckPriorityQueue docstring above for detailed explanation.
Simple wrapper around janus with bulletproof error handling.
"""
def __init__(self, maxsize: int = 0):
try:
self._janus_queue = janus.Queue(maxsize=maxsize)
# BOTH interfaces required - see class docstring for why
self.sync_q = self._janus_queue.sync_q # Flask routes, threads
self.async_q = self._janus_queue.async_q # Async workers
self.notification_event_signal = signal('notification_event')
logger.debug("NotificationQueue initialized successfully")
except Exception as e:
logger.critical(f"CRITICAL: Failed to initialize NotificationQueue: {str(e)}")
raise
def put(self, item: Dict[str, Any], block: bool = True, timeout: Optional[float] = None):
"""Thread-safe sync put with signal emission"""
try:
self.sync_q.put(item, block=block, timeout=timeout)
self._emit_notification_signal(item)
logger.debug(f"Successfully queued notification: {item.get('uuid', 'unknown')}")
return True
except Exception as e:
logger.critical(f"CRITICAL: Failed to put notification {item.get('uuid', 'unknown')}: {str(e)}")
return False
async def async_put(self, item: Dict[str, Any]):
"""Pure async put with signal emission"""
try:
await self.async_q.put(item)
self._emit_notification_signal(item)
logger.debug(f"Successfully async queued notification: {item.get('uuid', 'unknown')}")
return True
except Exception as e:
logger.critical(f"CRITICAL: Failed to async put notification {item.get('uuid', 'unknown')}: {str(e)}")
return False
def get(self, block: bool = True, timeout: Optional[float] = None):
"""Thread-safe sync get"""
try:
return self.sync_q.get(block=block, timeout=timeout)
except queue.Empty as e:
raise e
except Exception as e:
logger.critical(f"CRITICAL: Failed to get notification: {str(e)}")
raise e
async def async_get(self):
"""Pure async get"""
try:
return await self.async_q.get()
except queue.Empty as e:
raise e
except Exception as e:
logger.critical(f"CRITICAL: Failed to async get notification: {str(e)}")
raise e
def qsize(self) -> int:
"""Get current queue size"""
try:
return self.sync_q.qsize()
except Exception as e:
logger.critical(f"CRITICAL: Failed to get notification queue size: {str(e)}")
return 0
def empty(self) -> bool:
"""Check if queue is empty"""
return self.qsize() == 0
def close(self):
"""Close the janus queue"""
try:
self._janus_queue.close()
logger.debug("NotificationQueue closed successfully")
except Exception as e:
logger.critical(f"CRITICAL: Failed to close NotificationQueue: {str(e)}")
def _emit_notification_signal(self, item: Dict[str, Any]):
"""Emit notification signal"""
try:
if self.notification_event_signal and isinstance(item, dict):
watch_uuid = item.get('uuid')
if watch_uuid:
self.notification_event_signal.send(watch_uuid=watch_uuid)
else:
self.notification_event_signal.send()
except Exception as e:
logger.critical(f"CRITICAL: Failed to emit notification signal: {str(e)}")

View File

@@ -29,9 +29,6 @@ class SignalHandler:
watch_delete_signal = signal('watch_deleted')
watch_delete_signal.connect(self.handle_deleted_signal, weak=False)
watch_favicon_bumped_signal = signal('watch_favicon_bump')
watch_favicon_bumped_signal.connect(self.handle_watch_bumped_favicon_signal, weak=False)
# Connect to the notification_event signal
notification_event_signal = signal('notification_event')
notification_event_signal.connect(self.handle_notification_event, weak=False)
@@ -40,7 +37,7 @@ class SignalHandler:
# Create and start the queue update thread using standard threading
import threading
self.polling_emitter_thread = threading.Thread(
target=self.polling_emit_running_or_queued_watches_threaded,
target=self.polling_emit_running_or_queued_watches_threaded,
daemon=True
)
self.polling_emitter_thread.start()
@@ -72,16 +69,6 @@ class SignalHandler:
else:
logger.warning(f"Watch UUID {watch_uuid} not found in datastore")
def handle_watch_bumped_favicon_signal(self, *args, **kwargs):
watch_uuid = kwargs.get('watch_uuid')
if watch_uuid:
# Emit the queue size to all connected clients
self.socketio_instance.emit("watch_bumped_favicon", {
"uuid": watch_uuid,
"event_timestamp": time.time()
})
logger.debug(f"Watch UUID {watch_uuid} got its favicon updated")
def handle_deleted_signal(self, *args, **kwargs):
watch_uuid = kwargs.get('watch_uuid')
if watch_uuid:
@@ -118,38 +105,39 @@ class SignalHandler:
"watch_uuid": watch_uuid,
"event_timestamp": time.time()
})
logger.trace(f"Socket.IO: Emitted notification_event for watch UUID {watch_uuid}")
except Exception as e:
logger.error(f"Socket.IO error in handle_notification_event: {str(e)}")
def polling_emit_running_or_queued_watches_threaded(self):
"""Threading version of polling for Windows compatibility"""
import time
import threading
logger.info("Queue update thread started (threading mode)")
# Import here to avoid circular imports
from changedetectionio.flask_app import app
from changedetectionio import worker_handler
watch_check_update = signal('watch_check_update')
# Track previous state to avoid unnecessary emissions
previous_running_uuids = set()
# Run until app shutdown - check exit flag more frequently for fast shutdown
exit_event = getattr(app.config, 'exit', threading.Event())
while not exit_event.is_set():
try:
# Get current running UUIDs from async workers
running_uuids = set(worker_handler.get_running_uuids())
# Only send updates for UUIDs that changed state
newly_running = running_uuids - previous_running_uuids
no_longer_running = previous_running_uuids - running_uuids
# Send updates for newly running UUIDs (but exit fast if shutdown requested)
for uuid in newly_running:
if exit_event.is_set():
@@ -158,7 +146,7 @@ class SignalHandler:
with app.app_context():
watch_check_update.send(app_context=app, watch_uuid=uuid)
time.sleep(0.01) # Small yield
# Send updates for UUIDs that finished processing (but exit fast if shutdown requested)
if not exit_event.is_set():
for uuid in no_longer_running:
@@ -168,16 +156,16 @@ class SignalHandler:
with app.app_context():
watch_check_update.send(app_context=app, watch_uuid=uuid)
time.sleep(0.01) # Small yield
# Update tracking for next iteration
previous_running_uuids = running_uuids
# Sleep between polling cycles, but check exit flag every 0.5 seconds for fast shutdown
for _ in range(20): # 20 * 0.5 = 10 seconds total
if exit_event.is_set():
break
time.sleep(0.5)
except Exception as e:
logger.error(f"Error in threading polling: {str(e)}")
# Even during error recovery, check for exit quickly
@@ -185,11 +173,11 @@ class SignalHandler:
if exit_event.is_set():
break
time.sleep(0.5)
# Check if we're in pytest environment - if so, be more gentle with logging
import sys
in_pytest = "pytest" in sys.modules or "PYTEST_CURRENT_TEST" in os.environ
if not in_pytest:
logger.info("Queue update thread stopped (threading mode)")
@@ -220,20 +208,20 @@ def handle_watch_update(socketio, **kwargs):
watch_data = {
'checking_now': True if watch.get('uuid') in running_uuids else False,
'error_text': error_texts,
'event_timestamp': time.time(),
'fetch_time': watch.get('fetch_time'),
'has_error': True if error_texts else False,
'has_favicon': True if watch.get_favicon_filename() else False,
'history_n': watch.history_n,
'last_changed_text': timeago.format(int(watch.last_changed), time.time()) if watch.history_n >= 2 and int(watch.last_changed) > 0 else 'Not yet',
'last_changed': watch.get('last_changed'),
'last_checked': watch.get('last_checked'),
'error_text': error_texts,
'history_n': watch.history_n,
'last_checked_text': _jinja2_filter_datetime(watch),
'notification_muted': True if watch.get('notification_muted') else False,
'paused': True if watch.get('paused') else False,
'last_changed_text': timeago.format(int(watch.last_changed), time.time()) if watch.history_n >= 2 and int(watch.last_changed) > 0 else 'Not yet',
'queued': True if watch.get('uuid') in queue_list else False,
'paused': True if watch.get('paused') else False,
'notification_muted': True if watch.get('notification_muted') else False,
'unviewed': watch.has_unviewed,
'uuid': watch.get('uuid'),
'event_timestamp': time.time()
}
errored_count = 0
@@ -263,15 +251,15 @@ def init_socketio(app, datastore):
"""Initialize SocketIO with the main Flask app"""
import platform
import sys
# Platform-specific async_mode selection for better stability
system = platform.system().lower()
python_version = sys.version_info
# Check for SocketIO mode configuration via environment variable
# Default is 'threading' for best cross-platform compatibility
socketio_mode = os.getenv('SOCKETIO_MODE', 'threading').lower()
if socketio_mode == 'gevent':
# Use gevent mode (higher concurrency but platform limitations)
try:
@@ -289,7 +277,7 @@ def init_socketio(app, datastore):
# Invalid mode specified, use default
async_mode = 'threading'
logger.warning(f"Invalid SOCKETIO_MODE='{socketio_mode}', using default {async_mode} mode for Socket.IO")
# Log platform info for debugging
logger.info(f"Platform: {system}, Python: {python_version.major}.{python_version.minor}, Socket.IO mode: {async_mode}")
@@ -327,6 +315,7 @@ def init_socketio(app, datastore):
emit_flash=False
)
@socketio.on('connect')
def handle_connect():
"""Handle client connection"""
@@ -404,4 +393,4 @@ def init_socketio(app, datastore):
logger.info("Socket.IO initialized and attached to main Flask app")
logger.info(f"Socket.IO: Registered event handlers: {socketio.handlers if hasattr(socketio, 'handlers') else 'No handlers found'}")
return socketio
return socketio

View File

@@ -10,15 +10,9 @@ import os
JINJA2_MAX_RETURN_PAYLOAD_SIZE = 1024 * int(os.getenv("JINJA2_MAX_RETURN_PAYLOAD_SIZE_KB", 1024 * 10))
# This is used for notifications etc, so actually it's OK to send custom HTML such as <a href> etc, but it should limit what data is available.
# (Which also limits available functions that could be called)
def render(template_str, **args: t.Any) -> str:
jinja2_env = jinja2.sandbox.ImmutableSandboxedEnvironment(extensions=['jinja2_time.TimeExtension'])
output = jinja2_env.from_string(template_str).render(args)
return output[:JINJA2_MAX_RETURN_PAYLOAD_SIZE]
def render_fully_escaped(content):
env = jinja2.sandbox.ImmutableSandboxedEnvironment(autoescape=True)
template = env.from_string("{{ some_html|e }}")
return template.render(some_html=content)

View File

@@ -1,6 +1,6 @@
{
"name": "changedetection.io",
"short_name": "changedetection",
"name": "",
"short_name": "",
"icons": [
{
"src": "android-chrome-192x192.png",
@@ -15,8 +15,5 @@
],
"theme_color": "#ffffff",
"background_color": "#ffffff",
"display": "standalone",
"start_url": "/",
"scope": "/",
"gcm_sender_id": "103953800507"
"display": "standalone"
}

View File

@@ -1,450 +0,0 @@
/**
* changedetection.io Browser Push Notifications
* Handles service worker registration, push subscription management, and notification permissions
*/
class BrowserNotifications {
constructor() {
this.serviceWorkerRegistration = null;
this.vapidPublicKey = null;
this.isSubscribed = false;
this.init();
}
async init() {
if (!this.isSupported()) {
console.warn('Push notifications are not supported in this browser');
return;
}
try {
// Get VAPID public key from server
await this.fetchVapidPublicKey();
// Register service worker
await this.registerServiceWorker();
// Check existing subscription state
await this.checkExistingSubscription();
// Initialize UI elements
this.initializeUI();
// Set up notification URL monitoring
this.setupNotificationUrlMonitoring();
} catch (error) {
console.error('Failed to initialize browser notifications:', error);
}
}
isSupported() {
return 'serviceWorker' in navigator &&
'PushManager' in window &&
'Notification' in window;
}
async fetchVapidPublicKey() {
try {
const response = await fetch('/browser-notifications-api/vapid-public-key');
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const data = await response.json();
this.vapidPublicKey = data.publicKey;
} catch (error) {
console.error('Failed to fetch VAPID public key:', error);
throw error;
}
}
async registerServiceWorker() {
try {
this.serviceWorkerRegistration = await navigator.serviceWorker.register('/service-worker.js', {
scope: '/'
});
console.log('Service Worker registered successfully');
// Wait for service worker to be ready
await navigator.serviceWorker.ready;
} catch (error) {
console.error('Service Worker registration failed:', error);
throw error;
}
}
initializeUI() {
// Bind event handlers to existing elements in the template
this.bindEventHandlers();
// Update UI based on current permission state
this.updatePermissionStatus();
}
bindEventHandlers() {
const enableBtn = document.querySelector('#enable-notifications-btn');
const testBtn = document.querySelector('#test-notification-btn');
if (enableBtn) {
enableBtn.addEventListener('click', () => this.requestNotificationPermission());
}
if (testBtn) {
testBtn.addEventListener('click', () => this.sendTestNotification());
}
}
setupNotificationUrlMonitoring() {
// Monitor the notification URLs textarea for browser:// URLs
const notificationUrlsField = document.querySelector('textarea[name*="notification_urls"]');
if (notificationUrlsField) {
const checkForBrowserUrls = async () => {
const urls = notificationUrlsField.value || '';
const hasBrowserUrls = /browser:\/\//.test(urls);
// If browser URLs are detected and we're not subscribed, auto-subscribe
if (hasBrowserUrls && !this.isSubscribed && Notification.permission === 'default') {
const shouldSubscribe = confirm('Browser notifications detected! Would you like to enable browser notifications now?');
if (shouldSubscribe) {
await this.requestNotificationPermission();
}
} else if (hasBrowserUrls && !this.isSubscribed && Notification.permission === 'granted') {
// Permission already granted but not subscribed - auto-subscribe silently
console.log('Auto-subscribing to browser notifications...');
await this.subscribe();
}
};
// Check immediately
checkForBrowserUrls();
// Check on input changes
notificationUrlsField.addEventListener('input', checkForBrowserUrls);
}
}
async updatePermissionStatus() {
const statusElement = document.querySelector('#permission-status');
const enableBtn = document.querySelector('#enable-notifications-btn');
const testBtn = document.querySelector('#test-notification-btn');
if (!statusElement) return;
const permission = Notification.permission;
statusElement.textContent = permission;
statusElement.className = `permission-${permission}`;
// Show/hide controls based on permission
if (permission === 'default') {
if (enableBtn) enableBtn.style.display = 'inline-block';
if (testBtn) testBtn.style.display = 'none';
} else if (permission === 'granted') {
if (enableBtn) enableBtn.style.display = 'none';
if (testBtn) testBtn.style.display = 'inline-block';
} else { // denied
if (enableBtn) enableBtn.style.display = 'none';
if (testBtn) testBtn.style.display = 'none';
}
}
async requestNotificationPermission() {
try {
const permission = await Notification.requestPermission();
this.updatePermissionStatus();
if (permission === 'granted') {
console.log('Notification permission granted');
// Automatically subscribe to browser notifications
this.subscribe();
} else {
console.log('Notification permission denied');
}
} catch (error) {
console.error('Error requesting notification permission:', error);
}
}
async subscribe() {
if (Notification.permission !== 'granted') {
alert('Please enable notifications first');
return;
}
if (this.isSubscribed) {
console.log('Already subscribed to browser notifications');
return;
}
try {
// First, try to clear any existing subscription with different keys
await this.clearExistingSubscription();
// Create push subscription
const subscription = await this.serviceWorkerRegistration.pushManager.subscribe({
userVisibleOnly: true,
applicationServerKey: this.urlBase64ToUint8Array(this.vapidPublicKey)
});
// Send subscription to server
const response = await fetch('/browser-notifications-api/subscribe', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value
},
body: JSON.stringify({
subscription: subscription.toJSON()
})
});
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
// Store subscription status
this.isSubscribed = true;
console.log('Successfully subscribed to browser notifications');
} catch (error) {
console.error('Failed to subscribe to browser notifications:', error);
// Show user-friendly error message
if (error.message.includes('different applicationServerKey')) {
this.showSubscriptionConflictDialog(error);
} else {
alert(`Failed to subscribe: ${error.message}`);
}
}
}
async unsubscribe() {
try {
if (!this.isSubscribed) return;
// Get current subscription
const subscription = await this.serviceWorkerRegistration.pushManager.getSubscription();
if (!subscription) {
this.isSubscribed = false;
return;
}
// Unsubscribe from server
const response = await fetch('/browser-notifications-api/unsubscribe', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value
},
body: JSON.stringify({
subscription: subscription.toJSON()
})
});
if (!response.ok) {
console.warn(`Server unsubscribe failed: ${response.status}`);
}
// Unsubscribe locally
await subscription.unsubscribe();
// Update status
this.isSubscribed = false;
console.log('Unsubscribed from browser notifications');
} catch (error) {
console.error('Failed to unsubscribe from browser notifications:', error);
}
}
async sendTestNotification() {
try {
// First, check if we're subscribed
if (!this.isSubscribed) {
const shouldSubscribe = confirm('You need to subscribe to browser notifications first. Subscribe now?');
if (shouldSubscribe) {
await this.subscribe();
// Give a moment for subscription to complete
await new Promise(resolve => setTimeout(resolve, 1000));
} else {
return;
}
}
const response = await fetch('/browser-notifications/test', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value
}
});
if (!response.ok) {
if (response.status === 404) {
// No subscriptions found on server - try subscribing
alert('No browser subscriptions found. Subscribing now...');
await this.subscribe();
return;
}
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const result = await response.json();
alert(result.message);
console.log('Test notification result:', result);
} catch (error) {
console.error('Failed to send test notification:', error);
alert(`Failed to send test notification: ${error.message}`);
}
}
urlBase64ToUint8Array(base64String) {
const padding = '='.repeat((4 - base64String.length % 4) % 4);
const base64 = (base64String + padding)
.replace(/-/g, '+')
.replace(/_/g, '/');
const rawData = window.atob(base64);
const outputArray = new Uint8Array(rawData.length);
for (let i = 0; i < rawData.length; ++i) {
outputArray[i] = rawData.charCodeAt(i);
}
return outputArray;
}
async checkExistingSubscription() {
/**
* Check if we already have a valid browser subscription
* Updates this.isSubscribed based on actual browser state
*/
try {
if (!this.serviceWorkerRegistration) {
this.isSubscribed = false;
return;
}
const existingSubscription = await this.serviceWorkerRegistration.pushManager.getSubscription();
if (existingSubscription) {
// We have a subscription - verify it's still valid and matches our VAPID key
const subscriptionJson = existingSubscription.toJSON();
// Check if the endpoint is still active (basic validation)
if (subscriptionJson.endpoint && subscriptionJson.keys) {
console.log('Found existing valid subscription');
this.isSubscribed = true;
} else {
console.log('Found invalid subscription, clearing...');
await existingSubscription.unsubscribe();
this.isSubscribed = false;
}
} else {
console.log('No existing subscription found');
this.isSubscribed = false;
}
} catch (error) {
console.warn('Failed to check existing subscription:', error);
this.isSubscribed = false;
}
}
async clearExistingSubscription() {
/**
* Clear any existing push subscription that might conflict with our VAPID keys
*/
try {
const existingSubscription = await this.serviceWorkerRegistration.pushManager.getSubscription();
if (existingSubscription) {
console.log('Found existing subscription, unsubscribing...');
await existingSubscription.unsubscribe();
console.log('Successfully cleared existing subscription');
}
} catch (error) {
console.warn('Failed to clear existing subscription:', error);
// Don't throw - this is just cleanup
}
}
showSubscriptionConflictDialog(error) {
/**
* Show user-friendly dialog for subscription conflicts
*/
const message = `Browser notifications are already set up for a different changedetection.io instance or with different settings.
To fix this:
1. Clear your existing subscription
2. Try subscribing again
Would you like to automatically clear the old subscription and retry?`;
if (confirm(message)) {
this.clearExistingSubscription().then(() => {
// Retry subscription after clearing
setTimeout(() => {
this.subscribe();
}, 500);
});
} else {
alert('To use browser notifications, please manually clear your browser notifications for this site in browser settings, then try again.');
}
}
async clearAllNotifications() {
/**
* Clear all browser notification subscriptions (admin function)
*/
try {
// Call the server to clear ALL subscriptions from datastore
const response = await fetch('/browser-notifications/clear', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value
}
});
if (response.ok) {
const result = await response.json();
console.log('Server response:', result.message);
// Also clear the current browser's subscription if it exists
const existingSubscription = await this.serviceWorkerRegistration.pushManager.getSubscription();
if (existingSubscription) {
await existingSubscription.unsubscribe();
console.log('Cleared current browser subscription');
}
// Update status
this.isSubscribed = false;
alert(result.message + '. All browser notifications have been cleared.');
} else {
const error = await response.json();
console.error('Server clear failed:', error.message);
alert('Failed to clear server subscriptions: ' + error.message);
}
} catch (error) {
console.error('Failed to clear all notifications:', error);
alert('Failed to clear notifications: ' + error.message);
}
}
}
// Initialize when DOM is ready
if (document.readyState === 'loading') {
document.addEventListener('DOMContentLoaded', () => {
window.browserNotifications = new BrowserNotifications();
});
} else {
window.browserNotifications = new BrowserNotifications();
}

View File

@@ -159,7 +159,6 @@
// Return the current request in case it's needed
return requests[namespace];
};
})(jQuery);

View File

@@ -104,18 +104,8 @@ $(document).ready(function () {
});
});
// So that the favicon is only updated when the server has written the scraped favicon to disk.
socket.on('watch_bumped_favicon', function (watch) {
const $watchRow = $(`tr[data-watch-uuid="${watch.uuid}"]`);
if ($watchRow.length) {
$watchRow.addClass('has-favicon');
// Because the event could be emitted from a process that is outside the app context, url_for() might not work.
// Lets use url_for at template generation time to give us a PLACEHOLDER instead
let favicon_url = favicon_baseURL.replace('/PLACEHOLDER', `/${watch.uuid}?cache=${watch.event_timestamp}`);
console.log(`Setting favicon for UUID - ${watch.uuid} - ${favicon_url}`);
$('img.favicon', $watchRow).attr('src', favicon_url);
}
})
// Listen for periodically emitted watch data
console.log('Adding watch_update event listener');
socket.on('watch_update', function (data) {
const watch = data.watch;
@@ -126,34 +116,34 @@ $(document).ready(function () {
console.log(`${watch.event_timestamp} - Watch update ${watch.uuid} - Checking now - ${watch.checking_now} - UUID in URL ${window.location.href.includes(watch.uuid)}`);
console.log('Watch data:', watch);
console.log('General stats:', general_stats);
// Updating watch table rows
const $watchRow = $('tr[data-watch-uuid="' + watch.uuid + '"]');
console.log('Found watch row elements:', $watchRow.length);
if ($watchRow.length) {
$($watchRow).toggleClass('checking-now', watch.checking_now);
$($watchRow).toggleClass('queued', watch.queued);
$($watchRow).toggleClass('unviewed', watch.unviewed);
$($watchRow).toggleClass('has-error', watch.has_error);
$($watchRow).toggleClass('has-favicon', watch.has_favicon);
$($watchRow).toggleClass('notification_muted', watch.notification_muted);
$($watchRow).toggleClass('paused', watch.paused);
$($watchRow).toggleClass('single-history', watch.history_n === 1);
$($watchRow).toggleClass('multiple-history', watch.history_n >= 2);
$('td.title-col .error-text', $watchRow).html(watch.error_text)
$('td.last-changed', $watchRow).text(watch.last_changed_text)
$('td.last-checked .innertext', $watchRow).text(watch.last_checked_text)
$('td.last-checked', $watchRow).data('timestamp', watch.last_checked).data('fetchduration', watch.fetch_time);
$('td.last-checked', $watchRow).data('eta_complete', watch.last_checked + watch.fetch_time);
console.log('Updated UI for watch:', watch.uuid);
}
// Tabs at bottom of list
$('#post-list-mark-views').toggleClass("has-unviewed", general_stats.has_unviewed);
$('#post-list-unread').toggleClass("has-unviewed", general_stats.has_unviewed);
$('#post-list-with-errors').toggleClass("has-error", general_stats.count_errors !== 0)
$('#post-list-with-errors a').text(`With errors (${ general_stats.count_errors })`);

View File

@@ -1,95 +0,0 @@
// changedetection.io Service Worker for Browser Push Notifications
self.addEventListener('install', function(event) {
console.log('Service Worker installing');
self.skipWaiting();
});
self.addEventListener('activate', function(event) {
console.log('Service Worker activating');
event.waitUntil(self.clients.claim());
});
self.addEventListener('push', function(event) {
console.log('Push message received', event);
let notificationData = {
title: 'changedetection.io',
body: 'A watched page has changed',
icon: '/static/favicons/favicon-32x32.png',
badge: '/static/favicons/favicon-32x32.png',
tag: 'changedetection-notification',
requireInteraction: false,
timestamp: Date.now()
};
// Parse push data if available
if (event.data) {
try {
const pushData = event.data.json();
notificationData = {
...notificationData,
...pushData
};
} catch (e) {
console.warn('Failed to parse push data:', e);
notificationData.body = event.data.text() || notificationData.body;
}
}
const promiseChain = self.registration.showNotification(
notificationData.title,
{
body: notificationData.body,
icon: notificationData.icon,
badge: notificationData.badge,
tag: notificationData.tag,
requireInteraction: notificationData.requireInteraction,
timestamp: notificationData.timestamp,
data: {
url: notificationData.url || '/',
timestamp: notificationData.timestamp
}
}
);
event.waitUntil(promiseChain);
});
self.addEventListener('notificationclick', function(event) {
console.log('Notification clicked', event);
event.notification.close();
const targetUrl = event.notification.data?.url || '/';
event.waitUntil(
clients.matchAll().then(function(clientList) {
// Check if there's already a window/tab open with our app
for (let i = 0; i < clientList.length; i++) {
const client = clientList[i];
if (client.url.includes(self.location.origin) && 'focus' in client) {
client.navigate(targetUrl);
return client.focus();
}
}
// If no existing window, open a new one
if (clients.openWindow) {
return clients.openWindow(targetUrl);
}
})
);
});
self.addEventListener('notificationclose', function(event) {
console.log('Notification closed', event);
});
// Handle messages from the main thread
self.addEventListener('message', function(event) {
console.log('Service Worker received message:', event.data);
if (event.data && event.data.type === 'SKIP_WAITING') {
self.skipWaiting();
}
});

View File

@@ -51,7 +51,6 @@ $(document).ready(function () {
$('#notification_body').val('');
$('#notification_format').val('System default');
$('#notification_urls').val('');
$('#notification_muted_none').prop('checked', true); // in the case of a ternary field
e.preventDefault();
});
$("#notification-token-toggle").click(function (e) {

File diff suppressed because one or more lines are too long

View File

@@ -3,16 +3,15 @@
"version": "0.0.3",
"description": "",
"main": "index.js",
"engines": {
"node": ">=18.0.0"
},
"scripts": {
"watch": "sass --watch scss:. --style=compressed --no-source-map",
"build": "sass scss:. --style=compressed --no-source-map"
"watch": "node-sass -w scss -o .",
"build": "node-sass scss -o ."
},
"author": "Leigh Morresi / Web Technologies s.r.o.",
"license": "Apache",
"author": "",
"license": "ISC",
"dependencies": {
"sass": "^1.77.8"
"node-sass": "^7.0.0",
"tar": "^6.1.9",
"trim-newlines": "^3.0.1"
}
}

View File

@@ -1,4 +1,4 @@
@use "parts/variables";
@import "parts/_variables.scss";
#diff-ui {

View File

@@ -64,17 +64,17 @@ body.proxy-check-active {
#recommended-proxy {
display: grid;
gap: 2rem;
padding-bottom: 1em;
@media (min-width: 991px) {
grid-template-columns: repeat(2, 1fr);
}
@media (min-width: 991px) {
grid-template-columns: repeat(2, 1fr);
}
> div {
border: 1px #aaa solid;
border-radius: 4px;
padding: 1em;
}
padding-bottom: 1em;
}
#extra-proxies-setting {

View File

@@ -1,92 +0,0 @@
.watch-table {
&.favicon-not-enabled {
tr {
.favicon {
display: none;
}
}
}
tr {
/* make the icons and the text inline-ish */
td.inline.title-col {
.flex-wrapper {
display: flex;
align-items: center;
gap: 4px;
}
}
}
td,
th {
vertical-align: middle;
}
tr.has-favicon {
&.unviewed {
img.favicon {
opacity: 1.0 !important;
}
}
}
.status-icons {
white-space: nowrap;
display: flex;
align-items: center; /* Vertical centering */
gap: 4px; /* Space between image and text */
> * {
vertical-align: middle;
}
}
}
.title-col {
/* Optional, for spacing */
padding: 10px;
}
.title-wrapper {
display: flex;
align-items: center; /* Vertical centering */
gap: 10px; /* Space between image and text */
}
/* Make sure .title-col-inner doesn't collapse or misalign */
.title-col-inner {
display: inline-block;
vertical-align: middle;
}
/* favicon styling */
.watch-table {
img.favicon {
vertical-align: middle;
max-width: 25px;
max-height: 25px;
height: 25px;
padding-right: 4px;
}
// Reserved for future use
/* &.thumbnail-type-screenshot {
tr.has-favicon {
td.inline.title-col {
img.thumbnail {
background-color: #fff; !* fallback bg for SVGs without bg *!
border-radius: 4px; !* subtle rounded corners *!
border: 1px solid #ddd; !* light border for contrast *!
box-shadow: 0 2px 6px rgba(0, 0, 0, 0.15); !* soft shadow *!
filter: contrast(1.05) saturate(1.1) drop-shadow(0 0 0.5px rgba(0, 0, 0, 0.2));
object-fit: cover; !* crop/fill if needed *!
opacity: 0.8;
max-width: 30px;
max-height: 30px;
height: 30px;
}
}
}
}*/
}

View File

@@ -1,4 +1,4 @@
@use "minitabs";
@import "minitabs";
body.preview-text-enabled {

View File

@@ -24,9 +24,6 @@ body.checking-now {
#post-list-mark-views.has-unviewed {
display: inline-block !important;
}
#post-list-unread.has-unviewed {
display: inline-block !important;
}
}

View File

@@ -1,178 +0,0 @@
$grid-col-checkbox: 20px;
$grid-col-watch: 100px;
$grid-gap: 0.5rem;
@media (max-width: 767px) {
/*
Max width before this PARTICULAR table gets nasty
This query will take effect for any screen smaller than 760px
and also iPads specifically.
*/
.watch-table {
/* make headings work on mobile */
thead {
display: block;
tr {
th {
display: inline-block;
// Hide the "Last" text for smaller screens
@media (max-width: 768px) {
.hide-on-mobile {
display: none;
}
}
}
}
.empty-cell {
display: none;
}
}
.last-checked {
margin-left: calc($grid-col-checkbox + $grid-gap);
> span {
vertical-align: middle;
}
}
.last-changed {
margin-left: calc($grid-col-checkbox + $grid-gap);
}
.last-checked::before {
color: var(--color-text);
content: "Last Checked ";
}
.last-changed::before {
color: var(--color-text);
content: "Last Changed ";
}
/* Force table to not be like tables anymore */
td.inline {
display: inline-block;
}
.pure-table td,
.pure-table th {
border: none;
}
td {
/* Behave like a "row" */
border: none;
border-bottom: 1px solid var(--color-border-watch-table-cell);
vertical-align: middle;
&:before {
/* Top/left values mimic padding */
top: 6px;
left: 6px;
width: 45%;
padding-right: 10px;
white-space: nowrap;
}
}
&.pure-table-striped {
tr {
background-color: var(--color-table-background);
}
tr:nth-child(2n-1) {
background-color: var(--color-table-stripe);
}
tr:nth-child(2n-1) td {
background-color: inherit;
}
}
}
}
@media (max-width: 767px) {
.watch-table {
tbody {
tr {
padding-bottom: 10px;
padding-top: 10px;
display: grid;
grid-template-columns: $grid-col-checkbox 1fr $grid-col-watch;
grid-template-rows: auto auto auto auto;
gap: $grid-gap;
.counter-i {
display: none;
}
td.checkbox-uuid {
display: grid;
place-items: center;
}
td.inline {
/* display: block !important;;*/
}
> td {
border-bottom: none;
}
> td.title-col {
grid-column: 1 / -1;
grid-row: 1;
.watch-title {
font-size: 0.92rem;
}
.link-spread {
display: none;
}
}
> td.last-checked {
grid-column: 1 / -1;
grid-row: 2;
}
> td.last-changed {
grid-column: 1 / -1;
grid-row: 3;
}
> td.checkbox-uuid {
grid-column: 1;
grid-row: 4;
}
> td.buttons {
grid-column: 2;
grid-row: 4;
display: flex;
align-items: center;
justify-content: flex-start;
}
> td.watch-controls {
grid-column: 3;
grid-row: 4;
display: grid;
place-items: center;
a img {
padding: 10px;
}
}
}
}
}
.pure-table td {
padding: 3px !important;
}
}

View File

@@ -7,7 +7,6 @@
&.unviewed {
font-weight: bold;
}
color: var(--color-watch-table-row-text);
}
@@ -49,17 +48,17 @@
/* Row with 'checking-now' */
tr.checking-now {
td:first-child {
position: relative;
position: relative;
}
td:first-child::before {
content: "";
position: absolute;
top: 0;
bottom: 0;
left: 0;
width: 3px;
background-color: #293eff;
content: "";
position: absolute;
top: 0;
bottom: 0;
left: 0;
width: 3px;
background-color: #293eff;
}
td.last-checked {
@@ -110,7 +109,6 @@
tr.has-error {
color: var(--color-watch-table-error);
.error-text {
display: block !important;
}
@@ -121,7 +119,6 @@
display: inline-block !important;
}
}
tr.multiple-history {
a.history-link {
display: inline-block !important;
@@ -129,3 +126,5 @@
}
}

View File

@@ -1,115 +0,0 @@
// Ternary radio button group component
.ternary-radio-group {
display: flex;
gap: 0;
border: 1px solid var(--color-grey-750);
border-radius: 4px;
overflow: hidden;
width: fit-content;
background: var(--color-background);
.ternary-radio-option {
position: relative;
cursor: pointer;
margin: 0;
display: flex;
align-items: center;
input[type="radio"] {
position: absolute;
opacity: 0;
width: 0;
height: 0;
}
.ternary-radio-label {
padding: 8px 16px;
background: var(--color-grey-900);
border: none;
border-right: 1px solid var(--color-grey-750);
font-size: 13px;
font-weight: 500;
color: var(--color-text);
transition: all 0.2s ease;
cursor: pointer;
display: block;
min-width: 60px;
text-align: center;
}
&:last-child .ternary-radio-label {
border-right: none;
}
input:checked + .ternary-radio-label {
background: var(--color-link);
color: var(--color-text-button);
font-weight: 600;
&.ternary-default {
background: var(--color-grey-600);
color: var(--color-text-button);
}
&:hover {
background: #1a7bc4;
&.ternary-default {
background: var(--color-grey-500);
}
}
}
&:hover .ternary-radio-label {
background: var(--color-grey-800);
}
}
@media (max-width: 480px) {
width: 100%;
.ternary-radio-label {
flex: 1;
min-width: auto;
}
}
}
// Standard radio button styling
input[type="radio"].pure-radio:checked + label,
input[type="radio"].pure-radio:checked {
background: var(--color-link);
color: var(--color-text-button);
}
html[data-darkmode="true"] {
.ternary-radio-group {
.ternary-radio-option {
.ternary-radio-label {
background: var(--color-grey-350);
}
&:hover .ternary-radio-label {
background: var(--color-grey-400);
}
input:checked + .ternary-radio-label {
background: var(--color-link);
color: var(--color-text-button);
&.ternary-default {
background: var(--color-grey-600);
}
&:hover {
background: #1a7bc4;
&.ternary-default {
background: var(--color-grey-500);
}
}
}
}
}
}

View File

@@ -2,25 +2,21 @@
* -- BASE STYLES --
*/
@use "parts/variables";
@use "parts/arrows";
@use "parts/browser-steps";
@use "parts/extra_proxies";
@use "parts/extra_browsers";
@use "parts/pagination";
@use "parts/spinners";
@use "parts/darkmode";
@use "parts/menu";
@use "parts/love";
@use "parts/preview_text_filter";
@use "parts/watch_table";
@use "parts/watch_table-mobile";
@use "parts/edit";
@use "parts/conditions_table";
@use "parts/lister_extra";
@use "parts/socket";
@use "parts/visualselector";
@use "parts/widgets";
@import "parts/_arrows";
@import "parts/_browser-steps";
@import "parts/_extra_proxies";
@import "parts/_extra_browsers";
@import "parts/_pagination";
@import "parts/_spinners";
@import "parts/_variables";
@import "parts/_darkmode";
@import "parts/_menu";
@import "parts/_love";
@import "parts/preview_text_filter";
@import "parts/_watch_table";
@import "parts/_edit";
@import "parts/_conditions_table";
@import "parts/_socket";
body {
color: var(--color-text);
@@ -188,15 +184,9 @@ code {
@extend .inline-tag;
}
@media (min-width: 768px) {
.box {
margin: 0 1em !important;
}
}
.box {
max-width: 100%;
margin: 0 0.3em;
margin: 0 1em;
flex-direction: column;
display: flex;
justify-content: center;
@@ -704,6 +694,114 @@ footer {
width: 100%;
}
/*
Max width before this PARTICULAR table gets nasty
This query will take effect for any screen smaller than 760px
and also iPads specifically.
*/
.watch-table {
/* make headings work on mobile */
thead {
display: block;
tr {
th {
display: inline-block;
// Hide the "Last" text for smaller screens
@media (max-width: 768px) {
.hide-on-mobile {
display: none;
}
}
}
}
.empty-cell {
display: none;
}
}
/* Force table to not be like tables anymore */
tbody {
td,
tr {
display: block;
}
}
tbody {
tr {
display: flex;
flex-wrap: wrap;
// The third child of each row will take up the remaining space
// This is useful for the URL column, which should expand to fill the remaining space
:nth-child(3) {
flex-grow: 1;
}
// The last three children (from the end) of each row will take up the full width
// This is useful for the "Last Checked", "Last Changed", and the action buttons columns, which should each take up the full width
:nth-last-child(-n+3) {
flex-basis: 100%;
}
}
}
.last-checked {
>span {
vertical-align: middle;
}
}
.last-checked::before {
color: var(--color-last-checked);
content: "Last Checked ";
}
.last-changed::before {
color: var(--color-last-checked);
content: "Last Changed ";
}
/* Force table to not be like tables anymore */
td.inline {
display: inline-block;
}
.pure-table td,
.pure-table th {
border: none;
}
td {
/* Behave like a "row" */
border: none;
border-bottom: 1px solid var(--color-border-watch-table-cell);
vertical-align: middle;
&:before {
/* Top/left values mimic padding */
top: 6px;
left: 6px;
width: 45%;
padding-right: 10px;
white-space: nowrap;
}
}
&.pure-table-striped {
tr {
background-color: var(--color-table-background);
}
tr:nth-child(2n-1) {
background-color: var(--color-table-stripe);
}
tr:nth-child(2n-1) td {
background-color: inherit;
}
}
}
}
.pure-table {
@@ -958,6 +1056,8 @@ ul {
}
}
@import "parts/_visualselector";
#webdriver_delay {
width: 5em;
}
@@ -1075,23 +1175,17 @@ ul {
#quick-watch-processor-type {
ul#processor {
color: #fff;
padding-left: 0px;
color: #fff;
ul {
padding: 0.3rem;
li {
list-style: none;
font-size: 0.9rem;
display: grid;
grid-template-columns: auto 1fr;
align-items: center;
gap: 0.5rem;
margin-bottom: 0.5rem;
> * {
display: inline-block;
}
}
}
label, input {
padding: 0;
margin: 0;
}
}
.restock-label {
@@ -1130,12 +1224,11 @@ ul {
}
#realtime-conn-error {
position: fixed;
position: absolute;
bottom: 0;
left: 0;
left: 30px;
background: var(--color-warning);
padding: 10px;
font-size: 0.8rem;
color: #fff;
opacity: 0.8;
}

File diff suppressed because one or more lines are too long

View File

@@ -140,28 +140,6 @@ class ChangeDetectionStore:
secret = secrets.token_hex(16)
self.__data['settings']['application']['api_access_token'] = secret
# Generate VAPID keys for browser push notifications
if not self.__data['settings']['application']['vapid'].get('private_key'):
try:
from py_vapid import Vapid
vapid = Vapid()
vapid.generate_keys()
# Convert bytes to strings for JSON serialization
private_pem = vapid.private_pem()
public_pem = vapid.public_pem()
self.__data['settings']['application']['vapid']['private_key'] = private_pem.decode() if isinstance(private_pem, bytes) else private_pem
self.__data['settings']['application']['vapid']['public_key'] = public_pem.decode() if isinstance(public_pem, bytes) else public_pem
# Set default contact email if not present
if not self.__data['settings']['application']['vapid'].get('contact_email'):
self.__data['settings']['application']['vapid']['contact_email'] = 'citizen@example.com'
logger.info("Generated new VAPID keys for browser push notifications")
except ImportError:
logger.warning("py_vapid not available - browser notifications will not work")
except Exception as e:
logger.warning(f"Failed to generate VAPID keys: {e}")
self.needs_write = True
# Finally start the thread that will manage periodic data saves to JSON
@@ -284,6 +262,11 @@ class ChangeDetectionStore:
extras = deepcopy(self.data['watching'][uuid])
new_uuid = self.add_watch(url=url, extras=extras)
watch = self.data['watching'][new_uuid]
if self.data['settings']['application'].get('extract_title_as_title') or watch['extract_title_as_title']:
# Because it will be recalculated on the next fetch
self.data['watching'][new_uuid]['title'] = None
return new_uuid
def url_exists(self, url):
@@ -325,6 +308,7 @@ class ChangeDetectionStore:
'browser_steps',
'css_filter',
'extract_text',
'extract_title_as_title',
'headers',
'ignore_text',
'include_filters',
@@ -339,7 +323,6 @@ class ChangeDetectionStore:
'title',
'trigger_text',
'url',
'use_page_title_in_list',
'webdriver_js_execute_code',
]:
if res.get(k):
@@ -429,7 +412,7 @@ class ChangeDetectionStore:
with open(self.json_store_path+".tmp", 'w') as json_file:
# Use compact JSON in production for better performance
json.dump(data, json_file, indent=2)
os.replace(self.json_store_path+".tmp", self.json_store_path)
os.replace(self.json_store_path+".tmp", self.json_store_path)
except Exception as e:
logger.error(f"Error writing JSON!! (Main JSON file save was skipped) : {str(e)}")
@@ -990,16 +973,6 @@ class ChangeDetectionStore:
f_d.write(zlib.compress(f_j.read()))
os.unlink(json_path)
def update_20(self):
for uuid, watch in self.data['watching'].items():
if self.data['watching'][uuid].get('extract_title_as_title'):
self.data['watching'][uuid]['use_page_title_in_list'] = self.data['watching'][uuid].get('extract_title_as_title')
del self.data['watching'][uuid]['extract_title_as_title']
if self.data['settings']['application'].get('extract_title_as_title'):
self.data['settings']['application']['ui']['use_page_title_in_list'] = self.data['settings']['application'].get('extract_title_as_title')
def add_notification_url(self, notification_url):
logger.debug(f">>> Adding new notification_url - '{notification_url}'")

View File

@@ -33,34 +33,6 @@
<div id="notification-test-log" style="display: none;"><span class="pure-form-message-inline">Processing..</span></div>
</div>
</div>
<!-- Browser Notifications -->
<div id="browser-notification-section">
<div class="pure-control-group">
<label>Browser Notifications</label>
<div class="pure-form-message-inline">
<p><strong>Browser push notifications!</strong> Use <code>browser://</code> URLs in your notification settings to receive real-time push notifications even when this tab is closed.</p>
<p><small><strong>Troubleshooting:</strong> If you get "different applicationServerKey" errors, click "Clear All Notifications" below and try again. This happens when switching between different changedetection.io instances.</small></p>
<div id="browser-notification-controls" style="margin-top: 1em;">
<div id="notification-permission-status">
<p>Browser notifications: <span id="permission-status">checking...</span></p>
</div>
<div id="browser-notification-actions">
<button type="button" id="enable-notifications-btn" class="pure-button button-secondary button-xsmall" style="display: none;">
Enable Browser Notifications
</button>
<button type="button" id="test-notification-btn" class="pure-button button-secondary button-xsmall" style="display: none;">
Send browser test notification
</button>
<button type="button" id="clear-notifications-btn" class="pure-button button-secondary button-xsmall" onclick="window.browserNotifications?.clearAllNotifications()" style="margin-left: 0.5em;">
Clear All Notifications
</button>
</div>
</div>
</div>
</div>
</div>
<div id="notification-customisation" class="pure-control-group">
<div class="pure-control-group">
{{ render_field(form.notification_title, class="m-d notification-title", placeholder=settings_application['notification_title']) }}
@@ -98,7 +70,7 @@
</tr>
<tr>
<td><code>{{ '{{watch_title}}' }}</code></td>
<td>The page title of the watch, uses &lt;title&gt; if not set, falls back to URL</td>
<td>The title of the watch.</td>
</tr>
<tr>
<td><code>{{ '{{watch_tag}}' }}</code></td>

View File

@@ -1,29 +1,14 @@
{% macro render_field(field) %}
<div {% if field.errors or field.top_errors %} class="error" {% endif %}>{{ field.label }}</div>
<div {% if field.errors or field.top_errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
{% if field.top_errors %}
top
<ul class="errors top-errors">
{% for error in field.top_errors %}
<li>{{ error }}</li>
{% endfor %}
</ul>
{% endif %}
{% if field.errors %}
<ul class=errors>
{% if field.errors is mapping and 'form' in field.errors %}
{# and subfield form errors, such as used in RequiredFormField() for TimeBetweenCheckForm sub form #}
{% set errors = field.errors['form'] %}
{% else %}
{# regular list of errors with this field #}
{% set errors = field.errors %}
{% endif %}
{% for error in errors %}
<li>{{ error }}</li>
{% endfor %}
</ul>
{% endif %}
</div>
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
{% if field.errors %}
<ul class=errors>
{% for error in field.errors %}
<li>{{ error }}</li>
{% endfor %}
</ul>
{% endif %}
</div>
{% endmacro %}
{% macro render_checkbox_field(field) %}
@@ -39,23 +24,6 @@
</div>
{% endmacro %}
{% macro render_ternary_field(field, BooleanField=false) %}
{% if BooleanField %}
{% set _ = field.__setattr__('boolean_mode', true) %}
{% endif %}
<div class="ternary-field {% if field.errors %} error {% endif %}">
<div class="ternary-field-label">{{ field.label }}</div>
<div class="ternary-field-widget">{{ field(**kwargs)|safe }}</div>
{% if field.errors %}
<ul class=errors>
{% for error in field.errors %}
<li>{{ error }}</li>
{% endfor %}
</ul>
{% endif %}
</div>
{% endmacro %}
{% macro render_simple_field(field) %}
<span class="label {% if field.errors %}error{% endif %}">{{ field.label }}</span>

View File

@@ -5,7 +5,6 @@
<meta charset="utf-8" >
<meta name="viewport" content="width=device-width, initial-scale=1.0" >
<meta name="description" content="Self hosted website change detection." >
<meta name="robots" content="noindex">
<title>Change Detection{{extra_title}}</title>
{% if app_rss_token %}
<link rel="alternate" type="application/rss+xml" title="Changedetection.io » Feed{% if active_tag_uuid %}- {{active_tag.title}}{% endif %}" href="{{ url_for('rss.feed', tag=active_tag_uuid , token=app_rss_token)}}" >
@@ -35,14 +34,13 @@
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
<script src="{{url_for('static_content', group='js', filename='csrf.js')}}" defer></script>
<script src="{{url_for('static_content', group='js', filename='feather-icons.min.js')}}" defer></script>
<script src="{{url_for('static_content', group='js', filename='browser-notifications.js')}}" defer></script>
{% if socket_io_enabled %}
<script src="{{url_for('static_content', group='js', filename='socket.io.min.js')}}"></script>
<script src="{{url_for('static_content', group='js', filename='realtime.js')}}" defer></script>
{% endif %}
</head>
<body class="{{extra_classes}}">
<body class="">
<div class="header">
<div class="pure-menu-fixed" style="width: 100%;">
<div class="home-menu pure-menu pure-menu-horizontal" id="nav-menu">
@@ -238,7 +236,7 @@
<script src="{{url_for('static_content', group='js', filename='toggle-theme.js')}}" defer></script>
<div id="checking-now-fixed-tab" style="display: none;"><span class="spinner"></span><span>&nbsp;Checking now</span></div>
<div id="realtime-conn-error" style="display:none">Real-time updates offline</div>
<div id="realtime-conn-error" style="display:none">Offline</div>
</body>
</html>

View File

@@ -1,6 +1,6 @@
{% extends 'base.html' %}
{% block content %}
{% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form, playwright_warning, only_playwright_type_watches_warning, render_conditions_fieldlist_of_formfields_as_table, render_ternary_field %}
{% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form, playwright_warning, only_playwright_type_watches_warning, render_conditions_fieldlist_of_formfields_as_table %}
{% from '_common_fields.html' import render_common_settings_form %}
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
<script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script>
@@ -72,16 +72,15 @@
<div class="pure-form-message">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></div>
<div class="pure-form-message">Variables are supported in the URL (<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a>).</div>
</div>
<div class="pure-control-group">
{{ render_field(form.tags) }}
<span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span>
</div>
<div class="pure-control-group inline-radio">
{{ render_field(form.processor) }}
</div>
<div class="pure-control-group">
{{ render_field(form.title, class="m-d", placeholder=watch.label) }}
<span class="pure-form-message-inline">Automatically uses the page title if found, you can also use your own title/description here</span>
{{ render_field(form.title, class="m-d") }}
</div>
<div class="pure-control-group">
{{ render_field(form.tags) }}
<span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span>
</div>
<div class="pure-control-group time-between-check border-fieldset">
@@ -102,16 +101,15 @@
</div>
<br>
</div>
<div class="pure-control-group">
{{ render_checkbox_field(form.extract_title_as_title) }}
</div>
<div class="pure-control-group">
{{ render_checkbox_field(form.filter_failure_notification_send) }}
<span class="pure-form-message-inline">
Sends a notification when the filter can no longer be seen on the page, good for knowing when the page changed and your filter will not work anymore.
</span>
</div>
<div class="pure-control-group">
{{ render_ternary_field(form.use_page_title_in_list) }}
</div>
</fieldset>
</div>
@@ -264,7 +262,7 @@ Math: {{ 1 + 1 }}") }}
<div class="tab-pane-inner" id="notifications">
<fieldset>
<div class="pure-control-group inline-radio">
{{ render_ternary_field(form.notification_muted, BooleanField=true) }}
{{ render_checkbox_field(form.notification_muted) }}
</div>
{% if watch_needs_selenium_or_playwright %}
<div class="pure-control-group inline-radio">
@@ -471,11 +469,11 @@ Math: {{ 1 + 1 }}") }}
<div class="pure-control-group">
{{ render_button(form.save_button) }}
<a href="{{url_for('ui.form_delete', uuid=uuid)}}"
class="pure-button button-error ">Delete</a>
class="pure-button button-small button-error ">Delete</a>
{% if watch.history_n %}<a href="{{url_for('ui.clear_watch_history', uuid=uuid)}}"
class="pure-button button-error">Clear History</a>{% endif %}
class="pure-button button-small button-error ">Clear History</a>{% endif %}
<a href="{{url_for('ui.form_clone', uuid=uuid)}}"
class="pure-button">Clone &amp; Edit</a>
class="pure-button button-small ">Clone &amp; Edit</a>
</div>
</div>
</form>

View File

@@ -55,8 +55,7 @@ def do_test(client, live_server, make_test_use_extra_browser=False):
"tags": "",
"headers": "",
'fetch_backend': f"extra_browser_{custom_browser_name}",
'webdriver_js_execute_code': '',
"time_between_check_use_default": "y"
'webdriver_js_execute_code': ''
},
follow_redirects=True
)

View File

@@ -2,24 +2,19 @@
import time
from flask import url_for
import os
from ..util import live_server_setup, wait_for_all_checks
import logging
# Requires playwright to be installed
def test_fetch_webdriver_content(client, live_server, measure_memory_usage):
# live_server_setup(live_server) # Setup on conftest per function
# live_server_setup(live_server) # Setup on conftest per function
#####################
res = client.post(
url_for("settings.settings_page"),
data={
"application-empty_pages_are_a_change": "",
"requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_webdriver",
'application-ui-favicons_enabled': "y",
},
data={"application-empty_pages_are_a_change": "",
"requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_webdriver"},
follow_redirects=True
)
@@ -35,51 +30,11 @@ def test_fetch_webdriver_content(client, live_server, measure_memory_usage):
assert b"1 Imported" in res.data
wait_for_all_checks(client)
res = client.get(
url_for("ui.ui_views.preview_page", uuid="first"),
follow_redirects=True
)
logging.getLogger().info("Looking for correct fetched HTML (text) from server")
assert b'cool it works' in res.data
# Favicon scraper check, favicon only so far is fetched when in browser mode (not requests mode)
if os.getenv("PLAYWRIGHT_DRIVER_URL"):
uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
res = client.get(
url_for("watchlist.index"),
)
# The UI can access it here
assert f'src="/static/favicon/{uuid}'.encode('utf8') in res.data
# Attempt to fetch it, make sure that works
res = client.get(url_for('static_content', group='favicon', filename=uuid))
assert res.status_code == 200
assert len(res.data) > 10
# Check the API also returns it
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
res = client.get(
url_for("watchfavicon", uuid=uuid),
headers={'x-api-key': api_key}
)
assert res.status_code == 200
assert len(res.data) > 10
##################### disable favicons check
res = client.post(
url_for("settings.settings_page"),
data={
"requests-time_between_check-minutes": 180,
'application-ui-favicons_enabled': "",
"application-empty_pages_are_a_change": "",
},
follow_redirects=True
)
assert b"Settings updated." in res.data
res = client.get(
url_for("watchlist.index"),
)
# The UI can access it here
assert f'src="/static/favicon'.encode('utf8') not in res.data

View File

@@ -28,7 +28,6 @@ def test_execute_custom_js(client, live_server, measure_memory_usage):
'fetch_backend': "html_webdriver",
'webdriver_js_execute_code': 'document.querySelector("button[name=test-button]").click();',
'headers': "testheader: yes\buser-agent: MyCustomAgent",
"time_between_check_use_default": "y",
},
follow_redirects=True
)

View File

@@ -27,7 +27,6 @@ def test_preferred_proxy(client, live_server, measure_memory_usage):
"proxy": "proxy-two",
"tags": "",
"url": url,
"time_between_check_use_default": "y",
},
follow_redirects=True
)

View File

@@ -62,7 +62,6 @@ def test_noproxy_option(client, live_server, measure_memory_usage):
"proxy": "no-proxy",
"tags": "",
"url": url,
"time_between_check_use_default": "y",
},
follow_redirects=True
)

View File

@@ -44,7 +44,6 @@ def test_proxy_noconnect_custom(client, live_server, measure_memory_usage):
"url": test_url,
"fetch_backend": "html_webdriver" if os.getenv('PLAYWRIGHT_DRIVER_URL') or os.getenv("WEBDRIVER_URL") else "html_requests",
"proxy": "ui-0custom-test-proxy",
"time_between_check_use_default": "y",
}
res = client.post(

View File

@@ -66,7 +66,6 @@ def test_socks5(client, live_server, measure_memory_usage):
"proxy": "ui-0socks5proxy",
"tags": "",
"url": test_url,
"time_between_check_use_default": "y",
},
follow_redirects=True
)

View File

@@ -53,7 +53,6 @@ def test_socks5_from_proxiesjson_file(client, live_server, measure_memory_usage)
"proxy": "socks5proxy",
"tags": "",
"url": test_url,
"time_between_check_use_default": "y",
},
follow_redirects=True
)

View File

@@ -0,0 +1,72 @@
import asyncio
import socketio
from aiohttp import web
SOCKETIO_URL = 'ws://localhost.localdomain:5005'
SOCKETIO_PATH = "/socket.io"
NUM_CLIENTS = 1
clients = []
shutdown_event = asyncio.Event()
class WatchClient:
def __init__(self, client_id: int):
self.client_id = client_id
self.i_got_watch_update_event = False
self.sio = socketio.AsyncClient(reconnection_attempts=50, reconnection_delay=1)
@self.sio.event
async def connect():
print(f"[Client {self.client_id}] Connected")
@self.sio.event
async def disconnect():
print(f"[Client {self.client_id}] Disconnected")
@self.sio.on("watch_update")
async def on_watch_update(watch):
self.i_got_watch_update_event = True
print(f"[Client {self.client_id}] Received update: {watch}")
async def run(self):
try:
await self.sio.connect(SOCKETIO_URL, socketio_path=SOCKETIO_PATH, transports=["websocket", "polling"])
await self.sio.wait()
except Exception as e:
print(f"[Client {self.client_id}] Connection error: {e}")
async def handle_check(request):
all_received = all(c.i_got_watch_update_event for c in clients)
result = "yes" if all_received else "no"
print(f"Received HTTP check — returning '{result}'")
shutdown_event.set() # Signal shutdown
return web.Response(text=result)
async def start_http_server():
app = web.Application()
app.add_routes([web.get('/did_all_clients_get_watch_update', handle_check)])
runner = web.AppRunner(app)
await runner.setup()
site = web.TCPSite(runner, '0.0.0.0', 6666)
await site.start()
async def main():
#await start_http_server()
for i in range(NUM_CLIENTS):
client = WatchClient(i)
clients.append(client)
asyncio.create_task(client.run())
await shutdown_event.wait()
print("Shutting down...")
# Graceful disconnect
for c in clients:
await c.sio.disconnect()
if __name__ == "__main__":
try:
asyncio.run(main())
except KeyboardInterrupt:
print("Interrupted")

View File

@@ -157,8 +157,7 @@ def test_check_notification_email_formats_default_Text_override_HTML(client, liv
data={
"url": test_url,
"notification_format": 'HTML',
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -61,8 +61,7 @@ def test_check_removed_line_contains_trigger(client, live_server, measure_memory
data={"trigger_text": 'The golden line',
"url": test_url,
'fetch_backend': "html_requests",
'filter_text_removed': 'y',
"time_between_check_use_default": "y"},
'filter_text_removed': 'y'},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -155,8 +154,7 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa
'processor': 'text_json_diff',
'fetch_backend': "html_requests",
'filter_text_removed': '',
'filter_text_added': 'y',
"time_between_check_use_default": "y"},
'filter_text_added': 'y'},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -292,7 +292,9 @@ def test_access_denied(client, live_server, measure_memory_usage):
def test_api_watch_PUT_update(client, live_server, measure_memory_usage):
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
# Create a watch
set_original_response()
test_url = url_for('test_endpoint', _external=True)
@@ -300,27 +302,14 @@ def test_api_watch_PUT_update(client, live_server, measure_memory_usage):
# Create new
res = client.post(
url_for("createwatch"),
data=json.dumps({"url": test_url,
'tag': "One, Two",
"title": "My test URL",
'headers': {'cookie': 'yum'},
"conditions": [
{
"field": "page_filtered_text",
"operator": "contains_regex",
"value": "." # contains anything
}
],
"conditions_match_logic": "ALL",
}
),
data=json.dumps({"url": test_url, 'tag': "One, Two", "title": "My test URL", 'headers': {'cookie': 'yum'} }),
headers={'content-type': 'application/json', 'x-api-key': api_key},
follow_redirects=True
)
assert res.status_code == 201
wait_for_all_checks(client)
# Get a listing, it will be the first one
res = client.get(
url_for("createwatch"),
@@ -328,7 +317,6 @@ def test_api_watch_PUT_update(client, live_server, measure_memory_usage):
)
watch_uuid = list(res.json.keys())[0]
assert not res.json[watch_uuid].get('viewed'), 'A newly created watch can only be unviewed'
# Check in the edit page just to be sure
res = client.get(
@@ -342,12 +330,7 @@ def test_api_watch_PUT_update(client, live_server, measure_memory_usage):
res = client.put(
url_for("watch", uuid=watch_uuid),
headers={'x-api-key': api_key, 'content-type': 'application/json'},
data=json.dumps({
"title": "new title",
'time_between_check': {'minutes': 552},
'headers': {'cookie': 'all eaten'},
'last_viewed': int(time.time())
}),
data=json.dumps({"title": "new title", 'time_between_check': {'minutes': 552}, 'headers': {'cookie': 'all eaten'}}),
)
assert res.status_code == 200, "HTTP PUT update was sent OK"
@@ -357,7 +340,6 @@ def test_api_watch_PUT_update(client, live_server, measure_memory_usage):
headers={'x-api-key': api_key}
)
assert res.json.get('title') == 'new title'
assert res.json.get('viewed'), 'With the timestamp greater than "changed" a watch can be updated to viewed'
# Check in the edit page just to be sure
res = client.get(
@@ -390,13 +372,13 @@ def test_api_watch_PUT_update(client, live_server, measure_memory_usage):
def test_api_import(client, live_server, measure_memory_usage):
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
res = client.post(
url_for("import") + "?tag=import-test",
data='https://website1.com\r\nhttps://website2.com',
headers={'x-api-key': api_key, 'content-type': 'text/plain'},
headers={'x-api-key': api_key},
follow_redirects=True
)

View File

@@ -1,199 +0,0 @@
#!/usr/bin/env python3
"""
OpenAPI validation tests for ChangeDetection.io API
This test file specifically verifies that OpenAPI validation is working correctly
by testing various scenarios that should trigger validation errors.
"""
import time
import json
from flask import url_for
from .util import live_server_setup, wait_for_all_checks
def test_openapi_validation_invalid_content_type_on_create_watch(client, live_server, measure_memory_usage):
"""Test that creating a watch with invalid content-type triggers OpenAPI validation error."""
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
# Try to create a watch with JSON data but without proper content-type header
res = client.post(
url_for("createwatch"),
data=json.dumps({"url": "https://example.com", "title": "Test Watch"}),
headers={'x-api-key': api_key}, # Missing 'content-type': 'application/json'
follow_redirects=True
)
# Should get 400 error due to OpenAPI validation failure
assert res.status_code == 400, f"Expected 400 but got {res.status_code}"
assert b"OpenAPI validation failed" in res.data, "Should contain OpenAPI validation error message"
def test_openapi_validation_missing_required_field_create_watch(client, live_server, measure_memory_usage):
"""Test that creating a watch without required URL field triggers OpenAPI validation error."""
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
# Try to create a watch without the required 'url' field
res = client.post(
url_for("createwatch"),
data=json.dumps({"title": "Test Watch Without URL"}), # Missing required 'url' field
headers={'x-api-key': api_key, 'content-type': 'application/json'},
follow_redirects=True
)
# Should get 400 error due to missing required field
assert res.status_code == 400, f"Expected 400 but got {res.status_code}"
assert b"OpenAPI validation failed" in res.data, "Should contain OpenAPI validation error message"
def test_openapi_validation_invalid_field_in_request_body(client, live_server, measure_memory_usage):
"""Test that including invalid fields triggers OpenAPI validation error."""
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
# First create a valid watch
res = client.post(
url_for("createwatch"),
data=json.dumps({"url": "https://example.com", "title": "Test Watch"}),
headers={'x-api-key': api_key, 'content-type': 'application/json'},
follow_redirects=True
)
assert res.status_code == 201, "Watch creation should succeed"
# Get the watch list to find the UUID
res = client.get(
url_for("createwatch"),
headers={'x-api-key': api_key}
)
assert res.status_code == 200
watch_uuid = list(res.json.keys())[0]
# Now try to update the watch with an invalid field
res = client.put(
url_for("watch", uuid=watch_uuid),
headers={'x-api-key': api_key, 'content-type': 'application/json'},
data=json.dumps({
"title": "Updated title",
"invalid_field_that_doesnt_exist": "this should cause validation error"
}),
)
# Should get 400 error due to invalid field (this will be caught by internal validation)
# Note: This tests the flow where OpenAPI validation passes but internal validation catches it
assert res.status_code == 400, f"Expected 400 but got {res.status_code}"
assert b"Additional properties are not allowed" in res.data, "Should contain validation error about additional properties"
def test_openapi_validation_import_wrong_content_type(client, live_server, measure_memory_usage):
"""Test that import endpoint with wrong content-type triggers OpenAPI validation error."""
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
# Try to import URLs with JSON content-type instead of text/plain
res = client.post(
url_for("import") + "?tag=test-import",
data='https://website1.com\nhttps://website2.com',
headers={'x-api-key': api_key, 'content-type': 'application/json'}, # Wrong content-type
follow_redirects=True
)
# Should get 400 error due to content-type mismatch
assert res.status_code == 400, f"Expected 400 but got {res.status_code}"
assert b"OpenAPI validation failed" in res.data, "Should contain OpenAPI validation error message"
def test_openapi_validation_import_correct_content_type_succeeds(client, live_server, measure_memory_usage):
"""Test that import endpoint with correct content-type succeeds (positive test)."""
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
# Import URLs with correct text/plain content-type
res = client.post(
url_for("import") + "?tag=test-import",
data='https://website1.com\nhttps://website2.com',
headers={'x-api-key': api_key, 'content-type': 'text/plain'}, # Correct content-type
follow_redirects=True
)
# Should succeed
assert res.status_code == 200, f"Expected 200 but got {res.status_code}"
assert len(res.json) == 2, "Should import 2 URLs"
def test_openapi_validation_get_requests_bypass_validation(client, live_server, measure_memory_usage):
"""Test that GET requests bypass OpenAPI validation entirely."""
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
# Disable API token requirement first
res = client.post(
url_for("settings.settings_page"),
data={
"requests-time_between_check-minutes": 180,
"application-fetch_backend": "html_requests",
"application-api_access_token_enabled": ""
},
follow_redirects=True
)
assert b"Settings updated." in res.data
# Make GET request to list watches - should succeed even without API key or content-type
res = client.get(url_for("createwatch")) # No headers needed for GET
assert res.status_code == 200, f"GET requests should succeed without OpenAPI validation, got {res.status_code}"
# Should return JSON with watch list (empty in this case)
assert isinstance(res.json, dict), "Should return JSON dictionary for watch list"
def test_openapi_validation_create_tag_missing_required_title(client, live_server, measure_memory_usage):
"""Test that creating a tag without required title triggers OpenAPI validation error."""
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
# Try to create a tag without the required 'title' field
res = client.post(
url_for("tag"),
data=json.dumps({"notification_urls": ["mailto:test@example.com"]}), # Missing required 'title' field
headers={'x-api-key': api_key, 'content-type': 'application/json'},
follow_redirects=True
)
# Should get 400 error due to missing required field
assert res.status_code == 400, f"Expected 400 but got {res.status_code}"
assert b"OpenAPI validation failed" in res.data, "Should contain OpenAPI validation error message"
def test_openapi_validation_watch_update_allows_partial_updates(client, live_server, measure_memory_usage):
"""Test that watch updates allow partial updates without requiring all fields (positive test)."""
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
# First create a valid watch
res = client.post(
url_for("createwatch"),
data=json.dumps({"url": "https://example.com", "title": "Test Watch"}),
headers={'x-api-key': api_key, 'content-type': 'application/json'},
follow_redirects=True
)
assert res.status_code == 201, "Watch creation should succeed"
# Get the watch list to find the UUID
res = client.get(
url_for("createwatch"),
headers={'x-api-key': api_key}
)
assert res.status_code == 200
watch_uuid = list(res.json.keys())[0]
# Update only the title (partial update) - should succeed
res = client.put(
url_for("watch", uuid=watch_uuid),
headers={'x-api-key': api_key, 'content-type': 'application/json'},
data=json.dumps({"title": "Updated Title Only"}), # Only updating title, not URL
)
# Should succeed because UpdateWatch schema allows partial updates
assert res.status_code == 200, f"Partial updates should succeed, got {res.status_code}"
# Verify the update worked
res = client.get(
url_for("watch", uuid=watch_uuid),
headers={'x-api-key': api_key}
)
assert res.status_code == 200
assert res.json.get('title') == 'Updated Title Only', "Title should be updated"
assert res.json.get('url') == 'https://example.com', "URL should remain unchanged"

View File

@@ -1,18 +1,15 @@
#!/usr/bin/env python3
from flask import url_for
from .util import live_server_setup, wait_for_all_checks, set_original_response
from .util import live_server_setup, wait_for_all_checks
import json
import time
def test_api_tags_listing(client, live_server, measure_memory_usage):
# live_server_setup(live_server) # Setup on conftest per function
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
tag_title = 'Test Tag'
set_original_response()
# Get a listing
res = client.get(
url_for("tags"),
headers={'x-api-key': api_key}
@@ -107,8 +104,6 @@ def test_api_tags_listing(client, live_server, measure_memory_usage):
assert res.status_code == 201
watch_uuid = res.json.get('uuid')
wait_for_all_checks()
# Verify tag is associated with watch by name if need be
res = client.get(
url_for("watch", uuid=watch_uuid),
@@ -117,21 +112,6 @@ def test_api_tags_listing(client, live_server, measure_memory_usage):
assert res.status_code == 200
assert new_tag_uuid in res.json.get('tags', [])
# Check recheck by tag
before_check_time = live_server.app.config['DATASTORE'].data['watching'][watch_uuid].get('last_checked')
time.sleep(1)
res = client.get(
url_for("tag", uuid=new_tag_uuid) + "?recheck=true",
headers={'x-api-key': api_key}
)
wait_for_all_checks()
assert res.status_code == 200
assert b'OK, 1 watches' in res.data
after_check_time = live_server.app.config['DATASTORE'].data['watching'][watch_uuid].get('last_checked')
assert before_check_time != after_check_time
# Delete tag
res = client.delete(
url_for("tag", uuid=new_tag_uuid),
@@ -161,6 +141,3 @@ def test_api_tags_listing(client, live_server, measure_memory_usage):
headers={'x-api-key': api_key},
)
assert res.status_code == 204

View File

@@ -23,7 +23,7 @@ def test_basic_auth(client, live_server, measure_memory_usage):
# Check form validation
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"include_filters": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -89,7 +89,7 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
assert b'CDATA' in res.data
assert expected_url.encode('utf-8') in res.data
#
# Following the 'diff' link, it should no longer display as 'unviewed' even after we recheck it a few times
res = client.get(url_for("ui.ui_views.diff_history_page", uuid=uuid))
assert b'selected=""' in res.data, "Confirm diff history page loaded"
@@ -104,34 +104,26 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
wait_for_all_checks(client)
# Do this a few times.. ensures we don't accidently set the status
# Do this a few times.. ensures we dont accidently set the status
for n in range(2):
res = client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
wait_for_all_checks(client)
# It should report nothing found (no new 'unviewed' class)
res = client.get(url_for("watchlist.index"))
assert b'unviewed' not in res.data
assert b'class="has-unviewed' not in res.data
assert b'head title' in res.data # Should be ON by default
assert b'head title' not in res.data # Should not be present because this is off by default
assert b'test-endpoint' in res.data
# Recheck it but only with a title change, content wasnt changed
set_original_response(extra_title=" and more")
set_original_response()
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
wait_for_all_checks(client)
res = client.get(url_for("watchlist.index"))
assert b'head title and more' in res.data
# disable <title> pickup
# Enable auto pickup of <title> in settings
res = client.post(
url_for("settings.settings_page"),
data={"application-ui-use_page_title_in_list": "", "requests-time_between_check-minutes": 180,
data={"application-extract_title_as_title": "1", "requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_requests"},
follow_redirects=True
)
@@ -142,14 +134,16 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
res = client.get(url_for("watchlist.index"))
assert b'unviewed' in res.data
assert b'class="has-unviewed' in res.data
assert b'head title' not in res.data # should now be off
# It should have picked up the <title>
assert b'head title' in res.data
# Be sure the last_viewed is going to be greater than the last snapshot
time.sleep(1)
# hit the mark all viewed link
res = client.get(url_for("ui.mark_all_viewed"), follow_redirects=True)
time.sleep(0.2)
assert b'class="has-unviewed' not in res.data
assert b'unviewed' not in res.data

View File

@@ -86,8 +86,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"text_should_not_be_present": ignore_text,
"url": test_url,
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
'fetch_backend': "html_requests"
},
follow_redirects=True
)

View File

@@ -4,8 +4,6 @@ import time
from flask import url_for
from .util import live_server_setup, wait_for_all_checks
from ..model import CONDITIONS_MATCH_LOGIC_DEFAULT
def set_original_response(number="50"):
test_return_data = f"""<html>
@@ -78,7 +76,7 @@ def test_conditions_with_text_and_number(client, live_server):
"fetch_backend": "html_requests",
"include_filters": ".number-container",
"title": "Number AND Text Condition Test",
"conditions_match_logic": CONDITIONS_MATCH_LOGIC_DEFAULT, # ALL = AND logic
"conditions_match_logic": "ALL", # ALL = AND logic
"conditions-0-operator": "in",
"conditions-0-field": "page_filtered_text",
"conditions-0-value": "5",
@@ -105,7 +103,6 @@ def test_conditions_with_text_and_number(client, live_server):
"conditions-5-operator": "contains_regex",
"conditions-5-field": "page_filtered_text",
"conditions-5-value": "\d",
"time_between_check_use_default": "y",
},
follow_redirects=True
)
@@ -286,11 +283,10 @@ def test_lev_conditions_plugin(client, live_server, measure_memory_usage):
data={
"url": test_url,
"fetch_backend": "html_requests",
"conditions_match_logic": CONDITIONS_MATCH_LOGIC_DEFAULT, # ALL = AND logic
"conditions_match_logic": "ALL", # ALL = AND logic
"conditions-0-field": "levenshtein_ratio",
"conditions-0-operator": "<",
"conditions-0-value": "0.8", # needs to be more of a diff to trigger a change
"time_between_check_use_default": "y"
"conditions-0-value": "0.8" # needs to be more of a diff to trigger a change
},
follow_redirects=True
)

View File

@@ -95,7 +95,7 @@ def test_check_markup_include_filters_restriction(client, live_server, measure_m
# Add our URL to the import page
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -154,8 +154,7 @@ def test_check_multiple_filters(client, live_server, measure_memory_usage):
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -209,8 +208,7 @@ def test_filter_is_empty_help_suggestion(client, live_server, measure_memory_usa
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

Some files were not shown because too many files have changed in this diff Show More