Compare commits

..

8 Commits

Author SHA1 Message Date
dgtlmoon
14d2a71ac6 Adding extra test to probe OpenAPI validation 2025-09-15 12:56:21 +02:00
dgtlmoon
b5b8005b84 Bump schema version 2025-09-15 12:55:56 +02:00
dgtlmoon
53556fdc8b Skip OpenAPI validation for GET requests since they don't have request bodies 2025-09-15 11:47:51 +02:00
dgtlmoon
71bd3cbb6a OpenAPI spec check improvements 2025-09-15 11:36:13 +02:00
dgtlmoon
ef7df5d044 remove ignore 2025-09-15 11:04:31 +02:00
dgtlmoon
033a27b790 Fix path and caching 2025-09-15 10:58:42 +02:00
dgtlmoon
4b2f394fce Include YML 2025-09-15 10:35:04 +02:00
dgtlmoon
015db5fae1 OpenAPI validation was being skipped #3423 2025-09-15 10:33:31 +02:00
66 changed files with 102 additions and 2330 deletions

View File

@@ -71,7 +71,6 @@ jobs:
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_watch_model'
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_jinja2_security'
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_semver'
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_browser_notifications'
- name: Test built container with Pytest (generally as requests/plaintext fetching)
run: |

View File

@@ -2,7 +2,7 @@
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
__version__ = '0.50.14'
__version__ = '0.50.12'
from changedetectionio.strtobool import strtobool
from json.decoder import JSONDecodeError

View File

@@ -14,39 +14,6 @@ import copy
from . import schema, schema_create_watch, schema_update_watch, validate_openapi_request
def validate_time_between_check_required(json_data):
"""
Validate that at least one time interval is specified when not using default settings.
Returns None if valid, or error message string if invalid.
Defaults to using global settings if time_between_check_use_default is not provided.
"""
# Default to using global settings if not specified
use_default = json_data.get('time_between_check_use_default', True)
# If using default settings, no validation needed
if use_default:
return None
# If not using defaults, check if time_between_check exists and has at least one non-zero value
time_check = json_data.get('time_between_check')
if not time_check:
# No time_between_check provided and not using defaults - this is an error
return "At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings."
# time_between_check exists, check if it has at least one non-zero value
if any([
(time_check.get('weeks') or 0) > 0,
(time_check.get('days') or 0) > 0,
(time_check.get('hours') or 0) > 0,
(time_check.get('minutes') or 0) > 0,
(time_check.get('seconds') or 0) > 0
]):
return None
# time_between_check exists but all values are 0 or empty - this is an error
return "At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings."
class Watch(Resource):
def __init__(self, **kwargs):
# datastore is a black box dependency
@@ -88,8 +55,6 @@ class Watch(Resource):
# attr .last_changed will check for the last written text snapshot on change
watch['last_changed'] = watch.last_changed
watch['viewed'] = watch.viewed
watch['link'] = watch.link,
return watch
@auth.check_token
@@ -116,11 +81,6 @@ class Watch(Resource):
if not request.json.get('proxy') in plist:
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
# Validate time_between_check when not using defaults
validation_error = validate_time_between_check_required(request.json)
if validation_error:
return validation_error, 400
watch.update(request.json)
return "OK", 200
@@ -236,11 +196,6 @@ class CreateWatch(Resource):
if not json_data.get('proxy') in plist:
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
# Validate time_between_check when not using defaults
validation_error = validate_time_between_check_required(json_data)
if validation_error:
return validation_error, 400
extras = copy.deepcopy(json_data)
# Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API)
@@ -275,8 +230,6 @@ class CreateWatch(Resource):
'last_changed': watch.last_changed,
'last_checked': watch['last_checked'],
'last_error': watch['last_error'],
'link': watch.link,
'page_title': watch['page_title'],
'title': watch['title'],
'url': watch['url'],
'viewed': watch.viewed

View File

@@ -119,12 +119,6 @@ def build_watch_json_schema(d):
schema['properties']['time_between_check'] = build_time_between_check_json_schema()
schema['properties']['time_between_check_use_default'] = {
"type": "boolean",
"default": True,
"description": "Whether to use global settings for time between checks - defaults to true if not set"
}
schema['properties']['browser_steps'] = {
"anyOf": [
{

View File

@@ -1 +0,0 @@
# Browser notifications blueprint

View File

@@ -1,76 +0,0 @@
from flask import Blueprint, jsonify, request
from loguru import logger
def construct_blueprint(datastore):
browser_notifications_blueprint = Blueprint('browser_notifications', __name__)
@browser_notifications_blueprint.route("/test", methods=['POST'])
def test_browser_notification():
"""Send a test browser notification using the apprise handler"""
try:
from changedetectionio.notification.apprise_plugin.custom_handlers import apprise_browser_notification_handler
# Check if there are any subscriptions
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
if not browser_subscriptions:
return jsonify({'success': False, 'message': 'No browser subscriptions found'}), 404
# Get notification data from request or use defaults
data = request.get_json() or {}
title = data.get('title', 'Test Notification')
body = data.get('body', 'This is a test notification from changedetection.io')
# Use the apprise handler directly
success = apprise_browser_notification_handler(
body=body,
title=title,
notify_type='info',
meta={'url': 'browser://test'}
)
if success:
subscription_count = len(browser_subscriptions)
return jsonify({
'success': True,
'message': f'Test notification sent successfully to {subscription_count} subscriber(s)'
})
else:
return jsonify({'success': False, 'message': 'Failed to send test notification'}), 500
except ImportError:
logger.error("Browser notification handler not available")
return jsonify({'success': False, 'message': 'Browser notification handler not available'}), 500
except Exception as e:
logger.error(f"Failed to send test browser notification: {e}")
return jsonify({'success': False, 'message': f'Error: {str(e)}'}), 500
@browser_notifications_blueprint.route("/clear", methods=['POST'])
def clear_all_browser_notifications():
"""Clear all browser notification subscriptions from the datastore"""
try:
# Get current subscription count
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
subscription_count = len(browser_subscriptions)
# Clear all subscriptions
if 'settings' not in datastore.data:
datastore.data['settings'] = {}
if 'application' not in datastore.data['settings']:
datastore.data['settings']['application'] = {}
datastore.data['settings']['application']['browser_subscriptions'] = []
datastore.needs_write = True
logger.info(f"Cleared {subscription_count} browser notification subscriptions")
return jsonify({
'success': True,
'message': f'Cleared {subscription_count} browser notification subscription(s)'
})
except Exception as e:
logger.error(f"Failed to clear all browser notifications: {e}")
return jsonify({'success': False, 'message': f'Clear all failed: {str(e)}'}), 500
return browser_notifications_blueprint

View File

@@ -158,9 +158,9 @@ document.addEventListener('DOMContentLoaded', function() {
<div>
<span class="watch-title">
{% if system_use_url_watchlist or watch.get('use_page_title_in_list') %}
{{ watch.label }}
{{watch.label}}
{% else %}
{{ watch.get('title') or watch.link }}
{{watch.url}}
{% endif %}
<a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}">&nbsp;</a>
</span>

View File

@@ -39,11 +39,6 @@ from loguru import logger
from changedetectionio import __version__
from changedetectionio import queuedWatchMetaData
from changedetectionio.api import Watch, WatchHistory, WatchSingleHistory, CreateWatch, Import, SystemInfo, Tag, Tags, Notifications, WatchFavicon
from changedetectionio.notification.BrowserNotifications import (
BrowserNotificationsVapidPublicKey,
BrowserNotificationsSubscribe,
BrowserNotificationsUnsubscribe
)
from changedetectionio.api.Search import Search
from .time_handler import is_within_schedule
@@ -99,7 +94,6 @@ except locale.Error:
logger.warning(f"Unable to set locale {default_locale}, locale is not installed maybe?")
watch_api = Api(app, decorators=[csrf.exempt])
browser_notification_api = Api(app, decorators=[csrf.exempt])
def init_app_secret(datastore_path):
secret = ""
@@ -342,11 +336,6 @@ def changedetection_app(config=None, datastore_o=None):
watch_api.add_resource(Notifications, '/api/v1/notifications',
resource_class_kwargs={'datastore': datastore})
# Browser notification endpoints
browser_notification_api.add_resource(BrowserNotificationsVapidPublicKey, '/browser-notifications-api/vapid-public-key')
browser_notification_api.add_resource(BrowserNotificationsSubscribe, '/browser-notifications-api/subscribe')
browser_notification_api.add_resource(BrowserNotificationsUnsubscribe, '/browser-notifications-api/unsubscribe')
@login_manager.user_loader
def user_loader(email):
@@ -500,29 +489,10 @@ def changedetection_app(config=None, datastore_o=None):
except FileNotFoundError:
abort(404)
@app.route("/service-worker.js", methods=['GET'])
def service_worker():
from flask import make_response
try:
# Serve from the changedetectionio/static/js directory
static_js_path = os.path.join(os.path.dirname(__file__), 'static', 'js')
response = make_response(send_from_directory(static_js_path, "service-worker.js"))
response.headers['Content-Type'] = 'application/javascript'
response.headers['Service-Worker-Allowed'] = '/'
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
response.headers['Pragma'] = 'no-cache'
response.headers['Expires'] = '0'
return response
except FileNotFoundError:
abort(404)
import changedetectionio.blueprint.browser_steps as browser_steps
app.register_blueprint(browser_steps.construct_blueprint(datastore), url_prefix='/browser-steps')
import changedetectionio.blueprint.browser_notifications.browser_notifications as browser_notifications
app.register_blueprint(browser_notifications.construct_blueprint(datastore), url_prefix='/browser-notifications')
from changedetectionio.blueprint.imports import construct_blueprint as construct_import_blueprint
app.register_blueprint(construct_import_blueprint(datastore, update_q, queuedWatchMetaData), url_prefix='/imports')

View File

@@ -23,7 +23,6 @@ from wtforms import (
)
from flask_wtf.file import FileField, FileAllowed
from wtforms.fields import FieldList
from wtforms.utils import unset_value
from wtforms.validators import ValidationError
@@ -57,8 +56,6 @@ valid_method = {
default_method = 'GET'
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT='At least one time interval (weeks, days, hours, minutes, or seconds) must be specified.'
REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT='At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings.'
class StringListField(StringField):
widget = widgets.TextArea()
@@ -215,35 +212,6 @@ class ScheduleLimitForm(Form):
self.sunday.form.enabled.label.text = "Sunday"
def validate_time_between_check_has_values(form):
"""
Custom validation function for TimeBetweenCheckForm.
Returns True if at least one time interval field has a value > 0.
"""
res = any([
form.weeks.data and int(form.weeks.data) > 0,
form.days.data and int(form.days.data) > 0,
form.hours.data and int(form.hours.data) > 0,
form.minutes.data and int(form.minutes.data) > 0,
form.seconds.data and int(form.seconds.data) > 0
])
return res
class RequiredTimeInterval(object):
"""
WTForms validator that ensures at least one time interval field has a value > 0.
Use this with FormField(TimeBetweenCheckForm, validators=[RequiredTimeInterval()]).
"""
def __init__(self, message=None):
self.message = message or 'At least one time interval (weeks, days, hours, minutes, or seconds) must be specified.'
def __call__(self, form, field):
if not validate_time_between_check_has_values(field.form):
raise ValidationError(self.message)
class TimeBetweenCheckForm(Form):
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
@@ -252,123 +220,6 @@ class TimeBetweenCheckForm(Form):
seconds = IntegerField('Seconds', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
# @todo add total seconds minimum validatior = minimum_seconds_recheck_time
def __init__(self, formdata=None, obj=None, prefix="", data=None, meta=None, **kwargs):
super().__init__(formdata, obj, prefix, data, meta, **kwargs)
self.require_at_least_one = kwargs.get('require_at_least_one', False)
self.require_at_least_one_message = kwargs.get('require_at_least_one_message', REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT)
def validate(self, **kwargs):
"""Custom validation that can optionally require at least one time interval."""
# Run normal field validation first
if not super().validate(**kwargs):
return False
# Apply optional "at least one" validation
if self.require_at_least_one:
if not validate_time_between_check_has_values(self):
# Add error to the form's general errors (not field-specific)
if not hasattr(self, '_formdata_errors'):
self._formdata_errors = []
self._formdata_errors.append(self.require_at_least_one_message)
return False
return True
class EnhancedFormField(FormField):
"""
An enhanced FormField that supports conditional validation with top-level error messages.
Adds a 'top_errors' property for validation errors at the FormField level.
"""
def __init__(self, form_class, label=None, validators=None, separator="-",
conditional_field=None, conditional_message=None, conditional_test_function=None, **kwargs):
"""
Initialize EnhancedFormField with optional conditional validation.
:param conditional_field: Name of the field this FormField depends on (e.g. 'time_between_check_use_default')
:param conditional_message: Error message to show when validation fails
:param conditional_test_function: Custom function to test if FormField has valid values.
Should take self.form as parameter and return True if valid.
"""
super().__init__(form_class, label, validators, separator, **kwargs)
self.top_errors = []
self.conditional_field = conditional_field
self.conditional_message = conditional_message or "At least one field must have a value when not using defaults."
self.conditional_test_function = conditional_test_function
def validate(self, form, extra_validators=()):
"""
Custom validation that supports conditional logic and stores top-level errors.
"""
self.top_errors = []
# First run the normal FormField validation
base_valid = super().validate(form, extra_validators)
# Apply conditional validation if configured
if self.conditional_field and hasattr(form, self.conditional_field):
conditional_field_obj = getattr(form, self.conditional_field)
# If the conditional field is False/unchecked, check if this FormField has any values
if not conditional_field_obj.data:
# Use custom test function if provided, otherwise use generic fallback
if self.conditional_test_function:
has_any_value = self.conditional_test_function(self.form)
else:
# Generic fallback - check if any field has truthy data
has_any_value = any(field.data for field in self.form if hasattr(field, 'data') and field.data)
if not has_any_value:
self.top_errors.append(self.conditional_message)
base_valid = False
return base_valid
class RequiredFormField(FormField):
"""
A FormField that passes require_at_least_one=True to TimeBetweenCheckForm.
Use this when you want the sub-form to always require at least one value.
"""
def __init__(self, form_class, label=None, validators=None, separator="-", **kwargs):
super().__init__(form_class, label, validators, separator, **kwargs)
def process(self, formdata, data=unset_value, extra_filters=None):
if extra_filters:
raise TypeError(
"FormField cannot take filters, as the encapsulated"
"data is not mutable."
)
if data is unset_value:
try:
data = self.default()
except TypeError:
data = self.default
self._obj = data
self.object_data = data
prefix = self.name + self.separator
# Pass require_at_least_one=True to the sub-form
if isinstance(data, dict):
self.form = self.form_class(formdata=formdata, prefix=prefix, require_at_least_one=True, **data)
else:
self.form = self.form_class(formdata=formdata, obj=data, prefix=prefix, require_at_least_one=True)
@property
def errors(self):
"""Include sub-form validation errors"""
form_errors = self.form.errors
# Add any general form errors to a special 'form' key
if hasattr(self.form, '_formdata_errors') and self.form._formdata_errors:
form_errors = dict(form_errors) # Make a copy
form_errors['form'] = self.form._formdata_errors
return form_errors
# Separated by key:value
class StringDictKeyValue(StringField):
widget = widgets.TextArea()
@@ -497,7 +348,7 @@ class ValidateJinja2Template(object):
joined_data = ' '.join(map(str, field.data)) if isinstance(field.data, list) else f"{field.data}"
try:
jinja2_env = ImmutableSandboxedEnvironment(loader=BaseLoader, extensions=['jinja2_time.TimeExtension'])
jinja2_env = ImmutableSandboxedEnvironment(loader=BaseLoader)
jinja2_env.globals.update(notification.valid_tokens)
# Extra validation tokens provided on the form_class(... extra_tokens={}) setup
if hasattr(field, 'extra_notification_tokens'):
@@ -707,7 +558,6 @@ class commonSettingsForm(Form):
processor = RadioField( label=u"Processor - What do you want to achieve?", choices=processors.available_processors(), default="text_json_diff")
timezone = StringField("Timezone for watch schedule", render_kw={"list": "timezones"}, validators=[validateTimeZoneName()])
webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, message="Should contain one or more seconds")])
class importForm(Form):
@@ -733,16 +583,11 @@ class processor_text_json_diff_form(commonSettingsForm):
url = fields.URLField('URL', validators=[validateURL()])
tags = StringTagUUID('Group tag', [validators.Optional()], default='')
time_between_check = EnhancedFormField(
TimeBetweenCheckForm,
conditional_field='time_between_check_use_default',
conditional_message=REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT,
conditional_test_function=validate_time_between_check_has_values
)
time_between_check = FormField(TimeBetweenCheckForm)
time_schedule_limit = FormField(ScheduleLimitForm)
time_between_check_use_default = BooleanField('Use global settings for time between check and scheduler.', default=False)
time_between_check_use_default = BooleanField('Use global settings for time between check', default=False)
include_filters = StringListField('CSS/JSONPath/JQ/XPath Filters', [ValidateCSSJSONXPATHInput()], default='')
@@ -883,7 +728,7 @@ class DefaultUAInputForm(Form):
# datastore.data['settings']['requests']..
class globalSettingsRequestForm(Form):
time_between_check = RequiredFormField(TimeBetweenCheckForm)
time_between_check = FormField(TimeBetweenCheckForm)
time_schedule_limit = FormField(ScheduleLimitForm)
proxy = RadioField('Proxy')
jitter_seconds = IntegerField('Random jitter seconds ± check',

View File

@@ -66,11 +66,6 @@ class model(dict):
'socket_io_enabled': True,
'favicons_enabled': True
},
'vapid': {
'private_key': None,
'public_key': None,
'contact_email': None
},
}
}
}

View File

@@ -170,7 +170,7 @@ class model(watch_base):
@property
def label(self):
# Used for sorting, display, etc
return self.get('title') or self.get('page_title') or self.link
return self.get('title') or self.get('page_title') or self.get('url')
@property
def last_changed(self):

View File

@@ -1,217 +0,0 @@
import json
from flask import request, current_app
from flask_restful import Resource, marshal_with, fields
from loguru import logger
browser_notifications_fields = {
'success': fields.Boolean,
'message': fields.String,
}
vapid_public_key_fields = {
'publicKey': fields.String,
}
test_notification_fields = {
'success': fields.Boolean,
'message': fields.String,
'sent_count': fields.Integer,
}
class BrowserNotificationsVapidPublicKey(Resource):
"""Get VAPID public key for browser push notifications"""
@marshal_with(vapid_public_key_fields)
def get(self):
try:
from changedetectionio.notification.apprise_plugin.browser_notification_helpers import (
get_vapid_config_from_datastore, convert_pem_public_key_for_browser
)
datastore = current_app.config.get('DATASTORE')
if not datastore:
return {'publicKey': None}, 500
private_key, public_key_pem, contact_email = get_vapid_config_from_datastore(datastore)
if not public_key_pem:
return {'publicKey': None}, 404
# Convert PEM format to URL-safe base64 format for browser
public_key_b64 = convert_pem_public_key_for_browser(public_key_pem)
if public_key_b64:
return {'publicKey': public_key_b64}
else:
return {'publicKey': None}, 500
except Exception as e:
logger.error(f"Failed to get VAPID public key: {e}")
return {'publicKey': None}, 500
class BrowserNotificationsSubscribe(Resource):
"""Subscribe to browser notifications"""
@marshal_with(browser_notifications_fields)
def post(self):
try:
data = request.get_json()
if not data:
return {'success': False, 'message': 'No data provided'}, 400
subscription = data.get('subscription')
if not subscription:
return {'success': False, 'message': 'Subscription is required'}, 400
# Validate subscription format
required_fields = ['endpoint', 'keys']
for field in required_fields:
if field not in subscription:
return {'success': False, 'message': f'Missing subscription field: {field}'}, 400
if 'p256dh' not in subscription['keys'] or 'auth' not in subscription['keys']:
return {'success': False, 'message': 'Missing subscription keys'}, 400
# Get datastore
datastore = current_app.config.get('DATASTORE')
if not datastore:
return {'success': False, 'message': 'Datastore not available'}, 500
# Initialize browser_subscriptions if it doesn't exist
if 'browser_subscriptions' not in datastore.data['settings']['application']:
datastore.data['settings']['application']['browser_subscriptions'] = []
# Check if subscription already exists
existing_subscriptions = datastore.data['settings']['application']['browser_subscriptions']
for existing_sub in existing_subscriptions:
if existing_sub.get('endpoint') == subscription.get('endpoint'):
return {'success': True, 'message': 'Already subscribed to browser notifications'}
# Add new subscription
datastore.data['settings']['application']['browser_subscriptions'].append(subscription)
datastore.needs_write = True
logger.info(f"New browser notification subscription: {subscription.get('endpoint')}")
return {'success': True, 'message': 'Successfully subscribed to browser notifications'}
except Exception as e:
logger.error(f"Failed to subscribe to browser notifications: {e}")
return {'success': False, 'message': f'Subscription failed: {str(e)}'}, 500
class BrowserNotificationsUnsubscribe(Resource):
"""Unsubscribe from browser notifications"""
@marshal_with(browser_notifications_fields)
def post(self):
try:
data = request.get_json()
if not data:
return {'success': False, 'message': 'No data provided'}, 400
subscription = data.get('subscription')
if not subscription or not subscription.get('endpoint'):
return {'success': False, 'message': 'Valid subscription is required'}, 400
# Get datastore
datastore = current_app.config.get('DATASTORE')
if not datastore:
return {'success': False, 'message': 'Datastore not available'}, 500
# Check if subscriptions exist
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
if not browser_subscriptions:
return {'success': True, 'message': 'No subscriptions found'}
# Remove subscription with matching endpoint
endpoint = subscription.get('endpoint')
original_count = len(browser_subscriptions)
datastore.data['settings']['application']['browser_subscriptions'] = [
sub for sub in browser_subscriptions
if sub.get('endpoint') != endpoint
]
removed_count = original_count - len(datastore.data['settings']['application']['browser_subscriptions'])
if removed_count > 0:
datastore.needs_write = True
logger.info(f"Removed {removed_count} browser notification subscription(s)")
return {'success': True, 'message': 'Successfully unsubscribed from browser notifications'}
else:
return {'success': True, 'message': 'No matching subscription found'}
except Exception as e:
logger.error(f"Failed to unsubscribe from browser notifications: {e}")
return {'success': False, 'message': f'Unsubscribe failed: {str(e)}'}, 500
class BrowserNotificationsTest(Resource):
"""Send a test browser notification"""
@marshal_with(test_notification_fields)
def post(self):
try:
data = request.get_json()
if not data:
return {'success': False, 'message': 'No data provided', 'sent_count': 0}, 400
title = data.get('title', 'Test Notification')
body = data.get('body', 'This is a test notification from changedetection.io')
# Get datastore to check if subscriptions exist
datastore = current_app.config.get('DATASTORE')
if not datastore:
return {'success': False, 'message': 'Datastore not available', 'sent_count': 0}, 500
# Check if there are subscriptions before attempting to send
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
if not browser_subscriptions:
return {'success': False, 'message': 'No subscriptions found', 'sent_count': 0}, 404
# Use the apprise handler directly
try:
from changedetectionio.notification.apprise_plugin.custom_handlers import apprise_browser_notification_handler
# Call the apprise handler with test data
success = apprise_browser_notification_handler(
body=body,
title=title,
notify_type='info',
meta={'url': 'browser://test'}
)
# Count how many subscriptions we have after sending (some may have been removed if invalid)
final_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
sent_count = len(browser_subscriptions) # Original count
if success:
return {
'success': True,
'message': f'Test notification sent successfully to {sent_count} subscriber(s)',
'sent_count': sent_count
}
else:
return {
'success': False,
'message': 'Failed to send test notification',
'sent_count': 0
}, 500
except ImportError:
return {'success': False, 'message': 'Browser notification handler not available', 'sent_count': 0}, 500
except Exception as e:
logger.error(f"Failed to send test browser notification: {e}")
return {'success': False, 'message': f'Test failed: {str(e)}', 'sent_count': 0}, 500

View File

@@ -1,273 +0,0 @@
"""
Browser notification helpers for Web Push API
Shared utility functions for VAPID key handling and notification sending
"""
import json
import re
import time
from loguru import logger
def convert_pem_private_key_for_pywebpush(private_key):
"""
Convert PEM private key to the format that pywebpush expects
Args:
private_key: PEM private key string or already converted key
Returns:
Vapid instance for pywebpush (avoids PEM parsing compatibility issues)
"""
try:
from py_vapid import Vapid
import tempfile
import os
# If we get a string, assume it's PEM and create a Vapid instance from it
if isinstance(private_key, str) and private_key.startswith('-----BEGIN'):
# Write PEM to temporary file and load with Vapid.from_file
with tempfile.NamedTemporaryFile(mode='w', suffix='.pem', delete=False) as tmp_file:
tmp_file.write(private_key)
tmp_file.flush()
temp_path = tmp_file.name
try:
# Load using Vapid.from_file - this is more compatible with pywebpush
vapid_instance = Vapid.from_file(temp_path)
os.unlink(temp_path) # Clean up
logger.debug("Successfully created Vapid instance from PEM")
return vapid_instance
except Exception as e:
os.unlink(temp_path) # Clean up even on error
logger.error(f"Failed to create Vapid instance from PEM: {e}")
# Fall back to returning the original PEM string
return private_key
else:
# Return as-is if not a PEM string
return private_key
except Exception as e:
logger.error(f"Failed to convert private key: {e}")
return private_key
def convert_pem_public_key_for_browser(public_key_pem):
"""
Convert PEM public key to URL-safe base64 format for browser applicationServerKey
Args:
public_key_pem: PEM public key string
Returns:
URL-safe base64 encoded public key without padding
"""
try:
from cryptography.hazmat.primitives import serialization
import base64
# Parse PEM directly using cryptography library
pem_bytes = public_key_pem.encode() if isinstance(public_key_pem, str) else public_key_pem
# Load the public key from PEM
public_key_crypto = serialization.load_pem_public_key(pem_bytes)
# Get the raw public key bytes in uncompressed format (what browsers expect)
public_key_raw = public_key_crypto.public_bytes(
encoding=serialization.Encoding.X962,
format=serialization.PublicFormat.UncompressedPoint
)
# Convert to URL-safe base64 (remove padding)
public_key_b64 = base64.urlsafe_b64encode(public_key_raw).decode('ascii').rstrip('=')
return public_key_b64
except Exception as e:
logger.error(f"Failed to convert public key format: {e}")
return None
def send_push_notifications(subscriptions, notification_payload, private_key, contact_email, datastore):
"""
Send push notifications to a list of subscriptions
Args:
subscriptions: List of push subscriptions
notification_payload: Dict with notification data (title, body, etc.)
private_key: VAPID private key (will be converted if needed)
contact_email: Contact email for VAPID claims
datastore: Datastore object for updating subscriptions
Returns:
Tuple of (success_count, total_count)
"""
try:
from pywebpush import webpush, WebPushException
except ImportError:
logger.error("pywebpush not available - cannot send browser notifications")
return 0, len(subscriptions)
# Convert private key to format pywebpush expects
private_key_for_push = convert_pem_private_key_for_pywebpush(private_key)
success_count = 0
total_count = len(subscriptions)
# Send to all subscriptions
for subscription in subscriptions[:]: # Copy list to avoid modification issues
try:
webpush(
subscription_info=subscription,
data=json.dumps(notification_payload),
vapid_private_key=private_key_for_push,
vapid_claims={
"sub": f"mailto:{contact_email}",
"aud": f"https://{subscription['endpoint'].split('/')[2]}"
}
)
success_count += 1
except WebPushException as e:
logger.warning(f"Failed to send browser notification to subscription: {e}")
# Remove invalid subscriptions (410 = Gone, 404 = Not Found)
if e.response and e.response.status_code in [404, 410]:
logger.info("Removing invalid browser notification subscription")
try:
subscriptions.remove(subscription)
datastore.needs_write = True
except ValueError:
pass # Already removed
except Exception as e:
logger.error(f"Unexpected error sending browser notification: {e}")
return success_count, total_count
def create_notification_payload(title, body, icon_path=None):
"""
Create a standard notification payload
Args:
title: Notification title
body: Notification body
icon_path: Optional icon path (defaults to favicon)
Returns:
Dict with notification payload
"""
return {
'title': title,
'body': body,
'icon': icon_path or '/static/favicons/favicon-32x32.png',
'badge': '/static/favicons/favicon-32x32.png',
'timestamp': int(time.time() * 1000),
}
def get_vapid_config_from_datastore(datastore):
"""
Get VAPID configuration from datastore with proper error handling
Args:
datastore: Datastore object
Returns:
Tuple of (private_key, public_key, contact_email) or (None, None, None) if error
"""
try:
if not datastore:
return None, None, None
vapid_config = datastore.data.get('settings', {}).get('application', {}).get('vapid', {})
private_key = vapid_config.get('private_key')
public_key = vapid_config.get('public_key')
contact_email = vapid_config.get('contact_email', 'citizen@example.com')
return private_key, public_key, contact_email
except Exception as e:
logger.error(f"Failed to get VAPID config from datastore: {e}")
return None, None, None
def get_browser_subscriptions(datastore):
"""
Get browser subscriptions from datastore
Args:
datastore: Datastore object
Returns:
List of subscriptions
"""
try:
if not datastore:
return []
return datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
except Exception as e:
logger.error(f"Failed to get browser subscriptions: {e}")
return []
def save_browser_subscriptions(datastore, subscriptions):
"""
Save browser subscriptions to datastore
Args:
datastore: Datastore object
subscriptions: List of subscriptions to save
"""
try:
if not datastore:
return
# Ensure the settings structure exists
if 'settings' not in datastore.data:
datastore.data['settings'] = {}
if 'application' not in datastore.data['settings']:
datastore.data['settings']['application'] = {}
datastore.data['settings']['application']['browser_subscriptions'] = subscriptions
datastore.needs_write = True
except Exception as e:
logger.error(f"Failed to save browser subscriptions: {e}")
def create_error_response(message, sent_count=0, status_code=500):
"""
Create standardized error response for API endpoints
Args:
message: Error message
sent_count: Number of notifications sent (for test endpoints)
status_code: HTTP status code
Returns:
Tuple of (response_dict, status_code)
"""
return {'success': False, 'message': message, 'sent_count': sent_count}, status_code
def create_success_response(message, sent_count=None):
"""
Create standardized success response for API endpoints
Args:
message: Success message
sent_count: Number of notifications sent (optional)
Returns:
Response dict
"""
response = {'success': True, 'message': message}
if sent_count is not None:
response['sent_count'] = sent_count
return response

View File

@@ -1,6 +1,5 @@
import json
import re
import time
from urllib.parse import unquote_plus
import requests
@@ -111,80 +110,3 @@ def apprise_http_custom_handler(
except Exception as e:
logger.error(f"Unexpected error occurred while sending custom notification to {url}: {e}")
return False
@notify(on="browser")
def apprise_browser_notification_handler(
body: str,
title: str,
notify_type: str,
meta: dict,
*args,
**kwargs,
) -> bool:
"""
Browser push notification handler for browser:// URLs
Ignores anything after browser:// and uses single default channel
"""
try:
from pywebpush import webpush, WebPushException
from flask import current_app
# Get VAPID keys from app settings
try:
datastore = current_app.config.get('DATASTORE')
if not datastore:
logger.error("No datastore available for browser notifications")
return False
vapid_config = datastore.data.get('settings', {}).get('application', {}).get('vapid', {})
private_key = vapid_config.get('private_key')
public_key = vapid_config.get('public_key')
contact_email = vapid_config.get('contact_email', 'admin@changedetection.io')
if not private_key or not public_key:
logger.error("VAPID keys not configured for browser notifications")
return False
except Exception as e:
logger.error(f"Failed to get VAPID configuration: {e}")
return False
# Get subscriptions from datastore
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
if not browser_subscriptions:
logger.info("No browser subscriptions found")
return True # Not an error - just no subscribers
# Import helper functions
try:
from .browser_notification_helpers import create_notification_payload, send_push_notifications
except ImportError:
logger.error("Browser notification helpers not available")
return False
# Prepare notification payload
notification_payload = create_notification_payload(title, body)
# Send notifications using shared helper
success_count, total_count = send_push_notifications(
subscriptions=browser_subscriptions,
notification_payload=notification_payload,
private_key=private_key,
contact_email=contact_email,
datastore=datastore
)
# Update datastore with cleaned subscriptions
datastore.data['settings']['application']['browser_subscriptions'] = browser_subscriptions
logger.info(f"Sent browser notifications: {success_count}/{total_count} successful")
return success_count > 0
except ImportError:
logger.error("pywebpush not available - cannot send browser notifications")
return False
except Exception as e:
logger.error(f"Unexpected error in browser notification handler: {e}")
return False

View File

@@ -8,7 +8,7 @@ def process_notification(n_object, datastore):
from changedetectionio.safe_jinja import render as jinja_render
from . import default_notification_format_for_watch, default_notification_format, valid_notification_formats
# be sure its registered
from .apprise_plugin.custom_handlers import apprise_http_custom_handler, apprise_browser_notification_handler
from .apprise_plugin.custom_handlers import apprise_http_custom_handler
now = time.time()
if n_object.get('notification_timestamp'):

View File

@@ -1,6 +1,6 @@
{
"name": "changedetection.io",
"short_name": "changedetection",
"name": "",
"short_name": "",
"icons": [
{
"src": "android-chrome-192x192.png",
@@ -15,8 +15,5 @@
],
"theme_color": "#ffffff",
"background_color": "#ffffff",
"display": "standalone",
"start_url": "/",
"scope": "/",
"gcm_sender_id": "103953800507"
"display": "standalone"
}

View File

@@ -1,450 +0,0 @@
/**
* changedetection.io Browser Push Notifications
* Handles service worker registration, push subscription management, and notification permissions
*/
class BrowserNotifications {
constructor() {
this.serviceWorkerRegistration = null;
this.vapidPublicKey = null;
this.isSubscribed = false;
this.init();
}
async init() {
if (!this.isSupported()) {
console.warn('Push notifications are not supported in this browser');
return;
}
try {
// Get VAPID public key from server
await this.fetchVapidPublicKey();
// Register service worker
await this.registerServiceWorker();
// Check existing subscription state
await this.checkExistingSubscription();
// Initialize UI elements
this.initializeUI();
// Set up notification URL monitoring
this.setupNotificationUrlMonitoring();
} catch (error) {
console.error('Failed to initialize browser notifications:', error);
}
}
isSupported() {
return 'serviceWorker' in navigator &&
'PushManager' in window &&
'Notification' in window;
}
async fetchVapidPublicKey() {
try {
const response = await fetch('/browser-notifications-api/vapid-public-key');
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const data = await response.json();
this.vapidPublicKey = data.publicKey;
} catch (error) {
console.error('Failed to fetch VAPID public key:', error);
throw error;
}
}
async registerServiceWorker() {
try {
this.serviceWorkerRegistration = await navigator.serviceWorker.register('/service-worker.js', {
scope: '/'
});
console.log('Service Worker registered successfully');
// Wait for service worker to be ready
await navigator.serviceWorker.ready;
} catch (error) {
console.error('Service Worker registration failed:', error);
throw error;
}
}
initializeUI() {
// Bind event handlers to existing elements in the template
this.bindEventHandlers();
// Update UI based on current permission state
this.updatePermissionStatus();
}
bindEventHandlers() {
const enableBtn = document.querySelector('#enable-notifications-btn');
const testBtn = document.querySelector('#test-notification-btn');
if (enableBtn) {
enableBtn.addEventListener('click', () => this.requestNotificationPermission());
}
if (testBtn) {
testBtn.addEventListener('click', () => this.sendTestNotification());
}
}
setupNotificationUrlMonitoring() {
// Monitor the notification URLs textarea for browser:// URLs
const notificationUrlsField = document.querySelector('textarea[name*="notification_urls"]');
if (notificationUrlsField) {
const checkForBrowserUrls = async () => {
const urls = notificationUrlsField.value || '';
const hasBrowserUrls = /browser:\/\//.test(urls);
// If browser URLs are detected and we're not subscribed, auto-subscribe
if (hasBrowserUrls && !this.isSubscribed && Notification.permission === 'default') {
const shouldSubscribe = confirm('Browser notifications detected! Would you like to enable browser notifications now?');
if (shouldSubscribe) {
await this.requestNotificationPermission();
}
} else if (hasBrowserUrls && !this.isSubscribed && Notification.permission === 'granted') {
// Permission already granted but not subscribed - auto-subscribe silently
console.log('Auto-subscribing to browser notifications...');
await this.subscribe();
}
};
// Check immediately
checkForBrowserUrls();
// Check on input changes
notificationUrlsField.addEventListener('input', checkForBrowserUrls);
}
}
async updatePermissionStatus() {
const statusElement = document.querySelector('#permission-status');
const enableBtn = document.querySelector('#enable-notifications-btn');
const testBtn = document.querySelector('#test-notification-btn');
if (!statusElement) return;
const permission = Notification.permission;
statusElement.textContent = permission;
statusElement.className = `permission-${permission}`;
// Show/hide controls based on permission
if (permission === 'default') {
if (enableBtn) enableBtn.style.display = 'inline-block';
if (testBtn) testBtn.style.display = 'none';
} else if (permission === 'granted') {
if (enableBtn) enableBtn.style.display = 'none';
if (testBtn) testBtn.style.display = 'inline-block';
} else { // denied
if (enableBtn) enableBtn.style.display = 'none';
if (testBtn) testBtn.style.display = 'none';
}
}
async requestNotificationPermission() {
try {
const permission = await Notification.requestPermission();
this.updatePermissionStatus();
if (permission === 'granted') {
console.log('Notification permission granted');
// Automatically subscribe to browser notifications
this.subscribe();
} else {
console.log('Notification permission denied');
}
} catch (error) {
console.error('Error requesting notification permission:', error);
}
}
async subscribe() {
if (Notification.permission !== 'granted') {
alert('Please enable notifications first');
return;
}
if (this.isSubscribed) {
console.log('Already subscribed to browser notifications');
return;
}
try {
// First, try to clear any existing subscription with different keys
await this.clearExistingSubscription();
// Create push subscription
const subscription = await this.serviceWorkerRegistration.pushManager.subscribe({
userVisibleOnly: true,
applicationServerKey: this.urlBase64ToUint8Array(this.vapidPublicKey)
});
// Send subscription to server
const response = await fetch('/browser-notifications-api/subscribe', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value
},
body: JSON.stringify({
subscription: subscription.toJSON()
})
});
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
// Store subscription status
this.isSubscribed = true;
console.log('Successfully subscribed to browser notifications');
} catch (error) {
console.error('Failed to subscribe to browser notifications:', error);
// Show user-friendly error message
if (error.message.includes('different applicationServerKey')) {
this.showSubscriptionConflictDialog(error);
} else {
alert(`Failed to subscribe: ${error.message}`);
}
}
}
async unsubscribe() {
try {
if (!this.isSubscribed) return;
// Get current subscription
const subscription = await this.serviceWorkerRegistration.pushManager.getSubscription();
if (!subscription) {
this.isSubscribed = false;
return;
}
// Unsubscribe from server
const response = await fetch('/browser-notifications-api/unsubscribe', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value
},
body: JSON.stringify({
subscription: subscription.toJSON()
})
});
if (!response.ok) {
console.warn(`Server unsubscribe failed: ${response.status}`);
}
// Unsubscribe locally
await subscription.unsubscribe();
// Update status
this.isSubscribed = false;
console.log('Unsubscribed from browser notifications');
} catch (error) {
console.error('Failed to unsubscribe from browser notifications:', error);
}
}
async sendTestNotification() {
try {
// First, check if we're subscribed
if (!this.isSubscribed) {
const shouldSubscribe = confirm('You need to subscribe to browser notifications first. Subscribe now?');
if (shouldSubscribe) {
await this.subscribe();
// Give a moment for subscription to complete
await new Promise(resolve => setTimeout(resolve, 1000));
} else {
return;
}
}
const response = await fetch('/browser-notifications/test', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value
}
});
if (!response.ok) {
if (response.status === 404) {
// No subscriptions found on server - try subscribing
alert('No browser subscriptions found. Subscribing now...');
await this.subscribe();
return;
}
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const result = await response.json();
alert(result.message);
console.log('Test notification result:', result);
} catch (error) {
console.error('Failed to send test notification:', error);
alert(`Failed to send test notification: ${error.message}`);
}
}
urlBase64ToUint8Array(base64String) {
const padding = '='.repeat((4 - base64String.length % 4) % 4);
const base64 = (base64String + padding)
.replace(/-/g, '+')
.replace(/_/g, '/');
const rawData = window.atob(base64);
const outputArray = new Uint8Array(rawData.length);
for (let i = 0; i < rawData.length; ++i) {
outputArray[i] = rawData.charCodeAt(i);
}
return outputArray;
}
async checkExistingSubscription() {
/**
* Check if we already have a valid browser subscription
* Updates this.isSubscribed based on actual browser state
*/
try {
if (!this.serviceWorkerRegistration) {
this.isSubscribed = false;
return;
}
const existingSubscription = await this.serviceWorkerRegistration.pushManager.getSubscription();
if (existingSubscription) {
// We have a subscription - verify it's still valid and matches our VAPID key
const subscriptionJson = existingSubscription.toJSON();
// Check if the endpoint is still active (basic validation)
if (subscriptionJson.endpoint && subscriptionJson.keys) {
console.log('Found existing valid subscription');
this.isSubscribed = true;
} else {
console.log('Found invalid subscription, clearing...');
await existingSubscription.unsubscribe();
this.isSubscribed = false;
}
} else {
console.log('No existing subscription found');
this.isSubscribed = false;
}
} catch (error) {
console.warn('Failed to check existing subscription:', error);
this.isSubscribed = false;
}
}
async clearExistingSubscription() {
/**
* Clear any existing push subscription that might conflict with our VAPID keys
*/
try {
const existingSubscription = await this.serviceWorkerRegistration.pushManager.getSubscription();
if (existingSubscription) {
console.log('Found existing subscription, unsubscribing...');
await existingSubscription.unsubscribe();
console.log('Successfully cleared existing subscription');
}
} catch (error) {
console.warn('Failed to clear existing subscription:', error);
// Don't throw - this is just cleanup
}
}
showSubscriptionConflictDialog(error) {
/**
* Show user-friendly dialog for subscription conflicts
*/
const message = `Browser notifications are already set up for a different changedetection.io instance or with different settings.
To fix this:
1. Clear your existing subscription
2. Try subscribing again
Would you like to automatically clear the old subscription and retry?`;
if (confirm(message)) {
this.clearExistingSubscription().then(() => {
// Retry subscription after clearing
setTimeout(() => {
this.subscribe();
}, 500);
});
} else {
alert('To use browser notifications, please manually clear your browser notifications for this site in browser settings, then try again.');
}
}
async clearAllNotifications() {
/**
* Clear all browser notification subscriptions (admin function)
*/
try {
// Call the server to clear ALL subscriptions from datastore
const response = await fetch('/browser-notifications/clear', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value
}
});
if (response.ok) {
const result = await response.json();
console.log('Server response:', result.message);
// Also clear the current browser's subscription if it exists
const existingSubscription = await this.serviceWorkerRegistration.pushManager.getSubscription();
if (existingSubscription) {
await existingSubscription.unsubscribe();
console.log('Cleared current browser subscription');
}
// Update status
this.isSubscribed = false;
alert(result.message + '. All browser notifications have been cleared.');
} else {
const error = await response.json();
console.error('Server clear failed:', error.message);
alert('Failed to clear server subscriptions: ' + error.message);
}
} catch (error) {
console.error('Failed to clear all notifications:', error);
alert('Failed to clear notifications: ' + error.message);
}
}
}
// Initialize when DOM is ready
if (document.readyState === 'loading') {
document.addEventListener('DOMContentLoaded', () => {
window.browserNotifications = new BrowserNotifications();
});
} else {
window.browserNotifications = new BrowserNotifications();
}

View File

@@ -1,95 +0,0 @@
// changedetection.io Service Worker for Browser Push Notifications
self.addEventListener('install', function(event) {
console.log('Service Worker installing');
self.skipWaiting();
});
self.addEventListener('activate', function(event) {
console.log('Service Worker activating');
event.waitUntil(self.clients.claim());
});
self.addEventListener('push', function(event) {
console.log('Push message received', event);
let notificationData = {
title: 'changedetection.io',
body: 'A watched page has changed',
icon: '/static/favicons/favicon-32x32.png',
badge: '/static/favicons/favicon-32x32.png',
tag: 'changedetection-notification',
requireInteraction: false,
timestamp: Date.now()
};
// Parse push data if available
if (event.data) {
try {
const pushData = event.data.json();
notificationData = {
...notificationData,
...pushData
};
} catch (e) {
console.warn('Failed to parse push data:', e);
notificationData.body = event.data.text() || notificationData.body;
}
}
const promiseChain = self.registration.showNotification(
notificationData.title,
{
body: notificationData.body,
icon: notificationData.icon,
badge: notificationData.badge,
tag: notificationData.tag,
requireInteraction: notificationData.requireInteraction,
timestamp: notificationData.timestamp,
data: {
url: notificationData.url || '/',
timestamp: notificationData.timestamp
}
}
);
event.waitUntil(promiseChain);
});
self.addEventListener('notificationclick', function(event) {
console.log('Notification clicked', event);
event.notification.close();
const targetUrl = event.notification.data?.url || '/';
event.waitUntil(
clients.matchAll().then(function(clientList) {
// Check if there's already a window/tab open with our app
for (let i = 0; i < clientList.length; i++) {
const client = clientList[i];
if (client.url.includes(self.location.origin) && 'focus' in client) {
client.navigate(targetUrl);
return client.focus();
}
}
// If no existing window, open a new one
if (clients.openWindow) {
return clients.openWindow(targetUrl);
}
})
);
});
self.addEventListener('notificationclose', function(event) {
console.log('Notification closed', event);
});
// Handle messages from the main thread
self.addEventListener('message', function(event) {
console.log('Service Worker received message:', event.data);
if (event.data && event.data.type === 'SKIP_WAITING') {
self.skipWaiting();
}
});

View File

@@ -140,28 +140,6 @@ class ChangeDetectionStore:
secret = secrets.token_hex(16)
self.__data['settings']['application']['api_access_token'] = secret
# Generate VAPID keys for browser push notifications
if not self.__data['settings']['application']['vapid'].get('private_key'):
try:
from py_vapid import Vapid
vapid = Vapid()
vapid.generate_keys()
# Convert bytes to strings for JSON serialization
private_pem = vapid.private_pem()
public_pem = vapid.public_pem()
self.__data['settings']['application']['vapid']['private_key'] = private_pem.decode() if isinstance(private_pem, bytes) else private_pem
self.__data['settings']['application']['vapid']['public_key'] = public_pem.decode() if isinstance(public_pem, bytes) else public_pem
# Set default contact email if not present
if not self.__data['settings']['application']['vapid'].get('contact_email'):
self.__data['settings']['application']['vapid']['contact_email'] = 'citizen@example.com'
logger.info("Generated new VAPID keys for browser push notifications")
except ImportError:
logger.warning("py_vapid not available - browser notifications will not work")
except Exception as e:
logger.warning(f"Failed to generate VAPID keys: {e}")
self.needs_write = True
# Finally start the thread that will manage periodic data saves to JSON

View File

@@ -33,34 +33,6 @@
<div id="notification-test-log" style="display: none;"><span class="pure-form-message-inline">Processing..</span></div>
</div>
</div>
<!-- Browser Notifications -->
<div id="browser-notification-section">
<div class="pure-control-group">
<label>Browser Notifications</label>
<div class="pure-form-message-inline">
<p><strong>Browser push notifications!</strong> Use <code>browser://</code> URLs in your notification settings to receive real-time push notifications even when this tab is closed.</p>
<p><small><strong>Troubleshooting:</strong> If you get "different applicationServerKey" errors, click "Clear All Notifications" below and try again. This happens when switching between different changedetection.io instances.</small></p>
<div id="browser-notification-controls" style="margin-top: 1em;">
<div id="notification-permission-status">
<p>Browser notifications: <span id="permission-status">checking...</span></p>
</div>
<div id="browser-notification-actions">
<button type="button" id="enable-notifications-btn" class="pure-button button-secondary button-xsmall" style="display: none;">
Enable Browser Notifications
</button>
<button type="button" id="test-notification-btn" class="pure-button button-secondary button-xsmall" style="display: none;">
Send browser test notification
</button>
<button type="button" id="clear-notifications-btn" class="pure-button button-secondary button-xsmall" onclick="window.browserNotifications?.clearAllNotifications()" style="margin-left: 0.5em;">
Clear All Notifications
</button>
</div>
</div>
</div>
</div>
</div>
<div id="notification-customisation" class="pure-control-group">
<div class="pure-control-group">
{{ render_field(form.notification_title, class="m-d notification-title", placeholder=settings_application['notification_title']) }}

View File

@@ -1,29 +1,14 @@
{% macro render_field(field) %}
<div {% if field.errors or field.top_errors %} class="error" {% endif %}>{{ field.label }}</div>
<div {% if field.errors or field.top_errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
{% if field.top_errors %}
top
<ul class="errors top-errors">
{% for error in field.top_errors %}
<li>{{ error }}</li>
{% endfor %}
</ul>
{% endif %}
{% if field.errors %}
<ul class=errors>
{% if field.errors is mapping and 'form' in field.errors %}
{# and subfield form errors, such as used in RequiredFormField() for TimeBetweenCheckForm sub form #}
{% set errors = field.errors['form'] %}
{% else %}
{# regular list of errors with this field #}
{% set errors = field.errors %}
{% endif %}
{% for error in errors %}
<li>{{ error }}</li>
{% endfor %}
</ul>
{% endif %}
</div>
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
{% if field.errors %}
<ul class=errors>
{% for error in field.errors %}
<li>{{ error }}</li>
{% endfor %}
</ul>
{% endif %}
</div>
{% endmacro %}
{% macro render_checkbox_field(field) %}

View File

@@ -35,7 +35,6 @@
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
<script src="{{url_for('static_content', group='js', filename='csrf.js')}}" defer></script>
<script src="{{url_for('static_content', group='js', filename='feather-icons.min.js')}}" defer></script>
<script src="{{url_for('static_content', group='js', filename='browser-notifications.js')}}" defer></script>
{% if socket_io_enabled %}
<script src="{{url_for('static_content', group='js', filename='socket.io.min.js')}}"></script>
<script src="{{url_for('static_content', group='js', filename='realtime.js')}}" defer></script>

View File

@@ -55,8 +55,7 @@ def do_test(client, live_server, make_test_use_extra_browser=False):
"tags": "",
"headers": "",
'fetch_backend': f"extra_browser_{custom_browser_name}",
'webdriver_js_execute_code': '',
"time_between_check_use_default": "y"
'webdriver_js_execute_code': ''
},
follow_redirects=True
)

View File

@@ -28,7 +28,6 @@ def test_execute_custom_js(client, live_server, measure_memory_usage):
'fetch_backend': "html_webdriver",
'webdriver_js_execute_code': 'document.querySelector("button[name=test-button]").click();',
'headers': "testheader: yes\buser-agent: MyCustomAgent",
"time_between_check_use_default": "y",
},
follow_redirects=True
)

View File

@@ -27,7 +27,6 @@ def test_preferred_proxy(client, live_server, measure_memory_usage):
"proxy": "proxy-two",
"tags": "",
"url": url,
"time_between_check_use_default": "y",
},
follow_redirects=True
)

View File

@@ -62,7 +62,6 @@ def test_noproxy_option(client, live_server, measure_memory_usage):
"proxy": "no-proxy",
"tags": "",
"url": url,
"time_between_check_use_default": "y",
},
follow_redirects=True
)

View File

@@ -44,7 +44,6 @@ def test_proxy_noconnect_custom(client, live_server, measure_memory_usage):
"url": test_url,
"fetch_backend": "html_webdriver" if os.getenv('PLAYWRIGHT_DRIVER_URL') or os.getenv("WEBDRIVER_URL") else "html_requests",
"proxy": "ui-0custom-test-proxy",
"time_between_check_use_default": "y",
}
res = client.post(

View File

@@ -66,7 +66,6 @@ def test_socks5(client, live_server, measure_memory_usage):
"proxy": "ui-0socks5proxy",
"tags": "",
"url": test_url,
"time_between_check_use_default": "y",
},
follow_redirects=True
)

View File

@@ -53,7 +53,6 @@ def test_socks5_from_proxiesjson_file(client, live_server, measure_memory_usage)
"proxy": "socks5proxy",
"tags": "",
"url": test_url,
"time_between_check_use_default": "y",
},
follow_redirects=True
)

View File

@@ -157,8 +157,7 @@ def test_check_notification_email_formats_default_Text_override_HTML(client, liv
data={
"url": test_url,
"notification_format": 'HTML',
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -61,8 +61,7 @@ def test_check_removed_line_contains_trigger(client, live_server, measure_memory
data={"trigger_text": 'The golden line',
"url": test_url,
'fetch_backend': "html_requests",
'filter_text_removed': 'y',
"time_between_check_use_default": "y"},
'filter_text_removed': 'y'},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -155,8 +154,7 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa
'processor': 'text_json_diff',
'fetch_backend': "html_requests",
'filter_text_removed': '',
'filter_text_added': 'y',
"time_between_check_use_default": "y"},
'filter_text_added': 'y'},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -23,7 +23,7 @@ def test_basic_auth(client, live_server, measure_memory_usage):
# Check form validation
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"include_filters": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -86,8 +86,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"text_should_not_be_present": ignore_text,
"url": test_url,
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
'fetch_backend': "html_requests"
},
follow_redirects=True
)

View File

@@ -105,7 +105,6 @@ def test_conditions_with_text_and_number(client, live_server):
"conditions-5-operator": "contains_regex",
"conditions-5-field": "page_filtered_text",
"conditions-5-value": "\d",
"time_between_check_use_default": "y",
},
follow_redirects=True
)
@@ -289,8 +288,7 @@ def test_lev_conditions_plugin(client, live_server, measure_memory_usage):
"conditions_match_logic": CONDITIONS_MATCH_LOGIC_DEFAULT, # ALL = AND logic
"conditions-0-field": "levenshtein_ratio",
"conditions-0-operator": "<",
"conditions-0-value": "0.8", # needs to be more of a diff to trigger a change
"time_between_check_use_default": "y"
"conditions-0-value": "0.8" # needs to be more of a diff to trigger a change
},
follow_redirects=True
)

View File

@@ -95,7 +95,7 @@ def test_check_markup_include_filters_restriction(client, live_server, measure_m
# Add our URL to the import page
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -154,8 +154,7 @@ def test_check_multiple_filters(client, live_server, measure_memory_usage):
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -209,8 +208,7 @@ def test_filter_is_empty_help_suggestion(client, live_server, measure_memory_usa
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -171,7 +171,6 @@ def test_element_removal_full(client, live_server, measure_memory_usage):
"tags": "",
"headers": "",
"fetch_backend": "html_requests",
"time_between_check_use_default": "y",
},
follow_redirects=True,
)
@@ -246,7 +245,6 @@ body > table > tr:nth-child(3) > td:nth-child(3)""",
"url": test_url,
"tags": "",
"fetch_backend": "html_requests",
"time_between_check_use_default": "y",
},
follow_redirects=True,
)

View File

@@ -127,8 +127,7 @@ def test_low_level_errors_clear_correctly(client, live_server, measure_memory_us
url_for("ui.ui_edit.edit_page", uuid="first"),
data={
"url": test_url,
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
"fetch_backend": "html_requests"},
follow_redirects=True
)

View File

@@ -95,8 +95,7 @@ def test_check_filter_multiline(client, live_server, measure_memory_usage):
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
'fetch_backend': "html_requests"
},
follow_redirects=True
)
@@ -150,8 +149,7 @@ def test_check_filter_and_regex_extract(client, live_server, measure_memory_usag
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
'fetch_backend': "html_requests"
},
follow_redirects=True
)
@@ -224,8 +222,7 @@ def test_regex_error_handling(client, live_server, measure_memory_usage):
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"extract_text": '/something bad\d{3/XYZ',
"url": test_url,
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
"fetch_backend": "html_requests"},
follow_redirects=True
)

View File

@@ -94,8 +94,7 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se
"title": "my title",
"headers": "",
"include_filters": '.ticket-available',
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"})
"fetch_backend": "html_requests"})
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),

View File

@@ -72,7 +72,6 @@ def run_filter_test(client, live_server, content_filter):
"notification_format": "Text",
"fetch_backend": "html_requests",
"filter_failure_notification_send": 'y',
"time_between_check_use_default": "y",
"headers": "",
"tags": "my tag",
"title": "my title 123",

View File

@@ -424,8 +424,7 @@ def test_order_of_filters_tag_filter_and_watch_filter(client, live_server, measu
"url": test_url,
"tags": "test-tag-keep-order",
"headers": "",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -111,7 +111,7 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa
# Add our URL to the import page
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"ignore_text": ignore_text, "url": test_url, 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"ignore_text": ignore_text, "url": test_url, 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -205,7 +205,7 @@ def _run_test_global_ignore(client, as_source=False, extra_ignore=""):
#Adding some ignore text should not trigger a change
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"ignore_text": "something irrelevent but just to check", "url": test_url, 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"ignore_text": "something irrelevent but just to check", "url": test_url, 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -108,7 +108,7 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server, measu
# Add our URL to the import page
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"ignore_status_codes": "y", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"ignore_status_codes": "y", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -257,8 +257,7 @@ def check_json_filter(json_filter, client, live_server):
"url": test_url,
"tags": "",
"headers": "",
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"
"fetch_backend": "html_requests"
},
follow_redirects=True
)
@@ -329,8 +328,7 @@ def check_json_filter_bool_val(json_filter, client, live_server):
"url": test_url,
"tags": "",
"headers": "",
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"
"fetch_backend": "html_requests"
},
follow_redirects=True
)
@@ -395,8 +393,7 @@ def check_json_ext_filter(json_filter, client, live_server):
"url": test_url,
"tags": "",
"headers": "",
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"
"fetch_backend": "html_requests"
},
follow_redirects=True
)

View File

@@ -38,7 +38,6 @@ def test_content_filter_live_preview(client, live_server, measure_memory_usage):
"ignore_text": "something to ignore",
"trigger_text": "something to trigger",
"url": test_url,
"time_between_check_use_default": "y",
},
follow_redirects=True
)

View File

@@ -108,8 +108,7 @@ def test_check_notification(client, live_server, measure_memory_usage):
"tags": "my tag, my second tag",
"title": "my title",
"headers": "",
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"})
"fetch_backend": "html_requests"})
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
@@ -226,8 +225,7 @@ def test_check_notification(client, live_server, measure_memory_usage):
"notification_title": '',
"notification_body": '',
"notification_format": default_notification_format,
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
"fetch_backend": "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -36,8 +36,7 @@ def test_check_notification_error_handling(client, live_server, measure_memory_u
"title": "",
"headers": "",
"time_between_check-minutes": "180",
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
"fetch_backend": "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -44,8 +44,7 @@ def test_headers_in_request(client, live_server, measure_memory_usage):
"url": test_url,
"tags": "",
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
"headers": "jinja2:{{ 1+1 }}\nxxx:ooo\ncool:yeah\r\ncookie:"+cookie_header,
"time_between_check_use_default": "y"},
"headers": "jinja2:{{ 1+1 }}\nxxx:ooo\ncool:yeah\r\ncookie:"+cookie_header},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -110,8 +109,7 @@ def test_body_in_request(client, live_server, measure_memory_usage):
"tags": "",
"method": "POST",
"fetch_backend": "html_requests",
"body": "something something",
"time_between_check_use_default": "y"},
"body": "something something"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -128,8 +126,7 @@ def test_body_in_request(client, live_server, measure_memory_usage):
"tags": "",
"method": "POST",
"fetch_backend": "html_requests",
"body": body_value,
"time_between_check_use_default": "y"},
"body": body_value},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -175,8 +172,7 @@ def test_body_in_request(client, live_server, measure_memory_usage):
"tags": "",
"method": "GET",
"fetch_backend": "html_requests",
"body": "invalid",
"time_between_check_use_default": "y"},
"body": "invalid"},
follow_redirects=True
)
assert b"Body must be empty when Request Method is set to GET" in res.data
@@ -215,8 +211,7 @@ def test_method_in_request(client, live_server, measure_memory_usage):
"url": test_url,
"tags": "",
"fetch_backend": "html_requests",
"method": "invalid",
"time_between_check_use_default": "y"},
"method": "invalid"},
follow_redirects=True
)
assert b"Not a valid choice" in res.data
@@ -228,8 +223,7 @@ def test_method_in_request(client, live_server, measure_memory_usage):
"url": test_url,
"tags": "",
"fetch_backend": "html_requests",
"method": "PATCH",
"time_between_check_use_default": "y"},
"method": "PATCH"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -303,8 +297,7 @@ def test_ua_global_override(client, live_server, measure_memory_usage):
"tags": "testtag",
"fetch_backend": 'html_requests',
# Important - also test case-insensitive
"headers": "User-AGent: agent-from-watch",
"time_between_check_use_default": "y"},
"headers": "User-AGent: agent-from-watch"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -372,8 +365,7 @@ def test_headers_textfile_in_request(client, live_server, measure_memory_usage):
"url": test_url,
"tags": "testtag",
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
"headers": "xxx:ooo\ncool:yeah\r\n",
"time_between_check_use_default": "y"},
"headers": "xxx:ooo\ncool:yeah\r\n"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -448,8 +440,7 @@ def test_headers_validation(client, live_server):
data={
"url": test_url,
"fetch_backend": 'html_requests',
"headers": "User-AGent agent-from-watch\r\nsadfsadfsadfsdaf\r\n:foobar",
"time_between_check_use_default": "y"},
"headers": "User-AGent agent-from-watch\r\nsadfsadfsadfsdaf\r\n:foobar"},
follow_redirects=True
)

View File

@@ -121,7 +121,7 @@ def test_itemprop_price_change(client, live_server):
set_original_response(props_markup=instock_props[0], price='120.45')
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"restock_settings-follow_price_changes": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"restock_settings-follow_price_changes": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -155,8 +155,7 @@ def _run_test_minmax_limit(client, extra_watch_edit_form):
"url": test_url,
"headers": "",
"time_between_check-hours": 5,
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
'fetch_backend': "html_requests"
}
data.update(extra_watch_edit_form)
res = client.post(
@@ -279,8 +278,7 @@ def test_itemprop_percent_threshold(client, live_server):
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
'fetch_backend': "html_requests"
},
follow_redirects=True
)

View File

@@ -158,7 +158,6 @@ def test_rss_xpath_filtering(client, live_server, measure_memory_usage):
"proxy": "no-proxy",
"tags": "",
"url": test_url,
"time_between_check_use_default": "y",
},
follow_redirects=True
)

View File

@@ -1,13 +1,10 @@
#!/usr/bin/env python3
import time
from copy import copy
from datetime import datetime, timezone
from zoneinfo import ZoneInfo
from flask import url_for
from .util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
from ..forms import REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT, REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT
# def test_setup(client, live_server):
# live_server_setup(live_server) # Setup on conftest per function
@@ -45,12 +42,11 @@ def test_check_basic_scheduler_functionality(client, live_server, measure_memory
uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
# Setup all the days of the weeks using XXX as the placeholder for monday/tuesday/etc
last_check = copy(live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'])
tpl = {
"time_schedule_limit-XXX-start_time": "00:00",
"time_schedule_limit-XXX-duration-hours": 24,
"time_schedule_limit-XXX-duration-minutes": 0,
"time_between_check-seconds": 1,
"time_schedule_limit-XXX-enabled": '', # All days are turned off
"time_schedule_limit-enabled": 'y', # Scheduler is enabled, all days however are off.
}
@@ -62,13 +58,13 @@ def test_check_basic_scheduler_functionality(client, live_server, measure_memory
new_key = key.replace("XXX", day)
scheduler_data[new_key] = value
last_check = live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked']
data = {
"url": test_url,
"fetch_backend": "html_requests",
"time_between_check_use_default": "" # no
"fetch_backend": "html_requests"
}
data.update(scheduler_data)
time.sleep(1)
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data=data,
@@ -81,7 +77,6 @@ def test_check_basic_scheduler_functionality(client, live_server, measure_memory
# "Edit" should not trigger a check because it's not enabled in the schedule.
time.sleep(2)
# "time_schedule_limit-XXX-enabled": '', # All days are turned off, therefor, nothing should happen here..
assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] == last_check
# Enabling today in Kiritimati should work flawless
@@ -182,44 +177,3 @@ def test_check_basic_global_scheduler_functionality(client, live_server, measure
# Cleanup everything
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_validation_time_interval_field(client, live_server, measure_memory_usage):
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("imports.import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"trigger_text": 'The golden line',
"url": test_url,
'fetch_backend': "html_requests",
'filter_text_removed': 'y',
"time_between_check_use_default": ""
},
follow_redirects=True
)
assert REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT.encode('utf-8') in res.data
# Now set atleast something
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"trigger_text": 'The golden line',
"url": test_url,
'fetch_backend': "html_requests",
"time_between_check-minutes": 1,
"time_between_check_use_default": ""
},
follow_redirects=True
)
assert REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT.encode('utf-8') not in res.data

View File

@@ -27,7 +27,7 @@ def test_basic_search(client, live_server, measure_memory_usage):
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"title": "xxx-title", "url": urls[0], "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"title": "xxx-title", "url": urls[0], "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -62,7 +62,7 @@ def test_search_in_tag_limit(client, live_server, measure_memory_usage):
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"title": "xxx-title", "url": urls[0].split(' ')[0], "tags": urls[0].split(' ')[1], "headers": "",
'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -41,8 +41,7 @@ def test_bad_access(client, live_server, measure_memory_usage):
"tags": "",
"method": "GET",
"fetch_backend": "html_requests",
"body": "",
"time_between_check_use_default": "y"},
"body": ""},
follow_redirects=True
)
@@ -151,8 +150,7 @@ def test_xss_watch_last_error(client, live_server, measure_memory_usage):
data={
"include_filters": '<a href="https://foobar"></a><script>alert(123);</script>',
"url": url_for('test_endpoint', _external=True),
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
'fetch_backend': "html_requests"
},
follow_redirects=True
)

View File

@@ -29,7 +29,7 @@ def test_share_watch(client, live_server, measure_memory_usage):
# Add our URL to the import page
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -77,7 +77,7 @@ def test_check_ignore_elements(client, live_server, measure_memory_usage):
client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": 'span,p', "url": test_url, "tags": "", "subtractive_selectors": ".foobar-detection", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"include_filters": 'span,p', "url": test_url, "tags": "", "subtractive_selectors": ".foobar-detection", 'fetch_backend': "html_requests"},
follow_redirects=True
)

View File

@@ -81,8 +81,7 @@ def test_trigger_functionality(client, live_server, measure_memory_usage):
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"trigger_text": trigger_text,
"url": test_url,
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
"fetch_backend": "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -49,8 +49,7 @@ def test_trigger_regex_functionality(client, live_server, measure_memory_usage):
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"trigger_text": '/something \d{3}/',
"url": test_url,
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
"fetch_backend": "html_requests"},
follow_redirects=True
)
wait_for_all_checks(client)

View File

@@ -50,8 +50,7 @@ def test_trigger_regex_functionality_with_filter(client, live_server, measure_me
data={"trigger_text": "/cool.stuff/",
"url": test_url,
"include_filters": '#in-here',
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
"fetch_backend": "html_requests"},
follow_redirects=True
)

View File

@@ -2,107 +2,12 @@
from flask import url_for
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
from ..forms import REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT, REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT
def test_recheck_time_field_validation_global_settings(client, live_server):
"""
Tests that the global settings time field has atleast one value for week/day/hours/minute/seconds etc entered
class globalSettingsRequestForm(Form):
time_between_check = RequiredFormField(TimeBetweenCheckForm)
"""
res = client.post(
url_for("settings.settings_page"),
data={
"requests-time_between_check-weeks": '',
"requests-time_between_check-days": '',
"requests-time_between_check-hours": '',
"requests-time_between_check-minutes": '',
"requests-time_between_check-seconds": '',
},
follow_redirects=True
)
assert REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT.encode('utf-8') in res.data
def test_recheck_time_field_validation_single_watch(client, live_server):
"""
Tests that the global settings time field has atleast one value for week/day/hours/minute/seconds etc entered
class globalSettingsRequestForm(Form):
time_between_check = RequiredFormField(TimeBetweenCheckForm)
"""
test_url = url_for('test_endpoint', _external=True)
# Add our URL to the import page
res = client.post(
url_for("imports.import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={
"url": test_url,
'fetch_backend': "html_requests",
"time_between_check_use_default": "", # OFF
"time_between_check-weeks": '',
"time_between_check-days": '',
"time_between_check-hours": '',
"time_between_check-minutes": '',
"time_between_check-seconds": '',
},
follow_redirects=True
)
assert REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT.encode('utf-8') in res.data
# Now set some time
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={
"url": test_url,
'fetch_backend': "html_requests",
"time_between_check_use_default": "", # OFF
"time_between_check-weeks": '',
"time_between_check-days": '',
"time_between_check-hours": '',
"time_between_check-minutes": '5',
"time_between_check-seconds": '',
},
follow_redirects=True
)
assert b"Updated watch." in res.data
assert REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT.encode('utf-8') not in res.data
# Now set to use defaults
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={
"url": test_url,
'fetch_backend': "html_requests",
"time_between_check_use_default": "y", # ON YES
"time_between_check-weeks": '',
"time_between_check-days": '',
"time_between_check-hours": '',
"time_between_check-minutes": '',
"time_between_check-seconds": '',
},
follow_redirects=True
)
assert b"Updated watch." in res.data
assert REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT.encode('utf-8') not in res.data
def test_checkbox_open_diff_in_new_tab(client, live_server):
set_original_response()
# live_server_setup(live_server) # Setup on conftest per function
# Add our URL to the import page
res = client.post(
url_for("imports.import_page"),
@@ -173,78 +78,3 @@ def test_checkbox_open_diff_in_new_tab(client, live_server):
# Cleanup everything
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_page_title_listing_behaviour(client, live_server):
set_original_response(extra_title="custom html")
# either the manually entered title/description or the page link should be visible
res = client.post(
url_for("settings.settings_page"),
data={"application-ui-use_page_title_in_list": "",
"requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Settings updated." in res.data
# Add our URL to the import page
res = client.post(
url_for("imports.import_page"),
data={"urls": url_for('test_endpoint', _external=True)},
follow_redirects=True
)
assert b"1 Imported" in res.data
wait_for_all_checks(client)
# We see the URL only, no title/description was manually entered
res = client.get(url_for("watchlist.index"))
assert url_for('test_endpoint', _external=True).encode('utf-8') in res.data
# Now 'my title' should override
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={
"url": url_for('test_endpoint', _external=True),
"title": "my title",
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True
)
assert b"Updated watch." in res.data
res = client.get(url_for("watchlist.index"))
assert b"my title" in res.data
# Now we enable page <title> and unset the override title/description
res = client.post(
url_for("settings.settings_page"),
data={"application-ui-use_page_title_in_list": "y",
"requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Settings updated." in res.data
# Page title description override should take precedence
res = client.get(url_for("watchlist.index"))
assert b"my title" in res.data
# Remove page title description override and it should fall back to title
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={
"url": url_for('test_endpoint', _external=True),
"title": "",
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True
)
assert b"Updated watch." in res.data
# No page title description, and 'use_page_title_in_list' is on, it should show the <title>
res = client.get(url_for("watchlist.index"))
assert b"head titlecustom html" in res.data

View File

@@ -92,8 +92,7 @@ def test_unique_lines_functionality(client, live_server, measure_memory_usage):
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"check_unique_lines": "y",
"url": test_url,
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
"fetch_backend": "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -141,8 +140,7 @@ def test_sort_lines_functionality(client, live_server, measure_memory_usage):
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"sort_text_alphabetically": "n",
"url": test_url,
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
"fetch_backend": "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -194,8 +192,7 @@ def test_extra_filters(client, live_server, measure_memory_usage):
"trim_text_whitespace": "y",
"sort_text_alphabetically": "", # leave this OFF for testing
"url": test_url,
"fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
"fetch_backend": "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data

View File

@@ -28,8 +28,7 @@ def test_check_watch_field_storage(client, live_server, measure_memory_usage):
"url": test_url,
"tags": "woohoo",
"headers": "curl:foo",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
'fetch_backend': "html_requests"
},
follow_redirects=True
)

View File

@@ -92,7 +92,7 @@ def test_check_xpath_filter_utf8(client, live_server, measure_memory_usage):
wait_for_all_checks(client)
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -146,7 +146,7 @@ def test_check_xpath_text_function_utf8(client, live_server, measure_memory_usag
wait_for_all_checks(client)
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -188,7 +188,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server, measure_memo
# Add our URL to the import page
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": xpath_filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"include_filters": xpath_filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -226,7 +226,7 @@ def test_xpath_validation(client, live_server, measure_memory_usage):
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": "/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"include_filters": "/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"is not a valid XPath expression" in res.data
@@ -247,7 +247,7 @@ def test_xpath23_prefix_validation(client, live_server, measure_memory_usage):
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": "xpath:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"include_filters": "xpath:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"is not a valid XPath expression" in res.data
@@ -298,7 +298,7 @@ def test_xpath1_lxml(client, live_server, measure_memory_usage):
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": "xpath1://title/text()", "url": test_url, "tags": "", "headers": "",
'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
@@ -331,7 +331,7 @@ def test_xpath1_validation(client, live_server, measure_memory_usage):
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": "xpath1:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
data={"include_filters": "xpath1:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"is not a valid XPath expression" in res.data
@@ -359,7 +359,7 @@ def test_check_with_prefix_include_filters(client, live_server, measure_memory_u
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tags": "", "headers": "",
'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
@@ -413,8 +413,7 @@ def test_various_rules(client, live_server, measure_memory_usage):
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
wait_for_all_checks(client)
@@ -445,8 +444,7 @@ def test_xpath_20(client, live_server, measure_memory_usage):
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
@@ -483,8 +481,7 @@ def test_xpath_20_function_count(client, live_server, measure_memory_usage):
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
@@ -520,8 +517,7 @@ def test_xpath_20_function_count2(client, live_server, measure_memory_usage):
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)
@@ -558,8 +554,7 @@ def test_xpath_20_function_string_join_matches(client, live_server, measure_memo
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
'fetch_backend': "html_requests"},
follow_redirects=True
)

View File

@@ -1,436 +0,0 @@
"""
Tests for browser notification functionality
Tests VAPID key handling, subscription management, and notification sending
"""
import json
import sys
import tempfile
import os
import unittest
from unittest.mock import patch, Mock, MagicMock
from py_vapid import Vapid
from changedetectionio.notification.apprise_plugin.browser_notification_helpers import (
convert_pem_private_key_for_pywebpush,
convert_pem_public_key_for_browser,
send_push_notifications,
create_notification_payload,
get_vapid_config_from_datastore,
get_browser_subscriptions,
save_browser_subscriptions
)
class TestVAPIDKeyHandling(unittest.TestCase):
"""Test VAPID key generation, conversion, and validation"""
def test_create_notification_payload(self):
"""Test notification payload creation"""
payload = create_notification_payload("Test Title", "Test Body", "/test-icon.png")
self.assertEqual(payload['title'], "Test Title")
self.assertEqual(payload['body'], "Test Body")
self.assertEqual(payload['icon'], "/test-icon.png")
self.assertEqual(payload['badge'], "/static/favicons/favicon-32x32.png")
self.assertIn('timestamp', payload)
self.assertIsInstance(payload['timestamp'], int)
def test_create_notification_payload_defaults(self):
"""Test notification payload with default values"""
payload = create_notification_payload("Title", "Body")
self.assertEqual(payload['icon'], "/static/favicons/favicon-32x32.png")
self.assertEqual(payload['badge'], "/static/favicons/favicon-32x32.png")
def test_convert_pem_private_key_for_pywebpush_with_valid_pem(self):
"""Test conversion of valid PEM private key to Vapid instance"""
# Generate a real VAPID key
vapid = Vapid()
vapid.generate_keys()
private_pem = vapid.private_pem().decode()
# Convert using our function
converted_key = convert_pem_private_key_for_pywebpush(private_pem)
# Should return a Vapid instance
self.assertIsInstance(converted_key, Vapid)
def test_convert_pem_private_key_invalid_input(self):
"""Test conversion with invalid input returns original"""
invalid_key = "not-a-pem-key"
result = convert_pem_private_key_for_pywebpush(invalid_key)
self.assertEqual(result, invalid_key)
none_key = None
result = convert_pem_private_key_for_pywebpush(none_key)
self.assertEqual(result, none_key)
def test_convert_pem_public_key_for_browser(self):
"""Test conversion of PEM public key to browser format"""
# Generate a real VAPID key pair
vapid = Vapid()
vapid.generate_keys()
public_pem = vapid.public_pem().decode()
# Convert to browser format
browser_key = convert_pem_public_key_for_browser(public_pem)
# Should return URL-safe base64 string
self.assertIsInstance(browser_key, str)
self.assertGreater(len(browser_key), 0)
# Should not contain padding
self.assertFalse(browser_key.endswith('='))
def test_convert_pem_public_key_invalid(self):
"""Test public key conversion with invalid input"""
result = convert_pem_public_key_for_browser("invalid-pem")
self.assertIsNone(result)
class TestDatastoreIntegration(unittest.TestCase):
"""Test datastore operations for VAPID and subscriptions"""
def test_get_vapid_config_from_datastore(self):
"""Test retrieving VAPID config from datastore"""
mock_datastore = Mock()
mock_datastore.data = {
'settings': {
'application': {
'vapid': {
'private_key': 'test-private-key',
'public_key': 'test-public-key',
'contact_email': 'test@example.com'
}
}
}
}
private_key, public_key, contact_email = get_vapid_config_from_datastore(mock_datastore)
self.assertEqual(private_key, 'test-private-key')
self.assertEqual(public_key, 'test-public-key')
self.assertEqual(contact_email, 'test@example.com')
def test_get_vapid_config_missing_email(self):
"""Test VAPID config with missing contact email uses default"""
mock_datastore = Mock()
mock_datastore.data = {
'settings': {
'application': {
'vapid': {
'private_key': 'test-private-key',
'public_key': 'test-public-key'
}
}
}
}
private_key, public_key, contact_email = get_vapid_config_from_datastore(mock_datastore)
self.assertEqual(contact_email, 'citizen@example.com')
def test_get_vapid_config_empty_datastore(self):
"""Test VAPID config with empty datastore returns None values"""
mock_datastore = Mock()
mock_datastore.data = {}
private_key, public_key, contact_email = get_vapid_config_from_datastore(mock_datastore)
self.assertIsNone(private_key)
self.assertIsNone(public_key)
self.assertEqual(contact_email, 'citizen@example.com')
def test_get_browser_subscriptions(self):
"""Test retrieving browser subscriptions from datastore"""
mock_datastore = Mock()
test_subscriptions = [
{
'endpoint': 'https://fcm.googleapis.com/fcm/send/test1',
'keys': {'p256dh': 'key1', 'auth': 'auth1'}
},
{
'endpoint': 'https://fcm.googleapis.com/fcm/send/test2',
'keys': {'p256dh': 'key2', 'auth': 'auth2'}
}
]
mock_datastore.data = {
'settings': {
'application': {
'browser_subscriptions': test_subscriptions
}
}
}
subscriptions = get_browser_subscriptions(mock_datastore)
self.assertEqual(len(subscriptions), 2)
self.assertEqual(subscriptions, test_subscriptions)
def test_get_browser_subscriptions_empty(self):
"""Test getting subscriptions from empty datastore returns empty list"""
mock_datastore = Mock()
mock_datastore.data = {}
subscriptions = get_browser_subscriptions(mock_datastore)
self.assertEqual(subscriptions, [])
def test_save_browser_subscriptions(self):
"""Test saving browser subscriptions to datastore"""
mock_datastore = Mock()
mock_datastore.data = {'settings': {'application': {}}}
test_subscriptions = [
{'endpoint': 'test1', 'keys': {'p256dh': 'key1', 'auth': 'auth1'}}
]
save_browser_subscriptions(mock_datastore, test_subscriptions)
self.assertEqual(mock_datastore.data['settings']['application']['browser_subscriptions'], test_subscriptions)
self.assertTrue(mock_datastore.needs_write)
class TestNotificationSending(unittest.TestCase):
"""Test notification sending with mocked pywebpush"""
@patch('pywebpush.webpush')
def test_send_push_notifications_success(self, mock_webpush):
"""Test successful notification sending"""
mock_webpush.return_value = True
mock_datastore = Mock()
mock_datastore.needs_write = False
subscriptions = [
{
'endpoint': 'https://fcm.googleapis.com/fcm/send/test1',
'keys': {'p256dh': 'key1', 'auth': 'auth1'}
}
]
# Generate a real VAPID key for testing
vapid = Vapid()
vapid.generate_keys()
private_key = vapid.private_pem().decode()
notification_payload = {
'title': 'Test Title',
'body': 'Test Body'
}
success_count, total_count = send_push_notifications(
subscriptions=subscriptions,
notification_payload=notification_payload,
private_key=private_key,
contact_email='test@example.com',
datastore=mock_datastore
)
self.assertEqual(success_count, 1)
self.assertEqual(total_count, 1)
self.assertTrue(mock_webpush.called)
# Verify webpush was called with correct parameters
call_args = mock_webpush.call_args
self.assertEqual(call_args[1]['subscription_info'], subscriptions[0])
self.assertEqual(json.loads(call_args[1]['data']), notification_payload)
self.assertIn('vapid_private_key', call_args[1])
self.assertEqual(call_args[1]['vapid_claims']['sub'], 'mailto:test@example.com')
@patch('pywebpush.webpush')
def test_send_push_notifications_webpush_exception(self, mock_webpush):
"""Test handling of WebPushException with invalid subscription removal"""
from pywebpush import WebPushException
# Mock a 410 response (subscription gone)
mock_response = Mock()
mock_response.status_code = 410
mock_webpush.side_effect = WebPushException("Subscription expired", response=mock_response)
mock_datastore = Mock()
mock_datastore.needs_write = False
subscriptions = [
{
'endpoint': 'https://fcm.googleapis.com/fcm/send/test1',
'keys': {'p256dh': 'key1', 'auth': 'auth1'}
}
]
vapid = Vapid()
vapid.generate_keys()
private_key = vapid.private_pem().decode()
success_count, total_count = send_push_notifications(
subscriptions=subscriptions,
notification_payload={'title': 'Test', 'body': 'Test'},
private_key=private_key,
contact_email='test@example.com',
datastore=mock_datastore
)
self.assertEqual(success_count, 0)
self.assertEqual(total_count, 1)
self.assertTrue(mock_datastore.needs_write) # Should mark for subscription cleanup
def test_send_push_notifications_no_pywebpush(self):
"""Test graceful handling when pywebpush is not available"""
with patch.dict('sys.modules', {'pywebpush': None}):
subscriptions = [{'endpoint': 'test', 'keys': {}}]
success_count, total_count = send_push_notifications(
subscriptions=subscriptions,
notification_payload={'title': 'Test', 'body': 'Test'},
private_key='test-key',
contact_email='test@example.com',
datastore=Mock()
)
self.assertEqual(success_count, 0)
self.assertEqual(total_count, 1)
class TestBrowserIntegration(unittest.TestCase):
"""Test browser integration aspects (file existence)"""
def test_javascript_browser_notifications_class_exists(self):
"""Test that browser notifications JavaScript file exists and has expected structure"""
js_file = "/var/www/changedetection.io/changedetectionio/static/js/browser-notifications.js"
self.assertTrue(os.path.exists(js_file))
with open(js_file, 'r') as f:
content = f.read()
# Check for key class and methods
self.assertIn('class BrowserNotifications', content)
self.assertIn('async init()', content)
self.assertIn('async subscribe()', content)
self.assertIn('async sendTestNotification()', content)
self.assertIn('setupNotificationUrlMonitoring()', content)
def test_service_worker_exists(self):
"""Test that service worker file exists"""
sw_file = "/var/www/changedetection.io/changedetectionio/static/js/service-worker.js"
self.assertTrue(os.path.exists(sw_file))
with open(sw_file, 'r') as f:
content = f.read()
# Check for key service worker functionality
self.assertIn('push', content)
self.assertIn('notificationclick', content)
class TestAPIEndpoints(unittest.TestCase):
"""Test browser notification API endpoints"""
def test_browser_notifications_module_exists(self):
"""Test that BrowserNotifications API module exists"""
api_file = "/var/www/changedetection.io/changedetectionio/notification/BrowserNotifications.py"
self.assertTrue(os.path.exists(api_file))
with open(api_file, 'r') as f:
content = f.read()
# Check for key API classes
self.assertIn('BrowserNotificationsVapidPublicKey', content)
self.assertIn('BrowserNotificationsSubscribe', content)
self.assertIn('BrowserNotificationsUnsubscribe', content)
def test_vapid_public_key_conversion(self):
"""Test VAPID public key conversion for browser use"""
# Generate a real key pair
vapid = Vapid()
vapid.generate_keys()
public_pem = vapid.public_pem().decode()
# Convert to browser format
browser_key = convert_pem_public_key_for_browser(public_pem)
# Verify it's a valid URL-safe base64 string
self.assertIsInstance(browser_key, str)
self.assertGreater(len(browser_key), 80) # P-256 uncompressed point should be ~88 chars
# Should not have padding
self.assertFalse(browser_key.endswith('='))
# Should only contain URL-safe base64 characters
import re
self.assertRegex(browser_key, r'^[A-Za-z0-9_-]+$')
class TestIntegrationFlow(unittest.TestCase):
"""Test complete integration flow"""
@patch('pywebpush.webpush')
def test_complete_notification_flow(self, mock_webpush):
"""Test complete flow from subscription to notification"""
mock_webpush.return_value = True
# Create mock datastore with VAPID keys
mock_datastore = Mock()
vapid = Vapid()
vapid.generate_keys()
mock_datastore.data = {
'settings': {
'application': {
'vapid': {
'private_key': vapid.private_pem().decode(),
'public_key': vapid.public_pem().decode(),
'contact_email': 'test@example.com'
},
'browser_subscriptions': [
{
'endpoint': 'https://fcm.googleapis.com/fcm/send/test123',
'keys': {
'p256dh': 'test-p256dh-key',
'auth': 'test-auth-key'
}
}
]
}
}
}
mock_datastore.needs_write = False
# Get configuration
private_key, public_key, contact_email = get_vapid_config_from_datastore(mock_datastore)
subscriptions = get_browser_subscriptions(mock_datastore)
# Create notification
payload = create_notification_payload("Test Title", "Test Message")
# Send notification
success_count, total_count = send_push_notifications(
subscriptions=subscriptions,
notification_payload=payload,
private_key=private_key,
contact_email=contact_email,
datastore=mock_datastore
)
# Verify success
self.assertEqual(success_count, 1)
self.assertEqual(total_count, 1)
self.assertTrue(mock_webpush.called)
# Verify webpush call parameters
call_args = mock_webpush.call_args
self.assertIn('subscription_info', call_args[1])
self.assertIn('vapid_private_key', call_args[1])
self.assertIn('vapid_claims', call_args[1])
# Verify vapid_claims format
vapid_claims = call_args[1]['vapid_claims']
self.assertEqual(vapid_claims['sub'], 'mailto:test@example.com')
self.assertEqual(vapid_claims['aud'], 'https://fcm.googleapis.com')
if __name__ == '__main__':
unittest.main()

View File

@@ -36,7 +36,6 @@ def test_visual_selector_content_ready(client, live_server, measure_memory_usage
# For now, cookies doesnt work in headers because it must be a full cookiejar object
'headers': "testheader: yes\buser-agent: MyCustomAgent",
'fetch_backend': "html_webdriver",
"time_between_check_use_default": "y",
},
follow_redirects=True
)
@@ -117,7 +116,6 @@ def test_basic_browserstep(client, live_server, measure_memory_usage):
'browser_steps-1-optional_value': '',
# For now, cookies doesnt work in headers because it must be a full cookiejar object
'headers': "testheader: yes\buser-agent: MyCustomAgent",
"time_between_check_use_default": "y",
},
follow_redirects=True
)
@@ -169,8 +167,7 @@ def test_non_200_errors_report_browsersteps(client, live_server):
'fetch_backend': "html_webdriver",
'browser_steps-0-operation': 'Click element',
'browser_steps-0-selector': 'button[name=test-button]',
'browser_steps-0-optional_value': '',
"time_between_check_use_default": "y"
'browser_steps-0-optional_value': ''
},
follow_redirects=True
)

View File

@@ -1,4 +1,4 @@
openapi: 3.1.0
openapi: 3.0.4
info:
title: ChangeDetection.io API
description: |
@@ -28,7 +28,7 @@ info:
For example: `x-api-key: YOUR_API_KEY`
version: 0.1.1
version: 0.1.0
contact:
name: ChangeDetection.io
url: https://github.com/dgtlmoon/changedetection.io
@@ -129,7 +129,7 @@ components:
maxLength: 5000
title:
type: string
description: Custom title for the web page change monitor (watch), not to be confused with page_title
description: Custom title for the web page change monitor (watch)
maxLength: 5000
tag:
type: string
@@ -188,10 +188,6 @@ components:
seconds:
type: integer
description: Time intervals between checks
time_between_check_use_default:
type: boolean
default: true
description: Whether to use global settings for time between checks - defaults to true if not set
notification_urls:
type: array
items:
@@ -255,11 +251,6 @@ components:
type: integer
description: Unix timestamp in seconds of the last time the watch was viewed. Setting it to a value higher than `last_changed` in the "Update watch" endpoint marks the watch as viewed.
minimum: 0
link:
type: string
format: string
description: The watch URL rendered in case of any Jinja2 markup, always use this for listing.
readOnly: true
CreateWatch:
allOf:
@@ -400,10 +391,9 @@ paths:
example:
"095be615-a8ad-4c33-8e9c-c7612fbf6c9f":
uuid: "095be615-a8ad-4c33-8e9c-c7612fbf6c9f"
url: "http://example.com?id={{1+1}} - the raw URL"
link: "http://example.com?id=2 - the rendered URL, always use this for listing."
title: "Example Website Monitor - manually entered title/description"
page_title: "The HTML <title> from the page"
url: "http://example.com"
title: "Example Website Monitor"
tag: "550e8400-e29b-41d4-a716-446655440000"
tags: ["550e8400-e29b-41d4-a716-446655440000"]
paused: false
muted: false
@@ -413,10 +403,9 @@ paths:
last_changed: 1640995200
"7c9e6b8d-f2a1-4e5c-9d3b-8a7f6e4c2d1a":
uuid: "7c9e6b8d-f2a1-4e5c-9d3b-8a7f6e4c2d1a"
url: "http://example.com?id={{1+1}} - the raw URL"
link: "http://example.com?id=2 - the rendered URL, always use this for listing."
title: "News Site Tracker - manually entered title/description"
page_title: "The HTML <title> from the page"
url: "https://news.example.org"
title: "News Site Tracker"
tag: "330e8400-e29b-41d4-a716-446655440001"
tags: ["330e8400-e29b-41d4-a716-446655440001"]
paused: false
muted: true
@@ -1222,6 +1211,7 @@ paths:
uuid: "095be615-a8ad-4c33-8e9c-c7612fbf6c9f"
url: "http://example.com"
title: "Example Website Monitor"
tag: "550e8400-e29b-41d4-a716-446655440000"
tags: ["550e8400-e29b-41d4-a716-446655440000"]
paused: false
muted: false

View File

@@ -142,6 +142,3 @@ pre_commit >= 4.2.0
# For events between checking and socketio updates
blinker
# For Web Push notifications (browser notifications)
pywebpush