Compare commits

...

6 Commits

Author SHA1 Message Date
dgtlmoon
674d863a21 UI - Move Default Proxy selection back to "General" tab
Some checks are pending
Build and push containers / metadata (push) Waiting to run
Build and push containers / build-push-containers (push) Waiting to run
Publish Python 🐍distribution 📦 to PyPI and TestPyPI / Build distribution 📦 (push) Waiting to run
Publish Python 🐍distribution 📦 to PyPI and TestPyPI / Test the built package works basically. (push) Blocked by required conditions
Publish Python 🐍distribution 📦 to PyPI and TestPyPI / Publish Python 🐍 distribution 📦 to PyPI (push) Blocked by required conditions
ChangeDetection.io App Test / lint-code (push) Waiting to run
ChangeDetection.io App Test / test-application-3-10 (push) Blocked by required conditions
ChangeDetection.io App Test / test-application-3-11 (push) Blocked by required conditions
ChangeDetection.io App Test / test-application-3-12 (push) Blocked by required conditions
ChangeDetection.io App Test / test-application-3-13 (push) Blocked by required conditions
2026-01-28 18:15:32 +01:00
dgtlmoon
0b9cfcdf09 API - Notification URLs werent always being validated (#3812) 2026-01-28 17:30:58 +01:00
dgtlmoon
fd820c9330 Remove deprecated call to strtobool 2026-01-28 17:27:20 +01:00
Robbert Langezaal
e02a1824c5 UI - Make watch tags link elements (#3813) 2026-01-28 17:18:23 +01:00
dgtlmoon
5911b7fe7a test tweak 2026-01-28 17:17:37 +01:00
dgtlmoon
a239480272 DB data migration upgrade fixes (#3811) 2026-01-28 12:02:19 +01:00
12 changed files with 355 additions and 135 deletions

View File

@@ -133,43 +133,48 @@ def sigshutdown_handler(_signo, _stack_frame):
sys.exit()
def print_help():
"""Print help text for command line options"""
print('Usage: changedetection.py [options]')
print('')
print('Standard options:')
print(' -s SSL enable')
print(' -h HOST Listen host (default: 0.0.0.0)')
print(' -p PORT Listen port (default: 5000)')
print(' -d PATH Datastore path')
print(' -l LEVEL Log level (TRACE, DEBUG, INFO, SUCCESS, WARNING, ERROR, CRITICAL)')
print(' -c Cleanup unused snapshots')
print(' -C Create datastore directory if it doesn\'t exist')
print(' -P true/false Set all watches paused (true) or active (false)')
print('')
print('Add URLs on startup:')
print(' -u URL Add URL to watch (can be used multiple times)')
print(' -u0 \'JSON\' Set options for first -u URL (e.g. \'{"processor":"text_json_diff"}\')')
print(' -u1 \'JSON\' Set options for second -u URL (0-indexed)')
print(' -u2 \'JSON\' Set options for third -u URL, etc.')
print(' Available options: processor, fetch_backend, headers, method, etc.')
print(' See model/Watch.py for all available options')
print('')
print('Recheck on startup:')
print(' -r all Queue all watches for recheck on startup')
print(' -r UUID,... Queue specific watches (comma-separated UUIDs)')
print(' -r all N Queue all watches, wait for completion, repeat N times')
print(' -r UUID,... N Queue specific watches, wait for completion, repeat N times')
print('')
print('Batch mode:')
print(' -b Run in batch mode (process queue then exit)')
print(' Useful for CI/CD, cron jobs, or one-time checks')
print(' NOTE: Batch mode checks if Flask is running and aborts if port is in use')
print(' Use -p PORT to specify a different port if needed')
print('')
def main():
global datastore
global app
# Early help/version check before any initialization
if '--help' in sys.argv or '-help' in sys.argv:
print('Usage: changedetection.py [options]')
print('')
print('Standard options:')
print(' -s SSL enable')
print(' -h HOST Listen host (default: 0.0.0.0)')
print(' -p PORT Listen port (default: 5000)')
print(' -d PATH Datastore path')
print(' -l LEVEL Log level (TRACE, DEBUG, INFO, SUCCESS, WARNING, ERROR, CRITICAL)')
print(' -c Cleanup unused snapshots')
print(' -C Create datastore directory if it doesn\'t exist')
print('')
print('Add URLs on startup:')
print(' -u URL Add URL to watch (can be used multiple times)')
print(' -u0 \'JSON\' Set options for first -u URL (e.g. \'{"processor":"text_json_diff"}\')')
print(' -u1 \'JSON\' Set options for second -u URL (0-indexed)')
print(' -u2 \'JSON\' Set options for third -u URL, etc.')
print(' Available options: processor, fetch_backend, headers, method, etc.')
print(' See model/Watch.py for all available options')
print('')
print('Recheck on startup:')
print(' -r all Queue all watches for recheck on startup')
print(' -r UUID,... Queue specific watches (comma-separated UUIDs)')
print(' -r all N Queue all watches, wait for completion, repeat N times')
print(' -r UUID,... N Queue specific watches, wait for completion, repeat N times')
print('')
print('Batch mode:')
print(' -b Run in batch mode (process queue then exit)')
print(' Useful for CI/CD, cron jobs, or one-time checks')
print(' NOTE: Batch mode checks if Flask is running and aborts if port is in use')
print(' Use -p PORT to specify a different port if needed')
print('')
print_help()
sys.exit(0)
if '--version' in sys.argv or '-v' in sys.argv:
@@ -185,6 +190,7 @@ def main():
# Set a default logger level
logger_level = 'DEBUG'
include_default_watches = True
all_paused = None # None means don't change, True/False to set
host = os.environ.get("LISTEN_HOST", "0.0.0.0").strip()
port = int(os.environ.get('PORT', 5000))
@@ -263,39 +269,9 @@ def main():
i += 1
try:
opts, args = getopt.getopt(cleaned_argv[1:], "6Ccsd:h:p:l:", "port")
opts, args = getopt.getopt(cleaned_argv[1:], "6Ccsd:h:p:l:P:", "port")
except getopt.GetoptError as e:
print('Usage: changedetection.py [options]')
print('')
print('Standard options:')
print(' -s SSL enable')
print(' -h HOST Listen host (default: 0.0.0.0)')
print(' -p PORT Listen port (default: 5000)')
print(' -d PATH Datastore path')
print(' -l LEVEL Log level (TRACE, DEBUG, INFO, SUCCESS, WARNING, ERROR, CRITICAL)')
print(' -c Cleanup unused snapshots')
print(' -C Create datastore directory if it doesn\'t exist')
print('')
print('Add URLs on startup:')
print(' -u URL Add URL to watch (can be used multiple times)')
print(' -u0 \'JSON\' Set options for first -u URL (e.g. \'{"processor":"text_json_diff"}\')')
print(' -u1 \'JSON\' Set options for second -u URL (0-indexed)')
print(' -u2 \'JSON\' Set options for third -u URL, etc.')
print(' Available options: processor, fetch_backend, headers, method, etc.')
print(' See model/Watch.py for all available options')
print('')
print('Recheck on startup:')
print(' -r all Queue all watches for recheck on startup')
print(' -r UUID,... Queue specific watches (comma-separated UUIDs)')
print(' -r all N Queue all watches, wait for completion, repeat N times')
print(' -r UUID,... N Queue specific watches, wait for completion, repeat N times')
print('')
print('Batch mode:')
print(' -b Run in batch mode (process queue then exit)')
print(' Useful for CI/CD, cron jobs, or one-time checks')
print(' NOTE: Batch mode checks if Flask is running and aborts if port is in use')
print(' Use -p PORT to specify a different port if needed')
print('')
print_help()
print(f'Error: {e}')
sys.exit(2)
@@ -332,6 +308,14 @@ def main():
if opt == '-l':
logger_level = int(arg) if arg.isdigit() else arg.upper()
if opt == '-P':
try:
all_paused = bool(strtobool(arg))
except ValueError:
print(f'Error: Invalid value for -P option: {arg}')
print('Expected: true, false, yes, no, 1, or 0')
sys.exit(2)
# If URLs are provided, don't include default watches
if urls_to_add:
include_default_watches = False
@@ -398,6 +382,11 @@ def main():
logger.critical(str(e))
return
# Apply all_paused setting if specified via CLI
if all_paused is not None:
datastore.data['settings']['application']['all_paused'] = all_paused
logger.info(f"Setting all watches paused: {all_paused}")
# Inject datastore into plugins that need access to settings
from changedetectionio.pluggy_interface import inject_datastore_into_plugins
inject_datastore_into_plugins(datastore)

View File

@@ -96,6 +96,16 @@ class Tag(Resource):
if not tag:
abort(404, message='No tag exists with the UUID of {}'.format(uuid))
# Validate notification_urls if provided
if 'notification_urls' in request.json:
from wtforms import ValidationError
from changedetectionio.api.Notifications import validate_notification_urls
try:
notification_urls = request.json.get('notification_urls', [])
validate_notification_urls(notification_urls)
except ValidationError as e:
return str(e), 400
tag.update(request.json)
self.datastore.needs_write_urgent = True

View File

@@ -140,6 +140,16 @@ class Watch(Resource):
if validation_error:
return validation_error, 400
# Validate notification_urls if provided
if 'notification_urls' in request.json:
from wtforms import ValidationError
from changedetectionio.api.Notifications import validate_notification_urls
try:
notification_urls = request.json.get('notification_urls', [])
validate_notification_urls(notification_urls)
except ValidationError as e:
return str(e), 400
# XSS etc protection - validate URL if it's being updated
if 'url' in request.json:
new_url = request.json.get('url')
@@ -444,6 +454,16 @@ class CreateWatch(Resource):
if validation_error:
return validation_error, 400
# Validate notification_urls if provided
if 'notification_urls' in json_data:
from wtforms import ValidationError
from changedetectionio.api.Notifications import validate_notification_urls
try:
notification_urls = json_data.get('notification_urls', [])
validate_notification_urls(notification_urls)
except ValidationError as e:
return str(e), 400
extras = copy.deepcopy(json_data)
# Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API)

View File

@@ -80,6 +80,16 @@
{{ render_checkbox_field(form.application.form.empty_pages_are_a_change) }}
<span class="pure-form-message-inline">{{ _('When a request returns no content, or the HTML does not contain any text, is this considered a change?') }}</span>
</div>
{% if form.requests.proxy %}
<div>
<br>
<div class="inline-radio">
{{ render_field(form.requests.form.proxy, class="fetch-backend-proxy") }}
<span class="pure-form-message-inline">{{ _('Choose a default proxy for all watches') }}</span>
</div>
</div>
{% endif %}
</fieldset>
</div>
@@ -340,15 +350,6 @@ nav
{{ render_fieldlist_with_inline_errors(form.requests.form.extra_proxies) }}
<span class="pure-form-message-inline">{{ _('"Name" will be used for selecting the proxy in the Watch Edit settings') }}</span><br>
<span class="pure-form-message-inline">{{ _('SOCKS5 proxies with authentication are only supported with \'plain requests\' fetcher, for other fetchers you should whitelist the IP access instead') }}</span>
{% if form.requests.proxy %}
<div>
<br>
<div class="inline-radio">
{{ render_field(form.requests.form.proxy, class="fetch-backend-proxy") }}
<span class="pure-form-message-inline">{{ _('Choose a default proxy for all watches') }}</span>
</div>
</div>
{% endif %}
</div>
<div class="pure-control-group" id="extra-browsers-setting">
<p>

View File

@@ -223,7 +223,7 @@ html[data-darkmode="true"] .watch-tag-list.tag-{{ class_name }} {
<span class="processor-badge processor-badge-{{ watch['processor'] }}" title="{{ processor_descriptions.get(watch['processor'], watch['processor']) }}">{{ processor_badge_texts[watch['processor']] }}</span>
{%- endif -%}
{%- for watch_tag_uuid, watch_tag in datastore.get_all_tags_for_watch(watch['uuid']).items() -%}
<span class="watch-tag-list tag-{{ watch_tag.title|sanitize_tag_class }}">{{ watch_tag.title }}</span>
<a href="{{url_for('watchlist.index', tag=watch_tag_uuid) }}" class="watch-tag-list tag-{{ watch_tag.title|sanitize_tag_class }}">{{ watch_tag.title }}</a>
{%- endfor -%}
</div>
<div class="status-icons">

View File

@@ -222,6 +222,19 @@ code {
color: var(--color-white);
background: var(--color-text-watch-tag-list);
@extend .inline-tag;
/* Remove default anchor styling when used as links */
text-decoration: none;
&:hover {
text-decoration: none;
opacity: 0.8;
cursor: pointer;
}
&:visited {
color: var(--color-white);
}
}
@media (min-width: 768px) {

View File

@@ -218,21 +218,16 @@ class ChangeDetectionStore(DatastoreUpdatesMixin, FileSavingDataStore):
# Load the legacy datastore to get its schema_version
from .legacy_loader import load_legacy_format
legacy_path = os.path.join(self.datastore_path, "url-watches.json")
legacy_data = load_legacy_format(legacy_path)
with open(legacy_path) as f:
self.__data = json.load(f)
if not legacy_data:
if not self.__data:
raise Exception("Failed to load legacy datastore from url-watches.json")
# Get the schema version from legacy datastore (defaults to 0 if not present)
legacy_schema_version = legacy_data.get('settings', {}).get('application', {}).get('schema_version', 0)
logger.info(f"Legacy datastore schema version: {legacy_schema_version}")
# Set our schema version to match the legacy one
self.__data['settings']['application']['schema_version'] = legacy_schema_version
# update_26 will load the legacy data again and migrate to new format
# Only run updates AFTER the legacy schema version (e.g., if legacy is at 25, only run 26+)
self.run_updates(current_schema_version=legacy_schema_version)
self.run_updates()
else:
# Fresh install - create new datastore
@@ -307,7 +302,7 @@ class ChangeDetectionStore(DatastoreUpdatesMixin, FileSavingDataStore):
else:
watch_class = get_custom_watch_obj_for_processor(entity.get('processor'))
if entity.get('uuid') != 'text_json_diff':
if entity.get('processor') != 'text_json_diff':
logger.trace(f"Loading Watch object '{watch_class.__module__}.{watch_class.__name__}' for UUID {uuid}")
entity = watch_class(datastore_path=self.datastore_path, default=entity)
@@ -373,6 +368,13 @@ class ChangeDetectionStore(DatastoreUpdatesMixin, FileSavingDataStore):
self.__data['watching'] = watching
self._watch_hashes = watch_hashes
# Verify all watches have hashes
missing_hashes = [uuid for uuid in watching.keys() if uuid not in watch_hashes]
if missing_hashes:
logger.error(f"WARNING: {len(missing_hashes)} watches missing hashes after load: {missing_hashes[:5]}")
else:
logger.debug(f"All {len(watching)} watches have valid hashes")
def _delete_watch(self, uuid):
"""
Delete a watch from storage.

View File

@@ -16,11 +16,11 @@ import os
import tempfile
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
from distutils.util import strtobool
from threading import Thread
from loguru import logger
from .base import DataStore
from .. import strtobool
# Try to import orjson for faster JSON serialization
try:
@@ -322,8 +322,9 @@ def load_all_watches(datastore_path, rehydrate_entity_func, compute_hash_func):
watch, raw_data = load_watch_from_file(watch_json, uuid_dir, rehydrate_entity_func)
if watch and raw_data:
watching[uuid_dir] = watch
# Compute hash from raw data BEFORE rehydration to match saved hash
watch_hashes[uuid_dir] = compute_hash_func(raw_data)
# Compute hash from rehydrated Watch object (as dict) to match how we compute on save
# This ensures hash matches what audit will compute from dict(watch)
watch_hashes[uuid_dir] = compute_hash_func(dict(watch))
loaded += 1
if loaded % 100 == 0:
@@ -743,7 +744,7 @@ class FileSavingDataStore(DataStore):
self._dirty_watches.add(uuid)
changes_found += 1
logger.warning(
f"Audit detected unmarked change in watch {uuid[:8]}... "
f"Audit detected unmarked change in watch {uuid[:8]}... current {current_hash:8} stored hash {stored_hash[:8]}"
f"(hash changed but not marked dirty)"
)
self.needs_write = True

View File

@@ -534,7 +534,7 @@ class DatastoreUpdatesMixin:
logger.debug(f"Renaming history index {old_history_txt} to {new_history_txt}...")
shutil.move(old_history_txt, new_history_txt)
def update_26(self):
def migrate_legacy_db_format(self):
"""
Migration: Individual watch persistence (COPY-based, safe rollback).
@@ -578,25 +578,6 @@ class DatastoreUpdatesMixin:
# Populate settings from legacy data
logger.info("Populating settings from legacy data...")
if 'settings' in legacy_data:
self.data['settings'] = legacy_data['settings']
if 'app_guid' in legacy_data:
self.data['app_guid'] = legacy_data['app_guid']
if 'build_sha' in legacy_data:
self.data['build_sha'] = legacy_data['build_sha']
if 'version_tag' in legacy_data:
self.data['version_tag'] = legacy_data['version_tag']
# Rehydrate watches from legacy data
logger.info("Rehydrating watches from legacy data...")
self.data['watching'] = {}
for uuid, watch_data in legacy_data.get('watching', {}).items():
try:
self.data['watching'][uuid] = self.rehydrate_entity(uuid, watch_data)
except Exception as e:
logger.error(f"Failed to rehydrate watch {uuid}: {e}")
raise Exception(f"Migration failed: Could not rehydrate watch {uuid}. Error: {e}")
watch_count = len(self.data['watching'])
logger.success(f"Loaded {watch_count} watches from legacy format")
@@ -609,12 +590,10 @@ class DatastoreUpdatesMixin:
watch_dict = dict(watch)
watch_dir = os.path.join(self.datastore_path, uuid)
save_watch_atomic(watch_dir, uuid, watch_dict)
# Initialize hash
self._watch_hashes[uuid] = self._compute_hash(watch_dict)
saved_count += 1
if saved_count % 100 == 0:
logger.info(f" Progress: {saved_count}/{watch_count} watches saved...")
logger.info(f" Progress: {saved_count}/{watch_count} watches migrated...")
except Exception as e:
logger.error(f"Failed to save watch {uuid}: {e}")
@@ -667,9 +646,25 @@ class DatastoreUpdatesMixin:
# Success! Now reload from new format
logger.critical("Reloading datastore from new format...")
self._load_state()
self._load_state() # Includes load_watches
logger.success("Datastore reloaded from new format successfully")
# Verify all watches have hashes after migration
missing_hashes = [uuid for uuid in self.data['watching'].keys() if uuid not in self._watch_hashes]
if missing_hashes:
logger.error(f"WARNING: {len(missing_hashes)} watches missing hashes after migration: {missing_hashes[:5]}")
else:
logger.success(f"All {len(self.data['watching'])} watches have valid hashes after migration")
# Set schema version to latest available update
# This prevents re-running updates and re-marking all watches as dirty
updates_available = self.get_updates_available()
latest_schema = updates_available[-1] if updates_available else 26
self.data['settings']['application']['schema_version'] = latest_schema
self.mark_settings_dirty()
logger.info(f"Set schema_version to {latest_schema} (migration complete, all watches already saved)")
logger.critical("=" * 80)
logger.critical("MIGRATION COMPLETED SUCCESSFULLY!")
logger.critical("=" * 80)
@@ -687,4 +682,5 @@ class DatastoreUpdatesMixin:
logger.info(f" - rm {os.path.join(self.datastore_path, 'url-watches.json')}")
logger.info("")
# Schema version will be updated by run_updates()
def update_26(self):
self.migrate_legacy_db_format()

View File

@@ -70,8 +70,8 @@ test_single_url() {
local test_id=$1
local dir="/tmp/cli-test-single-${test_id}-$$"
timeout 10 python3 changedetection.py -d "$dir" -C -u https://example.com -b &>/dev/null
[ -f "$dir/url-watches.json" ] && \
[ "$(python3 -c "import json; print(len(json.load(open('$dir/url-watches.json')).get('watching', {})))")" -eq 1 ]
# Count watch directories (UUID directories containing watch.json)
[ "$(find "$dir" -mindepth 2 -maxdepth 2 -name 'watch.json' | wc -l)" -eq 1 ]
}
test_multiple_urls() {
@@ -82,8 +82,8 @@ test_multiple_urls() {
-u https://github.com \
-u https://httpbin.org \
-b &>/dev/null
[ -f "$dir/url-watches.json" ] && \
[ "$(python3 -c "import json; print(len(json.load(open('$dir/url-watches.json')).get('watching', {})))")" -eq 3 ]
# Count watch directories (UUID directories containing watch.json)
[ "$(find "$dir" -mindepth 2 -maxdepth 2 -name 'watch.json' | wc -l)" -eq 3 ]
}
test_url_with_options() {
@@ -93,8 +93,17 @@ test_url_with_options() {
-u https://example.com \
-u0 '{"title":"Test Site","processor":"text_json_diff"}' \
-b &>/dev/null
[ -f "$dir/url-watches.json" ] && \
python3 -c "import json; data=json.load(open('$dir/url-watches.json')); watches=data.get('watching', {}); exit(0 if any(w.get('title')=='Test Site' for w in watches.values()) else 1)"
# Check that at least one watch.json contains the title "Test Site"
python3 -c "
import json, glob, sys
watch_files = glob.glob('$dir/*/watch.json')
for wf in watch_files:
with open(wf) as f:
data = json.load(f)
if data.get('title') == 'Test Site':
sys.exit(0)
sys.exit(1)
"
}
test_multiple_urls_with_options() {
@@ -106,9 +115,19 @@ test_multiple_urls_with_options() {
-u https://github.com \
-u1 '{"title":"Site Two"}' \
-b &>/dev/null
[ -f "$dir/url-watches.json" ] && \
[ "$(python3 -c "import json; print(len(json.load(open('$dir/url-watches.json')).get('watching', {})))")" -eq 2 ] && \
python3 -c "import json; data=json.load(open('$dir/url-watches.json')); watches=data.get('watching', {}); titles=[w.get('title') for w in watches.values()]; exit(0 if 'Site One' in titles and 'Site Two' in titles else 1)"
# Check that we have 2 watches and both titles are present
python3 -c "
import json, glob, sys
watch_files = glob.glob('$dir/*/watch.json')
if len(watch_files) != 2:
sys.exit(1)
titles = []
for wf in watch_files:
with open(wf) as f:
data = json.load(f)
titles.append(data.get('title'))
sys.exit(0 if 'Site One' in titles and 'Site Two' in titles else 1)
"
}
test_batch_mode_exit() {
@@ -126,21 +145,24 @@ test_batch_mode_exit() {
test_recheck_all() {
local test_id=$1
local dir="/tmp/cli-test-recheck-all-${test_id}-$$"
mkdir -p "$dir"
cat > "$dir/url-watches.json" << 'EOF'
{"watching":{"test-uuid":{"url":"https://example.com","last_checked":0,"processor":"text_json_diff","uuid":"test-uuid"}},"settings":{"application":{"password":false}}}
EOF
timeout 10 python3 changedetection.py -d "$dir" -r all -b 2>&1 | grep -q "Queuing all"
# Create a watch using CLI, then recheck it
timeout 10 python3 changedetection.py -d "$dir" -C -u https://example.com -b &>/dev/null
# Now recheck all watches
timeout 10 python3 changedetection.py -d "$dir" -r all -b 2>&1 | grep -q "Queuing"
}
test_recheck_specific() {
local test_id=$1
local dir="/tmp/cli-test-recheck-uuid-${test_id}-$$"
mkdir -p "$dir"
cat > "$dir/url-watches.json" << 'EOF'
{"watching":{"uuid-1":{"url":"https://example.com","last_checked":0,"processor":"text_json_diff","uuid":"uuid-1"},"uuid-2":{"url":"https://github.com","last_checked":0,"processor":"text_json_diff","uuid":"uuid-2"}},"settings":{"application":{"password":false}}}
EOF
timeout 10 python3 changedetection.py -d "$dir" -r uuid-1,uuid-2 -b 2>&1 | grep -q "Queuing 2 specific watches"
# Create 2 watches using CLI
timeout 12 python3 changedetection.py -d "$dir" -C \
-u https://example.com \
-u https://github.com \
-b &>/dev/null
# Get the UUIDs that were created
local uuids=$(find "$dir" -mindepth 2 -maxdepth 2 -name 'watch.json' -exec dirname {} \; | xargs -n1 basename | tr '\n' ',' | sed 's/,$//')
# Now recheck specific UUIDs
timeout 10 python3 changedetection.py -d "$dir" -r "$uuids" -b 2>&1 | grep -q "Queuing"
}
test_combined_operations() {
@@ -151,8 +173,8 @@ test_combined_operations() {
-u https://github.com \
-r all \
-b &>/dev/null
[ -f "$dir/url-watches.json" ] && \
[ "$(python3 -c "import json; print(len(json.load(open('$dir/url-watches.json')).get('watching', {})))")" -eq 2 ]
# Count watch directories (UUID directories containing watch.json)
[ "$(find "$dir" -mindepth 2 -maxdepth 2 -name 'watch.json' | wc -l)" -eq 2 ]
}
test_invalid_json() {

View File

@@ -0,0 +1,166 @@
#!/usr/bin/env python3
"""
Test notification_urls validation in Watch and Tag API endpoints.
Ensures that invalid AppRise URLs are rejected when setting notification_urls.
Valid AppRise notification URLs use specific protocols like:
- posts://example.com - POST to HTTP endpoint
- gets://example.com - GET to HTTP endpoint
- mailto://user@example.com - Email
- slack://token/channel - Slack
- discord://webhook_id/webhook_token - Discord
- etc.
Invalid notification URLs:
- https://example.com - Plain HTTPS is NOT a valid AppRise notification protocol
- ftp://example.com - FTP is NOT a valid AppRise notification protocol
- Plain URLs without proper AppRise protocol prefix
"""
from flask import url_for
import json
def test_watch_notification_urls_validation(client, live_server, measure_memory_usage, datastore_path):
"""Test that Watch PUT/POST endpoints validate notification_urls."""
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
# Test 1: Create a watch with valid notification URLs
valid_urls = ["posts://example.com/notify1", "posts://example.com/notify2"]
res = client.post(
url_for("createwatch"),
data=json.dumps({
"url": "https://example.com",
"notification_urls": valid_urls
}),
headers={'content-type': 'application/json', 'x-api-key': api_key}
)
assert res.status_code == 201, "Should accept valid notification URLs on watch creation"
watch_uuid = res.json['uuid']
# Verify the notification URLs were saved
res = client.get(
url_for("watch", uuid=watch_uuid),
headers={'x-api-key': api_key}
)
assert res.status_code == 200
assert set(res.json['notification_urls']) == set(valid_urls), "Valid notification URLs should be saved"
# Test 2: Try to create a watch with invalid notification URLs (https:// is not valid)
invalid_urls = ["https://example.com/webhook"]
res = client.post(
url_for("createwatch"),
data=json.dumps({
"url": "https://example.com",
"notification_urls": invalid_urls
}),
headers={'content-type': 'application/json', 'x-api-key': api_key}
)
assert res.status_code == 400, "Should reject https:// notification URLs (not a valid AppRise protocol)"
assert b"is not a valid AppRise URL" in res.data, "Should provide AppRise validation error message"
# Test 2b: Also test other invalid protocols
invalid_urls_ftp = ["ftp://not-apprise-url"]
res = client.post(
url_for("createwatch"),
data=json.dumps({
"url": "https://example.com",
"notification_urls": invalid_urls_ftp
}),
headers={'content-type': 'application/json', 'x-api-key': api_key}
)
assert res.status_code == 400, "Should reject ftp:// notification URLs"
assert b"is not a valid AppRise URL" in res.data, "Should provide AppRise validation error message"
# Test 3: Update watch with valid notification URLs
new_valid_urls = ["posts://newserver.com"]
res = client.put(
url_for("watch", uuid=watch_uuid),
data=json.dumps({"notification_urls": new_valid_urls}),
headers={'content-type': 'application/json', 'x-api-key': api_key}
)
assert res.status_code == 200, "Should accept valid notification URLs on watch update"
# Verify the notification URLs were updated
res = client.get(
url_for("watch", uuid=watch_uuid),
headers={'x-api-key': api_key}
)
assert res.status_code == 200
assert res.json['notification_urls'] == new_valid_urls, "Valid notification URLs should be updated"
# Test 4: Try to update watch with invalid notification URLs (plain https:// not valid)
invalid_https_url = ["https://example.com/webhook"]
res = client.put(
url_for("watch", uuid=watch_uuid),
data=json.dumps({"notification_urls": invalid_https_url}),
headers={'content-type': 'application/json', 'x-api-key': api_key}
)
assert res.status_code == 400, "Should reject https:// notification URLs on watch update"
assert b"is not a valid AppRise URL" in res.data, "Should provide AppRise validation error message"
# Test 5: Update watch with non-list notification_urls (caught by OpenAPI schema validation)
res = client.put(
url_for("watch", uuid=watch_uuid),
data=json.dumps({"notification_urls": "not-a-list"}),
headers={'content-type': 'application/json', 'x-api-key': api_key}
)
assert res.status_code == 400, "Should reject non-list notification_urls"
assert b"OpenAPI validation failed" in res.data or b"Request body validation error" in res.data
# Test 6: Verify original URLs are preserved after failed update
res = client.get(
url_for("watch", uuid=watch_uuid),
headers={'x-api-key': api_key}
)
assert res.status_code == 200
assert res.json['notification_urls'] == new_valid_urls, "URLs should remain unchanged after validation failure"
def test_tag_notification_urls_validation(client, live_server, measure_memory_usage, datastore_path):
"""Test that Tag PUT endpoint validates notification_urls."""
from changedetectionio.model import Tag
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
datastore = live_server.app.config['DATASTORE']
# Create a tag
tag_uuid = datastore.add_tag(title="Test Tag")
assert tag_uuid is not None
# Test 1: Update tag with valid notification URLs
valid_urls = ["posts://example.com/tag-notify"]
res = client.put(
url_for("tag", uuid=tag_uuid),
data=json.dumps({"notification_urls": valid_urls}),
headers={'content-type': 'application/json', 'x-api-key': api_key}
)
assert res.status_code == 200, "Should accept valid notification URLs on tag update"
# Verify the notification URLs were saved
tag = datastore.data['settings']['application']['tags'][tag_uuid]
assert tag['notification_urls'] == valid_urls, "Valid notification URLs should be saved to tag"
# Test 2: Try to update tag with invalid notification URLs (https:// not valid)
invalid_urls = ["https://example.com/webhook"]
res = client.put(
url_for("tag", uuid=tag_uuid),
data=json.dumps({"notification_urls": invalid_urls}),
headers={'content-type': 'application/json', 'x-api-key': api_key}
)
assert res.status_code == 400, "Should reject https:// notification URLs on tag update"
assert b"is not a valid AppRise URL" in res.data, "Should provide AppRise validation error message"
# Test 3: Update tag with non-list notification_urls (caught by OpenAPI schema validation)
res = client.put(
url_for("tag", uuid=tag_uuid),
data=json.dumps({"notification_urls": "not-a-list"}),
headers={'content-type': 'application/json', 'x-api-key': api_key}
)
assert res.status_code == 400, "Should reject non-list notification_urls"
assert b"OpenAPI validation failed" in res.data or b"Request body validation error" in res.data
# Test 4: Verify original URLs are preserved after failed update
tag = datastore.data['settings']['application']['tags'][tag_uuid]
assert tag['notification_urls'] == valid_urls, "URLs should remain unchanged after validation failure"

View File

@@ -144,7 +144,7 @@ def wait_for_all_checks(client=None):
"""
from changedetectionio.flask_app import update_q as global_update_q
from changedetectionio import worker_handler
time.sleep(0.05)
# Use the shared wait logic from worker_handler
return worker_handler.wait_for_all_checks(global_update_q, timeout=150)