mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-10-30 22:27:52 +00:00
Compare commits
24 Commits
browser-no
...
0.50.19
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8f040a1a84 | ||
|
|
4dbab8d77a | ||
|
|
cde42c8a49 | ||
|
|
3b9d19df43 | ||
|
|
6ad4acc9fc | ||
|
|
3e59521f48 | ||
|
|
0970c087c8 | ||
|
|
676c550e6e | ||
|
|
78fa47f6f8 | ||
|
|
4aa5bb6da3 | ||
|
|
f7dfc9bbb8 | ||
|
|
584b6e378d | ||
|
|
754febfd33 | ||
|
|
0c9c475f32 | ||
|
|
e4baca1127 | ||
|
|
bb61a35a54 | ||
|
|
4b9ae5a97c | ||
|
|
c8caa0662d | ||
|
|
f4e8d1963f | ||
|
|
45d5e961dc | ||
|
|
45f2863966 | ||
|
|
01c1ac4c0c | ||
|
|
b2f9aec383 | ||
|
|
a95aa67aef |
8
.github/dependabot.yml
vendored
8
.github/dependabot.yml
vendored
@@ -4,11 +4,11 @@ updates:
|
||||
directory: /
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
"caronc/apprise":
|
||||
versioning-strategy: "increase"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
groups:
|
||||
all:
|
||||
patterns:
|
||||
- "*"
|
||||
- package-ecosystem: pip
|
||||
directory: /
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
|
||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@v4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
uses: github/codeql-action/autobuild@v4
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
@@ -59,4 +59,4 @@ jobs:
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@v4
|
||||
|
||||
4
.github/workflows/containers.yml
vendored
4
.github/workflows/containers.yml
vendored
@@ -95,7 +95,7 @@ jobs:
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
# Looks like this was disabled
|
||||
|
||||
4
.github/workflows/test-container-build.yml
vendored
4
.github/workflows/test-container-build.yml
vendored
@@ -38,8 +38,6 @@ jobs:
|
||||
dockerfile: ./Dockerfile
|
||||
- platform: linux/arm/v8
|
||||
dockerfile: ./Dockerfile
|
||||
- platform: linux/arm64/v8
|
||||
dockerfile: ./Dockerfile
|
||||
# Alpine Dockerfile platforms (musl via alpine check)
|
||||
- platform: linux/amd64
|
||||
dockerfile: ./.github/test/Dockerfile-alpine
|
||||
@@ -76,5 +74,5 @@ jobs:
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
cache-to: type=gha,mode=min
|
||||
|
||||
|
||||
@@ -71,7 +71,6 @@ jobs:
|
||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_watch_model'
|
||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_jinja2_security'
|
||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_semver'
|
||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_browser_notifications'
|
||||
|
||||
- name: Test built container with Pytest (generally as requests/plaintext fetching)
|
||||
run: |
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
|
||||
|
||||
__version__ = '0.50.14'
|
||||
__version__ = '0.50.19'
|
||||
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from json.decoder import JSONDecodeError
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import copy
|
||||
import yaml
|
||||
import functools
|
||||
from flask import request, abort
|
||||
from loguru import logger
|
||||
from openapi_core import OpenAPI
|
||||
from openapi_core.contrib.flask import FlaskOpenAPIRequest
|
||||
from . import api_schema
|
||||
from ..model import watch_base
|
||||
|
||||
@@ -34,7 +31,11 @@ schema_delete_notification_urls['required'] = ['notification_urls']
|
||||
|
||||
@functools.cache
|
||||
def get_openapi_spec():
|
||||
"""Lazy load OpenAPI spec and dependencies only when validation is needed."""
|
||||
import os
|
||||
import yaml # Lazy import - only loaded when API validation is actually used
|
||||
from openapi_core import OpenAPI # Lazy import - saves ~10.7 MB on startup
|
||||
|
||||
spec_path = os.path.join(os.path.dirname(__file__), '../../docs/api-spec.yaml')
|
||||
with open(spec_path, 'r') as f:
|
||||
spec_dict = yaml.safe_load(f)
|
||||
@@ -49,6 +50,9 @@ def validate_openapi_request(operation_id):
|
||||
try:
|
||||
# Skip OpenAPI validation for GET requests since they don't have request bodies
|
||||
if request.method.upper() != 'GET':
|
||||
# Lazy import - only loaded when actually validating a request
|
||||
from openapi_core.contrib.flask import FlaskOpenAPIRequest
|
||||
|
||||
spec = get_openapi_spec()
|
||||
openapi_request = FlaskOpenAPIRequest(request)
|
||||
result = spec.unmarshal_request(openapi_request)
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
# Browser notifications blueprint
|
||||
@@ -1,76 +0,0 @@
|
||||
from flask import Blueprint, jsonify, request
|
||||
from loguru import logger
|
||||
|
||||
|
||||
def construct_blueprint(datastore):
|
||||
browser_notifications_blueprint = Blueprint('browser_notifications', __name__)
|
||||
|
||||
@browser_notifications_blueprint.route("/test", methods=['POST'])
|
||||
def test_browser_notification():
|
||||
"""Send a test browser notification using the apprise handler"""
|
||||
try:
|
||||
from changedetectionio.notification.apprise_plugin.custom_handlers import apprise_browser_notification_handler
|
||||
|
||||
# Check if there are any subscriptions
|
||||
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
|
||||
if not browser_subscriptions:
|
||||
return jsonify({'success': False, 'message': 'No browser subscriptions found'}), 404
|
||||
|
||||
# Get notification data from request or use defaults
|
||||
data = request.get_json() or {}
|
||||
title = data.get('title', 'Test Notification')
|
||||
body = data.get('body', 'This is a test notification from changedetection.io')
|
||||
|
||||
# Use the apprise handler directly
|
||||
success = apprise_browser_notification_handler(
|
||||
body=body,
|
||||
title=title,
|
||||
notify_type='info',
|
||||
meta={'url': 'browser://test'}
|
||||
)
|
||||
|
||||
if success:
|
||||
subscription_count = len(browser_subscriptions)
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f'Test notification sent successfully to {subscription_count} subscriber(s)'
|
||||
})
|
||||
else:
|
||||
return jsonify({'success': False, 'message': 'Failed to send test notification'}), 500
|
||||
|
||||
except ImportError:
|
||||
logger.error("Browser notification handler not available")
|
||||
return jsonify({'success': False, 'message': 'Browser notification handler not available'}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send test browser notification: {e}")
|
||||
return jsonify({'success': False, 'message': f'Error: {str(e)}'}), 500
|
||||
|
||||
@browser_notifications_blueprint.route("/clear", methods=['POST'])
|
||||
def clear_all_browser_notifications():
|
||||
"""Clear all browser notification subscriptions from the datastore"""
|
||||
try:
|
||||
# Get current subscription count
|
||||
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
|
||||
subscription_count = len(browser_subscriptions)
|
||||
|
||||
# Clear all subscriptions
|
||||
if 'settings' not in datastore.data:
|
||||
datastore.data['settings'] = {}
|
||||
if 'application' not in datastore.data['settings']:
|
||||
datastore.data['settings']['application'] = {}
|
||||
|
||||
datastore.data['settings']['application']['browser_subscriptions'] = []
|
||||
datastore.needs_write = True
|
||||
|
||||
logger.info(f"Cleared {subscription_count} browser notification subscriptions")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f'Cleared {subscription_count} browser notification subscription(s)'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to clear all browser notifications: {e}")
|
||||
return jsonify({'success': False, 'message': f'Clear all failed: {str(e)}'}), 500
|
||||
|
||||
return browser_notifications_blueprint
|
||||
@@ -191,6 +191,12 @@ nav
|
||||
</ul>
|
||||
</span>
|
||||
</fieldset>
|
||||
<fieldset class="pure-group">
|
||||
{{ render_checkbox_field(form.application.form.strip_ignored_lines) }}
|
||||
<span class="pure-form-message-inline">Remove any text that appears in the "Ignore text" from the output (otherwise its just ignored for change-detection)<br>
|
||||
<i>Note:</i> Changing this will change the status of your existing watches, possibly trigger alerts etc.
|
||||
</span>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="api">
|
||||
|
||||
@@ -87,7 +87,6 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
form=form,
|
||||
guid=datastore.data['app_guid'],
|
||||
has_proxies=datastore.proxy_list,
|
||||
has_unviewed=datastore.has_unviewed,
|
||||
hosted_sticky=os.getenv("SALTED_PASS", False) == False,
|
||||
now_time_server=round(time.time()),
|
||||
pagination=pagination,
|
||||
@@ -97,6 +96,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
sort_order=request.args.get('order') if request.args.get('order') else request.cookies.get('order'),
|
||||
system_default_fetcher=datastore.data['settings']['application'].get('fetch_backend'),
|
||||
tags=sorted_tags,
|
||||
unread_changes_count=datastore.unread_changes_count,
|
||||
watches=sorted_watches
|
||||
)
|
||||
|
||||
|
||||
@@ -82,8 +82,11 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
{%- set cols_required = cols_required + 1 -%}
|
||||
{%- endif -%}
|
||||
{%- set ui_settings = datastore.data['settings']['application']['ui'] -%}
|
||||
|
||||
<div id="watch-table-wrapper">
|
||||
{%- set wrapper_classes = [
|
||||
'has-unread-changes' if unread_changes_count else '',
|
||||
'has-error' if errored_count else '',
|
||||
] -%}
|
||||
<div id="watch-table-wrapper" class="{{ wrapper_classes | reject('equalto', '') | join(' ') }}">
|
||||
{%- set table_classes = [
|
||||
'favicon-enabled' if 'favicons_enabled' not in ui_settings or ui_settings['favicons_enabled'] else 'favicon-not-enabled',
|
||||
] -%}
|
||||
@@ -241,10 +244,10 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
</tbody>
|
||||
</table>
|
||||
<ul id="post-list-buttons">
|
||||
<li id="post-list-with-errors" class="{%- if errored_count -%}has-error{%- endif -%}" style="display: none;" >
|
||||
<li id="post-list-with-errors" style="display: none;" >
|
||||
<a href="{{url_for('watchlist.index', with_errors=1, tag=request.args.get('tag')) }}" class="pure-button button-tag button-error">With errors ({{ errored_count }})</a>
|
||||
</li>
|
||||
<li id="post-list-mark-views" class="{%- if has_unviewed -%}has-unviewed{%- endif -%}" style="display: none;" >
|
||||
<li id="post-list-mark-views" style="display: none;" >
|
||||
<a href="{{url_for('ui.mark_all_viewed',with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag " id="mark-all-viewed">Mark all viewed</a>
|
||||
</li>
|
||||
{%- if active_tag_uuid -%}
|
||||
@@ -252,8 +255,8 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
<a href="{{url_for('ui.mark_all_viewed', tag=active_tag_uuid) }}" class="pure-button button-tag " id="mark-all-viewed">Mark all viewed in '{{active_tag.title}}'</a>
|
||||
</li>
|
||||
{%- endif -%}
|
||||
<li id="post-list-unread" class="{%- if has_unviewed -%}has-unviewed{%- endif -%}" style="display: none;" >
|
||||
<a href="{{url_for('watchlist.index', unread=1, tag=request.args.get('tag')) }}" class="pure-button button-tag">Unread</a>
|
||||
<li id="post-list-unread" style="display: none;" >
|
||||
<a href="{{url_for('watchlist.index', unread=1, tag=request.args.get('tag')) }}" class="pure-button button-tag">Unread (<span id="unread-tab-counter">{{ unread_changes_count }}</span>)</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ url_for('ui.form_watch_checknow', tag=active_tag_uuid, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag" id="recheck-all">Recheck
|
||||
|
||||
@@ -39,11 +39,6 @@ from loguru import logger
|
||||
from changedetectionio import __version__
|
||||
from changedetectionio import queuedWatchMetaData
|
||||
from changedetectionio.api import Watch, WatchHistory, WatchSingleHistory, CreateWatch, Import, SystemInfo, Tag, Tags, Notifications, WatchFavicon
|
||||
from changedetectionio.notification.BrowserNotifications import (
|
||||
BrowserNotificationsVapidPublicKey,
|
||||
BrowserNotificationsSubscribe,
|
||||
BrowserNotificationsUnsubscribe
|
||||
)
|
||||
from changedetectionio.api.Search import Search
|
||||
from .time_handler import is_within_schedule
|
||||
|
||||
@@ -99,7 +94,6 @@ except locale.Error:
|
||||
logger.warning(f"Unable to set locale {default_locale}, locale is not installed maybe?")
|
||||
|
||||
watch_api = Api(app, decorators=[csrf.exempt])
|
||||
browser_notification_api = Api(app, decorators=[csrf.exempt])
|
||||
|
||||
def init_app_secret(datastore_path):
|
||||
secret = ""
|
||||
@@ -342,11 +336,6 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
watch_api.add_resource(Notifications, '/api/v1/notifications',
|
||||
resource_class_kwargs={'datastore': datastore})
|
||||
|
||||
# Browser notification endpoints
|
||||
browser_notification_api.add_resource(BrowserNotificationsVapidPublicKey, '/browser-notifications-api/vapid-public-key')
|
||||
browser_notification_api.add_resource(BrowserNotificationsSubscribe, '/browser-notifications-api/subscribe')
|
||||
browser_notification_api.add_resource(BrowserNotificationsUnsubscribe, '/browser-notifications-api/unsubscribe')
|
||||
|
||||
@login_manager.user_loader
|
||||
def user_loader(email):
|
||||
@@ -500,29 +489,10 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
except FileNotFoundError:
|
||||
abort(404)
|
||||
|
||||
@app.route("/service-worker.js", methods=['GET'])
|
||||
def service_worker():
|
||||
from flask import make_response
|
||||
try:
|
||||
# Serve from the changedetectionio/static/js directory
|
||||
static_js_path = os.path.join(os.path.dirname(__file__), 'static', 'js')
|
||||
response = make_response(send_from_directory(static_js_path, "service-worker.js"))
|
||||
response.headers['Content-Type'] = 'application/javascript'
|
||||
response.headers['Service-Worker-Allowed'] = '/'
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
response.headers['Expires'] = '0'
|
||||
return response
|
||||
except FileNotFoundError:
|
||||
abort(404)
|
||||
|
||||
|
||||
import changedetectionio.blueprint.browser_steps as browser_steps
|
||||
app.register_blueprint(browser_steps.construct_blueprint(datastore), url_prefix='/browser-steps')
|
||||
|
||||
import changedetectionio.blueprint.browser_notifications.browser_notifications as browser_notifications
|
||||
app.register_blueprint(browser_notifications.construct_blueprint(datastore), url_prefix='/browser-notifications')
|
||||
|
||||
from changedetectionio.blueprint.imports import construct_blueprint as construct_import_blueprint
|
||||
app.register_blueprint(construct_import_blueprint(datastore, update_q, queuedWatchMetaData), url_prefix='/imports')
|
||||
|
||||
|
||||
@@ -707,7 +707,6 @@ class commonSettingsForm(Form):
|
||||
processor = RadioField( label=u"Processor - What do you want to achieve?", choices=processors.available_processors(), default="text_json_diff")
|
||||
timezone = StringField("Timezone for watch schedule", render_kw={"list": "timezones"}, validators=[validateTimeZoneName()])
|
||||
webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, message="Should contain one or more seconds")])
|
||||
|
||||
|
||||
|
||||
class importForm(Form):
|
||||
@@ -760,6 +759,7 @@ class processor_text_json_diff_form(commonSettingsForm):
|
||||
check_unique_lines = BooleanField('Only trigger when unique lines appear in all history', default=False)
|
||||
remove_duplicate_lines = BooleanField('Remove duplicate lines of text', default=False)
|
||||
sort_text_alphabetically = BooleanField('Sort text alphabetically', default=False)
|
||||
strip_ignored_lines = TernaryNoneBooleanField('Strip ignored lines', default=None)
|
||||
trim_text_whitespace = BooleanField('Trim whitespace before and after text', default=False)
|
||||
|
||||
filter_text_added = BooleanField('Added lines', default=True)
|
||||
@@ -937,6 +937,7 @@ class globalSettingsApplicationForm(commonSettingsForm):
|
||||
removepassword_button = SubmitField('Remove password', render_kw={"class": "pure-button pure-button-primary"})
|
||||
render_anchor_tag_content = BooleanField('Render anchor tag content', default=False)
|
||||
shared_diff_access = BooleanField('Allow anonymous access to watch history page when password is enabled', default=False, validators=[validators.Optional()])
|
||||
strip_ignored_lines = BooleanField('Strip ignored lines')
|
||||
rss_hide_muted_watches = BooleanField('Hide muted watches from RSS feed', default=True,
|
||||
validators=[validators.Optional()])
|
||||
filter_failure_notification_threshold_attempts = IntegerField('Number of times the filter can be missing before sending a notification',
|
||||
|
||||
@@ -100,7 +100,7 @@ def element_removal(selectors: List[str], html_content):
|
||||
xpath_selectors = []
|
||||
|
||||
for selector in selectors:
|
||||
if selector.startswith(('xpath:', 'xpath1:', '//')):
|
||||
if selector.strip().startswith(('xpath:', 'xpath1:', '//')):
|
||||
# Handle XPath selectors separately
|
||||
xpath_selector = selector.removeprefix('xpath:').removeprefix('xpath1:')
|
||||
xpath_selectors.append(xpath_selector)
|
||||
|
||||
@@ -57,6 +57,7 @@ class model(dict):
|
||||
'rss_hide_muted_watches': True,
|
||||
'schema_version' : 0,
|
||||
'shared_diff_access': False,
|
||||
'strip_ignored_lines': False,
|
||||
'tags': {}, #@todo use Tag.model initialisers
|
||||
'timezone': None, # Default IANA timezone name
|
||||
'webdriver_delay': None , # Extra delay in seconds before extracting text
|
||||
@@ -66,11 +67,6 @@ class model(dict):
|
||||
'socket_io_enabled': True,
|
||||
'favicons_enabled': True
|
||||
},
|
||||
'vapid': {
|
||||
'private_key': None,
|
||||
'public_key': None,
|
||||
'contact_email': None
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,6 +58,7 @@ class watch_base(dict):
|
||||
'proxy': None, # Preferred proxy connection
|
||||
'remote_server_reply': None, # From 'server' reply header
|
||||
'sort_text_alphabetically': False,
|
||||
'strip_ignored_lines': None,
|
||||
'subtractive_selectors': [],
|
||||
'tag': '', # Old system of text name for a tag, to be removed
|
||||
'tags': [], # list of UUIDs to App.Tags
|
||||
|
||||
@@ -1,217 +0,0 @@
|
||||
import json
|
||||
from flask import request, current_app
|
||||
from flask_restful import Resource, marshal_with, fields
|
||||
from loguru import logger
|
||||
|
||||
|
||||
browser_notifications_fields = {
|
||||
'success': fields.Boolean,
|
||||
'message': fields.String,
|
||||
}
|
||||
|
||||
vapid_public_key_fields = {
|
||||
'publicKey': fields.String,
|
||||
}
|
||||
|
||||
test_notification_fields = {
|
||||
'success': fields.Boolean,
|
||||
'message': fields.String,
|
||||
'sent_count': fields.Integer,
|
||||
}
|
||||
|
||||
|
||||
class BrowserNotificationsVapidPublicKey(Resource):
|
||||
"""Get VAPID public key for browser push notifications"""
|
||||
|
||||
@marshal_with(vapid_public_key_fields)
|
||||
def get(self):
|
||||
try:
|
||||
from changedetectionio.notification.apprise_plugin.browser_notification_helpers import (
|
||||
get_vapid_config_from_datastore, convert_pem_public_key_for_browser
|
||||
)
|
||||
|
||||
datastore = current_app.config.get('DATASTORE')
|
||||
if not datastore:
|
||||
return {'publicKey': None}, 500
|
||||
|
||||
private_key, public_key_pem, contact_email = get_vapid_config_from_datastore(datastore)
|
||||
|
||||
if not public_key_pem:
|
||||
return {'publicKey': None}, 404
|
||||
|
||||
# Convert PEM format to URL-safe base64 format for browser
|
||||
public_key_b64 = convert_pem_public_key_for_browser(public_key_pem)
|
||||
|
||||
if public_key_b64:
|
||||
return {'publicKey': public_key_b64}
|
||||
else:
|
||||
return {'publicKey': None}, 500
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get VAPID public key: {e}")
|
||||
return {'publicKey': None}, 500
|
||||
|
||||
|
||||
class BrowserNotificationsSubscribe(Resource):
|
||||
"""Subscribe to browser notifications"""
|
||||
|
||||
@marshal_with(browser_notifications_fields)
|
||||
def post(self):
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return {'success': False, 'message': 'No data provided'}, 400
|
||||
|
||||
subscription = data.get('subscription')
|
||||
|
||||
if not subscription:
|
||||
return {'success': False, 'message': 'Subscription is required'}, 400
|
||||
|
||||
# Validate subscription format
|
||||
required_fields = ['endpoint', 'keys']
|
||||
for field in required_fields:
|
||||
if field not in subscription:
|
||||
return {'success': False, 'message': f'Missing subscription field: {field}'}, 400
|
||||
|
||||
if 'p256dh' not in subscription['keys'] or 'auth' not in subscription['keys']:
|
||||
return {'success': False, 'message': 'Missing subscription keys'}, 400
|
||||
|
||||
# Get datastore
|
||||
datastore = current_app.config.get('DATASTORE')
|
||||
if not datastore:
|
||||
return {'success': False, 'message': 'Datastore not available'}, 500
|
||||
|
||||
# Initialize browser_subscriptions if it doesn't exist
|
||||
if 'browser_subscriptions' not in datastore.data['settings']['application']:
|
||||
datastore.data['settings']['application']['browser_subscriptions'] = []
|
||||
|
||||
# Check if subscription already exists
|
||||
existing_subscriptions = datastore.data['settings']['application']['browser_subscriptions']
|
||||
for existing_sub in existing_subscriptions:
|
||||
if existing_sub.get('endpoint') == subscription.get('endpoint'):
|
||||
return {'success': True, 'message': 'Already subscribed to browser notifications'}
|
||||
|
||||
# Add new subscription
|
||||
datastore.data['settings']['application']['browser_subscriptions'].append(subscription)
|
||||
datastore.needs_write = True
|
||||
|
||||
logger.info(f"New browser notification subscription: {subscription.get('endpoint')}")
|
||||
|
||||
return {'success': True, 'message': 'Successfully subscribed to browser notifications'}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to subscribe to browser notifications: {e}")
|
||||
return {'success': False, 'message': f'Subscription failed: {str(e)}'}, 500
|
||||
|
||||
|
||||
class BrowserNotificationsUnsubscribe(Resource):
|
||||
"""Unsubscribe from browser notifications"""
|
||||
|
||||
@marshal_with(browser_notifications_fields)
|
||||
def post(self):
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return {'success': False, 'message': 'No data provided'}, 400
|
||||
|
||||
subscription = data.get('subscription')
|
||||
|
||||
if not subscription or not subscription.get('endpoint'):
|
||||
return {'success': False, 'message': 'Valid subscription is required'}, 400
|
||||
|
||||
# Get datastore
|
||||
datastore = current_app.config.get('DATASTORE')
|
||||
if not datastore:
|
||||
return {'success': False, 'message': 'Datastore not available'}, 500
|
||||
|
||||
# Check if subscriptions exist
|
||||
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
|
||||
if not browser_subscriptions:
|
||||
return {'success': True, 'message': 'No subscriptions found'}
|
||||
|
||||
# Remove subscription with matching endpoint
|
||||
endpoint = subscription.get('endpoint')
|
||||
original_count = len(browser_subscriptions)
|
||||
|
||||
datastore.data['settings']['application']['browser_subscriptions'] = [
|
||||
sub for sub in browser_subscriptions
|
||||
if sub.get('endpoint') != endpoint
|
||||
]
|
||||
|
||||
removed_count = original_count - len(datastore.data['settings']['application']['browser_subscriptions'])
|
||||
|
||||
if removed_count > 0:
|
||||
datastore.needs_write = True
|
||||
logger.info(f"Removed {removed_count} browser notification subscription(s)")
|
||||
return {'success': True, 'message': 'Successfully unsubscribed from browser notifications'}
|
||||
else:
|
||||
return {'success': True, 'message': 'No matching subscription found'}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to unsubscribe from browser notifications: {e}")
|
||||
return {'success': False, 'message': f'Unsubscribe failed: {str(e)}'}, 500
|
||||
|
||||
|
||||
|
||||
class BrowserNotificationsTest(Resource):
|
||||
"""Send a test browser notification"""
|
||||
|
||||
@marshal_with(test_notification_fields)
|
||||
def post(self):
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return {'success': False, 'message': 'No data provided', 'sent_count': 0}, 400
|
||||
|
||||
title = data.get('title', 'Test Notification')
|
||||
body = data.get('body', 'This is a test notification from changedetection.io')
|
||||
|
||||
# Get datastore to check if subscriptions exist
|
||||
datastore = current_app.config.get('DATASTORE')
|
||||
if not datastore:
|
||||
return {'success': False, 'message': 'Datastore not available', 'sent_count': 0}, 500
|
||||
|
||||
# Check if there are subscriptions before attempting to send
|
||||
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
|
||||
if not browser_subscriptions:
|
||||
return {'success': False, 'message': 'No subscriptions found', 'sent_count': 0}, 404
|
||||
|
||||
# Use the apprise handler directly
|
||||
try:
|
||||
from changedetectionio.notification.apprise_plugin.custom_handlers import apprise_browser_notification_handler
|
||||
|
||||
# Call the apprise handler with test data
|
||||
success = apprise_browser_notification_handler(
|
||||
body=body,
|
||||
title=title,
|
||||
notify_type='info',
|
||||
meta={'url': 'browser://test'}
|
||||
)
|
||||
|
||||
# Count how many subscriptions we have after sending (some may have been removed if invalid)
|
||||
final_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
|
||||
sent_count = len(browser_subscriptions) # Original count
|
||||
|
||||
if success:
|
||||
return {
|
||||
'success': True,
|
||||
'message': f'Test notification sent successfully to {sent_count} subscriber(s)',
|
||||
'sent_count': sent_count
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Failed to send test notification',
|
||||
'sent_count': 0
|
||||
}, 500
|
||||
|
||||
except ImportError:
|
||||
return {'success': False, 'message': 'Browser notification handler not available', 'sent_count': 0}, 500
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send test browser notification: {e}")
|
||||
return {'success': False, 'message': f'Test failed: {str(e)}', 'sent_count': 0}, 500
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,273 +0,0 @@
|
||||
"""
|
||||
Browser notification helpers for Web Push API
|
||||
Shared utility functions for VAPID key handling and notification sending
|
||||
"""
|
||||
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
from loguru import logger
|
||||
|
||||
|
||||
def convert_pem_private_key_for_pywebpush(private_key):
|
||||
"""
|
||||
Convert PEM private key to the format that pywebpush expects
|
||||
|
||||
Args:
|
||||
private_key: PEM private key string or already converted key
|
||||
|
||||
Returns:
|
||||
Vapid instance for pywebpush (avoids PEM parsing compatibility issues)
|
||||
"""
|
||||
try:
|
||||
from py_vapid import Vapid
|
||||
import tempfile
|
||||
import os
|
||||
|
||||
# If we get a string, assume it's PEM and create a Vapid instance from it
|
||||
if isinstance(private_key, str) and private_key.startswith('-----BEGIN'):
|
||||
# Write PEM to temporary file and load with Vapid.from_file
|
||||
with tempfile.NamedTemporaryFile(mode='w', suffix='.pem', delete=False) as tmp_file:
|
||||
tmp_file.write(private_key)
|
||||
tmp_file.flush()
|
||||
temp_path = tmp_file.name
|
||||
|
||||
try:
|
||||
# Load using Vapid.from_file - this is more compatible with pywebpush
|
||||
vapid_instance = Vapid.from_file(temp_path)
|
||||
os.unlink(temp_path) # Clean up
|
||||
logger.debug("Successfully created Vapid instance from PEM")
|
||||
return vapid_instance
|
||||
except Exception as e:
|
||||
os.unlink(temp_path) # Clean up even on error
|
||||
logger.error(f"Failed to create Vapid instance from PEM: {e}")
|
||||
# Fall back to returning the original PEM string
|
||||
return private_key
|
||||
else:
|
||||
# Return as-is if not a PEM string
|
||||
return private_key
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to convert private key: {e}")
|
||||
return private_key
|
||||
|
||||
|
||||
def convert_pem_public_key_for_browser(public_key_pem):
|
||||
"""
|
||||
Convert PEM public key to URL-safe base64 format for browser applicationServerKey
|
||||
|
||||
Args:
|
||||
public_key_pem: PEM public key string
|
||||
|
||||
Returns:
|
||||
URL-safe base64 encoded public key without padding
|
||||
"""
|
||||
try:
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
import base64
|
||||
|
||||
# Parse PEM directly using cryptography library
|
||||
pem_bytes = public_key_pem.encode() if isinstance(public_key_pem, str) else public_key_pem
|
||||
|
||||
# Load the public key from PEM
|
||||
public_key_crypto = serialization.load_pem_public_key(pem_bytes)
|
||||
|
||||
# Get the raw public key bytes in uncompressed format (what browsers expect)
|
||||
public_key_raw = public_key_crypto.public_bytes(
|
||||
encoding=serialization.Encoding.X962,
|
||||
format=serialization.PublicFormat.UncompressedPoint
|
||||
)
|
||||
|
||||
# Convert to URL-safe base64 (remove padding)
|
||||
public_key_b64 = base64.urlsafe_b64encode(public_key_raw).decode('ascii').rstrip('=')
|
||||
|
||||
return public_key_b64
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to convert public key format: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def send_push_notifications(subscriptions, notification_payload, private_key, contact_email, datastore):
|
||||
"""
|
||||
Send push notifications to a list of subscriptions
|
||||
|
||||
Args:
|
||||
subscriptions: List of push subscriptions
|
||||
notification_payload: Dict with notification data (title, body, etc.)
|
||||
private_key: VAPID private key (will be converted if needed)
|
||||
contact_email: Contact email for VAPID claims
|
||||
datastore: Datastore object for updating subscriptions
|
||||
|
||||
Returns:
|
||||
Tuple of (success_count, total_count)
|
||||
"""
|
||||
try:
|
||||
from pywebpush import webpush, WebPushException
|
||||
except ImportError:
|
||||
logger.error("pywebpush not available - cannot send browser notifications")
|
||||
return 0, len(subscriptions)
|
||||
|
||||
# Convert private key to format pywebpush expects
|
||||
private_key_for_push = convert_pem_private_key_for_pywebpush(private_key)
|
||||
|
||||
success_count = 0
|
||||
total_count = len(subscriptions)
|
||||
|
||||
# Send to all subscriptions
|
||||
for subscription in subscriptions[:]: # Copy list to avoid modification issues
|
||||
try:
|
||||
webpush(
|
||||
subscription_info=subscription,
|
||||
data=json.dumps(notification_payload),
|
||||
vapid_private_key=private_key_for_push,
|
||||
vapid_claims={
|
||||
"sub": f"mailto:{contact_email}",
|
||||
"aud": f"https://{subscription['endpoint'].split('/')[2]}"
|
||||
}
|
||||
)
|
||||
success_count += 1
|
||||
|
||||
except WebPushException as e:
|
||||
logger.warning(f"Failed to send browser notification to subscription: {e}")
|
||||
# Remove invalid subscriptions (410 = Gone, 404 = Not Found)
|
||||
if e.response and e.response.status_code in [404, 410]:
|
||||
logger.info("Removing invalid browser notification subscription")
|
||||
try:
|
||||
subscriptions.remove(subscription)
|
||||
datastore.needs_write = True
|
||||
except ValueError:
|
||||
pass # Already removed
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error sending browser notification: {e}")
|
||||
|
||||
return success_count, total_count
|
||||
|
||||
|
||||
def create_notification_payload(title, body, icon_path=None):
|
||||
"""
|
||||
Create a standard notification payload
|
||||
|
||||
Args:
|
||||
title: Notification title
|
||||
body: Notification body
|
||||
icon_path: Optional icon path (defaults to favicon)
|
||||
|
||||
Returns:
|
||||
Dict with notification payload
|
||||
"""
|
||||
return {
|
||||
'title': title,
|
||||
'body': body,
|
||||
'icon': icon_path or '/static/favicons/favicon-32x32.png',
|
||||
'badge': '/static/favicons/favicon-32x32.png',
|
||||
'timestamp': int(time.time() * 1000),
|
||||
}
|
||||
|
||||
|
||||
def get_vapid_config_from_datastore(datastore):
|
||||
"""
|
||||
Get VAPID configuration from datastore with proper error handling
|
||||
|
||||
Args:
|
||||
datastore: Datastore object
|
||||
|
||||
Returns:
|
||||
Tuple of (private_key, public_key, contact_email) or (None, None, None) if error
|
||||
"""
|
||||
try:
|
||||
if not datastore:
|
||||
return None, None, None
|
||||
|
||||
vapid_config = datastore.data.get('settings', {}).get('application', {}).get('vapid', {})
|
||||
private_key = vapid_config.get('private_key')
|
||||
public_key = vapid_config.get('public_key')
|
||||
contact_email = vapid_config.get('contact_email', 'citizen@example.com')
|
||||
|
||||
return private_key, public_key, contact_email
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get VAPID config from datastore: {e}")
|
||||
return None, None, None
|
||||
|
||||
|
||||
|
||||
def get_browser_subscriptions(datastore):
|
||||
"""
|
||||
Get browser subscriptions from datastore
|
||||
|
||||
Args:
|
||||
datastore: Datastore object
|
||||
|
||||
Returns:
|
||||
List of subscriptions
|
||||
"""
|
||||
try:
|
||||
if not datastore:
|
||||
return []
|
||||
|
||||
return datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get browser subscriptions: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def save_browser_subscriptions(datastore, subscriptions):
|
||||
"""
|
||||
Save browser subscriptions to datastore
|
||||
|
||||
Args:
|
||||
datastore: Datastore object
|
||||
subscriptions: List of subscriptions to save
|
||||
"""
|
||||
try:
|
||||
if not datastore:
|
||||
return
|
||||
|
||||
# Ensure the settings structure exists
|
||||
if 'settings' not in datastore.data:
|
||||
datastore.data['settings'] = {}
|
||||
if 'application' not in datastore.data['settings']:
|
||||
datastore.data['settings']['application'] = {}
|
||||
|
||||
datastore.data['settings']['application']['browser_subscriptions'] = subscriptions
|
||||
datastore.needs_write = True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save browser subscriptions: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
def create_error_response(message, sent_count=0, status_code=500):
|
||||
"""
|
||||
Create standardized error response for API endpoints
|
||||
|
||||
Args:
|
||||
message: Error message
|
||||
sent_count: Number of notifications sent (for test endpoints)
|
||||
status_code: HTTP status code
|
||||
|
||||
Returns:
|
||||
Tuple of (response_dict, status_code)
|
||||
"""
|
||||
return {'success': False, 'message': message, 'sent_count': sent_count}, status_code
|
||||
|
||||
|
||||
def create_success_response(message, sent_count=None):
|
||||
"""
|
||||
Create standardized success response for API endpoints
|
||||
|
||||
Args:
|
||||
message: Success message
|
||||
sent_count: Number of notifications sent (optional)
|
||||
|
||||
Returns:
|
||||
Response dict
|
||||
"""
|
||||
response = {'success': True, 'message': message}
|
||||
if sent_count is not None:
|
||||
response['sent_count'] = sent_count
|
||||
return response
|
||||
@@ -1,6 +1,5 @@
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
from urllib.parse import unquote_plus
|
||||
|
||||
import requests
|
||||
@@ -111,80 +110,3 @@ def apprise_http_custom_handler(
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error occurred while sending custom notification to {url}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
@notify(on="browser")
|
||||
def apprise_browser_notification_handler(
|
||||
body: str,
|
||||
title: str,
|
||||
notify_type: str,
|
||||
meta: dict,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> bool:
|
||||
"""
|
||||
Browser push notification handler for browser:// URLs
|
||||
Ignores anything after browser:// and uses single default channel
|
||||
"""
|
||||
try:
|
||||
from pywebpush import webpush, WebPushException
|
||||
from flask import current_app
|
||||
|
||||
# Get VAPID keys from app settings
|
||||
try:
|
||||
datastore = current_app.config.get('DATASTORE')
|
||||
if not datastore:
|
||||
logger.error("No datastore available for browser notifications")
|
||||
return False
|
||||
|
||||
vapid_config = datastore.data.get('settings', {}).get('application', {}).get('vapid', {})
|
||||
private_key = vapid_config.get('private_key')
|
||||
public_key = vapid_config.get('public_key')
|
||||
contact_email = vapid_config.get('contact_email', 'admin@changedetection.io')
|
||||
|
||||
if not private_key or not public_key:
|
||||
logger.error("VAPID keys not configured for browser notifications")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get VAPID configuration: {e}")
|
||||
return False
|
||||
|
||||
# Get subscriptions from datastore
|
||||
browser_subscriptions = datastore.data.get('settings', {}).get('application', {}).get('browser_subscriptions', [])
|
||||
|
||||
if not browser_subscriptions:
|
||||
logger.info("No browser subscriptions found")
|
||||
return True # Not an error - just no subscribers
|
||||
|
||||
# Import helper functions
|
||||
try:
|
||||
from .browser_notification_helpers import create_notification_payload, send_push_notifications
|
||||
except ImportError:
|
||||
logger.error("Browser notification helpers not available")
|
||||
return False
|
||||
|
||||
# Prepare notification payload
|
||||
notification_payload = create_notification_payload(title, body)
|
||||
|
||||
# Send notifications using shared helper
|
||||
success_count, total_count = send_push_notifications(
|
||||
subscriptions=browser_subscriptions,
|
||||
notification_payload=notification_payload,
|
||||
private_key=private_key,
|
||||
contact_email=contact_email,
|
||||
datastore=datastore
|
||||
)
|
||||
|
||||
# Update datastore with cleaned subscriptions
|
||||
datastore.data['settings']['application']['browser_subscriptions'] = browser_subscriptions
|
||||
|
||||
logger.info(f"Sent browser notifications: {success_count}/{total_count} successful")
|
||||
return success_count > 0
|
||||
|
||||
except ImportError:
|
||||
logger.error("pywebpush not available - cannot send browser notifications")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in browser notification handler: {e}")
|
||||
return False
|
||||
|
||||
@@ -8,7 +8,7 @@ def process_notification(n_object, datastore):
|
||||
from changedetectionio.safe_jinja import render as jinja_render
|
||||
from . import default_notification_format_for_watch, default_notification_format, valid_notification_formats
|
||||
# be sure its registered
|
||||
from .apprise_plugin.custom_handlers import apprise_http_custom_handler, apprise_browser_notification_handler
|
||||
from .apprise_plugin.custom_handlers import apprise_http_custom_handler
|
||||
|
||||
now = time.time()
|
||||
if n_object.get('notification_timestamp'):
|
||||
|
||||
125
changedetectionio/processors/magic.py
Normal file
125
changedetectionio/processors/magic.py
Normal file
@@ -0,0 +1,125 @@
|
||||
"""
|
||||
Content Type Detection and Stream Classification
|
||||
|
||||
This module provides intelligent content-type detection for changedetection.io.
|
||||
It addresses the common problem where HTTP Content-Type headers are missing, incorrect,
|
||||
or too generic, which would otherwise cause the wrong processor to be used.
|
||||
|
||||
The guess_stream_type class combines:
|
||||
1. HTTP Content-Type headers (when available and reliable)
|
||||
2. Python-magic library for MIME detection (analyzing actual file content)
|
||||
3. Content-based pattern matching for text formats (HTML tags, XML declarations, etc.)
|
||||
|
||||
This multi-layered approach ensures accurate detection of RSS feeds, JSON, HTML, PDF,
|
||||
plain text, CSV, YAML, and XML formats - even when servers provide misleading headers.
|
||||
|
||||
Used by: processors/text_json_diff/processor.py and other content processors
|
||||
"""
|
||||
|
||||
# When to apply the 'cdata to real HTML' hack
|
||||
RSS_XML_CONTENT_TYPES = [
|
||||
"application/rss+xml",
|
||||
"application/rdf+xml",
|
||||
"application/atom+xml",
|
||||
"text/rss+xml", # rare, non-standard
|
||||
"application/x-rss+xml", # legacy (older feed software)
|
||||
"application/x-atom+xml", # legacy (older Atom)
|
||||
]
|
||||
|
||||
# JSON Content-types
|
||||
JSON_CONTENT_TYPES = [
|
||||
"application/activity+json",
|
||||
"application/feed+json",
|
||||
"application/json",
|
||||
"application/ld+json",
|
||||
"application/vnd.api+json",
|
||||
]
|
||||
|
||||
|
||||
# Generic XML Content-types (non-RSS/Atom)
|
||||
XML_CONTENT_TYPES = [
|
||||
"text/xml",
|
||||
"application/xml",
|
||||
]
|
||||
|
||||
HTML_PATTERNS = ['<!doctype html', '<html', '<head', '<body', '<script', '<iframe', '<div']
|
||||
|
||||
from loguru import logger
|
||||
|
||||
class guess_stream_type():
|
||||
is_pdf = False
|
||||
is_json = False
|
||||
is_html = False
|
||||
is_plaintext = False
|
||||
is_rss = False
|
||||
is_csv = False
|
||||
is_xml = False # Generic XML, not RSS/Atom
|
||||
is_yaml = False
|
||||
|
||||
def __init__(self, http_content_header, content):
|
||||
import re
|
||||
magic_content_header = http_content_header
|
||||
test_content = content[:200].lower().strip()
|
||||
|
||||
# Remove whitespace between < and tag name for robust detection (handles '< html', '<\nhtml', etc.)
|
||||
test_content_normalized = re.sub(r'<\s+', '<', test_content)
|
||||
|
||||
# Magic will sometimes call text/plain as text/html!
|
||||
magic_result = None
|
||||
try:
|
||||
import magic
|
||||
|
||||
mime = magic.from_buffer(content[:200], mime=True) # Send the original content
|
||||
logger.debug(f"Guessing mime type, original content_type '{http_content_header}', mime type detected '{mime}'")
|
||||
if mime and "/" in mime:
|
||||
magic_result = mime
|
||||
# Ignore generic/fallback mime types from magic
|
||||
if mime in ['application/octet-stream', 'application/x-empty', 'binary']:
|
||||
logger.debug(f"Ignoring generic mime type '{mime}' from magic library")
|
||||
# Trust magic for non-text types immediately
|
||||
elif mime not in ['text/html', 'text/plain']:
|
||||
magic_content_header = mime
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting a more precise mime type from 'magic' library ({str(e)}), using content-based detection")
|
||||
|
||||
# Content-based detection (most reliable for text formats)
|
||||
# Check for HTML patterns first - if found, override magic's text/plain
|
||||
has_html_patterns = any(p in test_content_normalized for p in HTML_PATTERNS)
|
||||
|
||||
# Always trust headers first
|
||||
if 'text/plain' in http_content_header:
|
||||
self.is_plaintext = True
|
||||
if any(s in http_content_header for s in RSS_XML_CONTENT_TYPES):
|
||||
self.is_rss = True
|
||||
elif any(s in http_content_header for s in JSON_CONTENT_TYPES):
|
||||
self.is_json = True
|
||||
elif any(s in http_content_header for s in XML_CONTENT_TYPES):
|
||||
# Only mark as generic XML if not already detected as RSS
|
||||
if not self.is_rss:
|
||||
self.is_xml = True
|
||||
elif 'pdf' in magic_content_header:
|
||||
self.is_pdf = True
|
||||
###
|
||||
elif has_html_patterns or http_content_header == 'text/html':
|
||||
self.is_html = True
|
||||
# If magic says text/plain and we found no HTML patterns, trust it
|
||||
elif magic_result == 'text/plain':
|
||||
self.is_plaintext = True
|
||||
logger.debug(f"Trusting magic's text/plain result (no HTML patterns detected)")
|
||||
elif any(s in magic_content_header for s in JSON_CONTENT_TYPES):
|
||||
self.is_json = True
|
||||
# magic will call a rss document 'xml'
|
||||
elif '<rss' in test_content_normalized or '<feed' in test_content_normalized or any(s in magic_content_header for s in RSS_XML_CONTENT_TYPES):
|
||||
self.is_rss = True
|
||||
elif test_content_normalized.startswith('<?xml') or any(s in magic_content_header for s in XML_CONTENT_TYPES):
|
||||
# Generic XML that's not RSS/Atom (RSS/Atom checked above)
|
||||
self.is_xml = True
|
||||
elif '%pdf-1' in test_content:
|
||||
self.is_pdf = True
|
||||
elif http_content_header.startswith('text/'):
|
||||
self.is_plaintext = True
|
||||
# Only trust magic for 'text' if no other patterns matched
|
||||
elif 'text' in magic_content_header:
|
||||
self.is_plaintext = True
|
||||
|
||||
@@ -13,6 +13,8 @@ from changedetectionio import html_tools, content_fetchers
|
||||
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
|
||||
from loguru import logger
|
||||
|
||||
from changedetectionio.processors.magic import guess_stream_type
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
name = 'Webpage Text/HTML, JSON and PDF changes'
|
||||
@@ -20,6 +22,9 @@ description = 'Detects all text changes where possible'
|
||||
|
||||
json_filter_prefixes = ['json:', 'jq:', 'jqraw:']
|
||||
|
||||
# Assume it's this type if the server says nothing on content-type
|
||||
DEFAULT_WHEN_NO_CONTENT_TYPE_HEADER = 'text/html'
|
||||
|
||||
class FilterNotFoundInResponse(ValueError):
|
||||
def __init__(self, msg, screenshot=None, xpath_data=None):
|
||||
self.screenshot = screenshot
|
||||
@@ -32,353 +37,516 @@ class PDFToHTMLToolNotFound(ValueError):
|
||||
ValueError.__init__(self, msg)
|
||||
|
||||
|
||||
class FilterConfig:
|
||||
"""Consolidates all filter and rule configurations from watch, tags, and global settings."""
|
||||
|
||||
def __init__(self, watch, datastore):
|
||||
self.watch = watch
|
||||
self.datastore = datastore
|
||||
self.watch_uuid = watch.get('uuid')
|
||||
# Cache computed properties to avoid repeated list operations
|
||||
self._include_filters_cache = None
|
||||
self._subtractive_selectors_cache = None
|
||||
|
||||
def _get_merged_rules(self, attr, include_global=False):
|
||||
"""Merge rules from watch, tags, and optionally global settings."""
|
||||
watch_rules = self.watch.get(attr, [])
|
||||
tag_rules = self.datastore.get_tag_overrides_for_watch(uuid=self.watch_uuid, attr=attr)
|
||||
rules = list(dict.fromkeys(watch_rules + tag_rules))
|
||||
|
||||
if include_global:
|
||||
global_rules = self.datastore.data['settings']['application'].get(f'global_{attr}', [])
|
||||
rules = list(dict.fromkeys(rules + global_rules))
|
||||
|
||||
return rules
|
||||
|
||||
@property
|
||||
def include_filters(self):
|
||||
if self._include_filters_cache is None:
|
||||
filters = self._get_merged_rules('include_filters')
|
||||
# Inject LD+JSON price tracker rule if enabled
|
||||
if self.watch.get('track_ldjson_price_data', '') == PRICE_DATA_TRACK_ACCEPT:
|
||||
filters += html_tools.LD_JSON_PRODUCT_OFFER_SELECTORS
|
||||
self._include_filters_cache = filters
|
||||
return self._include_filters_cache
|
||||
|
||||
@property
|
||||
def subtractive_selectors(self):
|
||||
if self._subtractive_selectors_cache is None:
|
||||
watch_selectors = self.watch.get("subtractive_selectors", [])
|
||||
tag_selectors = self.datastore.get_tag_overrides_for_watch(uuid=self.watch_uuid, attr='subtractive_selectors')
|
||||
global_selectors = self.datastore.data["settings"]["application"].get("global_subtractive_selectors", [])
|
||||
self._subtractive_selectors_cache = [*tag_selectors, *watch_selectors, *global_selectors]
|
||||
return self._subtractive_selectors_cache
|
||||
|
||||
@property
|
||||
def extract_text(self):
|
||||
return self._get_merged_rules('extract_text')
|
||||
|
||||
@property
|
||||
def ignore_text(self):
|
||||
return self._get_merged_rules('ignore_text', include_global=True)
|
||||
|
||||
@property
|
||||
def trigger_text(self):
|
||||
return self._get_merged_rules('trigger_text')
|
||||
|
||||
@property
|
||||
def text_should_not_be_present(self):
|
||||
return self._get_merged_rules('text_should_not_be_present')
|
||||
|
||||
@property
|
||||
def has_include_filters(self):
|
||||
return bool(self.include_filters) and bool(self.include_filters[0].strip())
|
||||
|
||||
@property
|
||||
def has_subtractive_selectors(self):
|
||||
return bool(self.subtractive_selectors) and bool(self.subtractive_selectors[0].strip())
|
||||
|
||||
|
||||
class ContentTransformer:
|
||||
"""Handles text transformations like trimming, sorting, and deduplication."""
|
||||
|
||||
@staticmethod
|
||||
def trim_whitespace(text):
|
||||
"""Remove leading/trailing whitespace from each line."""
|
||||
# Use generator expression to avoid building intermediate list
|
||||
return '\n'.join(line.strip() for line in text.replace("\n\n", "\n").splitlines())
|
||||
|
||||
@staticmethod
|
||||
def remove_duplicate_lines(text):
|
||||
"""Remove duplicate lines while preserving order."""
|
||||
return '\n'.join(dict.fromkeys(line for line in text.replace("\n\n", "\n").splitlines()))
|
||||
|
||||
@staticmethod
|
||||
def sort_alphabetically(text):
|
||||
"""Sort lines alphabetically (case-insensitive)."""
|
||||
# Remove double line feeds before sorting
|
||||
text = text.replace("\n\n", "\n")
|
||||
return '\n'.join(sorted(text.splitlines(), key=lambda x: x.lower()))
|
||||
|
||||
@staticmethod
|
||||
def extract_by_regex(text, regex_patterns):
|
||||
"""Extract text matching regex patterns."""
|
||||
# Use list of strings instead of concatenating lists repeatedly (avoids O(n²) behavior)
|
||||
regex_matched_output = []
|
||||
|
||||
for s_re in regex_patterns:
|
||||
# Check if it's perl-style regex /.../
|
||||
if re.search(PERL_STYLE_REGEX, s_re, re.IGNORECASE):
|
||||
regex = html_tools.perl_style_slash_enclosed_regex_to_options(s_re)
|
||||
result = re.findall(regex, text)
|
||||
|
||||
for match in result:
|
||||
if type(match) is tuple:
|
||||
regex_matched_output.extend(match)
|
||||
regex_matched_output.append('\n')
|
||||
else:
|
||||
regex_matched_output.append(match)
|
||||
regex_matched_output.append('\n')
|
||||
else:
|
||||
# Plain text search (case-insensitive)
|
||||
r = re.compile(re.escape(s_re), re.IGNORECASE)
|
||||
res = r.findall(text)
|
||||
if res:
|
||||
for match in res:
|
||||
regex_matched_output.append(match)
|
||||
regex_matched_output.append('\n')
|
||||
|
||||
return ''.join(regex_matched_output) if regex_matched_output else ''
|
||||
|
||||
|
||||
class RuleEngine:
|
||||
"""Evaluates blocking rules (triggers, conditions, text_should_not_be_present)."""
|
||||
|
||||
@staticmethod
|
||||
def evaluate_trigger_text(content, trigger_patterns):
|
||||
"""
|
||||
Check if trigger text is present. If trigger_text is configured,
|
||||
content is blocked UNLESS the trigger is found.
|
||||
Returns True if blocked, False if allowed.
|
||||
"""
|
||||
if not trigger_patterns:
|
||||
return False
|
||||
|
||||
# Assume blocked if trigger_text is configured
|
||||
result = html_tools.strip_ignore_text(
|
||||
content=str(content),
|
||||
wordlist=trigger_patterns,
|
||||
mode="line numbers"
|
||||
)
|
||||
# Unblock if trigger was found
|
||||
return not bool(result)
|
||||
|
||||
@staticmethod
|
||||
def evaluate_text_should_not_be_present(content, patterns):
|
||||
"""
|
||||
Check if forbidden text is present. If found, block the change.
|
||||
Returns True if blocked, False if allowed.
|
||||
"""
|
||||
if not patterns:
|
||||
return False
|
||||
|
||||
result = html_tools.strip_ignore_text(
|
||||
content=str(content),
|
||||
wordlist=patterns,
|
||||
mode="line numbers"
|
||||
)
|
||||
# Block if forbidden text was found
|
||||
return bool(result)
|
||||
|
||||
@staticmethod
|
||||
def evaluate_conditions(watch, datastore, content):
|
||||
"""
|
||||
Evaluate custom conditions ruleset.
|
||||
Returns True if blocked, False if allowed.
|
||||
"""
|
||||
if not watch.get('conditions') or not watch.get('conditions_match_logic'):
|
||||
return False
|
||||
|
||||
conditions_result = execute_ruleset_against_all_plugins(
|
||||
current_watch_uuid=watch.get('uuid'),
|
||||
application_datastruct=datastore.data,
|
||||
ephemeral_data={'text': content}
|
||||
)
|
||||
|
||||
# Block if conditions not met
|
||||
return not conditions_result.get('result')
|
||||
|
||||
|
||||
class ContentProcessor:
|
||||
"""Handles content preprocessing, filtering, and extraction."""
|
||||
|
||||
def __init__(self, fetcher, watch, filter_config, datastore):
|
||||
self.fetcher = fetcher
|
||||
self.watch = watch
|
||||
self.filter_config = filter_config
|
||||
self.datastore = datastore
|
||||
|
||||
def preprocess_rss(self, content):
|
||||
"""Convert CDATA/comments in RSS to usable text."""
|
||||
return cdata_in_document_to_text(html_content=content)
|
||||
|
||||
def preprocess_pdf(self, content, raw_content):
|
||||
"""Convert PDF to HTML using external tool."""
|
||||
from shutil import which
|
||||
tool = os.getenv("PDF_TO_HTML_TOOL", "pdftohtml")
|
||||
if not which(tool):
|
||||
raise PDFToHTMLToolNotFound(
|
||||
f"Command-line `{tool}` tool was not found in system PATH, was it installed?"
|
||||
)
|
||||
|
||||
import subprocess
|
||||
proc = subprocess.Popen(
|
||||
[tool, '-stdout', '-', '-s', 'out.pdf', '-i'],
|
||||
stdout=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE
|
||||
)
|
||||
proc.stdin.write(raw_content)
|
||||
proc.stdin.close()
|
||||
html_content = proc.stdout.read().decode('utf-8')
|
||||
proc.wait(timeout=60)
|
||||
|
||||
# Add metadata for change detection
|
||||
metadata = (
|
||||
f"<p>Added by changedetection.io: Document checksum - "
|
||||
f"{hashlib.md5(raw_content).hexdigest().upper()} "
|
||||
f"Filesize - {len(html_content)} bytes</p>"
|
||||
)
|
||||
return html_content.replace('</body>', metadata + '</body>')
|
||||
|
||||
def preprocess_json(self, content, has_filters):
|
||||
"""Format and sort JSON content."""
|
||||
# Force reformat if no filters specified
|
||||
if not has_filters:
|
||||
content = html_tools.extract_json_as_string(content=content, json_filter="json:$")
|
||||
|
||||
# Sort JSON to avoid false alerts from reordering
|
||||
try:
|
||||
content = json.dumps(json.loads(content), sort_keys=True)
|
||||
except Exception:
|
||||
# Might be malformed JSON, continue anyway
|
||||
pass
|
||||
|
||||
return content
|
||||
|
||||
def apply_include_filters(self, content, stream_content_type):
|
||||
"""Apply CSS, XPath, or JSON filters to extract specific content."""
|
||||
filtered_content = ""
|
||||
|
||||
for filter_rule in self.filter_config.include_filters:
|
||||
# XPath filters
|
||||
if filter_rule[0] == '/' or filter_rule.startswith('xpath:'):
|
||||
filtered_content += html_tools.xpath_filter(
|
||||
xpath_filter=filter_rule.replace('xpath:', ''),
|
||||
html_content=content,
|
||||
append_pretty_line_formatting=not self.watch.is_source_type_url,
|
||||
is_rss=stream_content_type.is_rss
|
||||
)
|
||||
|
||||
# XPath1 filters (first match only)
|
||||
elif filter_rule.startswith('xpath1:'):
|
||||
filtered_content += html_tools.xpath1_filter(
|
||||
xpath_filter=filter_rule.replace('xpath1:', ''),
|
||||
html_content=content,
|
||||
append_pretty_line_formatting=not self.watch.is_source_type_url,
|
||||
is_rss=stream_content_type.is_rss
|
||||
)
|
||||
|
||||
# JSON filters
|
||||
elif any(filter_rule.startswith(prefix) for prefix in json_filter_prefixes):
|
||||
filtered_content += html_tools.extract_json_as_string(
|
||||
content=content,
|
||||
json_filter=filter_rule
|
||||
)
|
||||
|
||||
# CSS selectors, default fallback
|
||||
else:
|
||||
filtered_content += html_tools.include_filters(
|
||||
include_filters=filter_rule,
|
||||
html_content=content,
|
||||
append_pretty_line_formatting=not self.watch.is_source_type_url
|
||||
)
|
||||
|
||||
# Raise error if filter returned nothing
|
||||
if not filtered_content.strip():
|
||||
raise FilterNotFoundInResponse(
|
||||
msg=self.filter_config.include_filters,
|
||||
screenshot=self.fetcher.screenshot,
|
||||
xpath_data=self.fetcher.xpath_data
|
||||
)
|
||||
|
||||
return filtered_content
|
||||
|
||||
def apply_subtractive_selectors(self, content):
|
||||
"""Remove elements matching subtractive selectors."""
|
||||
return html_tools.element_removal(self.filter_config.subtractive_selectors, content)
|
||||
|
||||
def extract_text_from_html(self, html_content, stream_content_type):
|
||||
"""Convert HTML to plain text."""
|
||||
do_anchor = self.datastore.data["settings"]["application"].get("render_anchor_tag_content", False)
|
||||
return html_tools.html_to_text(
|
||||
html_content=html_content,
|
||||
render_anchor_tag_content=do_anchor,
|
||||
is_rss=stream_content_type.is_rss
|
||||
)
|
||||
|
||||
|
||||
class ChecksumCalculator:
|
||||
"""Calculates checksums with various options."""
|
||||
|
||||
@staticmethod
|
||||
def calculate(text, ignore_whitespace=False):
|
||||
"""Calculate MD5 checksum of text content."""
|
||||
if ignore_whitespace:
|
||||
text = text.translate(TRANSLATE_WHITESPACE_TABLE)
|
||||
return hashlib.md5(text.encode('utf-8')).hexdigest()
|
||||
|
||||
|
||||
# Some common stuff here that can be moved to a base class
|
||||
# (set_proxy_from_list)
|
||||
class perform_site_check(difference_detection_processor):
|
||||
|
||||
def run_changedetection(self, watch):
|
||||
changed_detected = False
|
||||
html_content = ""
|
||||
screenshot = False # as bytes
|
||||
stripped_text_from_html = ""
|
||||
|
||||
if not watch:
|
||||
raise Exception("Watch no longer exists.")
|
||||
|
||||
# Initialize components
|
||||
filter_config = FilterConfig(watch, self.datastore)
|
||||
content_processor = ContentProcessor(self.fetcher, watch, filter_config, self.datastore)
|
||||
transformer = ContentTransformer()
|
||||
rule_engine = RuleEngine()
|
||||
|
||||
# Get content type and stream info
|
||||
ctype_header = self.fetcher.get_all_headers().get('content-type', DEFAULT_WHEN_NO_CONTENT_TYPE_HEADER).lower()
|
||||
stream_content_type = guess_stream_type(http_content_header=ctype_header, content=self.fetcher.content)
|
||||
|
||||
# Unset any existing notification error
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
|
||||
url = watch.link
|
||||
|
||||
self.screenshot = self.fetcher.screenshot
|
||||
self.xpath_data = self.fetcher.xpath_data
|
||||
|
||||
# Track the content type
|
||||
update_obj['content_type'] = self.fetcher.get_all_headers().get('content-type', '').lower()
|
||||
|
||||
# Watches added automatically in the queue manager will skip if its the same checksum as the previous run
|
||||
# Saves a lot of CPU
|
||||
# Track the content type and checksum before filters
|
||||
update_obj['content_type'] = ctype_header
|
||||
update_obj['previous_md5_before_filters'] = hashlib.md5(self.fetcher.content.encode('utf-8')).hexdigest()
|
||||
|
||||
# Fetching complete, now filters
|
||||
# === CONTENT PREPROCESSING ===
|
||||
# Avoid creating unnecessary intermediate string copies by reassigning only when needed
|
||||
content = self.fetcher.content
|
||||
|
||||
# @note: I feel like the following should be in a more obvious chain system
|
||||
# - Check filter text
|
||||
# - Is the checksum different?
|
||||
# - Do we convert to JSON?
|
||||
# https://stackoverflow.com/questions/41817578/basic-method-chaining ?
|
||||
# return content().textfilter().jsonextract().checksumcompare() ?
|
||||
# RSS preprocessing
|
||||
if stream_content_type.is_rss:
|
||||
content = content_processor.preprocess_rss(content)
|
||||
|
||||
is_json = 'application/json' in self.fetcher.get_all_headers().get('content-type', '').lower()
|
||||
is_html = not is_json
|
||||
is_rss = False
|
||||
# PDF preprocessing
|
||||
if watch.is_pdf or stream_content_type.is_pdf:
|
||||
content = content_processor.preprocess_pdf(content, self.fetcher.raw_content)
|
||||
|
||||
ctype_header = self.fetcher.get_all_headers().get('content-type', '').lower()
|
||||
# Go into RSS preprocess for converting CDATA/comment to usable text
|
||||
if any(substring in ctype_header for substring in ['application/xml', 'application/rss', 'text/xml']):
|
||||
if '<rss' in self.fetcher.content[:100].lower():
|
||||
self.fetcher.content = cdata_in_document_to_text(html_content=self.fetcher.content)
|
||||
is_rss = True
|
||||
# JSON preprocessing
|
||||
if stream_content_type.is_json:
|
||||
content = content_processor.preprocess_json(content, filter_config.has_include_filters)
|
||||
|
||||
# source: support, basically treat it as plaintext
|
||||
# HTML obfuscation workarounds
|
||||
if stream_content_type.is_html:
|
||||
content = html_tools.workarounds_for_obfuscations(content)
|
||||
|
||||
# Check for LD+JSON price data (for HTML content)
|
||||
if stream_content_type.is_html:
|
||||
update_obj['has_ldjson_price_data'] = html_tools.has_ldjson_product_info(content)
|
||||
|
||||
# === FILTER APPLICATION ===
|
||||
# Start with content reference, avoid copy until modification
|
||||
html_content = content
|
||||
|
||||
# Apply include filters (CSS, XPath, JSON)
|
||||
if filter_config.has_include_filters:
|
||||
html_content = content_processor.apply_include_filters(content, stream_content_type)
|
||||
|
||||
# Apply subtractive selectors
|
||||
if filter_config.has_subtractive_selectors:
|
||||
html_content = content_processor.apply_subtractive_selectors(html_content)
|
||||
|
||||
# === TEXT EXTRACTION ===
|
||||
if watch.is_source_type_url:
|
||||
is_html = False
|
||||
is_json = False
|
||||
|
||||
inline_pdf = self.fetcher.get_all_headers().get('content-disposition', '') and '%PDF-1' in self.fetcher.content[:10]
|
||||
if watch.is_pdf or 'application/pdf' in self.fetcher.get_all_headers().get('content-type', '').lower() or inline_pdf:
|
||||
from shutil import which
|
||||
tool = os.getenv("PDF_TO_HTML_TOOL", "pdftohtml")
|
||||
if not which(tool):
|
||||
raise PDFToHTMLToolNotFound("Command-line `{}` tool was not found in system PATH, was it installed?".format(tool))
|
||||
|
||||
import subprocess
|
||||
proc = subprocess.Popen(
|
||||
[tool, '-stdout', '-', '-s', 'out.pdf', '-i'],
|
||||
stdout=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE)
|
||||
proc.stdin.write(self.fetcher.raw_content)
|
||||
proc.stdin.close()
|
||||
self.fetcher.content = proc.stdout.read().decode('utf-8')
|
||||
proc.wait(timeout=60)
|
||||
|
||||
# Add a little metadata so we know if the file changes (like if an image changes, but the text is the same
|
||||
# @todo may cause problems with non-UTF8?
|
||||
metadata = "<p>Added by changedetection.io: Document checksum - {} Filesize - {} bytes</p>".format(
|
||||
hashlib.md5(self.fetcher.raw_content).hexdigest().upper(),
|
||||
len(self.fetcher.content))
|
||||
|
||||
self.fetcher.content = self.fetcher.content.replace('</body>', metadata + '</body>')
|
||||
|
||||
# Better would be if Watch.model could access the global data also
|
||||
# and then use getattr https://docs.python.org/3/reference/datamodel.html#object.__getitem__
|
||||
# https://realpython.com/inherit-python-dict/ instead of doing it procedurely
|
||||
include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='include_filters')
|
||||
|
||||
# 1845 - remove duplicated filters in both group and watch include filter
|
||||
include_filters_rule = list(dict.fromkeys(watch.get('include_filters', []) + include_filters_from_tags))
|
||||
|
||||
subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='subtractive_selectors'),
|
||||
*watch.get("subtractive_selectors", []),
|
||||
*self.datastore.data["settings"]["application"].get("global_subtractive_selectors", [])
|
||||
]
|
||||
|
||||
# Inject a virtual LD+JSON price tracker rule
|
||||
if watch.get('track_ldjson_price_data', '') == PRICE_DATA_TRACK_ACCEPT:
|
||||
include_filters_rule += html_tools.LD_JSON_PRODUCT_OFFER_SELECTORS
|
||||
|
||||
has_filter_rule = len(include_filters_rule) and len(include_filters_rule[0].strip())
|
||||
has_subtractive_selectors = len(subtractive_selectors) and len(subtractive_selectors[0].strip())
|
||||
|
||||
if is_json and not has_filter_rule:
|
||||
include_filters_rule.append("json:$")
|
||||
has_filter_rule = True
|
||||
|
||||
if is_json:
|
||||
# Sort the JSON so we dont get false alerts when the content is just re-ordered
|
||||
try:
|
||||
self.fetcher.content = json.dumps(json.loads(self.fetcher.content), sort_keys=True)
|
||||
except Exception as e:
|
||||
# Might have just been a snippet, or otherwise bad JSON, continue
|
||||
pass
|
||||
|
||||
if has_filter_rule:
|
||||
for filter in include_filters_rule:
|
||||
if any(prefix in filter for prefix in json_filter_prefixes):
|
||||
stripped_text_from_html += html_tools.extract_json_as_string(content=self.fetcher.content, json_filter=filter)
|
||||
is_html = False
|
||||
|
||||
if is_html or watch.is_source_type_url:
|
||||
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
self.fetcher.content = html_tools.workarounds_for_obfuscations(self.fetcher.content)
|
||||
html_content = self.fetcher.content
|
||||
|
||||
# If not JSON, and if it's not text/plain..
|
||||
if 'text/plain' in self.fetcher.get_all_headers().get('content-type', '').lower():
|
||||
# Don't run get_text or xpath/css filters on plaintext
|
||||
stripped_text_from_html = html_content
|
||||
# For source URLs, keep raw content
|
||||
stripped_text = html_content
|
||||
else:
|
||||
# Extract text from HTML/RSS content (not generic XML)
|
||||
if stream_content_type.is_html or stream_content_type.is_rss:
|
||||
stripped_text = content_processor.extract_text_from_html(html_content, stream_content_type)
|
||||
else:
|
||||
# Does it have some ld+json price data? used for easier monitoring
|
||||
update_obj['has_ldjson_price_data'] = html_tools.has_ldjson_product_info(self.fetcher.content)
|
||||
|
||||
# Then we assume HTML
|
||||
if has_filter_rule:
|
||||
html_content = ""
|
||||
|
||||
for filter_rule in include_filters_rule:
|
||||
# For HTML/XML we offer xpath as an option, just start a regular xPath "/.."
|
||||
if filter_rule[0] == '/' or filter_rule.startswith('xpath:'):
|
||||
html_content += html_tools.xpath_filter(xpath_filter=filter_rule.replace('xpath:', ''),
|
||||
html_content=self.fetcher.content,
|
||||
append_pretty_line_formatting=not watch.is_source_type_url,
|
||||
is_rss=is_rss)
|
||||
|
||||
elif filter_rule.startswith('xpath1:'):
|
||||
html_content += html_tools.xpath1_filter(xpath_filter=filter_rule.replace('xpath1:', ''),
|
||||
html_content=self.fetcher.content,
|
||||
append_pretty_line_formatting=not watch.is_source_type_url,
|
||||
is_rss=is_rss)
|
||||
else:
|
||||
html_content += html_tools.include_filters(include_filters=filter_rule,
|
||||
html_content=self.fetcher.content,
|
||||
append_pretty_line_formatting=not watch.is_source_type_url)
|
||||
|
||||
if not html_content.strip():
|
||||
raise FilterNotFoundInResponse(msg=include_filters_rule, screenshot=self.fetcher.screenshot, xpath_data=self.fetcher.xpath_data)
|
||||
|
||||
if has_subtractive_selectors:
|
||||
html_content = html_tools.element_removal(subtractive_selectors, html_content)
|
||||
|
||||
if watch.is_source_type_url:
|
||||
stripped_text_from_html = html_content
|
||||
else:
|
||||
# extract text
|
||||
do_anchor = self.datastore.data["settings"]["application"].get("render_anchor_tag_content", False)
|
||||
stripped_text_from_html = html_tools.html_to_text(html_content=html_content,
|
||||
render_anchor_tag_content=do_anchor,
|
||||
is_rss=is_rss) # 1874 activate the <title workaround hack
|
||||
stripped_text = html_content
|
||||
|
||||
# === TEXT TRANSFORMATIONS ===
|
||||
if watch.get('trim_text_whitespace'):
|
||||
stripped_text_from_html = '\n'.join(line.strip() for line in stripped_text_from_html.replace("\n\n", "\n").splitlines())
|
||||
stripped_text = transformer.trim_whitespace(stripped_text)
|
||||
|
||||
# Re #340 - return the content before the 'ignore text' was applied
|
||||
# Also used to calculate/show what was removed
|
||||
text_content_before_ignored_filter = stripped_text_from_html
|
||||
|
||||
# @todo whitespace coming from missing rtrim()?
|
||||
# stripped_text_from_html could be based on their preferences, replace the processed text with only that which they want to know about.
|
||||
# Rewrite's the processing text based on only what diff result they want to see
|
||||
# Save text before ignore filters (for diff calculation)
|
||||
text_content_before_ignored_filter = stripped_text
|
||||
|
||||
# === DIFF FILTERING ===
|
||||
# If user wants specific diff types (added/removed/replaced only)
|
||||
if watch.has_special_diff_filter_options_set() and len(watch.history.keys()):
|
||||
# Now the content comes from the diff-parser and not the returned HTTP traffic, so could be some differences
|
||||
from changedetectionio import diff
|
||||
# needs to not include (added) etc or it may get used twice
|
||||
# Replace the processed text with the preferred result
|
||||
rendered_diff = diff.render_diff(previous_version_file_contents=watch.get_last_fetched_text_before_filters(),
|
||||
newest_version_file_contents=stripped_text_from_html,
|
||||
include_equal=False, # not the same lines
|
||||
include_added=watch.get('filter_text_added', True),
|
||||
include_removed=watch.get('filter_text_removed', True),
|
||||
include_replaced=watch.get('filter_text_replaced', True),
|
||||
line_feed_sep="\n",
|
||||
include_change_type_prefix=False)
|
||||
stripped_text = self._apply_diff_filtering(watch, stripped_text, text_content_before_ignored_filter)
|
||||
if stripped_text is None:
|
||||
# No differences found, but content exists
|
||||
c = ChecksumCalculator.calculate(text_content_before_ignored_filter, ignore_whitespace=True)
|
||||
return False, {'previous_md5': c}, text_content_before_ignored_filter.encode('utf-8')
|
||||
|
||||
watch.save_last_text_fetched_before_filters(text_content_before_ignored_filter.encode('utf-8'))
|
||||
|
||||
if not rendered_diff and stripped_text_from_html:
|
||||
# We had some content, but no differences were found
|
||||
# Store our new file as the MD5 so it will trigger in the future
|
||||
c = hashlib.md5(stripped_text_from_html.translate(TRANSLATE_WHITESPACE_TABLE).encode('utf-8')).hexdigest()
|
||||
return False, {'previous_md5': c}, stripped_text_from_html.encode('utf-8')
|
||||
else:
|
||||
stripped_text_from_html = rendered_diff
|
||||
|
||||
# Treat pages with no renderable text content as a change? No by default
|
||||
# === EMPTY PAGE CHECK ===
|
||||
empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
|
||||
if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0:
|
||||
raise content_fetchers.exceptions.ReplyWithContentButNoText(url=url,
|
||||
status_code=self.fetcher.get_last_status_code(),
|
||||
screenshot=self.fetcher.screenshot,
|
||||
has_filters=has_filter_rule,
|
||||
html_content=html_content,
|
||||
xpath_data=self.fetcher.xpath_data
|
||||
)
|
||||
|
||||
# We rely on the actual text in the html output.. many sites have random script vars etc,
|
||||
# in the future we'll implement other mechanisms.
|
||||
if not stream_content_type.is_json and not empty_pages_are_a_change and len(stripped_text.strip()) == 0:
|
||||
raise content_fetchers.exceptions.ReplyWithContentButNoText(
|
||||
url=url,
|
||||
status_code=self.fetcher.get_last_status_code(),
|
||||
screenshot=self.fetcher.screenshot,
|
||||
has_filters=filter_config.has_include_filters,
|
||||
html_content=html_content,
|
||||
xpath_data=self.fetcher.xpath_data
|
||||
)
|
||||
|
||||
update_obj["last_check_status"] = self.fetcher.get_last_status_code()
|
||||
|
||||
# 615 Extract text by regex
|
||||
extract_text = list(dict.fromkeys(watch.get('extract_text', []) + self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='extract_text')))
|
||||
if len(extract_text) > 0:
|
||||
regex_matched_output = []
|
||||
for s_re in extract_text:
|
||||
# incase they specified something in '/.../x'
|
||||
if re.search(PERL_STYLE_REGEX, s_re, re.IGNORECASE):
|
||||
regex = html_tools.perl_style_slash_enclosed_regex_to_options(s_re)
|
||||
result = re.findall(regex, stripped_text_from_html)
|
||||
|
||||
for l in result:
|
||||
if type(l) is tuple:
|
||||
# @todo - some formatter option default (between groups)
|
||||
regex_matched_output += list(l) + ['\n']
|
||||
else:
|
||||
# @todo - some formatter option default (between each ungrouped result)
|
||||
regex_matched_output += [l] + ['\n']
|
||||
else:
|
||||
# Doesnt look like regex, just hunt for plaintext and return that which matches
|
||||
# `stripped_text_from_html` will be bytes, so we must encode s_re also to bytes
|
||||
r = re.compile(re.escape(s_re), re.IGNORECASE)
|
||||
res = r.findall(stripped_text_from_html)
|
||||
if res:
|
||||
for match in res:
|
||||
regex_matched_output += [match] + ['\n']
|
||||
|
||||
##########################################################
|
||||
stripped_text_from_html = ''
|
||||
|
||||
if regex_matched_output:
|
||||
# @todo some formatter for presentation?
|
||||
stripped_text_from_html = ''.join(regex_matched_output)
|
||||
# === REGEX EXTRACTION ===
|
||||
if filter_config.extract_text:
|
||||
extracted = transformer.extract_by_regex(stripped_text, filter_config.extract_text)
|
||||
stripped_text = extracted
|
||||
|
||||
# === MORE TEXT TRANSFORMATIONS ===
|
||||
if watch.get('remove_duplicate_lines'):
|
||||
stripped_text_from_html = '\n'.join(dict.fromkeys(line for line in stripped_text_from_html.replace("\n\n", "\n").splitlines()))
|
||||
|
||||
stripped_text = transformer.remove_duplicate_lines(stripped_text)
|
||||
|
||||
if watch.get('sort_text_alphabetically'):
|
||||
# Note: Because a <p>something</p> will add an extra line feed to signify the paragraph gap
|
||||
# we end up with 'Some text\n\n', sorting will add all those extra \n at the start, so we remove them here.
|
||||
stripped_text_from_html = stripped_text_from_html.replace("\n\n", "\n")
|
||||
stripped_text_from_html = '\n'.join(sorted(stripped_text_from_html.splitlines(), key=lambda x: x.lower()))
|
||||
stripped_text = transformer.sort_alphabetically(stripped_text)
|
||||
|
||||
### CALCULATE MD5
|
||||
# If there's text to ignore
|
||||
text_to_ignore = watch.get('ignore_text', []) + self.datastore.data['settings']['application'].get('global_ignore_text', [])
|
||||
text_to_ignore += self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='ignore_text')
|
||||
# === CHECKSUM CALCULATION ===
|
||||
text_for_checksuming = stripped_text
|
||||
|
||||
text_for_checksuming = stripped_text_from_html
|
||||
if text_to_ignore:
|
||||
text_for_checksuming = html_tools.strip_ignore_text(stripped_text_from_html, text_to_ignore)
|
||||
# Apply ignore_text for checksum calculation
|
||||
if filter_config.ignore_text:
|
||||
text_for_checksuming = html_tools.strip_ignore_text(stripped_text, filter_config.ignore_text)
|
||||
|
||||
# Re #133 - if we should strip whitespaces from triggering the change detected comparison
|
||||
if text_for_checksuming and self.datastore.data['settings']['application'].get('ignore_whitespace', False):
|
||||
fetched_md5 = hashlib.md5(text_for_checksuming.translate(TRANSLATE_WHITESPACE_TABLE).encode('utf-8')).hexdigest()
|
||||
else:
|
||||
fetched_md5 = hashlib.md5(text_for_checksuming.encode('utf-8')).hexdigest()
|
||||
# Optionally remove ignored lines from output
|
||||
strip_ignored_lines = watch.get('strip_ignored_lines')
|
||||
if strip_ignored_lines is None:
|
||||
strip_ignored_lines = self.datastore.data['settings']['application'].get('strip_ignored_lines')
|
||||
if strip_ignored_lines:
|
||||
stripped_text = text_for_checksuming
|
||||
|
||||
############ Blocking rules, after checksum #################
|
||||
# Calculate checksum
|
||||
ignore_whitespace = self.datastore.data['settings']['application'].get('ignore_whitespace', False)
|
||||
fetched_md5 = ChecksumCalculator.calculate(text_for_checksuming, ignore_whitespace=ignore_whitespace)
|
||||
|
||||
# === BLOCKING RULES EVALUATION ===
|
||||
blocked = False
|
||||
trigger_text = list(dict.fromkeys(watch.get('trigger_text', []) + self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='trigger_text')))
|
||||
if len(trigger_text):
|
||||
# Assume blocked
|
||||
|
||||
# Check trigger_text
|
||||
if rule_engine.evaluate_trigger_text(stripped_text, filter_config.trigger_text):
|
||||
blocked = True
|
||||
# Filter and trigger works the same, so reuse it
|
||||
# It should return the line numbers that match
|
||||
# Unblock flow if the trigger was found (some text remained after stripped what didnt match)
|
||||
result = html_tools.strip_ignore_text(content=str(stripped_text_from_html),
|
||||
wordlist=trigger_text,
|
||||
mode="line numbers")
|
||||
# Unblock if the trigger was found
|
||||
if result:
|
||||
blocked = False
|
||||
|
||||
text_should_not_be_present = list(dict.fromkeys(watch.get('text_should_not_be_present', []) + self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='text_should_not_be_present')))
|
||||
if len(text_should_not_be_present):
|
||||
# If anything matched, then we should block a change from happening
|
||||
result = html_tools.strip_ignore_text(content=str(stripped_text_from_html),
|
||||
wordlist=text_should_not_be_present,
|
||||
mode="line numbers")
|
||||
if result:
|
||||
blocked = True
|
||||
# Check text_should_not_be_present
|
||||
if rule_engine.evaluate_text_should_not_be_present(stripped_text, filter_config.text_should_not_be_present):
|
||||
blocked = True
|
||||
|
||||
# And check if 'conditions' will let this pass through
|
||||
if watch.get('conditions') and watch.get('conditions_match_logic'):
|
||||
conditions_result = execute_ruleset_against_all_plugins(current_watch_uuid=watch.get('uuid'),
|
||||
application_datastruct=self.datastore.data,
|
||||
ephemeral_data={
|
||||
'text': stripped_text_from_html
|
||||
}
|
||||
)
|
||||
# Check custom conditions
|
||||
if rule_engine.evaluate_conditions(watch, self.datastore, stripped_text):
|
||||
blocked = True
|
||||
|
||||
if not conditions_result.get('result'):
|
||||
# Conditions say "Condition not met" so we block it.
|
||||
blocked = True
|
||||
|
||||
# Looks like something changed, but did it match all the rules?
|
||||
# === CHANGE DETECTION ===
|
||||
if blocked:
|
||||
changed_detected = False
|
||||
else:
|
||||
# The main thing that all this at the moment comes down to :)
|
||||
# Compare checksums
|
||||
if watch.get('previous_md5') != fetched_md5:
|
||||
changed_detected = True
|
||||
|
||||
# Always record the new checksum
|
||||
update_obj["previous_md5"] = fetched_md5
|
||||
|
||||
# On the first run of a site, watch['previous_md5'] will be None, set it the current one.
|
||||
# On first run, initialize previous_md5
|
||||
if not watch.get('previous_md5'):
|
||||
watch['previous_md5'] = fetched_md5
|
||||
|
||||
logger.debug(f"Watch UUID {watch.get('uuid')} content check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}")
|
||||
|
||||
if changed_detected:
|
||||
if watch.get('check_unique_lines', False):
|
||||
ignore_whitespace = self.datastore.data['settings']['application'].get('ignore_whitespace')
|
||||
# === UNIQUE LINES CHECK ===
|
||||
if changed_detected and watch.get('check_unique_lines', False):
|
||||
has_unique_lines = watch.lines_contain_something_unique_compared_to_history(
|
||||
lines=stripped_text.splitlines(),
|
||||
ignore_whitespace=ignore_whitespace
|
||||
)
|
||||
|
||||
has_unique_lines = watch.lines_contain_something_unique_compared_to_history(
|
||||
lines=stripped_text_from_html.splitlines(),
|
||||
ignore_whitespace=ignore_whitespace
|
||||
)
|
||||
if not has_unique_lines:
|
||||
logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} didnt have anything new setting change_detected=False")
|
||||
changed_detected = False
|
||||
else:
|
||||
logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} had unique content")
|
||||
|
||||
# One or more lines? unsure?
|
||||
if not has_unique_lines:
|
||||
logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} didnt have anything new setting change_detected=False")
|
||||
changed_detected = False
|
||||
else:
|
||||
logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} had unique content")
|
||||
return changed_detected, update_obj, stripped_text
|
||||
|
||||
def _apply_diff_filtering(self, watch, stripped_text, text_before_filter):
|
||||
"""Apply user's diff filtering preferences (show only added/removed/replaced lines)."""
|
||||
from changedetectionio import diff
|
||||
|
||||
# stripped_text_from_html - Everything after filters and NO 'ignored' content
|
||||
return changed_detected, update_obj, stripped_text_from_html
|
||||
rendered_diff = diff.render_diff(
|
||||
previous_version_file_contents=watch.get_last_fetched_text_before_filters(),
|
||||
newest_version_file_contents=stripped_text,
|
||||
include_equal=False,
|
||||
include_added=watch.get('filter_text_added', True),
|
||||
include_removed=watch.get('filter_text_removed', True),
|
||||
include_replaced=watch.get('filter_text_replaced', True),
|
||||
line_feed_sep="\n",
|
||||
include_change_type_prefix=False
|
||||
)
|
||||
|
||||
watch.save_last_text_fetched_before_filters(text_before_filter.encode('utf-8'))
|
||||
|
||||
if not rendered_diff and stripped_text:
|
||||
# No differences found
|
||||
return None
|
||||
|
||||
return rendered_diff
|
||||
|
||||
@@ -243,14 +243,15 @@ def handle_watch_update(socketio, **kwargs):
|
||||
|
||||
general_stats = {
|
||||
'count_errors': errored_count,
|
||||
'has_unviewed': datastore.has_unviewed
|
||||
'unread_changes_count': datastore.unread_changes_count
|
||||
}
|
||||
|
||||
# Debug what's being emitted
|
||||
# logger.debug(f"Emitting 'watch_update' event for {watch.get('uuid')}, data: {watch_data}")
|
||||
|
||||
# Emit to all clients (no 'broadcast' parameter needed - it's the default behavior)
|
||||
socketio.emit("watch_update", {'watch': watch_data, 'general_stats': general_stats})
|
||||
socketio.emit("watch_update", {'watch': watch_data})
|
||||
socketio.emit("general_stats_update", general_stats)
|
||||
|
||||
# Log after successful emit - use watch_data['uuid'] to avoid variable shadowing issues
|
||||
logger.trace(f"Socket.IO: Emitted update for watch {watch_data['uuid']}, Checking now: {watch_data['checking_now']}")
|
||||
|
||||
@@ -9,7 +9,7 @@ set -x
|
||||
# SOCKS5 related - start simple Socks5 proxy server
|
||||
# SOCKSTEST=xyz should show in the logs of this service to confirm it fetched
|
||||
docker run --network changedet-network -d --hostname socks5proxy --rm --name socks5proxy -p 1080:1080 -e PROXY_USER=proxy_user123 -e PROXY_PASSWORD=proxy_pass123 serjs/go-socks5-proxy
|
||||
docker run --network changedet-network -d --hostname socks5proxy-noauth --rm -p 1081:1080 --name socks5proxy-noauth serjs/go-socks5-proxy
|
||||
docker run --network changedet-network -d --hostname socks5proxy-noauth --rm -p 1081:1080 --name socks5proxy-noauth -e REQUIRE_AUTH=false serjs/go-socks5-proxy
|
||||
|
||||
echo "---------------------------------- SOCKS5 -------------------"
|
||||
# SOCKS5 related - test from proxies.json
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "changedetection.io",
|
||||
"short_name": "changedetection",
|
||||
"name": "",
|
||||
"short_name": "",
|
||||
"icons": [
|
||||
{
|
||||
"src": "android-chrome-192x192.png",
|
||||
@@ -15,8 +15,5 @@
|
||||
],
|
||||
"theme_color": "#ffffff",
|
||||
"background_color": "#ffffff",
|
||||
"display": "standalone",
|
||||
"start_url": "/",
|
||||
"scope": "/",
|
||||
"gcm_sender_id": "103953800507"
|
||||
"display": "standalone"
|
||||
}
|
||||
|
||||
@@ -1,450 +0,0 @@
|
||||
/**
|
||||
* changedetection.io Browser Push Notifications
|
||||
* Handles service worker registration, push subscription management, and notification permissions
|
||||
*/
|
||||
|
||||
class BrowserNotifications {
|
||||
constructor() {
|
||||
this.serviceWorkerRegistration = null;
|
||||
this.vapidPublicKey = null;
|
||||
this.isSubscribed = false;
|
||||
this.init();
|
||||
}
|
||||
|
||||
async init() {
|
||||
if (!this.isSupported()) {
|
||||
console.warn('Push notifications are not supported in this browser');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Get VAPID public key from server
|
||||
await this.fetchVapidPublicKey();
|
||||
|
||||
// Register service worker
|
||||
await this.registerServiceWorker();
|
||||
|
||||
// Check existing subscription state
|
||||
await this.checkExistingSubscription();
|
||||
|
||||
// Initialize UI elements
|
||||
this.initializeUI();
|
||||
|
||||
// Set up notification URL monitoring
|
||||
this.setupNotificationUrlMonitoring();
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to initialize browser notifications:', error);
|
||||
}
|
||||
}
|
||||
|
||||
isSupported() {
|
||||
return 'serviceWorker' in navigator &&
|
||||
'PushManager' in window &&
|
||||
'Notification' in window;
|
||||
}
|
||||
|
||||
async fetchVapidPublicKey() {
|
||||
try {
|
||||
const response = await fetch('/browser-notifications-api/vapid-public-key');
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
const data = await response.json();
|
||||
this.vapidPublicKey = data.publicKey;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch VAPID public key:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async registerServiceWorker() {
|
||||
try {
|
||||
this.serviceWorkerRegistration = await navigator.serviceWorker.register('/service-worker.js', {
|
||||
scope: '/'
|
||||
});
|
||||
|
||||
console.log('Service Worker registered successfully');
|
||||
|
||||
// Wait for service worker to be ready
|
||||
await navigator.serviceWorker.ready;
|
||||
|
||||
} catch (error) {
|
||||
console.error('Service Worker registration failed:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
initializeUI() {
|
||||
// Bind event handlers to existing elements in the template
|
||||
this.bindEventHandlers();
|
||||
|
||||
// Update UI based on current permission state
|
||||
this.updatePermissionStatus();
|
||||
}
|
||||
|
||||
bindEventHandlers() {
|
||||
const enableBtn = document.querySelector('#enable-notifications-btn');
|
||||
const testBtn = document.querySelector('#test-notification-btn');
|
||||
|
||||
if (enableBtn) {
|
||||
enableBtn.addEventListener('click', () => this.requestNotificationPermission());
|
||||
}
|
||||
|
||||
if (testBtn) {
|
||||
testBtn.addEventListener('click', () => this.sendTestNotification());
|
||||
}
|
||||
}
|
||||
|
||||
setupNotificationUrlMonitoring() {
|
||||
// Monitor the notification URLs textarea for browser:// URLs
|
||||
const notificationUrlsField = document.querySelector('textarea[name*="notification_urls"]');
|
||||
if (notificationUrlsField) {
|
||||
const checkForBrowserUrls = async () => {
|
||||
const urls = notificationUrlsField.value || '';
|
||||
const hasBrowserUrls = /browser:\/\//.test(urls);
|
||||
|
||||
// If browser URLs are detected and we're not subscribed, auto-subscribe
|
||||
if (hasBrowserUrls && !this.isSubscribed && Notification.permission === 'default') {
|
||||
const shouldSubscribe = confirm('Browser notifications detected! Would you like to enable browser notifications now?');
|
||||
if (shouldSubscribe) {
|
||||
await this.requestNotificationPermission();
|
||||
}
|
||||
} else if (hasBrowserUrls && !this.isSubscribed && Notification.permission === 'granted') {
|
||||
// Permission already granted but not subscribed - auto-subscribe silently
|
||||
console.log('Auto-subscribing to browser notifications...');
|
||||
await this.subscribe();
|
||||
}
|
||||
};
|
||||
|
||||
// Check immediately
|
||||
checkForBrowserUrls();
|
||||
|
||||
// Check on input changes
|
||||
notificationUrlsField.addEventListener('input', checkForBrowserUrls);
|
||||
}
|
||||
}
|
||||
|
||||
async updatePermissionStatus() {
|
||||
const statusElement = document.querySelector('#permission-status');
|
||||
const enableBtn = document.querySelector('#enable-notifications-btn');
|
||||
const testBtn = document.querySelector('#test-notification-btn');
|
||||
|
||||
if (!statusElement) return;
|
||||
|
||||
const permission = Notification.permission;
|
||||
statusElement.textContent = permission;
|
||||
statusElement.className = `permission-${permission}`;
|
||||
|
||||
// Show/hide controls based on permission
|
||||
if (permission === 'default') {
|
||||
if (enableBtn) enableBtn.style.display = 'inline-block';
|
||||
if (testBtn) testBtn.style.display = 'none';
|
||||
} else if (permission === 'granted') {
|
||||
if (enableBtn) enableBtn.style.display = 'none';
|
||||
if (testBtn) testBtn.style.display = 'inline-block';
|
||||
} else { // denied
|
||||
if (enableBtn) enableBtn.style.display = 'none';
|
||||
if (testBtn) testBtn.style.display = 'none';
|
||||
}
|
||||
}
|
||||
|
||||
async requestNotificationPermission() {
|
||||
try {
|
||||
const permission = await Notification.requestPermission();
|
||||
this.updatePermissionStatus();
|
||||
|
||||
if (permission === 'granted') {
|
||||
console.log('Notification permission granted');
|
||||
// Automatically subscribe to browser notifications
|
||||
this.subscribe();
|
||||
} else {
|
||||
console.log('Notification permission denied');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error requesting notification permission:', error);
|
||||
}
|
||||
}
|
||||
|
||||
async subscribe() {
|
||||
if (Notification.permission !== 'granted') {
|
||||
alert('Please enable notifications first');
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.isSubscribed) {
|
||||
console.log('Already subscribed to browser notifications');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// First, try to clear any existing subscription with different keys
|
||||
await this.clearExistingSubscription();
|
||||
|
||||
// Create push subscription
|
||||
const subscription = await this.serviceWorkerRegistration.pushManager.subscribe({
|
||||
userVisibleOnly: true,
|
||||
applicationServerKey: this.urlBase64ToUint8Array(this.vapidPublicKey)
|
||||
});
|
||||
|
||||
// Send subscription to server
|
||||
const response = await fetch('/browser-notifications-api/subscribe', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value
|
||||
},
|
||||
body: JSON.stringify({
|
||||
subscription: subscription.toJSON()
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
|
||||
// Store subscription status
|
||||
this.isSubscribed = true;
|
||||
|
||||
console.log('Successfully subscribed to browser notifications');
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to subscribe to browser notifications:', error);
|
||||
|
||||
// Show user-friendly error message
|
||||
if (error.message.includes('different applicationServerKey')) {
|
||||
this.showSubscriptionConflictDialog(error);
|
||||
} else {
|
||||
alert(`Failed to subscribe: ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async unsubscribe() {
|
||||
try {
|
||||
if (!this.isSubscribed) return;
|
||||
|
||||
// Get current subscription
|
||||
const subscription = await this.serviceWorkerRegistration.pushManager.getSubscription();
|
||||
if (!subscription) {
|
||||
this.isSubscribed = false;
|
||||
return;
|
||||
}
|
||||
|
||||
// Unsubscribe from server
|
||||
const response = await fetch('/browser-notifications-api/unsubscribe', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value
|
||||
},
|
||||
body: JSON.stringify({
|
||||
subscription: subscription.toJSON()
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
console.warn(`Server unsubscribe failed: ${response.status}`);
|
||||
}
|
||||
|
||||
// Unsubscribe locally
|
||||
await subscription.unsubscribe();
|
||||
|
||||
// Update status
|
||||
this.isSubscribed = false;
|
||||
|
||||
console.log('Unsubscribed from browser notifications');
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to unsubscribe from browser notifications:', error);
|
||||
}
|
||||
}
|
||||
|
||||
async sendTestNotification() {
|
||||
try {
|
||||
// First, check if we're subscribed
|
||||
if (!this.isSubscribed) {
|
||||
const shouldSubscribe = confirm('You need to subscribe to browser notifications first. Subscribe now?');
|
||||
if (shouldSubscribe) {
|
||||
await this.subscribe();
|
||||
// Give a moment for subscription to complete
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const response = await fetch('/browser-notifications/test', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) {
|
||||
// No subscriptions found on server - try subscribing
|
||||
alert('No browser subscriptions found. Subscribing now...');
|
||||
await this.subscribe();
|
||||
return;
|
||||
}
|
||||
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const result = await response.json();
|
||||
alert(result.message);
|
||||
console.log('Test notification result:', result);
|
||||
} catch (error) {
|
||||
console.error('Failed to send test notification:', error);
|
||||
alert(`Failed to send test notification: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
urlBase64ToUint8Array(base64String) {
|
||||
const padding = '='.repeat((4 - base64String.length % 4) % 4);
|
||||
const base64 = (base64String + padding)
|
||||
.replace(/-/g, '+')
|
||||
.replace(/_/g, '/');
|
||||
|
||||
const rawData = window.atob(base64);
|
||||
const outputArray = new Uint8Array(rawData.length);
|
||||
|
||||
for (let i = 0; i < rawData.length; ++i) {
|
||||
outputArray[i] = rawData.charCodeAt(i);
|
||||
}
|
||||
return outputArray;
|
||||
}
|
||||
|
||||
async checkExistingSubscription() {
|
||||
/**
|
||||
* Check if we already have a valid browser subscription
|
||||
* Updates this.isSubscribed based on actual browser state
|
||||
*/
|
||||
try {
|
||||
if (!this.serviceWorkerRegistration) {
|
||||
this.isSubscribed = false;
|
||||
return;
|
||||
}
|
||||
|
||||
const existingSubscription = await this.serviceWorkerRegistration.pushManager.getSubscription();
|
||||
|
||||
if (existingSubscription) {
|
||||
// We have a subscription - verify it's still valid and matches our VAPID key
|
||||
const subscriptionJson = existingSubscription.toJSON();
|
||||
|
||||
// Check if the endpoint is still active (basic validation)
|
||||
if (subscriptionJson.endpoint && subscriptionJson.keys) {
|
||||
console.log('Found existing valid subscription');
|
||||
this.isSubscribed = true;
|
||||
} else {
|
||||
console.log('Found invalid subscription, clearing...');
|
||||
await existingSubscription.unsubscribe();
|
||||
this.isSubscribed = false;
|
||||
}
|
||||
} else {
|
||||
console.log('No existing subscription found');
|
||||
this.isSubscribed = false;
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Failed to check existing subscription:', error);
|
||||
this.isSubscribed = false;
|
||||
}
|
||||
}
|
||||
|
||||
async clearExistingSubscription() {
|
||||
/**
|
||||
* Clear any existing push subscription that might conflict with our VAPID keys
|
||||
*/
|
||||
try {
|
||||
const existingSubscription = await this.serviceWorkerRegistration.pushManager.getSubscription();
|
||||
|
||||
if (existingSubscription) {
|
||||
console.log('Found existing subscription, unsubscribing...');
|
||||
await existingSubscription.unsubscribe();
|
||||
console.log('Successfully cleared existing subscription');
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Failed to clear existing subscription:', error);
|
||||
// Don't throw - this is just cleanup
|
||||
}
|
||||
}
|
||||
|
||||
showSubscriptionConflictDialog(error) {
|
||||
/**
|
||||
* Show user-friendly dialog for subscription conflicts
|
||||
*/
|
||||
const message = `Browser notifications are already set up for a different changedetection.io instance or with different settings.
|
||||
|
||||
To fix this:
|
||||
1. Clear your existing subscription
|
||||
2. Try subscribing again
|
||||
|
||||
Would you like to automatically clear the old subscription and retry?`;
|
||||
|
||||
if (confirm(message)) {
|
||||
this.clearExistingSubscription().then(() => {
|
||||
// Retry subscription after clearing
|
||||
setTimeout(() => {
|
||||
this.subscribe();
|
||||
}, 500);
|
||||
});
|
||||
} else {
|
||||
alert('To use browser notifications, please manually clear your browser notifications for this site in browser settings, then try again.');
|
||||
}
|
||||
}
|
||||
|
||||
async clearAllNotifications() {
|
||||
/**
|
||||
* Clear all browser notification subscriptions (admin function)
|
||||
*/
|
||||
try {
|
||||
// Call the server to clear ALL subscriptions from datastore
|
||||
const response = await fetch('/browser-notifications/clear', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-CSRFToken': document.querySelector('input[name=csrf_token]')?.value
|
||||
}
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const result = await response.json();
|
||||
console.log('Server response:', result.message);
|
||||
|
||||
// Also clear the current browser's subscription if it exists
|
||||
const existingSubscription = await this.serviceWorkerRegistration.pushManager.getSubscription();
|
||||
if (existingSubscription) {
|
||||
await existingSubscription.unsubscribe();
|
||||
console.log('Cleared current browser subscription');
|
||||
}
|
||||
|
||||
// Update status
|
||||
this.isSubscribed = false;
|
||||
|
||||
alert(result.message + '. All browser notifications have been cleared.');
|
||||
} else {
|
||||
const error = await response.json();
|
||||
console.error('Server clear failed:', error.message);
|
||||
alert('Failed to clear server subscriptions: ' + error.message);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to clear all notifications:', error);
|
||||
alert('Failed to clear notifications: ' + error.message);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Initialize when DOM is ready
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
window.browserNotifications = new BrowserNotifications();
|
||||
});
|
||||
} else {
|
||||
window.browserNotifications = new BrowserNotifications();
|
||||
}
|
||||
@@ -117,15 +117,16 @@ $(document).ready(function () {
|
||||
}
|
||||
})
|
||||
|
||||
socket.on('general_stats_update', function (general_stats) {
|
||||
// Tabs at bottom of list
|
||||
$('#watch-table-wrapper').toggleClass("has-unread-changes", general_stats.unread_changes_count !==0)
|
||||
$('#watch-table-wrapper').toggleClass("has-error", general_stats.count_errors !== 0)
|
||||
$('#post-list-with-errors a').text(`With errors (${ new Intl.NumberFormat(navigator.language).format(general_stats.count_errors) })`);
|
||||
$('#unread-tab-counter').text(new Intl.NumberFormat(navigator.language).format(general_stats.unread_changes_count));
|
||||
});
|
||||
|
||||
socket.on('watch_update', function (data) {
|
||||
const watch = data.watch;
|
||||
const general_stats = data.general_stats;
|
||||
|
||||
// Log the entire watch object for debugging
|
||||
console.log('!!! WATCH UPDATE EVENT RECEIVED !!!');
|
||||
console.log(`${watch.event_timestamp} - Watch update ${watch.uuid} - Checking now - ${watch.checking_now} - UUID in URL ${window.location.href.includes(watch.uuid)}`);
|
||||
console.log('Watch data:', watch);
|
||||
console.log('General stats:', general_stats);
|
||||
|
||||
// Updating watch table rows
|
||||
const $watchRow = $('tr[data-watch-uuid="' + watch.uuid + '"]');
|
||||
@@ -150,13 +151,6 @@ $(document).ready(function () {
|
||||
|
||||
console.log('Updated UI for watch:', watch.uuid);
|
||||
}
|
||||
|
||||
// Tabs at bottom of list
|
||||
$('#post-list-mark-views').toggleClass("has-unviewed", general_stats.has_unviewed);
|
||||
$('#post-list-unread').toggleClass("has-unviewed", general_stats.has_unviewed);
|
||||
$('#post-list-with-errors').toggleClass("has-error", general_stats.count_errors !== 0)
|
||||
$('#post-list-with-errors a').text(`With errors (${ general_stats.count_errors })`);
|
||||
|
||||
$('body').toggleClass('checking-now', watch.checking_now && window.location.href.includes(watch.uuid));
|
||||
});
|
||||
|
||||
|
||||
@@ -1,95 +0,0 @@
|
||||
// changedetection.io Service Worker for Browser Push Notifications
|
||||
|
||||
self.addEventListener('install', function(event) {
|
||||
console.log('Service Worker installing');
|
||||
self.skipWaiting();
|
||||
});
|
||||
|
||||
self.addEventListener('activate', function(event) {
|
||||
console.log('Service Worker activating');
|
||||
event.waitUntil(self.clients.claim());
|
||||
});
|
||||
|
||||
self.addEventListener('push', function(event) {
|
||||
console.log('Push message received', event);
|
||||
|
||||
let notificationData = {
|
||||
title: 'changedetection.io',
|
||||
body: 'A watched page has changed',
|
||||
icon: '/static/favicons/favicon-32x32.png',
|
||||
badge: '/static/favicons/favicon-32x32.png',
|
||||
tag: 'changedetection-notification',
|
||||
requireInteraction: false,
|
||||
timestamp: Date.now()
|
||||
};
|
||||
|
||||
// Parse push data if available
|
||||
if (event.data) {
|
||||
try {
|
||||
const pushData = event.data.json();
|
||||
notificationData = {
|
||||
...notificationData,
|
||||
...pushData
|
||||
};
|
||||
} catch (e) {
|
||||
console.warn('Failed to parse push data:', e);
|
||||
notificationData.body = event.data.text() || notificationData.body;
|
||||
}
|
||||
}
|
||||
|
||||
const promiseChain = self.registration.showNotification(
|
||||
notificationData.title,
|
||||
{
|
||||
body: notificationData.body,
|
||||
icon: notificationData.icon,
|
||||
badge: notificationData.badge,
|
||||
tag: notificationData.tag,
|
||||
requireInteraction: notificationData.requireInteraction,
|
||||
timestamp: notificationData.timestamp,
|
||||
data: {
|
||||
url: notificationData.url || '/',
|
||||
timestamp: notificationData.timestamp
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
event.waitUntil(promiseChain);
|
||||
});
|
||||
|
||||
self.addEventListener('notificationclick', function(event) {
|
||||
console.log('Notification clicked', event);
|
||||
|
||||
event.notification.close();
|
||||
|
||||
const targetUrl = event.notification.data?.url || '/';
|
||||
|
||||
event.waitUntil(
|
||||
clients.matchAll().then(function(clientList) {
|
||||
// Check if there's already a window/tab open with our app
|
||||
for (let i = 0; i < clientList.length; i++) {
|
||||
const client = clientList[i];
|
||||
if (client.url.includes(self.location.origin) && 'focus' in client) {
|
||||
client.navigate(targetUrl);
|
||||
return client.focus();
|
||||
}
|
||||
}
|
||||
// If no existing window, open a new one
|
||||
if (clients.openWindow) {
|
||||
return clients.openWindow(targetUrl);
|
||||
}
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
self.addEventListener('notificationclose', function(event) {
|
||||
console.log('Notification closed', event);
|
||||
});
|
||||
|
||||
// Handle messages from the main thread
|
||||
self.addEventListener('message', function(event) {
|
||||
console.log('Service Worker received message:', event.data);
|
||||
|
||||
if (event.data && event.data.type === 'SKIP_WAITING') {
|
||||
self.skipWaiting();
|
||||
}
|
||||
});
|
||||
@@ -17,18 +17,6 @@ body.checking-now {
|
||||
position: fixed;
|
||||
}
|
||||
|
||||
#post-list-buttons {
|
||||
#post-list-with-errors.has-error {
|
||||
display: inline-block !important;
|
||||
}
|
||||
#post-list-mark-views.has-unviewed {
|
||||
display: inline-block !important;
|
||||
}
|
||||
#post-list-unread.has-unviewed {
|
||||
display: inline-block !important;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -127,5 +127,44 @@
|
||||
display: inline-block !important;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
#watch-table-wrapper {
|
||||
/* general styling */
|
||||
#post-list-buttons {
|
||||
text-align: right;
|
||||
padding: 0px;
|
||||
margin: 0px;
|
||||
|
||||
li {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
a {
|
||||
border-top-left-radius: initial;
|
||||
border-top-right-radius: initial;
|
||||
border-bottom-left-radius: 5px;
|
||||
border-bottom-right-radius: 5px;
|
||||
}
|
||||
}
|
||||
|
||||
/* post list dynamically on/off stuff */
|
||||
|
||||
&.has-error {
|
||||
#post-list-buttons {
|
||||
#post-list-with-errors {
|
||||
display: inline-block !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&.has-unread-changes {
|
||||
#post-list-buttons {
|
||||
#post-list-unread, #post-list-mark-views, #post-list-unread {
|
||||
display: inline-block !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,7 +34,6 @@
|
||||
transition: all 0.2s ease;
|
||||
cursor: pointer;
|
||||
display: block;
|
||||
min-width: 60px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
|
||||
@@ -203,24 +203,6 @@ code {
|
||||
}
|
||||
|
||||
|
||||
#post-list-buttons {
|
||||
text-align: right;
|
||||
padding: 0px;
|
||||
margin: 0px;
|
||||
|
||||
li {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
a {
|
||||
border-top-left-radius: initial;
|
||||
border-top-right-radius: initial;
|
||||
border-bottom-left-radius: 5px;
|
||||
border-bottom-right-radius: 5px;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
body:after {
|
||||
content: "";
|
||||
background: linear-gradient(130deg, var(--color-background-gradient-first), var(--color-background-gradient-second) 41.07%, var(--color-background-gradient-third) 84.05%);
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -140,28 +140,6 @@ class ChangeDetectionStore:
|
||||
secret = secrets.token_hex(16)
|
||||
self.__data['settings']['application']['api_access_token'] = secret
|
||||
|
||||
# Generate VAPID keys for browser push notifications
|
||||
if not self.__data['settings']['application']['vapid'].get('private_key'):
|
||||
try:
|
||||
from py_vapid import Vapid
|
||||
vapid = Vapid()
|
||||
vapid.generate_keys()
|
||||
# Convert bytes to strings for JSON serialization
|
||||
private_pem = vapid.private_pem()
|
||||
public_pem = vapid.public_pem()
|
||||
|
||||
self.__data['settings']['application']['vapid']['private_key'] = private_pem.decode() if isinstance(private_pem, bytes) else private_pem
|
||||
self.__data['settings']['application']['vapid']['public_key'] = public_pem.decode() if isinstance(public_pem, bytes) else public_pem
|
||||
|
||||
# Set default contact email if not present
|
||||
if not self.__data['settings']['application']['vapid'].get('contact_email'):
|
||||
self.__data['settings']['application']['vapid']['contact_email'] = 'citizen@example.com'
|
||||
logger.info("Generated new VAPID keys for browser push notifications")
|
||||
except ImportError:
|
||||
logger.warning("py_vapid not available - browser notifications will not work")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to generate VAPID keys: {e}")
|
||||
|
||||
self.needs_write = True
|
||||
|
||||
# Finally start the thread that will manage periodic data saves to JSON
|
||||
@@ -224,14 +202,13 @@ class ChangeDetectionStore:
|
||||
return seconds
|
||||
|
||||
@property
|
||||
def has_unviewed(self):
|
||||
if not self.__data.get('watching'):
|
||||
return None
|
||||
|
||||
def unread_changes_count(self):
|
||||
unread_changes_count = 0
|
||||
for uuid, watch in self.__data['watching'].items():
|
||||
if watch.history_n >= 2 and watch.viewed == False:
|
||||
return True
|
||||
return False
|
||||
unread_changes_count += 1
|
||||
|
||||
return unread_changes_count
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
|
||||
@@ -33,34 +33,6 @@
|
||||
<div id="notification-test-log" style="display: none;"><span class="pure-form-message-inline">Processing..</span></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Browser Notifications -->
|
||||
<div id="browser-notification-section">
|
||||
<div class="pure-control-group">
|
||||
<label>Browser Notifications</label>
|
||||
<div class="pure-form-message-inline">
|
||||
<p><strong>Browser push notifications!</strong> Use <code>browser://</code> URLs in your notification settings to receive real-time push notifications even when this tab is closed.</p>
|
||||
<p><small><strong>Troubleshooting:</strong> If you get "different applicationServerKey" errors, click "Clear All Notifications" below and try again. This happens when switching between different changedetection.io instances.</small></p>
|
||||
<div id="browser-notification-controls" style="margin-top: 1em;">
|
||||
<div id="notification-permission-status">
|
||||
<p>Browser notifications: <span id="permission-status">checking...</span></p>
|
||||
</div>
|
||||
<div id="browser-notification-actions">
|
||||
<button type="button" id="enable-notifications-btn" class="pure-button button-secondary button-xsmall" style="display: none;">
|
||||
Enable Browser Notifications
|
||||
</button>
|
||||
<button type="button" id="test-notification-btn" class="pure-button button-secondary button-xsmall" style="display: none;">
|
||||
Send browser test notification
|
||||
</button>
|
||||
<button type="button" id="clear-notifications-btn" class="pure-button button-secondary button-xsmall" onclick="window.browserNotifications?.clearAllNotifications()" style="margin-left: 0.5em;">
|
||||
Clear All Notifications
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="notification-customisation" class="pure-control-group">
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_title, class="m-d notification-title", placeholder=settings_application['notification_title']) }}
|
||||
|
||||
@@ -35,7 +35,6 @@
|
||||
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='csrf.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='feather-icons.min.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='browser-notifications.js')}}" defer></script>
|
||||
{% if socket_io_enabled %}
|
||||
<script src="{{url_for('static_content', group='js', filename='socket.io.min.js')}}"></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='realtime.js')}}" defer></script>
|
||||
|
||||
@@ -26,7 +26,10 @@
|
||||
<li>Changing this will affect the comparison checksum which may trigger an alert</li>
|
||||
</ul>
|
||||
</span>
|
||||
|
||||
<br><br>
|
||||
<div class="pure-control-group">
|
||||
{{ render_ternary_field(form.strip_ignored_lines) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
|
||||
<fieldset>
|
||||
|
||||
@@ -29,16 +29,28 @@ def reportlog(pytestconfig):
|
||||
logger.remove(handler_id)
|
||||
|
||||
|
||||
def format_memory_human(bytes_value):
|
||||
"""Format memory in human-readable units (KB, MB, GB)"""
|
||||
if bytes_value < 1024:
|
||||
return f"{bytes_value} B"
|
||||
elif bytes_value < 1024 ** 2:
|
||||
return f"{bytes_value / 1024:.2f} KB"
|
||||
elif bytes_value < 1024 ** 3:
|
||||
return f"{bytes_value / (1024 ** 2):.2f} MB"
|
||||
else:
|
||||
return f"{bytes_value / (1024 ** 3):.2f} GB"
|
||||
|
||||
def track_memory(memory_usage, ):
|
||||
process = psutil.Process(os.getpid())
|
||||
while not memory_usage["stop"]:
|
||||
current_rss = process.memory_info().rss
|
||||
memory_usage["peak"] = max(memory_usage["peak"], current_rss)
|
||||
memory_usage["current"] = current_rss # Keep updating current
|
||||
time.sleep(0.01) # Adjust the sleep time as needed
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def measure_memory_usage(request):
|
||||
memory_usage = {"peak": 0, "stop": False}
|
||||
memory_usage = {"peak": 0, "current": 0, "stop": False}
|
||||
tracker_thread = Thread(target=track_memory, args=(memory_usage,))
|
||||
tracker_thread.start()
|
||||
|
||||
@@ -47,16 +59,17 @@ def measure_memory_usage(request):
|
||||
memory_usage["stop"] = True
|
||||
tracker_thread.join()
|
||||
|
||||
# Note: ru_maxrss is in kilobytes on Unix-based systems
|
||||
max_memory_used = memory_usage["peak"] / 1024 # Convert to MB
|
||||
s = f"Peak memory used by the test {request.node.fspath} - '{request.node.name}': {max_memory_used:.2f} MB"
|
||||
# Note: psutil returns RSS memory in bytes
|
||||
peak_human = format_memory_human(memory_usage["peak"])
|
||||
|
||||
s = f"{time.time()} {request.node.fspath} - '{request.node.name}' - Peak memory: {peak_human}"
|
||||
logger.debug(s)
|
||||
|
||||
with open("test-memory.log", 'a') as f:
|
||||
f.write(f"{s}\n")
|
||||
|
||||
# Assert that the memory usage is less than 200MB
|
||||
# assert max_memory_used < 150, f"Memory usage exceeded 200MB: {max_memory_used:.2f} MB"
|
||||
# assert peak_memory_kb < 150 * 1024, f"Memory usage exceeded 150MB: {peak_human}"
|
||||
|
||||
|
||||
def cleanup(datastore_path):
|
||||
|
||||
@@ -29,13 +29,8 @@ def do_test(client, live_server, make_test_use_extra_browser=False):
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
if make_test_use_extra_browser:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
import json
|
||||
import os
|
||||
from flask import url_for
|
||||
from changedetectionio.tests.util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
from changedetectionio.tests.util import live_server_setup, wait_for_all_checks, extract_UUID_from_client, delete_all_watches
|
||||
|
||||
|
||||
def set_response():
|
||||
@@ -98,6 +98,5 @@ def test_socks5(client, live_server, measure_memory_usage):
|
||||
)
|
||||
assert b"OK" in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import re
|
||||
from flask import url_for
|
||||
from changedetectionio.tests.util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup, \
|
||||
wait_for_all_checks, \
|
||||
set_longer_modified_response
|
||||
set_longer_modified_response, delete_all_watches
|
||||
from changedetectionio.tests.util import extract_UUID_from_client
|
||||
import logging
|
||||
import base64
|
||||
@@ -85,8 +85,7 @@ def test_check_notification_email_formats_default_HTML(client, live_server, meas
|
||||
assert '(added) So let\'s see what happens.\r\n' in msg # The plaintext part with \r\n
|
||||
assert 'Content-Type: text/html' in msg
|
||||
assert '(added) So let\'s see what happens.<br>' in msg # the html part
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_check_notification_email_formats_default_Text_override_HTML(client, live_server, measure_memory_usage):
|
||||
@@ -179,5 +178,4 @@ def test_check_notification_email_formats_default_Text_override_HTML(client, liv
|
||||
assert '<' not in msg
|
||||
assert 'Content-Type: text/html' in msg
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
@@ -2,7 +2,7 @@ from .util import live_server_setup, wait_for_all_checks
|
||||
from flask import url_for
|
||||
import time
|
||||
|
||||
def test_check_access_control(app, client, live_server):
|
||||
def test_check_access_control(app, client, live_server, measure_memory_usage):
|
||||
# Still doesnt work, but this is closer.
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import os.path
|
||||
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks, wait_for_notification_endpoint_output
|
||||
from .util import live_server_setup, wait_for_all_checks, wait_for_notification_endpoint_output, delete_all_watches
|
||||
import time
|
||||
|
||||
def set_original(excluding=None, add_line=None):
|
||||
@@ -44,12 +44,8 @@ def test_check_removed_line_contains_trigger(client, live_server, measure_memory
|
||||
set_original()
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -75,7 +71,7 @@ def test_check_removed_line_contains_trigger(client, live_server, measure_memory
|
||||
wait_for_all_checks(client)
|
||||
time.sleep(0.5)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# The trigger line is REMOVED, this should trigger
|
||||
set_original(excluding='The golden line')
|
||||
@@ -84,7 +80,7 @@ def test_check_removed_line_contains_trigger(client, live_server, measure_memory
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
@@ -98,23 +94,21 @@ def test_check_removed_line_contains_trigger(client, live_server, measure_memory
|
||||
wait_for_all_checks(client)
|
||||
time.sleep(1)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Remove it again, and we should get a trigger
|
||||
set_original(excluding='The golden line')
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_check_add_line_contains_trigger(client, live_server, measure_memory_usage):
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
time.sleep(1)
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
@@ -137,12 +131,8 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa
|
||||
set_original()
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -169,7 +159,7 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa
|
||||
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# The trigger line is ADDED, this should trigger
|
||||
set_original(add_line='<p>Oh yes please</p>')
|
||||
@@ -177,7 +167,7 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# Takes a moment for apprise to fire
|
||||
wait_for_notification_endpoint_output()
|
||||
@@ -187,5 +177,4 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa
|
||||
assert b'-Oh yes please' in response
|
||||
assert '网站监测 内容更新了'.encode('utf-8') in response
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from .util import live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
|
||||
import json
|
||||
import uuid
|
||||
@@ -276,8 +276,7 @@ def test_access_denied(client, live_server, measure_memory_usage):
|
||||
assert res.status_code == 200
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("settings.settings_page"),
|
||||
@@ -385,8 +384,7 @@ def test_api_watch_PUT_update(client, live_server, measure_memory_usage):
|
||||
assert b'Additional properties are not allowed' in res.data
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_api_import(client, live_server, measure_memory_usage):
|
||||
|
||||
@@ -4,7 +4,7 @@ from flask import url_for
|
||||
from .util import live_server_setup
|
||||
import json
|
||||
|
||||
def test_api_notifications_crud(client, live_server):
|
||||
def test_api_notifications_crud(client, live_server, measure_memory_usage):
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import time
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
def test_api_search(client, live_server):
|
||||
def test_api_search(client, live_server, measure_memory_usage):
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
|
||||
|
||||
@@ -12,12 +12,8 @@ def test_basic_auth(client, live_server, measure_memory_usage):
|
||||
# This page will echo back any auth info
|
||||
test_url = url_for('test_basicauth_method', _external=True).replace("//","//myuser:mypass@")
|
||||
time.sleep(1)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
time.sleep(1)
|
||||
# Check form validation
|
||||
|
||||
@@ -86,12 +86,8 @@ def test_check_ldjson_price_autodetect(client, live_server, measure_memory_usage
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Should get a notice that it's available
|
||||
@@ -129,12 +125,8 @@ def test_check_ldjson_price_autodetect(client, live_server, measure_memory_usage
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'ldjson-price-track-offer' not in res.data
|
||||
@@ -146,12 +138,8 @@ def test_check_ldjson_price_autodetect(client, live_server, measure_memory_usage
|
||||
def _test_runner_check_bad_format_ignored(live_server, client, has_ldjson_price_data):
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
for k,v in client.application.config.get('DATASTORE').data['watching'].items():
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, \
|
||||
extract_UUID_from_client
|
||||
extract_UUID_from_client, delete_all_watches
|
||||
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
@@ -38,9 +38,9 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'test-endpoint' in res.data
|
||||
|
||||
# Default no password set, this stuff should be always available.
|
||||
@@ -74,9 +74,9 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
res = client.get(url_for("ui.ui_edit.watch_get_latest_html", uuid=uuid))
|
||||
assert b'which has this one new line' in res.data
|
||||
|
||||
# Now something should be ready, indicated by having a 'unviewed' class
|
||||
# Now something should be ready, indicated by having a 'has-unread-changes' class
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# #75, and it should be in the RSS feed
|
||||
rss_token = extract_rss_token_from_UI(client)
|
||||
@@ -90,7 +90,7 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
|
||||
assert expected_url.encode('utf-8') in res.data
|
||||
#
|
||||
# Following the 'diff' link, it should no longer display as 'unviewed' even after we recheck it a few times
|
||||
# Following the 'diff' link, it should no longer display as 'has-unread-changes' even after we recheck it a few times
|
||||
res = client.get(url_for("ui.ui_views.diff_history_page", uuid=uuid))
|
||||
assert b'selected=""' in res.data, "Confirm diff history page loaded"
|
||||
|
||||
@@ -111,12 +111,12 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
|
||||
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'class="has-unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'class="has-unread-changes' not in res.data
|
||||
assert b'head title' in res.data # Should be ON by default
|
||||
assert b'test-endpoint' in res.data
|
||||
|
||||
@@ -140,8 +140,8 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'class="has-unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
assert b'class="has-unread-changes' in res.data
|
||||
assert b'head title' not in res.data # should now be off
|
||||
|
||||
|
||||
@@ -151,8 +151,8 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
# hit the mark all viewed link
|
||||
res = client.get(url_for("ui.mark_all_viewed"), follow_redirects=True)
|
||||
|
||||
assert b'class="has-unviewed' not in res.data
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'class="has-unread-changes' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# #2458 "clear history" should make the Watch object update its status correctly when the first snapshot lands again
|
||||
client.get(url_for("ui.clear_watch_history", uuid=uuid))
|
||||
@@ -163,5 +163,179 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
|
||||
#
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_non_text_mime_or_downloads(client, live_server, measure_memory_usage):
|
||||
"""
|
||||
|
||||
https://github.com/dgtlmoon/changedetection.io/issues/3434
|
||||
I noticed that a watched website can be monitored fine as long as the server sends content-type: text/plain; charset=utf-8,
|
||||
but once the server sends content-type: application/octet-stream (which is usually done to force the browser to show the Download dialog),
|
||||
changedetection somehow ignores all line breaks and treats the document file as if everything is on one line.
|
||||
|
||||
WHAT THIS DOES - makes the system rely on 'magic' to determine what is it
|
||||
|
||||
:param client:
|
||||
:param live_server:
|
||||
:param measure_memory_usage:
|
||||
:return:
|
||||
"""
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("""some random text that should be split by line
|
||||
and not parsed with html_to_text
|
||||
this way we know that it correctly parsed as plain text
|
||||
\r\n
|
||||
ok\r\n
|
||||
got it\r\n
|
||||
""")
|
||||
|
||||
test_url = url_for('test_endpoint', content_type="application/octet-stream", _external=True)
|
||||
|
||||
# Add our URL to the import page
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
### check the front end
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"some random text that should be split by line\n" in res.data
|
||||
####
|
||||
|
||||
# Check the snapshot by API that it has linefeeds too
|
||||
watch_uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
res = client.get(
|
||||
url_for("watchhistory", uuid=watch_uuid),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
|
||||
# Fetch a snapshot by timestamp, check the right one was found
|
||||
res = client.get(
|
||||
url_for("watchsinglehistory", uuid=watch_uuid, timestamp=list(res.json.keys())[-1]),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
assert b"some random text that should be split by line\n" in res.data
|
||||
|
||||
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_standard_text_plain(client, live_server, measure_memory_usage):
|
||||
"""
|
||||
|
||||
https://github.com/dgtlmoon/changedetection.io/issues/3434
|
||||
I noticed that a watched website can be monitored fine as long as the server sends content-type: text/plain; charset=utf-8,
|
||||
but once the server sends content-type: application/octet-stream (which is usually done to force the browser to show the Download dialog),
|
||||
changedetection somehow ignores all line breaks and treats the document file as if everything is on one line.
|
||||
|
||||
The real bug here can be that it will try to process plain-text as HTML, losing <etc>
|
||||
|
||||
:param client:
|
||||
:param live_server:
|
||||
:param measure_memory_usage:
|
||||
:return:
|
||||
"""
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("""some random text that should be split by line
|
||||
and not parsed with html_to_text
|
||||
<title>Even this title should stay because we are just plain text</title>
|
||||
this way we know that it correctly parsed as plain text
|
||||
\r\n
|
||||
ok\r\n
|
||||
got it\r\n
|
||||
""")
|
||||
|
||||
test_url = url_for('test_endpoint', content_type="text/plain", _external=True)
|
||||
|
||||
# Add our URL to the import page
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
### check the front end
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"some random text that should be split by line\n" in res.data
|
||||
####
|
||||
|
||||
# Check the snapshot by API that it has linefeeds too
|
||||
watch_uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
res = client.get(
|
||||
url_for("watchhistory", uuid=watch_uuid),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
|
||||
# Fetch a snapshot by timestamp, check the right one was found
|
||||
res = client.get(
|
||||
url_for("watchsinglehistory", uuid=watch_uuid, timestamp=list(res.json.keys())[-1]),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
assert b"some random text that should be split by line\n" in res.data
|
||||
assert b"<title>Even this title should stay because we are just plain text</title>" in res.data
|
||||
|
||||
delete_all_watches(client)
|
||||
|
||||
# Server says its plaintext, we should always treat it as plaintext
|
||||
def test_plaintext_even_if_xml_content(client, live_server, measure_memory_usage):
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("""<?xml version="1.0" encoding="utf-8"?>
|
||||
<resources xmlns:tools="http://schemas.android.com/tools">
|
||||
<!--Activity and fragment titles-->
|
||||
<string name="feed_update_receiver_name">Abonnementen bijwerken</string>
|
||||
</resources>
|
||||
""")
|
||||
|
||||
test_url = url_for('test_endpoint', content_type="text/plain", _external=True)
|
||||
|
||||
# Add our URL to the import page
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b'<string name="feed_update_receiver_name"' in res.data
|
||||
|
||||
delete_all_watches(client)
|
||||
|
||||
# Server says its plaintext, we should always treat it as plaintext, and then if they have a filter, try to apply that
|
||||
def test_plaintext_even_if_xml_content_and_can_apply_filters(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("""<?xml version="1.0" encoding="utf-8"?>
|
||||
<resources xmlns:tools="http://schemas.android.com/tools">
|
||||
<!--Activity and fragment titles-->
|
||||
<string name="feed_update_receiver_name">Abonnementen bijwerken</string>
|
||||
<foobar>ok man</foobar>
|
||||
</resources>
|
||||
""")
|
||||
|
||||
test_url=url_for('test_endpoint', content_type="text/plain", _external=True)
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url, extras={"include_filters": ['//string']})
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b'<string name="feed_update_receiver_name"' in res.data
|
||||
assert b'<foobar' not in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
@@ -58,6 +58,7 @@ def run_socketio_watch_update_test(client, live_server, password_mode=""):
|
||||
|
||||
has_watch_update = False
|
||||
has_unviewed_update = False
|
||||
got_general_stats_update = False
|
||||
|
||||
for i in range(10):
|
||||
# Get received events
|
||||
@@ -65,15 +66,11 @@ def run_socketio_watch_update_test(client, live_server, password_mode=""):
|
||||
|
||||
if received:
|
||||
logger.info(f"Received {len(received)} events after {i+1} seconds")
|
||||
|
||||
# Check for watch_update events with unviewed=True
|
||||
for event in received:
|
||||
if event['name'] == 'watch_update':
|
||||
has_watch_update = True
|
||||
if event['args'][0]['watch'].get('unviewed', False):
|
||||
has_unviewed_update = True
|
||||
logger.info("Found unviewed update event!")
|
||||
break
|
||||
if event['name'] == 'general_stats_update':
|
||||
got_general_stats_update = True
|
||||
|
||||
if has_unviewed_update:
|
||||
break
|
||||
@@ -92,7 +89,7 @@ def run_socketio_watch_update_test(client, live_server, password_mode=""):
|
||||
assert has_watch_update, "No watch_update events received"
|
||||
|
||||
# Verify we received an unviewed event
|
||||
assert has_unviewed_update, "No watch_update event with unviewed=True received"
|
||||
assert got_general_stats_update, "Got general stats update event"
|
||||
|
||||
# Alternatively, check directly if the watch in the datastore is marked as unviewed
|
||||
from changedetectionio.flask_app import app
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from .util import live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
from changedetectionio import html_tools
|
||||
|
||||
def set_original_ignore_response():
|
||||
@@ -70,12 +70,8 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -107,9 +103,9 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
# The page changed, BUT the text is still there, just the rest of it changes, we should not see a change
|
||||
@@ -120,9 +116,9 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
# 2548
|
||||
@@ -131,7 +127,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
|
||||
# Now we set a change where the text is gone AND its different content, it should now trigger
|
||||
@@ -139,10 +135,9 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
|
||||
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
@@ -14,12 +14,8 @@ def test_clone_functionality(client, live_server, measure_memory_usage):
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# So that we can be sure the same history doesnt carry over
|
||||
|
||||
@@ -3,7 +3,7 @@ import json
|
||||
import time
|
||||
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from .util import live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
from ..model import CONDITIONS_MATCH_LOGIC_DEFAULT
|
||||
|
||||
|
||||
@@ -47,11 +47,11 @@ def set_number_out_of_range_response(number="150"):
|
||||
f.write(test_return_data)
|
||||
|
||||
|
||||
# def test_setup(client, live_server):
|
||||
# def test_setup(client, live_server, measure_memory_usage):
|
||||
"""Test that both text and number conditions work together with AND logic."""
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
|
||||
def test_conditions_with_text_and_number(client, live_server):
|
||||
def test_conditions_with_text_and_number(client, live_server, measure_memory_usage):
|
||||
"""Test that both text and number conditions work together with AND logic."""
|
||||
|
||||
set_original_response("50")
|
||||
@@ -60,12 +60,8 @@ def test_conditions_with_text_and_number(client, live_server):
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Configure the watch with two conditions connected with AND:
|
||||
@@ -125,7 +121,7 @@ def test_conditions_with_text_and_number(client, live_server):
|
||||
time.sleep(2)
|
||||
# 75 is > 20 and < 100 and contains "5"
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
|
||||
# Case 2: Change with one condition violated
|
||||
@@ -141,25 +137,20 @@ def test_conditions_with_text_and_number(client, live_server):
|
||||
|
||||
# Should NOT be marked as having changes since not all conditions are met
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
# The 'validate' button next to each rule row
|
||||
def test_condition_validate_rule_row(client, live_server):
|
||||
def test_condition_validate_rule_row(client, live_server, measure_memory_usage):
|
||||
|
||||
set_original_response("50")
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
@@ -230,12 +221,8 @@ def test_wordcount_conditions_plugin(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -299,7 +286,7 @@ def test_lev_conditions_plugin(client, live_server, measure_memory_usage):
|
||||
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Check the content saved initially, even tho a condition was set - this is the first snapshot so shouldnt be affected by conditions
|
||||
res = client.get(
|
||||
@@ -326,7 +313,7 @@ def test_lev_conditions_plugin(client, live_server, measure_memory_usage):
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data #because this will be like 0.90 not 0.8 threshold
|
||||
assert b'has-unread-changes' not in res.data #because this will be like 0.90 not 0.8 threshold
|
||||
|
||||
############### Now change it a MORE THAN 50%
|
||||
test_return_data = """<html>
|
||||
@@ -345,7 +332,7 @@ def test_lev_conditions_plugin(client, live_server, measure_memory_usage):
|
||||
assert b'Queued 1 watch for rechecking.' in res.data
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
# cleanup for the next
|
||||
client.get(
|
||||
url_for("ui.form_delete", uuid="all"),
|
||||
|
||||
@@ -81,12 +81,8 @@ def test_check_markup_include_filters_restriction(client, live_server, measure_m
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -116,10 +112,10 @@ def test_check_markup_include_filters_restriction(client, live_server, measure_m
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
# It should have 'has-unread-changes' still
|
||||
# Because it should be looking at only that 'sametext' id
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
|
||||
# Tests the whole stack works with the CSS Filter
|
||||
@@ -138,12 +134,8 @@ def test_check_multiple_filters(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
@@ -193,12 +185,8 @@ def test_filter_is_empty_help_suggestion(client, live_server, measure_memory_usa
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
|
||||
@@ -5,7 +5,7 @@ import time
|
||||
from flask import url_for
|
||||
|
||||
from ..html_tools import *
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from .util import live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
|
||||
|
||||
|
||||
@@ -190,7 +190,7 @@ def test_element_removal_full(client, live_server, measure_memory_usage):
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# so that we set the state to 'unviewed' after all the edits
|
||||
# so that we set the state to 'has-unread-changes' after all the edits
|
||||
client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
|
||||
# Make a change to header/footer/nav
|
||||
@@ -209,48 +209,32 @@ def test_element_removal_full(client, live_server, measure_memory_usage):
|
||||
|
||||
# Re #2752
|
||||
def test_element_removal_nth_offset_no_shift(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
set_response_with_multiple_index()
|
||||
subtractive_selectors_data = ["""
|
||||
body > table > tr:nth-child(1) > th:nth-child(2)
|
||||
subtractive_selectors_data = [
|
||||
### css style ###
|
||||
"""body > table > tr:nth-child(1) > th:nth-child(2)
|
||||
body > table > tr:nth-child(2) > td:nth-child(2)
|
||||
body > table > tr:nth-child(3) > td:nth-child(2)
|
||||
body > table > tr:nth-child(1) > th:nth-child(3)
|
||||
body > table > tr:nth-child(2) > td:nth-child(3)
|
||||
body > table > tr:nth-child(3) > td:nth-child(3)""",
|
||||
### second type, xpath ###
|
||||
"""//body/table/tr[1]/th[2]
|
||||
//body/table/tr[2]/td[2]
|
||||
//body/table/tr[3]/td[2]
|
||||
//body/table/tr[1]/th[3]
|
||||
//body/table/tr[2]/td[3]
|
||||
//body/table/tr[3]/td[3]"""]
|
||||
|
||||
test_url = url_for("test_endpoint", _external=True)
|
||||
|
||||
for selector_list in subtractive_selectors_data:
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for("test_endpoint", _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"), data={"urls": test_url}, follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={
|
||||
"subtractive_selectors": selector_list,
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url, extras={"subtractive_selectors": selector_list.splitlines()})
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
@@ -258,6 +242,7 @@ body > table > tr:nth-child(3) > td:nth-child(3)""",
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# the filters above should have removed this but they never say to remove the "emil" column
|
||||
assert b"Tobias" not in res.data
|
||||
assert b"Linus" not in res.data
|
||||
assert b"Person 2" not in res.data
|
||||
|
||||
@@ -28,11 +28,8 @@ def test_check_encoding_detection(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', content_type="text/html", _external=True)
|
||||
client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -59,11 +56,8 @@ def test_check_encoding_detection_missing_content_type_header(client, live_serve
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import time
|
||||
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from .util import live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
|
||||
|
||||
|
||||
@@ -19,19 +19,15 @@ def _runner_test_http_errors(client, live_server, http_code, expected_text):
|
||||
status_code=http_code,
|
||||
_external=True)
|
||||
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
# no change
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert bytes(expected_text.encode('utf-8')) in res.data
|
||||
|
||||
|
||||
@@ -47,8 +43,7 @@ def _runner_test_http_errors(client, live_server, http_code, expected_text):
|
||||
#assert b'Error Screenshot' in res.data
|
||||
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_http_error_handler(client, live_server, measure_memory_usage):
|
||||
@@ -56,8 +51,7 @@ def test_http_error_handler(client, live_server, measure_memory_usage):
|
||||
_runner_test_http_errors(client, live_server, 404, 'Page not found')
|
||||
_runner_test_http_errors(client, live_server, 500, '(Internal server error) received')
|
||||
_runner_test_http_errors(client, live_server, 400, 'Error - Request returned a HTTP error code 400')
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
# Just to be sure error text is properly handled
|
||||
def test_DNS_errors(client, live_server, measure_memory_usage):
|
||||
@@ -87,8 +81,7 @@ def test_DNS_errors(client, live_server, measure_memory_usage):
|
||||
assert found_name_resolution_error
|
||||
# Should always record that we tried
|
||||
assert bytes("just now".encode('utf-8')) in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
# Re 1513
|
||||
def test_low_level_errors_clear_correctly(client, live_server, measure_memory_usage):
|
||||
@@ -145,5 +138,4 @@ def test_low_level_errors_clear_correctly(client, live_server, measure_memory_us
|
||||
)
|
||||
assert not found_name_resolution_error
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from .util import live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
|
||||
from ..html_tools import *
|
||||
|
||||
@@ -76,12 +76,8 @@ def test_check_filter_multiline(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
@@ -131,12 +127,8 @@ def test_check_filter_and_regex_extract(client, live_server, measure_memory_usag
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -174,10 +166,10 @@ def test_check_filter_and_regex_extract(client, live_server, measure_memory_usag
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
# It should have 'has-unread-changes' still
|
||||
# Because it should be looking at only that 'sametext' id
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# Check HTML conversion detected and workd
|
||||
res = client.get(
|
||||
@@ -212,12 +204,8 @@ def test_regex_error_handling(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
### test regex error handling
|
||||
res = client.post(
|
||||
@@ -231,5 +219,4 @@ def test_regex_error_handling(client, live_server, measure_memory_usage):
|
||||
|
||||
assert b'is not a valid regular expression.' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
@@ -42,13 +42,8 @@ def run_filter_test(client, live_server, content_filter):
|
||||
if os.path.isfile("test-datastore/notification.txt"):
|
||||
os.unlink("test-datastore/notification.txt")
|
||||
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, get_UUID_for_tag_name, extract_UUID_from_client
|
||||
from .util import live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, get_UUID_for_tag_name, extract_UUID_from_client, delete_all_watches
|
||||
import os
|
||||
|
||||
|
||||
@@ -127,8 +127,7 @@ def test_setup_group_tag(client, live_server, measure_memory_usage):
|
||||
assert b"should-be-excluded" not in res.data
|
||||
assert res.status_code == 200
|
||||
assert b"first-imported=1" in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_tag_import_singular(client, live_server, measure_memory_usage):
|
||||
|
||||
@@ -147,8 +146,7 @@ def test_tag_import_singular(client, live_server, measure_memory_usage):
|
||||
)
|
||||
# Should be only 1 tag because they both had the same
|
||||
assert res.data.count(b'test-tag') == 1
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_tag_add_in_ui(client, live_server, measure_memory_usage):
|
||||
|
||||
@@ -164,8 +162,7 @@ def test_tag_add_in_ui(client, live_server, measure_memory_usage):
|
||||
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||
assert b'All tags deleted' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_group_tag_notification(client, live_server, measure_memory_usage):
|
||||
|
||||
@@ -232,8 +229,7 @@ def test_group_tag_notification(client, live_server, measure_memory_usage):
|
||||
|
||||
#@todo Test that multiple notifications fired
|
||||
#@todo Test that each of multiple notifications with different settings
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_limit_tag_ui(client, live_server, measure_memory_usage):
|
||||
|
||||
@@ -264,15 +260,12 @@ def test_limit_tag_ui(client, live_server, measure_memory_usage):
|
||||
client.get(url_for('ui.mark_all_viewed', tag=tag_uuid), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
with open('/tmp/fuck.html', 'wb') as f:
|
||||
f.write(res.data)
|
||||
# Should be only 1 unviewed
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert res.data.count(b' unviewed ') == 1
|
||||
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||
assert b'All tags deleted' in res.data
|
||||
|
||||
@@ -299,8 +292,7 @@ def test_clone_tag_on_import(client, live_server, measure_memory_usage):
|
||||
# 2 times plus the top link to tag
|
||||
assert res.data.count(b'test-tag') == 3
|
||||
assert res.data.count(b'another-tag') == 3
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_clone_tag_on_quickwatchform_add(client, live_server, measure_memory_usage):
|
||||
|
||||
@@ -327,8 +319,7 @@ def test_clone_tag_on_quickwatchform_add(client, live_server, measure_memory_usa
|
||||
# 2 times plus the top link to tag
|
||||
assert res.data.count(b'test-tag') == 3
|
||||
assert res.data.count(b'another-tag') == 3
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||
assert b'All tags deleted' in res.data
|
||||
@@ -391,12 +382,8 @@ def test_order_of_filters_tag_filter_and_watch_filter(client, live_server, measu
|
||||
f.write(d)
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
filters = [
|
||||
@@ -482,5 +469,4 @@ the {test} appeared before. {test in res.data[:n]=}
|
||||
"""
|
||||
n += t_index + len(test)
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
@@ -3,9 +3,8 @@
|
||||
import time
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from .util import wait_for_all_checks, delete_all_watches
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
def test_consistent_history(client, live_server, measure_memory_usage):
|
||||
@@ -81,19 +80,15 @@ def test_consistent_history(client, live_server, measure_memory_usage):
|
||||
assert '"default"' not in f.read(), "'default' probably shouldnt be here, it came from when the 'default' Watch vars were accidently being saved"
|
||||
|
||||
|
||||
def test_check_text_history_view(client, live_server):
|
||||
def test_check_text_history_view(client, live_server, measure_memory_usage):
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("<html>test-one</html>")
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -122,5 +117,4 @@ def test_check_text_history_view(client, live_server):
|
||||
assert b'test-two' in res.data
|
||||
assert b'test-one' not in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
@@ -27,12 +27,8 @@ def test_ignore(client, live_server, measure_memory_usage):
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
set_original_ignore_response()
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -58,3 +54,35 @@ def test_ignore(client, live_server, measure_memory_usage):
|
||||
# Should be in base.html
|
||||
assert b'csrftoken' in res.data
|
||||
|
||||
|
||||
def test_strip_ignore_lines(client, live_server, measure_memory_usage):
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
set_original_ignore_response()
|
||||
|
||||
|
||||
# Goto the settings page, add our ignore text
|
||||
res = client.post(
|
||||
url_for("settings.settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-ignore_whitespace": "y",
|
||||
"application-strip_ignored_lines": "y",
|
||||
"application-global_ignore_text": "Which is across multiple",
|
||||
'application-fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
|
||||
# It should not be in the preview anymore
|
||||
res = client.get(url_for("ui.ui_views.preview_page", uuid=uuid))
|
||||
assert b'<div class="ignored">' not in res.data
|
||||
assert b'Which is across multiple' not in res.data
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from .util import live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
from changedetectionio import html_tools
|
||||
|
||||
|
||||
@@ -97,12 +97,8 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -128,9 +124,9 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
# Make a change
|
||||
@@ -141,9 +137,9 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
|
||||
@@ -154,7 +150,7 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(url_for("ui.ui_views.preview_page", uuid="first"))
|
||||
|
||||
@@ -163,8 +159,7 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa
|
||||
# it is only ignored, it is not removed (it will be highlighted too)
|
||||
assert b'new ignore stuff' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
# When adding some ignore text, it should not trigger a change, even if something else on that line changes
|
||||
def _run_test_global_ignore(client, as_source=False, extra_ignore=""):
|
||||
@@ -192,12 +187,8 @@ def _run_test_global_ignore(client, as_source=False, extra_ignore=""):
|
||||
# Switch to source mode so we can test that too!
|
||||
test_url = "source:"+test_url
|
||||
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -222,9 +213,9 @@ def _run_test_global_ignore(client, as_source=False, extra_ignore=""):
|
||||
# Trigger a check
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
# It should report nothing found (no new 'unviewed' class), adding random ignore text should not cause a change
|
||||
# It should report nothing found (no new 'has-unread-changes' class), adding random ignore text should not cause a change
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
#####
|
||||
|
||||
@@ -238,10 +229,10 @@ def _run_test_global_ignore(client, as_source=False, extra_ignore=""):
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
# Just to be sure.. set a regular modified change that will trigger it
|
||||
@@ -249,15 +240,14 @@ def _run_test_global_ignore(client, as_source=False, extra_ignore=""):
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_check_global_ignore_text_functionality(client, live_server):
|
||||
def test_check_global_ignore_text_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
_run_test_global_ignore(client, as_source=False)
|
||||
|
||||
def test_check_global_ignore_text_functionality_as_source(client, live_server):
|
||||
def test_check_global_ignore_text_functionality_as_source(client, live_server, measure_memory_usage):
|
||||
|
||||
_run_test_global_ignore(client, as_source=True, extra_ignore='/\?v=\d/')
|
||||
|
||||
@@ -3,9 +3,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
from .util import live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
|
||||
|
||||
def set_original_ignore_response():
|
||||
@@ -111,13 +109,11 @@ def test_render_anchor_tag_content_true(client, live_server, measure_memory_usag
|
||||
assert '(/modified_link)' in res.data.decode()
|
||||
|
||||
# since the link has changed, and we chose to render anchor tag content,
|
||||
# we should detect a change (new 'unviewed' class)
|
||||
# we should detect a change (new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b"unviewed" in res.data
|
||||
assert b"/test-endpoint" in res.data
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"),
|
||||
follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
@@ -60,12 +60,8 @@ def test_normal_page_check_works_with_ignore_status_code(client, live_server, me
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
@@ -77,9 +73,9 @@ def test_normal_page_check_works_with_ignore_status_code(client, live_server, me
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
|
||||
@@ -94,12 +90,8 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server, measu
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', status_code=403, _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -124,8 +116,8 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server, measu
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
# It should have 'has-unread-changes' still
|
||||
# Because it should be looking at only that 'sametext' id
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
|
||||
@@ -70,12 +70,8 @@ def test_check_ignore_whitespace(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
# Trigger a check
|
||||
@@ -89,7 +85,7 @@ def test_check_ignore_whitespace(client, live_server, measure_memory_usage):
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
@@ -5,7 +5,7 @@ import time
|
||||
|
||||
from flask import url_for
|
||||
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from .util import live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
|
||||
|
||||
# def test_setup(client, live_server, measure_memory_usage):
|
||||
@@ -28,7 +28,7 @@ https://example.com tag1, other tag"""
|
||||
assert b"3 Imported" in res.data
|
||||
assert b"tag1" in res.data
|
||||
assert b"other tag" in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
delete_all_watches(client)
|
||||
|
||||
# Clear flask alerts
|
||||
res = client.get( url_for("watchlist.index"))
|
||||
@@ -53,7 +53,7 @@ def xtest_import_skip_url(client, live_server, measure_memory_usage):
|
||||
assert b"1 Imported" in res.data
|
||||
assert b"ht000000broken" in res.data
|
||||
assert b"1 Skipped" in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
delete_all_watches(client)
|
||||
# Clear flask alerts
|
||||
res = client.get( url_for("watchlist.index"))
|
||||
|
||||
@@ -119,7 +119,7 @@ def test_import_distillio(client, live_server, measure_memory_usage):
|
||||
assert b"nice stuff" in res.data
|
||||
assert b"nerd-news" in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
delete_all_watches(client)
|
||||
# Clear flask alerts
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
|
||||
@@ -169,8 +169,7 @@ def test_import_custom_xlsx(client, live_server, measure_memory_usage):
|
||||
assert filters[0] == '/html[1]/body[1]/div[4]/div[1]/div[1]/div[1]||//*[@id=\'content\']/div[3]/div[1]/div[1]||//*[@id=\'content\']/div[1]'
|
||||
assert watch.get('time_between_check') == {'weeks': 0, 'days': 1, 'hours': 6, 'minutes': 24, 'seconds': 0}
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_import_watchete_xlsx(client, live_server, measure_memory_usage):
|
||||
"""Test can upload a excel spreadsheet and the watches are created correctly"""
|
||||
@@ -214,5 +213,4 @@ def test_import_watchete_xlsx(client, live_server, measure_memory_usage):
|
||||
if watch.get('title') == 'system default website':
|
||||
assert watch.get('fetch_backend') == 'system' # uses default if blank
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
@@ -26,7 +26,7 @@ def test_jinja2_in_url_query(client, live_server, measure_memory_usage):
|
||||
assert b"Watch added" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
@@ -51,7 +51,7 @@ def test_jinja2_security_url_query(client, live_server, measure_memory_usage):
|
||||
assert b"Watch added" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'is invalid and cannot be used' in res.data
|
||||
# Some of the spewed output from the subclasses
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for, escape
|
||||
from . util import live_server_setup, wait_for_all_checks
|
||||
from . util import live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
import pytest
|
||||
jq_support = True
|
||||
|
||||
@@ -205,16 +205,10 @@ def test_check_json_without_filter(client, live_server, measure_memory_usage):
|
||||
# and be sure it doesn't get chewed up by instriptis
|
||||
set_json_response_with_html()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', content_type="application/json", _external=True)
|
||||
client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -228,23 +222,16 @@ def test_check_json_without_filter(client, live_server, measure_memory_usage):
|
||||
assert b'"html": "<b>"' in res.data
|
||||
assert res.data.count(b'{') >= 2
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def check_json_filter(json_filter, client, live_server):
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', content_type="application/json", _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -280,9 +267,9 @@ def check_json_filter(json_filter, client, live_server):
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
# It should have 'has-unread-changes' still
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# Should not see this, because its not in the JSONPath we entered
|
||||
res = client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
@@ -291,8 +278,7 @@ def check_json_filter(json_filter, client, live_server):
|
||||
# And #462 - check we see the proper utf-8 string there
|
||||
assert "Örnsköldsvik".encode('utf-8') in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_check_jsonpath_filter(client, live_server, measure_memory_usage):
|
||||
check_json_filter('json:boss.name', client, live_server)
|
||||
@@ -313,12 +299,8 @@ def check_json_filter_bool_val(json_filter, client, live_server):
|
||||
|
||||
test_url = url_for('test_endpoint', content_type="application/json", _external=True)
|
||||
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
# Goto the edit page, add our ignore text
|
||||
@@ -350,8 +332,7 @@ def check_json_filter_bool_val(json_filter, client, live_server):
|
||||
# But the change should be there, tho its hard to test the change was detected because it will show old and new versions
|
||||
assert b'false' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_check_jsonpath_filter_bool_val(client, live_server, measure_memory_usage):
|
||||
check_json_filter_bool_val("json:$['available']", client, live_server)
|
||||
@@ -377,12 +358,8 @@ def check_json_ext_filter(json_filter, client, live_server):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', content_type="application/json", _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -418,14 +395,14 @@ def check_json_ext_filter(json_filter, client, live_server):
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should have 'unviewed'
|
||||
# It should have 'has-unread-changes'
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(url_for("ui.ui_views.preview_page", uuid="first"))
|
||||
|
||||
# We should never see 'ForSale' because we are selecting on 'Sold' in the rule,
|
||||
# But we should know it triggered ('unviewed' assert above)
|
||||
# But we should know it triggered ('has-unread-changes' assert above)
|
||||
assert b'ForSale' not in res.data
|
||||
assert b'Sold' in res.data
|
||||
|
||||
@@ -436,8 +413,7 @@ def check_json_ext_filter(json_filter, client, live_server):
|
||||
assert b'ForSale' in res.data
|
||||
assert b'Sold' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_ignore_json_order(client, live_server, measure_memory_usage):
|
||||
# A change in order shouldn't trigger a notification
|
||||
@@ -448,12 +424,8 @@ def test_ignore_json_order(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', content_type="application/json", _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
@@ -465,7 +437,7 @@ def test_ignore_json_order(client, live_server, measure_memory_usage):
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Just to be sure it still works
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
@@ -476,10 +448,9 @@ def test_ignore_json_order(client, live_server, measure_memory_usage):
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_correct_header_detect(client, live_server, measure_memory_usage):
|
||||
# Like in https://github.com/dgtlmoon/changedetection.io/pull/1593
|
||||
@@ -490,12 +461,8 @@ def test_correct_header_detect(client, live_server, measure_memory_usage):
|
||||
# Add our URL to the import page
|
||||
# Check weird casing is cleaned up and detected also
|
||||
test_url = url_for('test_endpoint', content_type="aPPlication/JSon", uppercase_headers=True, _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
|
||||
@@ -510,8 +477,7 @@ def test_correct_header_detect(client, live_server, measure_memory_usage):
|
||||
assert b'"hello": 123,' in res.data
|
||||
assert b'"world": 123' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_check_jsonpath_ext_filter(client, live_server, measure_memory_usage):
|
||||
check_json_ext_filter('json:$[?(@.status==Sold)]', client, live_server)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from flask import url_for
|
||||
from changedetectionio.tests.util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
from changedetectionio.tests.util import live_server_setup, wait_for_all_checks, extract_UUID_from_client, delete_all_watches
|
||||
|
||||
|
||||
def set_response():
|
||||
@@ -75,5 +75,4 @@ def test_content_filter_live_preview(client, live_server, measure_memory_usage):
|
||||
assert reply.get('ignore_line_numbers') == [2] # Ignored - "socks" on line 2
|
||||
assert reply.get('trigger_line_numbers') == [1] # Triggers "Awesome" in line 1
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from flask import url_for
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
import time
|
||||
|
||||
|
||||
@@ -40,9 +40,9 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
|
||||
#####################
|
||||
@@ -62,9 +62,9 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
watch = live_server.app.config['DATASTORE'].data['watching'][uuid]
|
||||
@@ -92,9 +92,9 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
client.get(url_for("ui.mark_all_viewed"), follow_redirects=True)
|
||||
time.sleep(0.2)
|
||||
|
||||
@@ -108,11 +108,10 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data # A change should have registered because empty_pages_are_a_change is ON
|
||||
assert b'has-unread-changes' in res.data # A change should have registered because empty_pages_are_a_change is ON
|
||||
assert b'fetch-error' not in res.data
|
||||
|
||||
#
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
@@ -24,12 +24,8 @@ def test_obfuscations(client, live_server, measure_memory_usage):
|
||||
time.sleep(1)
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
|
||||
@@ -13,13 +13,8 @@ def test_fetch_pdf(client, live_server, measure_memory_usage):
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
test_url = url_for('test_pdf_endpoint', _external=True)
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
|
||||
wait_for_all_checks(client)
|
||||
@@ -49,9 +44,9 @@ def test_fetch_pdf(client, live_server, measure_memory_usage):
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Now something should be ready, indicated by having a 'unviewed' class
|
||||
# Now something should be ready, indicated by having a 'has-unread-changes' class
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# The original checksum should be not be here anymore (cdio adds it to the bottom of the text)
|
||||
|
||||
|
||||
@@ -13,13 +13,8 @@ def test_fetch_pdf(client, live_server, measure_memory_usage):
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
test_url = url_for('test_pdf_endpoint', _external=True)
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
@@ -47,9 +42,9 @@ def test_fetch_pdf(client, live_server, measure_memory_usage):
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Now something should be ready, indicated by having a 'unviewed' class
|
||||
# Now something should be ready, indicated by having a 'has-unread-changes' class
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# The original checksum should be not be here anymore (cdio adds it to the bottom of the text)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import json
|
||||
import os
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
from . util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_UUID_from_client, delete_all_watches
|
||||
|
||||
|
||||
|
||||
@@ -17,21 +17,13 @@ def test_headers_in_request(client, live_server, measure_memory_usage):
|
||||
test_url = test_url.replace('localhost', 'changedet')
|
||||
|
||||
# Add the test URL twice, we will check
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
cookie_header = '_ga=GA1.2.1022228332; cookie-preferences=analytics:accepted;'
|
||||
@@ -82,8 +74,7 @@ def test_headers_in_request(client, live_server, measure_memory_usage):
|
||||
for k, watch in client.application.config.get('DATASTORE').data.get('watching').items():
|
||||
assert 'custom' in watch.get('remote_server_reply') # added in util.py
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_body_in_request(client, live_server, measure_memory_usage):
|
||||
|
||||
@@ -93,12 +84,8 @@ def test_body_in_request(client, live_server, measure_memory_usage):
|
||||
# Because its no longer calling back to localhost but from the browser container, set in test-only.yml
|
||||
test_url = test_url.replace('localhost', 'cdio')
|
||||
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
@@ -150,12 +137,8 @@ def test_body_in_request(client, live_server, measure_memory_usage):
|
||||
|
||||
####### data sanity checks
|
||||
# Add the test URL twice, we will check
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
watches_with_body = 0
|
||||
with open('test-datastore/url-watches.json') as f:
|
||||
@@ -180,8 +163,7 @@ def test_body_in_request(client, live_server, measure_memory_usage):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Body must be empty when Request Method is set to GET" in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_method_in_request(client, live_server, measure_memory_usage):
|
||||
# Add our URL to the import page
|
||||
@@ -191,20 +173,12 @@ def test_method_in_request(client, live_server, measure_memory_usage):
|
||||
test_url = test_url.replace('localhost', 'cdio')
|
||||
|
||||
# Add the test URL twice, we will check
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
@@ -258,8 +232,7 @@ def test_method_in_request(client, live_server, measure_memory_usage):
|
||||
# Should be only one with method set to PATCH
|
||||
assert watches_with_method == 1
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
# Re #2408 - user-agent override test, also should handle case-insensitive header deduplication
|
||||
def test_ua_global_override(client, live_server, measure_memory_usage):
|
||||
@@ -277,12 +250,8 @@ def test_ua_global_override(client, live_server, measure_memory_usage):
|
||||
)
|
||||
assert b'Settings updated' in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(
|
||||
@@ -315,8 +284,7 @@ def test_ua_global_override(client, live_server, measure_memory_usage):
|
||||
)
|
||||
assert b"agent-from-watch" in res.data
|
||||
assert b"html-requests-user-agent" not in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_headers_textfile_in_request(client, live_server, measure_memory_usage):
|
||||
|
||||
@@ -356,12 +324,8 @@ def test_headers_textfile_in_request(client, live_server, measure_memory_usage):
|
||||
assert b"requests-default_ua-html_requests" in res.data
|
||||
|
||||
# Add the test URL twice, we will check
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
@@ -429,19 +393,14 @@ def test_headers_textfile_in_request(client, live_server, measure_memory_usage):
|
||||
assert "User-Agent:".encode('utf-8') + requests_ua.encode('utf-8') in res.data
|
||||
|
||||
# unlink headers.txt on start/stop
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_headers_validation(client, live_server):
|
||||
def test_headers_validation(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
test_url = url_for('test_headers', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
|
||||
@@ -3,7 +3,7 @@ import os
|
||||
import time
|
||||
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks, wait_for_notification_endpoint_output, extract_UUID_from_client
|
||||
from .util import live_server_setup, wait_for_all_checks, wait_for_notification_endpoint_output, extract_UUID_from_client, delete_all_watches
|
||||
from ..notification import default_notification_format
|
||||
|
||||
instock_props = [
|
||||
@@ -44,11 +44,11 @@ def set_original_response(props_markup='', price="121.95"):
|
||||
|
||||
|
||||
|
||||
# def test_setup(client, live_server):
|
||||
# def test_setup(client, live_server, measure_memory_usage):
|
||||
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
|
||||
def test_restock_itemprop_basic(client, live_server):
|
||||
def test_restock_itemprop_basic(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
|
||||
@@ -69,8 +69,7 @@ def test_restock_itemprop_basic(client, live_server):
|
||||
assert b'has-restock-info' in res.data
|
||||
assert b' in-stock' in res.data
|
||||
assert b' not-in-stock' not in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
for p in out_of_stock_props:
|
||||
@@ -85,10 +84,9 @@ def test_restock_itemprop_basic(client, live_server):
|
||||
|
||||
assert b'has-restock-info not-in-stock' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_itemprop_price_change(client, live_server):
|
||||
def test_itemprop_price_change(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
# Out of the box 'Follow price changes' should be ON
|
||||
@@ -112,7 +110,7 @@ def test_itemprop_price_change(client, live_server):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'180.45' in res.data
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
client.get(url_for("ui.mark_all_viewed"), follow_redirects=True)
|
||||
time.sleep(0.2)
|
||||
|
||||
@@ -129,16 +127,14 @@ def test_itemprop_price_change(client, live_server):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'120.45' in res.data
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def _run_test_minmax_limit(client, extra_watch_edit_form):
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
|
||||
@@ -178,7 +174,7 @@ def _run_test_minmax_limit(client, extra_watch_edit_form):
|
||||
assert b'more than one price detected' not in res.data
|
||||
# BUT the new price should show, even tho its within limits
|
||||
assert b'1,000.45' or b'1000.45' in res.data #depending on locale
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# price changed to something LESS than min (900), SHOULD be a change
|
||||
set_original_response(props_markup=instock_props[0], price='890.45')
|
||||
@@ -188,7 +184,7 @@ def _run_test_minmax_limit(client, extra_watch_edit_form):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'890.45' in res.data
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
client.get(url_for("ui.mark_all_viewed"))
|
||||
|
||||
@@ -200,7 +196,7 @@ def _run_test_minmax_limit(client, extra_watch_edit_form):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'820.45' in res.data
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
client.get(url_for("ui.mark_all_viewed"))
|
||||
|
||||
# price changed to something MORE than max (1100.10), SHOULD be a change
|
||||
@@ -210,13 +206,12 @@ def _run_test_minmax_limit(client, extra_watch_edit_form):
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
# Depending on the LOCALE it may be either of these (generally for US/default/etc)
|
||||
assert b'1,890.45' in res.data or b'1890.45' in res.data
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_restock_itemprop_minmax(client, live_server):
|
||||
def test_restock_itemprop_minmax(client, live_server, measure_memory_usage):
|
||||
|
||||
extras = {
|
||||
"restock_settings-follow_price_changes": "y",
|
||||
@@ -225,7 +220,7 @@ def test_restock_itemprop_minmax(client, live_server):
|
||||
}
|
||||
_run_test_minmax_limit(client, extra_watch_edit_form=extras)
|
||||
|
||||
def test_restock_itemprop_with_tag(client, live_server):
|
||||
def test_restock_itemprop_with_tag(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
res = client.post(
|
||||
@@ -254,11 +249,10 @@ def test_restock_itemprop_with_tag(client, live_server):
|
||||
|
||||
|
||||
|
||||
def test_itemprop_percent_threshold(client, live_server):
|
||||
def test_itemprop_percent_threshold(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
|
||||
@@ -294,7 +288,7 @@ def test_itemprop_percent_threshold(client, live_server):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'960.45' in res.data
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Bigger INCREASE change than the threshold should trigger
|
||||
set_original_response(props_markup=instock_props[0], price='1960.45')
|
||||
@@ -302,7 +296,7 @@ def test_itemprop_percent_threshold(client, live_server):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'1,960.45' or b'1960.45' in res.data #depending on locale
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
|
||||
# Small decrease should NOT trigger
|
||||
@@ -312,17 +306,16 @@ def test_itemprop_percent_threshold(client, live_server):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'1,950.45' or b'1950.45' in res.data #depending on locale
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
|
||||
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
|
||||
def test_change_with_notification_values(client, live_server):
|
||||
def test_change_with_notification_values(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
if os.path.isfile("test-datastore/notification.txt"):
|
||||
@@ -390,11 +383,10 @@ def test_change_with_notification_values(client, live_server):
|
||||
assert os.path.isfile("test-datastore/notification.txt"), "Notification received"
|
||||
|
||||
|
||||
def test_data_sanity(client, live_server):
|
||||
def test_data_sanity(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
test_url2 = url_for('test_endpoint2', _external=True)
|
||||
@@ -421,8 +413,7 @@ def test_data_sanity(client, live_server):
|
||||
assert str(res.data.decode()).count("950.95") == 1, "Price should only show once (for the watch added, no other watches yet)"
|
||||
|
||||
## different test, check the edit page works on an empty request result
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
client.post(
|
||||
url_for("ui.ui_views.form_quick_watch_add"),
|
||||
@@ -435,11 +426,10 @@ def test_data_sanity(client, live_server):
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"))
|
||||
assert test_url2.encode('utf-8') in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
# All examples should give a prive of 666.66
|
||||
def test_special_prop_examples(client, live_server):
|
||||
def test_special_prop_examples(client, live_server, measure_memory_usage):
|
||||
import glob
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, \
|
||||
extract_UUID_from_client
|
||||
extract_UUID_from_client, delete_all_watches
|
||||
|
||||
|
||||
def set_original_cdata_xml():
|
||||
@@ -111,16 +111,11 @@ def test_basic_cdata_rss_markup(client, live_server, measure_memory_usage):
|
||||
|
||||
set_original_cdata_xml()
|
||||
|
||||
test_url = url_for('test_endpoint', content_type="application/xml", _external=True)
|
||||
test_url = url_for('test_endpoint', content_type="application/atom+xml; charset=UTF-8", _external=True)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
@@ -132,14 +127,14 @@ def test_basic_cdata_rss_markup(client, live_server, measure_memory_usage):
|
||||
assert b'<![' not in res.data
|
||||
assert b'Hackers can access your computer' in res.data
|
||||
assert b'The days of Terminator' in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_rss_xpath_filtering(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
set_original_cdata_xml()
|
||||
|
||||
test_url = url_for('test_endpoint', content_type="application/xml", _external=True)
|
||||
test_url = url_for('test_endpoint', content_type="application/atom+xml; charset=UTF-8", _external=True)
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_views.form_quick_watch_add"),
|
||||
@@ -180,10 +175,10 @@ def test_rss_xpath_filtering(client, live_server, measure_memory_usage):
|
||||
assert b'The days of Terminator' not in res.data # Should NOT be selected by the xpath
|
||||
assert b'Some other description' not in res.data # Should NOT be selected by the xpath
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_rss_bad_chars_breaking(client, live_server):
|
||||
def test_rss_bad_chars_breaking(client, live_server, measure_memory_usage):
|
||||
"""This should absolutely trigger the RSS builder to go into worst state mode
|
||||
|
||||
- source: prefix means no html conversion (which kinda filters out the bad stuff)
|
||||
|
||||
@@ -5,11 +5,11 @@ from copy import copy
|
||||
from datetime import datetime, timezone
|
||||
from zoneinfo import ZoneInfo
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
from .util import live_server_setup, wait_for_all_checks, extract_UUID_from_client, delete_all_watches
|
||||
from ..forms import REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT, REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT
|
||||
|
||||
|
||||
# def test_setup(client, live_server):
|
||||
# def test_setup(client, live_server, measure_memory_usage):
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
|
||||
def test_check_basic_scheduler_functionality(client, live_server, measure_memory_usage):
|
||||
@@ -34,13 +34,8 @@ def test_check_basic_scheduler_functionality(client, live_server, measure_memory
|
||||
res = client.get(url_for("settings.settings_page"))
|
||||
assert b'Pacific/Kiritimati' in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
|
||||
@@ -92,8 +87,7 @@ def test_check_basic_scheduler_functionality(client, live_server, measure_memory
|
||||
assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] != last_check
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_check_basic_global_scheduler_functionality(client, live_server, measure_memory_usage):
|
||||
@@ -101,13 +95,8 @@ def test_check_basic_global_scheduler_functionality(client, live_server, measure
|
||||
days = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']
|
||||
test_url = url_for('test_random_content_endpoint', _external=True)
|
||||
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
|
||||
@@ -180,18 +169,13 @@ def test_check_basic_global_scheduler_functionality(client, live_server, measure
|
||||
assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] != last_check
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_validation_time_interval_field(client, live_server, measure_memory_usage):
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
|
||||
res = client.post(
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import os
|
||||
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from .util import live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
from .. import strtobool
|
||||
|
||||
|
||||
@@ -100,8 +100,7 @@ def _runner_test_various_file_slash(client, file_uri):
|
||||
# This will give some error from requests or if it went to chrome, will give some other error :-)
|
||||
assert any(s in res.data for s in substrings)
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_file_slash_access(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import time
|
||||
from flask import url_for
|
||||
from urllib.request import urlopen
|
||||
from .util import set_original_response, set_modified_response, live_server_setup
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, delete_all_watches
|
||||
import re
|
||||
|
||||
sleep_time_for_fetch_thread = 3
|
||||
@@ -17,13 +17,8 @@ def test_share_watch(client, live_server, measure_memory_usage):
|
||||
include_filters = ".nice-filter"
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
@@ -54,8 +49,7 @@ def test_share_watch(client, live_server, measure_memory_usage):
|
||||
|
||||
# Now delete what we have, we will try to re-import it
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
|
||||
@@ -13,13 +13,8 @@ def test_check_basic_change_detection_functionality_source(client, live_server,
|
||||
set_original_response()
|
||||
test_url = 'source:'+url_for('test_endpoint', _external=True)
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
@@ -43,9 +38,9 @@ def test_check_basic_change_detection_functionality_source(client, live_server,
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Now something should be ready, indicated by having a 'unviewed' class
|
||||
# Now something should be ready, indicated by having a 'has-unread-changes' class
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.diff_history_page", uuid="first"),
|
||||
@@ -62,13 +57,8 @@ def test_check_ignore_elements(client, live_server, measure_memory_usage):
|
||||
time.sleep(1)
|
||||
test_url = 'source:'+url_for('test_endpoint', _external=True)
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
|
||||
@@ -65,12 +65,8 @@ def test_trigger_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
@@ -96,7 +92,7 @@ def test_trigger_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
|
||||
# so that we set the state to 'unviewed' after all the edits
|
||||
# so that we set the state to 'has-unread-changes' after all the edits
|
||||
client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
|
||||
# Trigger a check
|
||||
@@ -104,9 +100,9 @@ def test_trigger_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
# Make a change
|
||||
@@ -116,9 +112,9 @@ def test_trigger_functionality(client, live_server, measure_memory_usage):
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Now set the content which contains the trigger text
|
||||
set_modified_with_trigger_text_response()
|
||||
@@ -126,7 +122,7 @@ def test_trigger_functionality(client, live_server, measure_memory_usage):
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# https://github.com/dgtlmoon/changedetection.io/issues/616
|
||||
# Apparently the actual snapshot that contains the trigger never shows
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from .util import live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
|
||||
|
||||
def set_original_ignore_response():
|
||||
@@ -30,19 +30,15 @@ def test_trigger_regex_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (just a new one shouldnt have anything)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
### test regex
|
||||
res = client.post(
|
||||
@@ -54,7 +50,7 @@ def test_trigger_regex_functionality(client, live_server, measure_memory_usage):
|
||||
follow_redirects=True
|
||||
)
|
||||
wait_for_all_checks(client)
|
||||
# so that we set the state to 'unviewed' after all the edits
|
||||
# so that we set the state to 'has-unread-changes' after all the edits
|
||||
client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
@@ -65,7 +61,7 @@ def test_trigger_regex_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
# It should report nothing found (nothing should match the regex)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("regex test123<br>\nsomething 123")
|
||||
@@ -73,8 +69,7 @@ def test_trigger_regex_functionality(client, live_server, measure_memory_usage):
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
from . util import live_server_setup, delete_all_watches
|
||||
|
||||
|
||||
def set_original_ignore_response():
|
||||
@@ -34,12 +34,8 @@ def test_trigger_regex_functionality_with_filter(client, live_server, measure_me
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# it needs time to save the original version
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -69,7 +65,7 @@ def test_trigger_regex_functionality_with_filter(client, live_server, measure_me
|
||||
|
||||
# It should report nothing found (nothing should match the regex and filter)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# now this should trigger something
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
@@ -78,8 +74,7 @@ def test_trigger_regex_functionality_with_filter(client, live_server, measure_me
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from flask import url_for
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
from ..forms import REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT, REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT
|
||||
|
||||
|
||||
def test_recheck_time_field_validation_global_settings(client, live_server):
|
||||
def test_recheck_time_field_validation_global_settings(client, live_server, measure_memory_usage):
|
||||
"""
|
||||
Tests that the global settings time field has atleast one value for week/day/hours/minute/seconds etc entered
|
||||
class globalSettingsRequestForm(Form):
|
||||
@@ -27,7 +27,7 @@ def test_recheck_time_field_validation_global_settings(client, live_server):
|
||||
assert REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT.encode('utf-8') in res.data
|
||||
|
||||
|
||||
def test_recheck_time_field_validation_single_watch(client, live_server):
|
||||
def test_recheck_time_field_validation_single_watch(client, live_server, measure_memory_usage):
|
||||
"""
|
||||
Tests that the global settings time field has atleast one value for week/day/hours/minute/seconds etc entered
|
||||
class globalSettingsRequestForm(Form):
|
||||
@@ -36,13 +36,8 @@ def test_recheck_time_field_validation_single_watch(client, live_server):
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
@@ -100,7 +95,7 @@ def test_recheck_time_field_validation_single_watch(client, live_server):
|
||||
assert b"Updated watch." in res.data
|
||||
assert REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT.encode('utf-8') not in res.data
|
||||
|
||||
def test_checkbox_open_diff_in_new_tab(client, live_server):
|
||||
def test_checkbox_open_diff_in_new_tab(client, live_server, measure_memory_usage):
|
||||
|
||||
set_original_response()
|
||||
# Add our URL to the import page
|
||||
@@ -171,10 +166,9 @@ def test_checkbox_open_diff_in_new_tab(client, live_server):
|
||||
assert 'target=' not in target_line
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_page_title_listing_behaviour(client, live_server):
|
||||
def test_page_title_listing_behaviour(client, live_server, measure_memory_usage):
|
||||
|
||||
set_original_response(extra_title="custom html")
|
||||
|
||||
@@ -248,3 +242,44 @@ def test_page_title_listing_behaviour(client, live_server):
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b"head titlecustom html" in res.data
|
||||
|
||||
|
||||
def test_ui_viewed_unread_flag(client, live_server, measure_memory_usage):
|
||||
|
||||
import time
|
||||
|
||||
set_original_response(extra_title="custom html")
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": url_for('test_endpoint', _external=True)+"\r\n"+url_for('test_endpoint', _external=True)},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"2 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
set_modified_response()
|
||||
res = client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
assert b'Queued 2 watches for rechecking.' in res.data
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'<span id="unread-tab-counter">2</span>' in res.data
|
||||
assert res.data.count(b'data-watch-uuid') == 2
|
||||
|
||||
# one should now be viewed, but two in total still
|
||||
client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'<span id="unread-tab-counter">1</span>' in res.data
|
||||
assert res.data.count(b'data-watch-uuid') == 2
|
||||
|
||||
# check ?unread=1 works
|
||||
res = client.get(url_for("watchlist.index")+"?unread=1")
|
||||
assert res.data.count(b'data-watch-uuid') == 1
|
||||
assert b'<span id="unread-tab-counter">1</span>' in res.data
|
||||
|
||||
# Mark all viewed test again
|
||||
client.get(url_for("ui.mark_all_viewed"), follow_redirects=True)
|
||||
time.sleep(0.2)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'<span id="unread-tab-counter">0</span>' in res.data
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from .util import live_server_setup, wait_for_all_checks, delete_all_watches
|
||||
|
||||
|
||||
def set_original_ignore_response():
|
||||
@@ -79,12 +79,8 @@ def test_unique_lines_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Add our URL to the import page
|
||||
@@ -97,7 +93,7 @@ def test_unique_lines_functionality(client, live_server, measure_memory_usage):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Make a change
|
||||
set_modified_swapped_lines()
|
||||
@@ -108,18 +104,17 @@ def test_unique_lines_functionality(client, live_server, measure_memory_usage):
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Now set the content which contains the new text and re-ordered existing text
|
||||
set_modified_with_trigger_text_response()
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_sort_lines_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
@@ -128,12 +123,8 @@ def test_sort_lines_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Add our URL to the import page
|
||||
@@ -157,7 +148,7 @@ def test_sort_lines_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
# Should be a change registered
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.preview_page", uuid="first"),
|
||||
@@ -168,8 +159,7 @@ def test_sort_lines_functionality(client, live_server, measure_memory_usage):
|
||||
assert res.data.find(b'A uppercase') < res.data.find(b'Z last')
|
||||
assert res.data.find(b'Some initial text') < res.data.find(b'Which is across multiple lines')
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_extra_filters(client, live_server, measure_memory_usage):
|
||||
@@ -179,12 +169,8 @@ def test_extra_filters(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Add our URL to the import page
|
||||
@@ -216,5 +202,4 @@ def test_extra_filters(client, live_server, measure_memory_usage):
|
||||
# still should remain unsorted ('A - sortable line') stays at the end
|
||||
assert res.data.find(b'A - sortable line') > res.data.find(b'Which is across multiple lines')
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
@@ -10,12 +10,8 @@ def test_check_watch_field_storage(client, live_server, measure_memory_usage):
|
||||
|
||||
test_url = "http://somerandomsitewewatch.com"
|
||||
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
|
||||
res = client.post(
|
||||
|
||||
@@ -1,12 +1,42 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import time
|
||||
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
|
||||
from ..html_tools import *
|
||||
from .util import wait_for_all_checks, delete_all_watches
|
||||
from ..processors.magic import RSS_XML_CONTENT_TYPES
|
||||
|
||||
|
||||
def set_rss_atom_feed_response(header=''):
|
||||
test_return_data = f"""{header}<!-- Generated on Wed, 08 Oct 2025 08:42:33 -0700, really really honestly -->
|
||||
<rss xmlns:atom="http://www.w3.org/2005/Atom" version="2.0">
|
||||
<channel>
|
||||
<atom:link href="https://store.waterpowered.com/news/collection//" rel="self" type="application/rss+xml"/>
|
||||
<title>RSS Feed</title>
|
||||
<link>
|
||||
<![CDATA[ https://store.waterpowered.com/news/collection// ]]>
|
||||
</link>
|
||||
<description>
|
||||
<![CDATA[ Events and Announcements for ]]>
|
||||
</description>
|
||||
<language>en-us</language>
|
||||
<generator>water News RSS</generator>
|
||||
<item>
|
||||
<title> 🍁 Lets go discount</title>
|
||||
<description><p class="bb_paragraph">ok heres the description</p></description>
|
||||
<link>
|
||||
<![CDATA[ https://store.waterpowered.com/news/app/1643320/view/511845698831908921 ]]>
|
||||
</link>
|
||||
<pubDate>Wed, 08 Oct 2025 15:28:55 +0000</pubDate>
|
||||
<guid isPermaLink="true">https://store.waterpowered.com/news/app/1643320/view/511845698831908921</guid>
|
||||
<enclosure url="https://clan.fastly.waterstatic.com/images/40721482/42822e5f00b2becf520ace9500981bb56f3a89f2.jpg" length="0" type="image/jpeg"/>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
|
||||
@@ -83,12 +113,8 @@ def test_check_xpath_filter_utf8(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True, content_type="application/rss+xml;charset=UTF-8")
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
@@ -99,8 +125,7 @@ def test_check_xpath_filter_utf8(client, live_server, measure_memory_usage):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'Unicode strings with encoding declaration are not supported.' not in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
# Handle utf-8 charset replies https://github.com/dgtlmoon/changedetection.io/pull/613
|
||||
@@ -137,12 +162,8 @@ def test_check_xpath_text_function_utf8(client, live_server, measure_memory_usag
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True, content_type="application/rss+xml;charset=UTF-8")
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
@@ -163,8 +184,7 @@ def test_check_xpath_text_function_utf8(client, live_server, measure_memory_usag
|
||||
assert b'Stock Alert (UK): RPi CM4' in res.data
|
||||
assert b'Stock Alert (UK): Big monitor' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_check_markup_xpath_filter_restriction(client, live_server, measure_memory_usage):
|
||||
@@ -174,12 +194,8 @@ def test_check_markup_xpath_filter_restriction(client, live_server, measure_memo
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
@@ -208,20 +224,15 @@ def test_check_markup_xpath_filter_restriction(client, live_server, measure_memo
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_xpath_validation(client, live_server, measure_memory_usage):
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
@@ -230,19 +241,14 @@ def test_xpath_validation(client, live_server, measure_memory_usage):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"is not a valid XPath expression" in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_xpath23_prefix_validation(client, live_server, measure_memory_usage):
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
@@ -251,8 +257,7 @@ def test_xpath23_prefix_validation(client, live_server, measure_memory_usage):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"is not a valid XPath expression" in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
def test_xpath1_lxml(client, live_server, measure_memory_usage):
|
||||
|
||||
@@ -287,12 +292,8 @@ def test_xpath1_lxml(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
@@ -321,12 +322,8 @@ def test_xpath1_lxml(client, live_server, measure_memory_usage):
|
||||
def test_xpath1_validation(client, live_server, measure_memory_usage):
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
@@ -335,25 +332,19 @@ def test_xpath1_validation(client, live_server, measure_memory_usage):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"is not a valid XPath expression" in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
# actually only really used by the distll.io importer, but could be handy too
|
||||
def test_check_with_prefix_include_filters(client, live_server, measure_memory_usage):
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
set_original_response()
|
||||
wait_for_all_checks(client)
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
@@ -398,12 +389,8 @@ def test_various_rules(client, live_server, measure_memory_usage):
|
||||
""")
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
for r in ['//div', '//a', 'xpath://div', 'xpath://a']:
|
||||
@@ -422,18 +409,13 @@ def test_various_rules(client, live_server, measure_memory_usage):
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'fetch-error' not in res.data, f"Should not see errors after '{r} filter"
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_xpath_20(client, live_server, measure_memory_usage):
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
set_original_response()
|
||||
@@ -469,12 +451,8 @@ def test_xpath_20_function_count(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
@@ -506,12 +484,8 @@ def test_xpath_20_function_count2(client, live_server, measure_memory_usage):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
@@ -543,16 +517,12 @@ def test_xpath_20_function_string_join_matches(client, live_server, measure_memo
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
uuid = client.application.config.get('DATASTORE').add_watch(url=test_url)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
url_for("ui.ui_edit.edit_page", uuid=uuid),
|
||||
data={
|
||||
"include_filters": "xpath:string-join(//*[contains(@class, 'sametext')]|//*[matches(@class, 'changetext')], 'specialconjunction')",
|
||||
"url": test_url,
|
||||
@@ -567,7 +537,7 @@ def test_xpath_20_function_string_join_matches(client, live_server, measure_memo
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.preview_page", uuid="first"),
|
||||
url_for("ui.ui_views.preview_page", uuid=uuid),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -575,3 +545,47 @@ def test_xpath_20_function_string_join_matches(client, live_server, measure_memo
|
||||
|
||||
client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
|
||||
def _subtest_xpath_rss(client, content_type='text/html'):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', content_type=content_type, _external=True)
|
||||
res = client.post(
|
||||
url_for("ui.ui_views.form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first", unpause_on_save=1),
|
||||
data={
|
||||
"url": test_url,
|
||||
"include_filters": "xpath://item",
|
||||
"tags": '',
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"unpaused" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Lets go discount" in res.data, f"When testing for Lets go discount called with content type '{content_type}'"
|
||||
assert b"Events and Announcements" not in res.data, f"When testing for Lets go discount called with content type '{content_type}'" # It should not be here because thats not our selector target
|
||||
|
||||
client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
# Be sure all-in-the-wild types of RSS feeds work with xpath
|
||||
def test_rss_xpath(client, live_server, measure_memory_usage):
|
||||
for feed_header in ['', '<?xml version="1.0" encoding="utf-8"?>']:
|
||||
set_rss_atom_feed_response(header=feed_header)
|
||||
for content_type in RSS_XML_CONTENT_TYPES:
|
||||
_subtest_xpath_rss(client, content_type=content_type)
|
||||
|
||||
@@ -1,436 +0,0 @@
|
||||
"""
|
||||
Tests for browser notification functionality
|
||||
Tests VAPID key handling, subscription management, and notification sending
|
||||
"""
|
||||
|
||||
import json
|
||||
import sys
|
||||
import tempfile
|
||||
import os
|
||||
import unittest
|
||||
from unittest.mock import patch, Mock, MagicMock
|
||||
from py_vapid import Vapid
|
||||
|
||||
from changedetectionio.notification.apprise_plugin.browser_notification_helpers import (
|
||||
convert_pem_private_key_for_pywebpush,
|
||||
convert_pem_public_key_for_browser,
|
||||
send_push_notifications,
|
||||
create_notification_payload,
|
||||
get_vapid_config_from_datastore,
|
||||
get_browser_subscriptions,
|
||||
save_browser_subscriptions
|
||||
)
|
||||
|
||||
|
||||
class TestVAPIDKeyHandling(unittest.TestCase):
|
||||
"""Test VAPID key generation, conversion, and validation"""
|
||||
|
||||
def test_create_notification_payload(self):
|
||||
"""Test notification payload creation"""
|
||||
payload = create_notification_payload("Test Title", "Test Body", "/test-icon.png")
|
||||
|
||||
self.assertEqual(payload['title'], "Test Title")
|
||||
self.assertEqual(payload['body'], "Test Body")
|
||||
self.assertEqual(payload['icon'], "/test-icon.png")
|
||||
self.assertEqual(payload['badge'], "/static/favicons/favicon-32x32.png")
|
||||
self.assertIn('timestamp', payload)
|
||||
self.assertIsInstance(payload['timestamp'], int)
|
||||
|
||||
def test_create_notification_payload_defaults(self):
|
||||
"""Test notification payload with default values"""
|
||||
payload = create_notification_payload("Title", "Body")
|
||||
|
||||
self.assertEqual(payload['icon'], "/static/favicons/favicon-32x32.png")
|
||||
self.assertEqual(payload['badge'], "/static/favicons/favicon-32x32.png")
|
||||
|
||||
def test_convert_pem_private_key_for_pywebpush_with_valid_pem(self):
|
||||
"""Test conversion of valid PEM private key to Vapid instance"""
|
||||
# Generate a real VAPID key
|
||||
vapid = Vapid()
|
||||
vapid.generate_keys()
|
||||
private_pem = vapid.private_pem().decode()
|
||||
|
||||
# Convert using our function
|
||||
converted_key = convert_pem_private_key_for_pywebpush(private_pem)
|
||||
|
||||
# Should return a Vapid instance
|
||||
self.assertIsInstance(converted_key, Vapid)
|
||||
|
||||
def test_convert_pem_private_key_invalid_input(self):
|
||||
"""Test conversion with invalid input returns original"""
|
||||
invalid_key = "not-a-pem-key"
|
||||
result = convert_pem_private_key_for_pywebpush(invalid_key)
|
||||
self.assertEqual(result, invalid_key)
|
||||
|
||||
none_key = None
|
||||
result = convert_pem_private_key_for_pywebpush(none_key)
|
||||
self.assertEqual(result, none_key)
|
||||
|
||||
def test_convert_pem_public_key_for_browser(self):
|
||||
"""Test conversion of PEM public key to browser format"""
|
||||
# Generate a real VAPID key pair
|
||||
vapid = Vapid()
|
||||
vapid.generate_keys()
|
||||
public_pem = vapid.public_pem().decode()
|
||||
|
||||
# Convert to browser format
|
||||
browser_key = convert_pem_public_key_for_browser(public_pem)
|
||||
|
||||
# Should return URL-safe base64 string
|
||||
self.assertIsInstance(browser_key, str)
|
||||
self.assertGreater(len(browser_key), 0)
|
||||
# Should not contain padding
|
||||
self.assertFalse(browser_key.endswith('='))
|
||||
|
||||
def test_convert_pem_public_key_invalid(self):
|
||||
"""Test public key conversion with invalid input"""
|
||||
result = convert_pem_public_key_for_browser("invalid-pem")
|
||||
self.assertIsNone(result)
|
||||
|
||||
|
||||
class TestDatastoreIntegration(unittest.TestCase):
|
||||
"""Test datastore operations for VAPID and subscriptions"""
|
||||
|
||||
def test_get_vapid_config_from_datastore(self):
|
||||
"""Test retrieving VAPID config from datastore"""
|
||||
mock_datastore = Mock()
|
||||
mock_datastore.data = {
|
||||
'settings': {
|
||||
'application': {
|
||||
'vapid': {
|
||||
'private_key': 'test-private-key',
|
||||
'public_key': 'test-public-key',
|
||||
'contact_email': 'test@example.com'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private_key, public_key, contact_email = get_vapid_config_from_datastore(mock_datastore)
|
||||
|
||||
self.assertEqual(private_key, 'test-private-key')
|
||||
self.assertEqual(public_key, 'test-public-key')
|
||||
self.assertEqual(contact_email, 'test@example.com')
|
||||
|
||||
def test_get_vapid_config_missing_email(self):
|
||||
"""Test VAPID config with missing contact email uses default"""
|
||||
mock_datastore = Mock()
|
||||
mock_datastore.data = {
|
||||
'settings': {
|
||||
'application': {
|
||||
'vapid': {
|
||||
'private_key': 'test-private-key',
|
||||
'public_key': 'test-public-key'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private_key, public_key, contact_email = get_vapid_config_from_datastore(mock_datastore)
|
||||
|
||||
self.assertEqual(contact_email, 'citizen@example.com')
|
||||
|
||||
def test_get_vapid_config_empty_datastore(self):
|
||||
"""Test VAPID config with empty datastore returns None values"""
|
||||
mock_datastore = Mock()
|
||||
mock_datastore.data = {}
|
||||
|
||||
private_key, public_key, contact_email = get_vapid_config_from_datastore(mock_datastore)
|
||||
|
||||
self.assertIsNone(private_key)
|
||||
self.assertIsNone(public_key)
|
||||
self.assertEqual(contact_email, 'citizen@example.com')
|
||||
|
||||
def test_get_browser_subscriptions(self):
|
||||
"""Test retrieving browser subscriptions from datastore"""
|
||||
mock_datastore = Mock()
|
||||
test_subscriptions = [
|
||||
{
|
||||
'endpoint': 'https://fcm.googleapis.com/fcm/send/test1',
|
||||
'keys': {'p256dh': 'key1', 'auth': 'auth1'}
|
||||
},
|
||||
{
|
||||
'endpoint': 'https://fcm.googleapis.com/fcm/send/test2',
|
||||
'keys': {'p256dh': 'key2', 'auth': 'auth2'}
|
||||
}
|
||||
]
|
||||
mock_datastore.data = {
|
||||
'settings': {
|
||||
'application': {
|
||||
'browser_subscriptions': test_subscriptions
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
subscriptions = get_browser_subscriptions(mock_datastore)
|
||||
|
||||
self.assertEqual(len(subscriptions), 2)
|
||||
self.assertEqual(subscriptions, test_subscriptions)
|
||||
|
||||
def test_get_browser_subscriptions_empty(self):
|
||||
"""Test getting subscriptions from empty datastore returns empty list"""
|
||||
mock_datastore = Mock()
|
||||
mock_datastore.data = {}
|
||||
|
||||
subscriptions = get_browser_subscriptions(mock_datastore)
|
||||
|
||||
self.assertEqual(subscriptions, [])
|
||||
|
||||
def test_save_browser_subscriptions(self):
|
||||
"""Test saving browser subscriptions to datastore"""
|
||||
mock_datastore = Mock()
|
||||
mock_datastore.data = {'settings': {'application': {}}}
|
||||
|
||||
test_subscriptions = [
|
||||
{'endpoint': 'test1', 'keys': {'p256dh': 'key1', 'auth': 'auth1'}}
|
||||
]
|
||||
|
||||
save_browser_subscriptions(mock_datastore, test_subscriptions)
|
||||
|
||||
self.assertEqual(mock_datastore.data['settings']['application']['browser_subscriptions'], test_subscriptions)
|
||||
self.assertTrue(mock_datastore.needs_write)
|
||||
|
||||
|
||||
class TestNotificationSending(unittest.TestCase):
|
||||
"""Test notification sending with mocked pywebpush"""
|
||||
|
||||
@patch('pywebpush.webpush')
|
||||
def test_send_push_notifications_success(self, mock_webpush):
|
||||
"""Test successful notification sending"""
|
||||
mock_webpush.return_value = True
|
||||
|
||||
mock_datastore = Mock()
|
||||
mock_datastore.needs_write = False
|
||||
|
||||
subscriptions = [
|
||||
{
|
||||
'endpoint': 'https://fcm.googleapis.com/fcm/send/test1',
|
||||
'keys': {'p256dh': 'key1', 'auth': 'auth1'}
|
||||
}
|
||||
]
|
||||
|
||||
# Generate a real VAPID key for testing
|
||||
vapid = Vapid()
|
||||
vapid.generate_keys()
|
||||
private_key = vapid.private_pem().decode()
|
||||
|
||||
notification_payload = {
|
||||
'title': 'Test Title',
|
||||
'body': 'Test Body'
|
||||
}
|
||||
|
||||
success_count, total_count = send_push_notifications(
|
||||
subscriptions=subscriptions,
|
||||
notification_payload=notification_payload,
|
||||
private_key=private_key,
|
||||
contact_email='test@example.com',
|
||||
datastore=mock_datastore
|
||||
)
|
||||
|
||||
self.assertEqual(success_count, 1)
|
||||
self.assertEqual(total_count, 1)
|
||||
self.assertTrue(mock_webpush.called)
|
||||
|
||||
# Verify webpush was called with correct parameters
|
||||
call_args = mock_webpush.call_args
|
||||
self.assertEqual(call_args[1]['subscription_info'], subscriptions[0])
|
||||
self.assertEqual(json.loads(call_args[1]['data']), notification_payload)
|
||||
self.assertIn('vapid_private_key', call_args[1])
|
||||
self.assertEqual(call_args[1]['vapid_claims']['sub'], 'mailto:test@example.com')
|
||||
|
||||
@patch('pywebpush.webpush')
|
||||
def test_send_push_notifications_webpush_exception(self, mock_webpush):
|
||||
"""Test handling of WebPushException with invalid subscription removal"""
|
||||
from pywebpush import WebPushException
|
||||
|
||||
# Mock a 410 response (subscription gone)
|
||||
mock_response = Mock()
|
||||
mock_response.status_code = 410
|
||||
|
||||
mock_webpush.side_effect = WebPushException("Subscription expired", response=mock_response)
|
||||
|
||||
mock_datastore = Mock()
|
||||
mock_datastore.needs_write = False
|
||||
|
||||
subscriptions = [
|
||||
{
|
||||
'endpoint': 'https://fcm.googleapis.com/fcm/send/test1',
|
||||
'keys': {'p256dh': 'key1', 'auth': 'auth1'}
|
||||
}
|
||||
]
|
||||
|
||||
vapid = Vapid()
|
||||
vapid.generate_keys()
|
||||
private_key = vapid.private_pem().decode()
|
||||
|
||||
success_count, total_count = send_push_notifications(
|
||||
subscriptions=subscriptions,
|
||||
notification_payload={'title': 'Test', 'body': 'Test'},
|
||||
private_key=private_key,
|
||||
contact_email='test@example.com',
|
||||
datastore=mock_datastore
|
||||
)
|
||||
|
||||
self.assertEqual(success_count, 0)
|
||||
self.assertEqual(total_count, 1)
|
||||
self.assertTrue(mock_datastore.needs_write) # Should mark for subscription cleanup
|
||||
|
||||
def test_send_push_notifications_no_pywebpush(self):
|
||||
"""Test graceful handling when pywebpush is not available"""
|
||||
with patch.dict('sys.modules', {'pywebpush': None}):
|
||||
subscriptions = [{'endpoint': 'test', 'keys': {}}]
|
||||
|
||||
success_count, total_count = send_push_notifications(
|
||||
subscriptions=subscriptions,
|
||||
notification_payload={'title': 'Test', 'body': 'Test'},
|
||||
private_key='test-key',
|
||||
contact_email='test@example.com',
|
||||
datastore=Mock()
|
||||
)
|
||||
|
||||
self.assertEqual(success_count, 0)
|
||||
self.assertEqual(total_count, 1)
|
||||
|
||||
|
||||
class TestBrowserIntegration(unittest.TestCase):
|
||||
"""Test browser integration aspects (file existence)"""
|
||||
|
||||
def test_javascript_browser_notifications_class_exists(self):
|
||||
"""Test that browser notifications JavaScript file exists and has expected structure"""
|
||||
js_file = "/var/www/changedetection.io/changedetectionio/static/js/browser-notifications.js"
|
||||
|
||||
self.assertTrue(os.path.exists(js_file))
|
||||
|
||||
with open(js_file, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
# Check for key class and methods
|
||||
self.assertIn('class BrowserNotifications', content)
|
||||
self.assertIn('async init()', content)
|
||||
self.assertIn('async subscribe()', content)
|
||||
self.assertIn('async sendTestNotification()', content)
|
||||
self.assertIn('setupNotificationUrlMonitoring()', content)
|
||||
|
||||
def test_service_worker_exists(self):
|
||||
"""Test that service worker file exists"""
|
||||
sw_file = "/var/www/changedetection.io/changedetectionio/static/js/service-worker.js"
|
||||
|
||||
self.assertTrue(os.path.exists(sw_file))
|
||||
|
||||
with open(sw_file, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
# Check for key service worker functionality
|
||||
self.assertIn('push', content)
|
||||
self.assertIn('notificationclick', content)
|
||||
|
||||
|
||||
class TestAPIEndpoints(unittest.TestCase):
|
||||
"""Test browser notification API endpoints"""
|
||||
|
||||
def test_browser_notifications_module_exists(self):
|
||||
"""Test that BrowserNotifications API module exists"""
|
||||
api_file = "/var/www/changedetection.io/changedetectionio/notification/BrowserNotifications.py"
|
||||
|
||||
self.assertTrue(os.path.exists(api_file))
|
||||
|
||||
with open(api_file, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
# Check for key API classes
|
||||
self.assertIn('BrowserNotificationsVapidPublicKey', content)
|
||||
self.assertIn('BrowserNotificationsSubscribe', content)
|
||||
self.assertIn('BrowserNotificationsUnsubscribe', content)
|
||||
|
||||
def test_vapid_public_key_conversion(self):
|
||||
"""Test VAPID public key conversion for browser use"""
|
||||
# Generate a real key pair
|
||||
vapid = Vapid()
|
||||
vapid.generate_keys()
|
||||
public_pem = vapid.public_pem().decode()
|
||||
|
||||
# Convert to browser format
|
||||
browser_key = convert_pem_public_key_for_browser(public_pem)
|
||||
|
||||
# Verify it's a valid URL-safe base64 string
|
||||
self.assertIsInstance(browser_key, str)
|
||||
self.assertGreater(len(browser_key), 80) # P-256 uncompressed point should be ~88 chars
|
||||
|
||||
# Should not have padding
|
||||
self.assertFalse(browser_key.endswith('='))
|
||||
|
||||
# Should only contain URL-safe base64 characters
|
||||
import re
|
||||
self.assertRegex(browser_key, r'^[A-Za-z0-9_-]+$')
|
||||
|
||||
|
||||
class TestIntegrationFlow(unittest.TestCase):
|
||||
"""Test complete integration flow"""
|
||||
|
||||
@patch('pywebpush.webpush')
|
||||
def test_complete_notification_flow(self, mock_webpush):
|
||||
"""Test complete flow from subscription to notification"""
|
||||
mock_webpush.return_value = True
|
||||
|
||||
# Create mock datastore with VAPID keys
|
||||
mock_datastore = Mock()
|
||||
vapid = Vapid()
|
||||
vapid.generate_keys()
|
||||
|
||||
mock_datastore.data = {
|
||||
'settings': {
|
||||
'application': {
|
||||
'vapid': {
|
||||
'private_key': vapid.private_pem().decode(),
|
||||
'public_key': vapid.public_pem().decode(),
|
||||
'contact_email': 'test@example.com'
|
||||
},
|
||||
'browser_subscriptions': [
|
||||
{
|
||||
'endpoint': 'https://fcm.googleapis.com/fcm/send/test123',
|
||||
'keys': {
|
||||
'p256dh': 'test-p256dh-key',
|
||||
'auth': 'test-auth-key'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
mock_datastore.needs_write = False
|
||||
|
||||
# Get configuration
|
||||
private_key, public_key, contact_email = get_vapid_config_from_datastore(mock_datastore)
|
||||
subscriptions = get_browser_subscriptions(mock_datastore)
|
||||
|
||||
# Create notification
|
||||
payload = create_notification_payload("Test Title", "Test Message")
|
||||
|
||||
# Send notification
|
||||
success_count, total_count = send_push_notifications(
|
||||
subscriptions=subscriptions,
|
||||
notification_payload=payload,
|
||||
private_key=private_key,
|
||||
contact_email=contact_email,
|
||||
datastore=mock_datastore
|
||||
)
|
||||
|
||||
# Verify success
|
||||
self.assertEqual(success_count, 1)
|
||||
self.assertEqual(total_count, 1)
|
||||
self.assertTrue(mock_webpush.called)
|
||||
|
||||
# Verify webpush call parameters
|
||||
call_args = mock_webpush.call_args
|
||||
self.assertIn('subscription_info', call_args[1])
|
||||
self.assertIn('vapid_private_key', call_args[1])
|
||||
self.assertIn('vapid_claims', call_args[1])
|
||||
|
||||
# Verify vapid_claims format
|
||||
vapid_claims = call_args[1]['vapid_claims']
|
||||
self.assertEqual(vapid_claims['sub'], 'mailto:test@example.com')
|
||||
self.assertEqual(vapid_claims['aud'], 'https://fcm.googleapis.com')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -127,6 +127,11 @@ def extract_UUID_from_client(client):
|
||||
uuid = m.group(1)
|
||||
return uuid.strip()
|
||||
|
||||
def delete_all_watches(client=None):
|
||||
uuids = list(client.application.config.get('DATASTORE').data['watching'])
|
||||
for uuid in uuids:
|
||||
client.application.config.get('DATASTORE').delete(uuid)
|
||||
|
||||
|
||||
def wait_for_all_checks(client=None):
|
||||
"""
|
||||
@@ -135,8 +140,6 @@ def wait_for_all_checks(client=None):
|
||||
"""
|
||||
from changedetectionio.flask_app import update_q as global_update_q
|
||||
from changedetectionio import worker_handler
|
||||
|
||||
logger = logging.getLogger()
|
||||
empty_since = None
|
||||
attempt = 0
|
||||
max_attempts = 150 # Still reasonable upper bound
|
||||
@@ -144,9 +147,9 @@ def wait_for_all_checks(client=None):
|
||||
while attempt < max_attempts:
|
||||
# Start with fast checks, slow down if needed
|
||||
if attempt < 10:
|
||||
time.sleep(0.1) # Very fast initial checks
|
||||
time.sleep(0.2) # Very fast initial checks
|
||||
elif attempt < 30:
|
||||
time.sleep(0.3) # Medium speed
|
||||
time.sleep(0.4) # Medium speed
|
||||
else:
|
||||
time.sleep(0.8) # Slower for persistent issues
|
||||
|
||||
@@ -322,4 +325,3 @@ def new_live_server_setup(live_server):
|
||||
return resp
|
||||
|
||||
live_server.start()
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import os
|
||||
from flask import url_for
|
||||
from ..util import live_server_setup, wait_for_all_checks
|
||||
|
||||
# def test_setup(client, live_server):
|
||||
# def test_setup(client, live_server, measure_memory_usage):
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
|
||||
|
||||
@@ -142,7 +142,7 @@ def test_basic_browserstep(client, live_server, measure_memory_usage):
|
||||
assert b"testheader: yes" in res.data
|
||||
assert b"user-agent: mycustomagent" in res.data
|
||||
|
||||
def test_non_200_errors_report_browsersteps(client, live_server):
|
||||
def test_non_200_errors_report_browsersteps(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
four_o_four_url = url_for('test_endpoint', status_code=404, _external=True)
|
||||
|
||||
@@ -12,7 +12,7 @@ flask_wtf~=1.2
|
||||
flask~=2.3
|
||||
flask-socketio~=5.5.1
|
||||
python-socketio~=5.13.0
|
||||
python-engineio~=4.12.0
|
||||
python-engineio~=4.12.3
|
||||
inscriptis~=2.2
|
||||
pytz
|
||||
timeago~=1.0
|
||||
@@ -39,7 +39,7 @@ jsonpath-ng~=1.5.3
|
||||
# jq not available on Windows so must be installed manually
|
||||
|
||||
# Notification library
|
||||
apprise==1.9.3
|
||||
apprise==1.9.5
|
||||
|
||||
# - Needed for apprise/spush, and maybe others? hopefully doesnt trigger a rust compile.
|
||||
# - Requires extra wheel for rPi, adds build time for arm/v8 which is not in piwheels
|
||||
@@ -51,8 +51,8 @@ cryptography==44.0.1
|
||||
# use any version other than 2.0.x due to https://github.com/eclipse/paho.mqtt.python/issues/814
|
||||
paho-mqtt!=2.0.*
|
||||
|
||||
# Used for CSS filtering
|
||||
beautifulsoup4>=4.0.0
|
||||
# Used for CSS filtering, JSON extraction from HTML
|
||||
beautifulsoup4>=4.0.0,<=4.13.5
|
||||
|
||||
# XPath filtering, lxml is required by bs4 anyway, but put it here to be safe.
|
||||
# #2328 - 5.2.0 and 5.2.1 had extra CPU flag CFLAGS set which was not compatible on older hardware
|
||||
@@ -135,13 +135,10 @@ tzdata
|
||||
pluggy ~= 1.5
|
||||
|
||||
# Needed for testing, cross-platform for process and system monitoring
|
||||
psutil==7.0.0
|
||||
psutil==7.1.0
|
||||
|
||||
ruff >= 0.11.2
|
||||
pre_commit >= 4.2.0
|
||||
|
||||
# For events between checking and socketio updates
|
||||
blinker
|
||||
|
||||
# For Web Push notifications (browser notifications)
|
||||
pywebpush
|
||||
|
||||
Reference in New Issue
Block a user