mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-11-13 13:06:10 +00:00
Compare commits
19 Commits
3423-opena
...
0.50.16
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
754febfd33 | ||
|
|
0c9c475f32 | ||
|
|
e4baca1127 | ||
|
|
bb61a35a54 | ||
|
|
4b9ae5a97c | ||
|
|
c8caa0662d | ||
|
|
f4e8d1963f | ||
|
|
45d5e961dc | ||
|
|
45f2863966 | ||
|
|
01c1ac4c0c | ||
|
|
b2f9aec383 | ||
|
|
a95aa67aef | ||
|
|
cbeefeccbb | ||
|
|
2b72d38235 | ||
|
|
8fe7aec3c6 | ||
|
|
6e1f5a8503 | ||
|
|
b74b76c9f9 | ||
|
|
a27265450c | ||
|
|
cc5455c3dc |
@@ -33,7 +33,6 @@ venv/
|
||||
# Test and development files
|
||||
test-datastore/
|
||||
tests/
|
||||
docs/
|
||||
*.md
|
||||
!README.md
|
||||
|
||||
|
||||
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@@ -4,11 +4,13 @@ updates:
|
||||
directory: /
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
"caronc/apprise":
|
||||
versioning-strategy: "increase"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
groups:
|
||||
all:
|
||||
patterns:
|
||||
- "*"
|
||||
- package-ecosystem: pip
|
||||
directory: /
|
||||
schedule:
|
||||
interval: "daily"
|
||||
allow:
|
||||
- dependency-name: "apprise"
|
||||
|
||||
4
.github/workflows/containers.yml
vendored
4
.github/workflows/containers.yml
vendored
@@ -95,7 +95,7 @@ jobs:
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
# Looks like this was disabled
|
||||
|
||||
4
.github/workflows/test-container-build.yml
vendored
4
.github/workflows/test-container-build.yml
vendored
@@ -38,8 +38,6 @@ jobs:
|
||||
dockerfile: ./Dockerfile
|
||||
- platform: linux/arm/v8
|
||||
dockerfile: ./Dockerfile
|
||||
- platform: linux/arm64/v8
|
||||
dockerfile: ./Dockerfile
|
||||
# Alpine Dockerfile platforms (musl via alpine check)
|
||||
- platform: linux/amd64
|
||||
dockerfile: ./.github/test/Dockerfile-alpine
|
||||
@@ -76,5 +74,5 @@ jobs:
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
cache-to: type=gha,mode=min
|
||||
|
||||
|
||||
@@ -84,6 +84,11 @@ EXPOSE 5000
|
||||
|
||||
# The actual flask app module
|
||||
COPY changedetectionio /app/changedetectionio
|
||||
|
||||
# Also for OpenAPI validation wrapper - needs the YML
|
||||
RUN [ ! -d "/app/docs" ] && mkdir /app/docs
|
||||
COPY docs/api-spec.yaml /app/docs/api-spec.yaml
|
||||
|
||||
# Starting wrapper
|
||||
COPY changedetection.py /app/changedetection.py
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
|
||||
|
||||
__version__ = '0.50.12'
|
||||
__version__ = '0.50.16'
|
||||
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from json.decoder import JSONDecodeError
|
||||
|
||||
@@ -14,6 +14,39 @@ import copy
|
||||
from . import schema, schema_create_watch, schema_update_watch, validate_openapi_request
|
||||
|
||||
|
||||
def validate_time_between_check_required(json_data):
|
||||
"""
|
||||
Validate that at least one time interval is specified when not using default settings.
|
||||
Returns None if valid, or error message string if invalid.
|
||||
Defaults to using global settings if time_between_check_use_default is not provided.
|
||||
"""
|
||||
# Default to using global settings if not specified
|
||||
use_default = json_data.get('time_between_check_use_default', True)
|
||||
|
||||
# If using default settings, no validation needed
|
||||
if use_default:
|
||||
return None
|
||||
|
||||
# If not using defaults, check if time_between_check exists and has at least one non-zero value
|
||||
time_check = json_data.get('time_between_check')
|
||||
if not time_check:
|
||||
# No time_between_check provided and not using defaults - this is an error
|
||||
return "At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings."
|
||||
|
||||
# time_between_check exists, check if it has at least one non-zero value
|
||||
if any([
|
||||
(time_check.get('weeks') or 0) > 0,
|
||||
(time_check.get('days') or 0) > 0,
|
||||
(time_check.get('hours') or 0) > 0,
|
||||
(time_check.get('minutes') or 0) > 0,
|
||||
(time_check.get('seconds') or 0) > 0
|
||||
]):
|
||||
return None
|
||||
|
||||
# time_between_check exists but all values are 0 or empty - this is an error
|
||||
return "At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings."
|
||||
|
||||
|
||||
class Watch(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
# datastore is a black box dependency
|
||||
@@ -55,6 +88,8 @@ class Watch(Resource):
|
||||
# attr .last_changed will check for the last written text snapshot on change
|
||||
watch['last_changed'] = watch.last_changed
|
||||
watch['viewed'] = watch.viewed
|
||||
watch['link'] = watch.link,
|
||||
|
||||
return watch
|
||||
|
||||
@auth.check_token
|
||||
@@ -81,6 +116,11 @@ class Watch(Resource):
|
||||
if not request.json.get('proxy') in plist:
|
||||
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
|
||||
|
||||
# Validate time_between_check when not using defaults
|
||||
validation_error = validate_time_between_check_required(request.json)
|
||||
if validation_error:
|
||||
return validation_error, 400
|
||||
|
||||
watch.update(request.json)
|
||||
|
||||
return "OK", 200
|
||||
@@ -196,6 +236,11 @@ class CreateWatch(Resource):
|
||||
if not json_data.get('proxy') in plist:
|
||||
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
|
||||
|
||||
# Validate time_between_check when not using defaults
|
||||
validation_error = validate_time_between_check_required(json_data)
|
||||
if validation_error:
|
||||
return validation_error, 400
|
||||
|
||||
extras = copy.deepcopy(json_data)
|
||||
|
||||
# Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API)
|
||||
@@ -230,6 +275,8 @@ class CreateWatch(Resource):
|
||||
'last_changed': watch.last_changed,
|
||||
'last_checked': watch['last_checked'],
|
||||
'last_error': watch['last_error'],
|
||||
'link': watch.link,
|
||||
'page_title': watch['page_title'],
|
||||
'title': watch['title'],
|
||||
'url': watch['url'],
|
||||
'viewed': watch.viewed
|
||||
|
||||
@@ -2,6 +2,7 @@ import copy
|
||||
import yaml
|
||||
import functools
|
||||
from flask import request, abort
|
||||
from loguru import logger
|
||||
from openapi_core import OpenAPI
|
||||
from openapi_core.contrib.flask import FlaskOpenAPIRequest
|
||||
from . import api_schema
|
||||
@@ -31,17 +32,13 @@ schema_create_notification_urls['required'] = ['notification_urls']
|
||||
schema_delete_notification_urls = copy.deepcopy(schema_notification_urls)
|
||||
schema_delete_notification_urls['required'] = ['notification_urls']
|
||||
|
||||
# Load OpenAPI spec for validation
|
||||
_openapi_spec = None
|
||||
|
||||
@functools.cache
|
||||
def get_openapi_spec():
|
||||
global _openapi_spec
|
||||
if _openapi_spec is None:
|
||||
import os
|
||||
spec_path = os.path.join(os.path.dirname(__file__), '../../docs/api-spec.yaml')
|
||||
with open(spec_path, 'r') as f:
|
||||
spec_dict = yaml.safe_load(f)
|
||||
_openapi_spec = OpenAPI.from_dict(spec_dict)
|
||||
import os
|
||||
spec_path = os.path.join(os.path.dirname(__file__), '../../docs/api-spec.yaml')
|
||||
with open(spec_path, 'r') as f:
|
||||
spec_dict = yaml.safe_load(f)
|
||||
_openapi_spec = OpenAPI.from_dict(spec_dict)
|
||||
return _openapi_spec
|
||||
|
||||
def validate_openapi_request(operation_id):
|
||||
@@ -50,16 +47,25 @@ def validate_openapi_request(operation_id):
|
||||
@functools.wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
spec = get_openapi_spec()
|
||||
openapi_request = FlaskOpenAPIRequest(request)
|
||||
result = spec.unmarshal_request(openapi_request)
|
||||
if result.errors:
|
||||
abort(400, message=f"OpenAPI validation failed: {result.errors}")
|
||||
return f(*args, **kwargs)
|
||||
# Skip OpenAPI validation for GET requests since they don't have request bodies
|
||||
if request.method.upper() != 'GET':
|
||||
spec = get_openapi_spec()
|
||||
openapi_request = FlaskOpenAPIRequest(request)
|
||||
result = spec.unmarshal_request(openapi_request)
|
||||
if result.errors:
|
||||
from werkzeug.exceptions import BadRequest
|
||||
error_details = []
|
||||
for error in result.errors:
|
||||
error_details.append(str(error))
|
||||
raise BadRequest(f"OpenAPI validation failed: {error_details}")
|
||||
except BadRequest:
|
||||
# Re-raise BadRequest exceptions (validation failures)
|
||||
raise
|
||||
except Exception as e:
|
||||
# If OpenAPI validation fails, log but don't break existing functionality
|
||||
print(f"OpenAPI validation warning for {operation_id}: {e}")
|
||||
return f(*args, **kwargs)
|
||||
# If OpenAPI spec loading fails, log but don't break existing functionality
|
||||
logger.critical(f"OpenAPI validation warning for {operation_id}: {e}")
|
||||
abort(500)
|
||||
return f(*args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
@@ -69,3 +75,4 @@ from .Tags import Tags, Tag
|
||||
from .Import import Import
|
||||
from .SystemInfo import SystemInfo
|
||||
from .Notifications import Notifications
|
||||
|
||||
|
||||
@@ -119,6 +119,12 @@ def build_watch_json_schema(d):
|
||||
|
||||
schema['properties']['time_between_check'] = build_time_between_check_json_schema()
|
||||
|
||||
schema['properties']['time_between_check_use_default'] = {
|
||||
"type": "boolean",
|
||||
"default": True,
|
||||
"description": "Whether to use global settings for time between checks - defaults to true if not set"
|
||||
}
|
||||
|
||||
schema['properties']['browser_steps'] = {
|
||||
"anyOf": [
|
||||
{
|
||||
|
||||
@@ -191,6 +191,12 @@ nav
|
||||
</ul>
|
||||
</span>
|
||||
</fieldset>
|
||||
<fieldset class="pure-group">
|
||||
{{ render_checkbox_field(form.application.form.strip_ignored_lines) }}
|
||||
<span class="pure-form-message-inline">Remove any text that appears in the "Ignore text" from the output (otherwise its just ignored for change-detection)<br>
|
||||
<i>Note:</i> Changing this will change the status of your existing watches, possibly trigger alerts etc.
|
||||
</span>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="api">
|
||||
|
||||
@@ -87,7 +87,6 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
form=form,
|
||||
guid=datastore.data['app_guid'],
|
||||
has_proxies=datastore.proxy_list,
|
||||
has_unviewed=datastore.has_unviewed,
|
||||
hosted_sticky=os.getenv("SALTED_PASS", False) == False,
|
||||
now_time_server=round(time.time()),
|
||||
pagination=pagination,
|
||||
@@ -97,6 +96,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
sort_order=request.args.get('order') if request.args.get('order') else request.cookies.get('order'),
|
||||
system_default_fetcher=datastore.data['settings']['application'].get('fetch_backend'),
|
||||
tags=sorted_tags,
|
||||
unread_changes_count=datastore.unread_changes_count,
|
||||
watches=sorted_watches
|
||||
)
|
||||
|
||||
|
||||
@@ -82,8 +82,11 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
{%- set cols_required = cols_required + 1 -%}
|
||||
{%- endif -%}
|
||||
{%- set ui_settings = datastore.data['settings']['application']['ui'] -%}
|
||||
|
||||
<div id="watch-table-wrapper">
|
||||
{%- set wrapper_classes = [
|
||||
'has-unread-changes' if unread_changes_count else '',
|
||||
'has-error' if errored_count else '',
|
||||
] -%}
|
||||
<div id="watch-table-wrapper" class="{{ wrapper_classes | reject('equalto', '') | join(' ') }}">
|
||||
{%- set table_classes = [
|
||||
'favicon-enabled' if 'favicons_enabled' not in ui_settings or ui_settings['favicons_enabled'] else 'favicon-not-enabled',
|
||||
] -%}
|
||||
@@ -158,9 +161,9 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
<div>
|
||||
<span class="watch-title">
|
||||
{% if system_use_url_watchlist or watch.get('use_page_title_in_list') %}
|
||||
{{watch.label}}
|
||||
{{ watch.label }}
|
||||
{% else %}
|
||||
{{watch.url}}
|
||||
{{ watch.get('title') or watch.link }}
|
||||
{% endif %}
|
||||
<a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}"> </a>
|
||||
</span>
|
||||
@@ -241,10 +244,10 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
</tbody>
|
||||
</table>
|
||||
<ul id="post-list-buttons">
|
||||
<li id="post-list-with-errors" class="{%- if errored_count -%}has-error{%- endif -%}" style="display: none;" >
|
||||
<li id="post-list-with-errors" style="display: none;" >
|
||||
<a href="{{url_for('watchlist.index', with_errors=1, tag=request.args.get('tag')) }}" class="pure-button button-tag button-error">With errors ({{ errored_count }})</a>
|
||||
</li>
|
||||
<li id="post-list-mark-views" class="{%- if has_unviewed -%}has-unviewed{%- endif -%}" style="display: none;" >
|
||||
<li id="post-list-mark-views" style="display: none;" >
|
||||
<a href="{{url_for('ui.mark_all_viewed',with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag " id="mark-all-viewed">Mark all viewed</a>
|
||||
</li>
|
||||
{%- if active_tag_uuid -%}
|
||||
@@ -252,8 +255,8 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
<a href="{{url_for('ui.mark_all_viewed', tag=active_tag_uuid) }}" class="pure-button button-tag " id="mark-all-viewed">Mark all viewed in '{{active_tag.title}}'</a>
|
||||
</li>
|
||||
{%- endif -%}
|
||||
<li id="post-list-unread" class="{%- if has_unviewed -%}has-unviewed{%- endif -%}" style="display: none;" >
|
||||
<a href="{{url_for('watchlist.index', unread=1, tag=request.args.get('tag')) }}" class="pure-button button-tag">Unread</a>
|
||||
<li id="post-list-unread" style="display: none;" >
|
||||
<a href="{{url_for('watchlist.index', unread=1, tag=request.args.get('tag')) }}" class="pure-button button-tag">Unread (<span id="unread-tab-counter">{{ unread_changes_count }}</span>)</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ url_for('ui.form_watch_checknow', tag=active_tag_uuid, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag" id="recheck-all">Recheck
|
||||
|
||||
@@ -23,6 +23,7 @@ from wtforms import (
|
||||
)
|
||||
from flask_wtf.file import FileField, FileAllowed
|
||||
from wtforms.fields import FieldList
|
||||
from wtforms.utils import unset_value
|
||||
|
||||
from wtforms.validators import ValidationError
|
||||
|
||||
@@ -56,6 +57,8 @@ valid_method = {
|
||||
|
||||
default_method = 'GET'
|
||||
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
|
||||
REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT='At least one time interval (weeks, days, hours, minutes, or seconds) must be specified.'
|
||||
REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT='At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings.'
|
||||
|
||||
class StringListField(StringField):
|
||||
widget = widgets.TextArea()
|
||||
@@ -212,6 +215,35 @@ class ScheduleLimitForm(Form):
|
||||
self.sunday.form.enabled.label.text = "Sunday"
|
||||
|
||||
|
||||
def validate_time_between_check_has_values(form):
|
||||
"""
|
||||
Custom validation function for TimeBetweenCheckForm.
|
||||
Returns True if at least one time interval field has a value > 0.
|
||||
"""
|
||||
res = any([
|
||||
form.weeks.data and int(form.weeks.data) > 0,
|
||||
form.days.data and int(form.days.data) > 0,
|
||||
form.hours.data and int(form.hours.data) > 0,
|
||||
form.minutes.data and int(form.minutes.data) > 0,
|
||||
form.seconds.data and int(form.seconds.data) > 0
|
||||
])
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class RequiredTimeInterval(object):
|
||||
"""
|
||||
WTForms validator that ensures at least one time interval field has a value > 0.
|
||||
Use this with FormField(TimeBetweenCheckForm, validators=[RequiredTimeInterval()]).
|
||||
"""
|
||||
def __init__(self, message=None):
|
||||
self.message = message or 'At least one time interval (weeks, days, hours, minutes, or seconds) must be specified.'
|
||||
|
||||
def __call__(self, form, field):
|
||||
if not validate_time_between_check_has_values(field.form):
|
||||
raise ValidationError(self.message)
|
||||
|
||||
|
||||
class TimeBetweenCheckForm(Form):
|
||||
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
@@ -220,6 +252,123 @@ class TimeBetweenCheckForm(Form):
|
||||
seconds = IntegerField('Seconds', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
# @todo add total seconds minimum validatior = minimum_seconds_recheck_time
|
||||
|
||||
def __init__(self, formdata=None, obj=None, prefix="", data=None, meta=None, **kwargs):
|
||||
super().__init__(formdata, obj, prefix, data, meta, **kwargs)
|
||||
self.require_at_least_one = kwargs.get('require_at_least_one', False)
|
||||
self.require_at_least_one_message = kwargs.get('require_at_least_one_message', REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT)
|
||||
|
||||
def validate(self, **kwargs):
|
||||
"""Custom validation that can optionally require at least one time interval."""
|
||||
# Run normal field validation first
|
||||
if not super().validate(**kwargs):
|
||||
return False
|
||||
|
||||
# Apply optional "at least one" validation
|
||||
if self.require_at_least_one:
|
||||
if not validate_time_between_check_has_values(self):
|
||||
# Add error to the form's general errors (not field-specific)
|
||||
if not hasattr(self, '_formdata_errors'):
|
||||
self._formdata_errors = []
|
||||
self._formdata_errors.append(self.require_at_least_one_message)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class EnhancedFormField(FormField):
|
||||
"""
|
||||
An enhanced FormField that supports conditional validation with top-level error messages.
|
||||
Adds a 'top_errors' property for validation errors at the FormField level.
|
||||
"""
|
||||
|
||||
def __init__(self, form_class, label=None, validators=None, separator="-",
|
||||
conditional_field=None, conditional_message=None, conditional_test_function=None, **kwargs):
|
||||
"""
|
||||
Initialize EnhancedFormField with optional conditional validation.
|
||||
|
||||
:param conditional_field: Name of the field this FormField depends on (e.g. 'time_between_check_use_default')
|
||||
:param conditional_message: Error message to show when validation fails
|
||||
:param conditional_test_function: Custom function to test if FormField has valid values.
|
||||
Should take self.form as parameter and return True if valid.
|
||||
"""
|
||||
super().__init__(form_class, label, validators, separator, **kwargs)
|
||||
self.top_errors = []
|
||||
self.conditional_field = conditional_field
|
||||
self.conditional_message = conditional_message or "At least one field must have a value when not using defaults."
|
||||
self.conditional_test_function = conditional_test_function
|
||||
|
||||
def validate(self, form, extra_validators=()):
|
||||
"""
|
||||
Custom validation that supports conditional logic and stores top-level errors.
|
||||
"""
|
||||
self.top_errors = []
|
||||
|
||||
# First run the normal FormField validation
|
||||
base_valid = super().validate(form, extra_validators)
|
||||
|
||||
# Apply conditional validation if configured
|
||||
if self.conditional_field and hasattr(form, self.conditional_field):
|
||||
conditional_field_obj = getattr(form, self.conditional_field)
|
||||
|
||||
# If the conditional field is False/unchecked, check if this FormField has any values
|
||||
if not conditional_field_obj.data:
|
||||
# Use custom test function if provided, otherwise use generic fallback
|
||||
if self.conditional_test_function:
|
||||
has_any_value = self.conditional_test_function(self.form)
|
||||
else:
|
||||
# Generic fallback - check if any field has truthy data
|
||||
has_any_value = any(field.data for field in self.form if hasattr(field, 'data') and field.data)
|
||||
|
||||
if not has_any_value:
|
||||
self.top_errors.append(self.conditional_message)
|
||||
base_valid = False
|
||||
|
||||
return base_valid
|
||||
|
||||
|
||||
class RequiredFormField(FormField):
|
||||
"""
|
||||
A FormField that passes require_at_least_one=True to TimeBetweenCheckForm.
|
||||
Use this when you want the sub-form to always require at least one value.
|
||||
"""
|
||||
|
||||
def __init__(self, form_class, label=None, validators=None, separator="-", **kwargs):
|
||||
super().__init__(form_class, label, validators, separator, **kwargs)
|
||||
|
||||
def process(self, formdata, data=unset_value, extra_filters=None):
|
||||
if extra_filters:
|
||||
raise TypeError(
|
||||
"FormField cannot take filters, as the encapsulated"
|
||||
"data is not mutable."
|
||||
)
|
||||
|
||||
if data is unset_value:
|
||||
try:
|
||||
data = self.default()
|
||||
except TypeError:
|
||||
data = self.default
|
||||
self._obj = data
|
||||
|
||||
self.object_data = data
|
||||
|
||||
prefix = self.name + self.separator
|
||||
# Pass require_at_least_one=True to the sub-form
|
||||
if isinstance(data, dict):
|
||||
self.form = self.form_class(formdata=formdata, prefix=prefix, require_at_least_one=True, **data)
|
||||
else:
|
||||
self.form = self.form_class(formdata=formdata, obj=data, prefix=prefix, require_at_least_one=True)
|
||||
|
||||
@property
|
||||
def errors(self):
|
||||
"""Include sub-form validation errors"""
|
||||
form_errors = self.form.errors
|
||||
# Add any general form errors to a special 'form' key
|
||||
if hasattr(self.form, '_formdata_errors') and self.form._formdata_errors:
|
||||
form_errors = dict(form_errors) # Make a copy
|
||||
form_errors['form'] = self.form._formdata_errors
|
||||
return form_errors
|
||||
|
||||
|
||||
# Separated by key:value
|
||||
class StringDictKeyValue(StringField):
|
||||
widget = widgets.TextArea()
|
||||
@@ -348,7 +497,7 @@ class ValidateJinja2Template(object):
|
||||
joined_data = ' '.join(map(str, field.data)) if isinstance(field.data, list) else f"{field.data}"
|
||||
|
||||
try:
|
||||
jinja2_env = ImmutableSandboxedEnvironment(loader=BaseLoader)
|
||||
jinja2_env = ImmutableSandboxedEnvironment(loader=BaseLoader, extensions=['jinja2_time.TimeExtension'])
|
||||
jinja2_env.globals.update(notification.valid_tokens)
|
||||
# Extra validation tokens provided on the form_class(... extra_tokens={}) setup
|
||||
if hasattr(field, 'extra_notification_tokens'):
|
||||
@@ -583,11 +732,16 @@ class processor_text_json_diff_form(commonSettingsForm):
|
||||
url = fields.URLField('URL', validators=[validateURL()])
|
||||
tags = StringTagUUID('Group tag', [validators.Optional()], default='')
|
||||
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
time_between_check = EnhancedFormField(
|
||||
TimeBetweenCheckForm,
|
||||
conditional_field='time_between_check_use_default',
|
||||
conditional_message=REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT,
|
||||
conditional_test_function=validate_time_between_check_has_values
|
||||
)
|
||||
|
||||
time_schedule_limit = FormField(ScheduleLimitForm)
|
||||
|
||||
time_between_check_use_default = BooleanField('Use global settings for time between check', default=False)
|
||||
time_between_check_use_default = BooleanField('Use global settings for time between check and scheduler.', default=False)
|
||||
|
||||
include_filters = StringListField('CSS/JSONPath/JQ/XPath Filters', [ValidateCSSJSONXPATHInput()], default='')
|
||||
|
||||
@@ -605,6 +759,7 @@ class processor_text_json_diff_form(commonSettingsForm):
|
||||
check_unique_lines = BooleanField('Only trigger when unique lines appear in all history', default=False)
|
||||
remove_duplicate_lines = BooleanField('Remove duplicate lines of text', default=False)
|
||||
sort_text_alphabetically = BooleanField('Sort text alphabetically', default=False)
|
||||
strip_ignored_lines = TernaryNoneBooleanField('Strip ignored lines', default=None)
|
||||
trim_text_whitespace = BooleanField('Trim whitespace before and after text', default=False)
|
||||
|
||||
filter_text_added = BooleanField('Added lines', default=True)
|
||||
@@ -728,7 +883,7 @@ class DefaultUAInputForm(Form):
|
||||
|
||||
# datastore.data['settings']['requests']..
|
||||
class globalSettingsRequestForm(Form):
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
time_between_check = RequiredFormField(TimeBetweenCheckForm)
|
||||
time_schedule_limit = FormField(ScheduleLimitForm)
|
||||
proxy = RadioField('Proxy')
|
||||
jitter_seconds = IntegerField('Random jitter seconds ± check',
|
||||
@@ -782,6 +937,7 @@ class globalSettingsApplicationForm(commonSettingsForm):
|
||||
removepassword_button = SubmitField('Remove password', render_kw={"class": "pure-button pure-button-primary"})
|
||||
render_anchor_tag_content = BooleanField('Render anchor tag content', default=False)
|
||||
shared_diff_access = BooleanField('Allow anonymous access to watch history page when password is enabled', default=False, validators=[validators.Optional()])
|
||||
strip_ignored_lines = BooleanField('Strip ignored lines')
|
||||
rss_hide_muted_watches = BooleanField('Hide muted watches from RSS feed', default=True,
|
||||
validators=[validators.Optional()])
|
||||
filter_failure_notification_threshold_attempts = IntegerField('Number of times the filter can be missing before sending a notification',
|
||||
|
||||
@@ -57,6 +57,7 @@ class model(dict):
|
||||
'rss_hide_muted_watches': True,
|
||||
'schema_version' : 0,
|
||||
'shared_diff_access': False,
|
||||
'strip_ignored_lines': False,
|
||||
'tags': {}, #@todo use Tag.model initialisers
|
||||
'timezone': None, # Default IANA timezone name
|
||||
'webdriver_delay': None , # Extra delay in seconds before extracting text
|
||||
|
||||
@@ -170,7 +170,7 @@ class model(watch_base):
|
||||
@property
|
||||
def label(self):
|
||||
# Used for sorting, display, etc
|
||||
return self.get('title') or self.get('page_title') or self.get('url')
|
||||
return self.get('title') or self.get('page_title') or self.link
|
||||
|
||||
@property
|
||||
def last_changed(self):
|
||||
|
||||
@@ -58,6 +58,7 @@ class watch_base(dict):
|
||||
'proxy': None, # Preferred proxy connection
|
||||
'remote_server_reply': None, # From 'server' reply header
|
||||
'sort_text_alphabetically': False,
|
||||
'strip_ignored_lines': None,
|
||||
'subtractive_selectors': [],
|
||||
'tag': '', # Old system of text name for a tag, to be removed
|
||||
'tags': [], # list of UUIDs to App.Tags
|
||||
|
||||
@@ -153,12 +153,26 @@ class perform_site_check(difference_detection_processor):
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
self.fetcher.content = html_tools.workarounds_for_obfuscations(self.fetcher.content)
|
||||
html_content = self.fetcher.content
|
||||
content_type = self.fetcher.get_all_headers().get('content-type', '').lower()
|
||||
is_attachment = 'attachment' in self.fetcher.get_all_headers().get('content-disposition', '').lower() or 'octet-stream' in content_type
|
||||
|
||||
# If not JSON, and if it's not text/plain..
|
||||
if 'text/plain' in self.fetcher.get_all_headers().get('content-type', '').lower():
|
||||
# Try to detect better mime types if its a download or not announced as HTML
|
||||
if is_attachment:
|
||||
logger.debug(f"Got a reply that may be a download or possibly a text attachment, checking..")
|
||||
try:
|
||||
import magic
|
||||
mime = magic.from_buffer(html_content, mime=True)
|
||||
logger.debug(f"Guessing mime type, original content_type '{content_type}', mime type detected '{mime}'")
|
||||
if mime and "/" in mime: # looks valid and is a valid mime type
|
||||
content_type = mime
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting a more precise mime type from 'magic' library ({str(e)}")
|
||||
|
||||
if 'text/' in content_type and not 'html' in content_type:
|
||||
# Don't run get_text or xpath/css filters on plaintext
|
||||
stripped_text_from_html = html_content
|
||||
else:
|
||||
# If not JSON, and if it's not text/plain..
|
||||
# Does it have some ld+json price data? used for easier monitoring
|
||||
update_obj['has_ldjson_price_data'] = html_tools.has_ldjson_product_info(self.fetcher.content)
|
||||
|
||||
@@ -301,6 +315,11 @@ class perform_site_check(difference_detection_processor):
|
||||
text_for_checksuming = stripped_text_from_html
|
||||
if text_to_ignore:
|
||||
text_for_checksuming = html_tools.strip_ignore_text(stripped_text_from_html, text_to_ignore)
|
||||
# Some people prefer to also completely remove it
|
||||
strip_ignored_lines = watch.get('strip_ignored_lines') if watch.get('strip_ignored_lines') is not None else self.datastore.data['settings']['application'].get('strip_ignored_lines')
|
||||
if strip_ignored_lines:
|
||||
# @todo add test in the 'preview' mode, check the widget works? compare to datastruct
|
||||
stripped_text_from_html = text_for_checksuming
|
||||
|
||||
# Re #133 - if we should strip whitespaces from triggering the change detected comparison
|
||||
if text_for_checksuming and self.datastore.data['settings']['application'].get('ignore_whitespace', False):
|
||||
|
||||
@@ -243,14 +243,15 @@ def handle_watch_update(socketio, **kwargs):
|
||||
|
||||
general_stats = {
|
||||
'count_errors': errored_count,
|
||||
'has_unviewed': datastore.has_unviewed
|
||||
'unread_changes_count': datastore.unread_changes_count
|
||||
}
|
||||
|
||||
# Debug what's being emitted
|
||||
# logger.debug(f"Emitting 'watch_update' event for {watch.get('uuid')}, data: {watch_data}")
|
||||
|
||||
# Emit to all clients (no 'broadcast' parameter needed - it's the default behavior)
|
||||
socketio.emit("watch_update", {'watch': watch_data, 'general_stats': general_stats})
|
||||
socketio.emit("watch_update", {'watch': watch_data})
|
||||
socketio.emit("general_stats_update", general_stats)
|
||||
|
||||
# Log after successful emit - use watch_data['uuid'] to avoid variable shadowing issues
|
||||
logger.trace(f"Socket.IO: Emitted update for watch {watch_data['uuid']}, Checking now: {watch_data['checking_now']}")
|
||||
|
||||
@@ -9,7 +9,7 @@ set -x
|
||||
# SOCKS5 related - start simple Socks5 proxy server
|
||||
# SOCKSTEST=xyz should show in the logs of this service to confirm it fetched
|
||||
docker run --network changedet-network -d --hostname socks5proxy --rm --name socks5proxy -p 1080:1080 -e PROXY_USER=proxy_user123 -e PROXY_PASSWORD=proxy_pass123 serjs/go-socks5-proxy
|
||||
docker run --network changedet-network -d --hostname socks5proxy-noauth --rm -p 1081:1080 --name socks5proxy-noauth serjs/go-socks5-proxy
|
||||
docker run --network changedet-network -d --hostname socks5proxy-noauth --rm -p 1081:1080 --name socks5proxy-noauth -e REQUIRE_AUTH=false serjs/go-socks5-proxy
|
||||
|
||||
echo "---------------------------------- SOCKS5 -------------------"
|
||||
# SOCKS5 related - test from proxies.json
|
||||
|
||||
@@ -117,15 +117,16 @@ $(document).ready(function () {
|
||||
}
|
||||
})
|
||||
|
||||
socket.on('general_stats_update', function (general_stats) {
|
||||
// Tabs at bottom of list
|
||||
$('#watch-table-wrapper').toggleClass("has-unread-changes", general_stats.unread_changes_count !==0)
|
||||
$('#watch-table-wrapper').toggleClass("has-error", general_stats.count_errors !== 0)
|
||||
$('#post-list-with-errors a').text(`With errors (${ new Intl.NumberFormat(navigator.language).format(general_stats.count_errors) })`);
|
||||
$('#unread-tab-counter').text(new Intl.NumberFormat(navigator.language).format(general_stats.unread_changes_count));
|
||||
});
|
||||
|
||||
socket.on('watch_update', function (data) {
|
||||
const watch = data.watch;
|
||||
const general_stats = data.general_stats;
|
||||
|
||||
// Log the entire watch object for debugging
|
||||
console.log('!!! WATCH UPDATE EVENT RECEIVED !!!');
|
||||
console.log(`${watch.event_timestamp} - Watch update ${watch.uuid} - Checking now - ${watch.checking_now} - UUID in URL ${window.location.href.includes(watch.uuid)}`);
|
||||
console.log('Watch data:', watch);
|
||||
console.log('General stats:', general_stats);
|
||||
|
||||
// Updating watch table rows
|
||||
const $watchRow = $('tr[data-watch-uuid="' + watch.uuid + '"]');
|
||||
@@ -150,13 +151,6 @@ $(document).ready(function () {
|
||||
|
||||
console.log('Updated UI for watch:', watch.uuid);
|
||||
}
|
||||
|
||||
// Tabs at bottom of list
|
||||
$('#post-list-mark-views').toggleClass("has-unviewed", general_stats.has_unviewed);
|
||||
$('#post-list-unread').toggleClass("has-unviewed", general_stats.has_unviewed);
|
||||
$('#post-list-with-errors').toggleClass("has-error", general_stats.count_errors !== 0)
|
||||
$('#post-list-with-errors a').text(`With errors (${ general_stats.count_errors })`);
|
||||
|
||||
$('body').toggleClass('checking-now', watch.checking_now && window.location.href.includes(watch.uuid));
|
||||
});
|
||||
|
||||
|
||||
@@ -17,18 +17,6 @@ body.checking-now {
|
||||
position: fixed;
|
||||
}
|
||||
|
||||
#post-list-buttons {
|
||||
#post-list-with-errors.has-error {
|
||||
display: inline-block !important;
|
||||
}
|
||||
#post-list-mark-views.has-unviewed {
|
||||
display: inline-block !important;
|
||||
}
|
||||
#post-list-unread.has-unviewed {
|
||||
display: inline-block !important;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -127,5 +127,44 @@
|
||||
display: inline-block !important;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
#watch-table-wrapper {
|
||||
/* general styling */
|
||||
#post-list-buttons {
|
||||
text-align: right;
|
||||
padding: 0px;
|
||||
margin: 0px;
|
||||
|
||||
li {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
a {
|
||||
border-top-left-radius: initial;
|
||||
border-top-right-radius: initial;
|
||||
border-bottom-left-radius: 5px;
|
||||
border-bottom-right-radius: 5px;
|
||||
}
|
||||
}
|
||||
|
||||
/* post list dynamically on/off stuff */
|
||||
|
||||
&.has-error {
|
||||
#post-list-buttons {
|
||||
#post-list-with-errors {
|
||||
display: inline-block !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&.has-unread-changes {
|
||||
#post-list-buttons {
|
||||
#post-list-unread, #post-list-mark-views, #post-list-unread {
|
||||
display: inline-block !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,7 +34,6 @@
|
||||
transition: all 0.2s ease;
|
||||
cursor: pointer;
|
||||
display: block;
|
||||
min-width: 60px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
|
||||
@@ -203,24 +203,6 @@ code {
|
||||
}
|
||||
|
||||
|
||||
#post-list-buttons {
|
||||
text-align: right;
|
||||
padding: 0px;
|
||||
margin: 0px;
|
||||
|
||||
li {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
a {
|
||||
border-top-left-radius: initial;
|
||||
border-top-right-radius: initial;
|
||||
border-bottom-left-radius: 5px;
|
||||
border-bottom-right-radius: 5px;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
body:after {
|
||||
content: "";
|
||||
background: linear-gradient(130deg, var(--color-background-gradient-first), var(--color-background-gradient-second) 41.07%, var(--color-background-gradient-third) 84.05%);
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -202,14 +202,13 @@ class ChangeDetectionStore:
|
||||
return seconds
|
||||
|
||||
@property
|
||||
def has_unviewed(self):
|
||||
if not self.__data.get('watching'):
|
||||
return None
|
||||
|
||||
def unread_changes_count(self):
|
||||
unread_changes_count = 0
|
||||
for uuid, watch in self.__data['watching'].items():
|
||||
if watch.history_n >= 2 and watch.viewed == False:
|
||||
return True
|
||||
return False
|
||||
unread_changes_count += 1
|
||||
|
||||
return unread_changes_count
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
|
||||
@@ -1,14 +1,29 @@
|
||||
{% macro render_field(field) %}
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||
{% if field.errors %}
|
||||
<ul class=errors>
|
||||
{% for error in field.errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div {% if field.errors or field.top_errors %} class="error" {% endif %}>{{ field.label }}</div>
|
||||
<div {% if field.errors or field.top_errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||
{% if field.top_errors %}
|
||||
top
|
||||
<ul class="errors top-errors">
|
||||
{% for error in field.top_errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
{% if field.errors %}
|
||||
<ul class=errors>
|
||||
{% if field.errors is mapping and 'form' in field.errors %}
|
||||
{# and subfield form errors, such as used in RequiredFormField() for TimeBetweenCheckForm sub form #}
|
||||
{% set errors = field.errors['form'] %}
|
||||
{% else %}
|
||||
{# regular list of errors with this field #}
|
||||
{% set errors = field.errors %}
|
||||
{% endif %}
|
||||
{% for error in errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro render_checkbox_field(field) %}
|
||||
|
||||
@@ -26,7 +26,10 @@
|
||||
<li>Changing this will affect the comparison checksum which may trigger an alert</li>
|
||||
</ul>
|
||||
</span>
|
||||
|
||||
<br><br>
|
||||
<div class="pure-control-group">
|
||||
{{ render_ternary_field(form.strip_ignored_lines) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
|
||||
<fieldset>
|
||||
|
||||
@@ -55,7 +55,8 @@ def do_test(client, live_server, make_test_use_extra_browser=False):
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': f"extra_browser_{custom_browser_name}",
|
||||
'webdriver_js_execute_code': ''
|
||||
'webdriver_js_execute_code': '',
|
||||
"time_between_check_use_default": "y"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -28,6 +28,7 @@ def test_execute_custom_js(client, live_server, measure_memory_usage):
|
||||
'fetch_backend': "html_webdriver",
|
||||
'webdriver_js_execute_code': 'document.querySelector("button[name=test-button]").click();',
|
||||
'headers': "testheader: yes\buser-agent: MyCustomAgent",
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -27,6 +27,7 @@ def test_preferred_proxy(client, live_server, measure_memory_usage):
|
||||
"proxy": "proxy-two",
|
||||
"tags": "",
|
||||
"url": url,
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -62,6 +62,7 @@ def test_noproxy_option(client, live_server, measure_memory_usage):
|
||||
"proxy": "no-proxy",
|
||||
"tags": "",
|
||||
"url": url,
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -44,6 +44,7 @@ def test_proxy_noconnect_custom(client, live_server, measure_memory_usage):
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_webdriver" if os.getenv('PLAYWRIGHT_DRIVER_URL') or os.getenv("WEBDRIVER_URL") else "html_requests",
|
||||
"proxy": "ui-0custom-test-proxy",
|
||||
"time_between_check_use_default": "y",
|
||||
}
|
||||
|
||||
res = client.post(
|
||||
|
||||
@@ -66,6 +66,7 @@ def test_socks5(client, live_server, measure_memory_usage):
|
||||
"proxy": "ui-0socks5proxy",
|
||||
"tags": "",
|
||||
"url": test_url,
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -53,6 +53,7 @@ def test_socks5_from_proxiesjson_file(client, live_server, measure_memory_usage)
|
||||
"proxy": "socks5proxy",
|
||||
"tags": "",
|
||||
"url": test_url,
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -157,7 +157,8 @@ def test_check_notification_email_formats_default_Text_override_HTML(client, liv
|
||||
data={
|
||||
"url": test_url,
|
||||
"notification_format": 'HTML',
|
||||
'fetch_backend': "html_requests"},
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -61,7 +61,8 @@ def test_check_removed_line_contains_trigger(client, live_server, measure_memory
|
||||
data={"trigger_text": 'The golden line',
|
||||
"url": test_url,
|
||||
'fetch_backend': "html_requests",
|
||||
'filter_text_removed': 'y'},
|
||||
'filter_text_removed': 'y',
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -74,7 +75,7 @@ def test_check_removed_line_contains_trigger(client, live_server, measure_memory
|
||||
wait_for_all_checks(client)
|
||||
time.sleep(0.5)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# The trigger line is REMOVED, this should trigger
|
||||
set_original(excluding='The golden line')
|
||||
@@ -83,7 +84,7 @@ def test_check_removed_line_contains_trigger(client, live_server, measure_memory
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
@@ -97,14 +98,14 @@ def test_check_removed_line_contains_trigger(client, live_server, measure_memory
|
||||
wait_for_all_checks(client)
|
||||
time.sleep(1)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Remove it again, and we should get a trigger
|
||||
set_original(excluding='The golden line')
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
@@ -154,7 +155,8 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa
|
||||
'processor': 'text_json_diff',
|
||||
'fetch_backend': "html_requests",
|
||||
'filter_text_removed': '',
|
||||
'filter_text_added': 'y'},
|
||||
'filter_text_added': 'y',
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -167,7 +169,7 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa
|
||||
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# The trigger line is ADDED, this should trigger
|
||||
set_original(add_line='<p>Oh yes please</p>')
|
||||
@@ -175,7 +177,7 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# Takes a moment for apprise to fire
|
||||
wait_for_notification_endpoint_output()
|
||||
|
||||
@@ -396,7 +396,7 @@ def test_api_import(client, live_server, measure_memory_usage):
|
||||
res = client.post(
|
||||
url_for("import") + "?tag=import-test",
|
||||
data='https://website1.com\r\nhttps://website2.com',
|
||||
headers={'x-api-key': api_key},
|
||||
headers={'x-api-key': api_key, 'content-type': 'text/plain'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
199
changedetectionio/tests/test_api_openapi.py
Normal file
199
changedetectionio/tests/test_api_openapi.py
Normal file
@@ -0,0 +1,199 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
OpenAPI validation tests for ChangeDetection.io API
|
||||
|
||||
This test file specifically verifies that OpenAPI validation is working correctly
|
||||
by testing various scenarios that should trigger validation errors.
|
||||
"""
|
||||
|
||||
import time
|
||||
import json
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
def test_openapi_validation_invalid_content_type_on_create_watch(client, live_server, measure_memory_usage):
|
||||
"""Test that creating a watch with invalid content-type triggers OpenAPI validation error."""
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
|
||||
# Try to create a watch with JSON data but without proper content-type header
|
||||
res = client.post(
|
||||
url_for("createwatch"),
|
||||
data=json.dumps({"url": "https://example.com", "title": "Test Watch"}),
|
||||
headers={'x-api-key': api_key}, # Missing 'content-type': 'application/json'
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Should get 400 error due to OpenAPI validation failure
|
||||
assert res.status_code == 400, f"Expected 400 but got {res.status_code}"
|
||||
assert b"OpenAPI validation failed" in res.data, "Should contain OpenAPI validation error message"
|
||||
|
||||
|
||||
def test_openapi_validation_missing_required_field_create_watch(client, live_server, measure_memory_usage):
|
||||
"""Test that creating a watch without required URL field triggers OpenAPI validation error."""
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
|
||||
# Try to create a watch without the required 'url' field
|
||||
res = client.post(
|
||||
url_for("createwatch"),
|
||||
data=json.dumps({"title": "Test Watch Without URL"}), # Missing required 'url' field
|
||||
headers={'x-api-key': api_key, 'content-type': 'application/json'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Should get 400 error due to missing required field
|
||||
assert res.status_code == 400, f"Expected 400 but got {res.status_code}"
|
||||
assert b"OpenAPI validation failed" in res.data, "Should contain OpenAPI validation error message"
|
||||
|
||||
|
||||
def test_openapi_validation_invalid_field_in_request_body(client, live_server, measure_memory_usage):
|
||||
"""Test that including invalid fields triggers OpenAPI validation error."""
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
|
||||
# First create a valid watch
|
||||
res = client.post(
|
||||
url_for("createwatch"),
|
||||
data=json.dumps({"url": "https://example.com", "title": "Test Watch"}),
|
||||
headers={'x-api-key': api_key, 'content-type': 'application/json'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert res.status_code == 201, "Watch creation should succeed"
|
||||
|
||||
# Get the watch list to find the UUID
|
||||
res = client.get(
|
||||
url_for("createwatch"),
|
||||
headers={'x-api-key': api_key}
|
||||
)
|
||||
assert res.status_code == 200
|
||||
watch_uuid = list(res.json.keys())[0]
|
||||
|
||||
# Now try to update the watch with an invalid field
|
||||
res = client.put(
|
||||
url_for("watch", uuid=watch_uuid),
|
||||
headers={'x-api-key': api_key, 'content-type': 'application/json'},
|
||||
data=json.dumps({
|
||||
"title": "Updated title",
|
||||
"invalid_field_that_doesnt_exist": "this should cause validation error"
|
||||
}),
|
||||
)
|
||||
|
||||
# Should get 400 error due to invalid field (this will be caught by internal validation)
|
||||
# Note: This tests the flow where OpenAPI validation passes but internal validation catches it
|
||||
assert res.status_code == 400, f"Expected 400 but got {res.status_code}"
|
||||
assert b"Additional properties are not allowed" in res.data, "Should contain validation error about additional properties"
|
||||
|
||||
|
||||
def test_openapi_validation_import_wrong_content_type(client, live_server, measure_memory_usage):
|
||||
"""Test that import endpoint with wrong content-type triggers OpenAPI validation error."""
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
|
||||
# Try to import URLs with JSON content-type instead of text/plain
|
||||
res = client.post(
|
||||
url_for("import") + "?tag=test-import",
|
||||
data='https://website1.com\nhttps://website2.com',
|
||||
headers={'x-api-key': api_key, 'content-type': 'application/json'}, # Wrong content-type
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Should get 400 error due to content-type mismatch
|
||||
assert res.status_code == 400, f"Expected 400 but got {res.status_code}"
|
||||
assert b"OpenAPI validation failed" in res.data, "Should contain OpenAPI validation error message"
|
||||
|
||||
|
||||
def test_openapi_validation_import_correct_content_type_succeeds(client, live_server, measure_memory_usage):
|
||||
"""Test that import endpoint with correct content-type succeeds (positive test)."""
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
|
||||
# Import URLs with correct text/plain content-type
|
||||
res = client.post(
|
||||
url_for("import") + "?tag=test-import",
|
||||
data='https://website1.com\nhttps://website2.com',
|
||||
headers={'x-api-key': api_key, 'content-type': 'text/plain'}, # Correct content-type
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Should succeed
|
||||
assert res.status_code == 200, f"Expected 200 but got {res.status_code}"
|
||||
assert len(res.json) == 2, "Should import 2 URLs"
|
||||
|
||||
|
||||
def test_openapi_validation_get_requests_bypass_validation(client, live_server, measure_memory_usage):
|
||||
"""Test that GET requests bypass OpenAPI validation entirely."""
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
|
||||
# Disable API token requirement first
|
||||
res = client.post(
|
||||
url_for("settings.settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-fetch_backend": "html_requests",
|
||||
"application-api_access_token_enabled": ""
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Make GET request to list watches - should succeed even without API key or content-type
|
||||
res = client.get(url_for("createwatch")) # No headers needed for GET
|
||||
assert res.status_code == 200, f"GET requests should succeed without OpenAPI validation, got {res.status_code}"
|
||||
|
||||
# Should return JSON with watch list (empty in this case)
|
||||
assert isinstance(res.json, dict), "Should return JSON dictionary for watch list"
|
||||
|
||||
|
||||
def test_openapi_validation_create_tag_missing_required_title(client, live_server, measure_memory_usage):
|
||||
"""Test that creating a tag without required title triggers OpenAPI validation error."""
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
|
||||
# Try to create a tag without the required 'title' field
|
||||
res = client.post(
|
||||
url_for("tag"),
|
||||
data=json.dumps({"notification_urls": ["mailto:test@example.com"]}), # Missing required 'title' field
|
||||
headers={'x-api-key': api_key, 'content-type': 'application/json'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Should get 400 error due to missing required field
|
||||
assert res.status_code == 400, f"Expected 400 but got {res.status_code}"
|
||||
assert b"OpenAPI validation failed" in res.data, "Should contain OpenAPI validation error message"
|
||||
|
||||
|
||||
def test_openapi_validation_watch_update_allows_partial_updates(client, live_server, measure_memory_usage):
|
||||
"""Test that watch updates allow partial updates without requiring all fields (positive test)."""
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
|
||||
# First create a valid watch
|
||||
res = client.post(
|
||||
url_for("createwatch"),
|
||||
data=json.dumps({"url": "https://example.com", "title": "Test Watch"}),
|
||||
headers={'x-api-key': api_key, 'content-type': 'application/json'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert res.status_code == 201, "Watch creation should succeed"
|
||||
|
||||
# Get the watch list to find the UUID
|
||||
res = client.get(
|
||||
url_for("createwatch"),
|
||||
headers={'x-api-key': api_key}
|
||||
)
|
||||
assert res.status_code == 200
|
||||
watch_uuid = list(res.json.keys())[0]
|
||||
|
||||
# Update only the title (partial update) - should succeed
|
||||
res = client.put(
|
||||
url_for("watch", uuid=watch_uuid),
|
||||
headers={'x-api-key': api_key, 'content-type': 'application/json'},
|
||||
data=json.dumps({"title": "Updated Title Only"}), # Only updating title, not URL
|
||||
)
|
||||
|
||||
# Should succeed because UpdateWatch schema allows partial updates
|
||||
assert res.status_code == 200, f"Partial updates should succeed, got {res.status_code}"
|
||||
|
||||
# Verify the update worked
|
||||
res = client.get(
|
||||
url_for("watch", uuid=watch_uuid),
|
||||
headers={'x-api-key': api_key}
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert res.json.get('title') == 'Updated Title Only', "Title should be updated"
|
||||
assert res.json.get('url') == 'https://example.com', "URL should remain unchanged"
|
||||
@@ -23,7 +23,7 @@ def test_basic_auth(client, live_server, measure_memory_usage):
|
||||
# Check form validation
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"include_filters": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -38,9 +38,9 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'test-endpoint' in res.data
|
||||
|
||||
# Default no password set, this stuff should be always available.
|
||||
@@ -74,9 +74,9 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
res = client.get(url_for("ui.ui_edit.watch_get_latest_html", uuid=uuid))
|
||||
assert b'which has this one new line' in res.data
|
||||
|
||||
# Now something should be ready, indicated by having a 'unviewed' class
|
||||
# Now something should be ready, indicated by having a 'has-unread-changes' class
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# #75, and it should be in the RSS feed
|
||||
rss_token = extract_rss_token_from_UI(client)
|
||||
@@ -90,7 +90,7 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
|
||||
assert expected_url.encode('utf-8') in res.data
|
||||
#
|
||||
# Following the 'diff' link, it should no longer display as 'unviewed' even after we recheck it a few times
|
||||
# Following the 'diff' link, it should no longer display as 'has-unread-changes' even after we recheck it a few times
|
||||
res = client.get(url_for("ui.ui_views.diff_history_page", uuid=uuid))
|
||||
assert b'selected=""' in res.data, "Confirm diff history page loaded"
|
||||
|
||||
@@ -111,12 +111,12 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
|
||||
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'class="has-unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'class="has-unread-changes' not in res.data
|
||||
assert b'head title' in res.data # Should be ON by default
|
||||
assert b'test-endpoint' in res.data
|
||||
|
||||
@@ -140,8 +140,8 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'class="has-unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
assert b'class="has-unread-changes' in res.data
|
||||
assert b'head title' not in res.data # should now be off
|
||||
|
||||
|
||||
@@ -151,8 +151,8 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
# hit the mark all viewed link
|
||||
res = client.get(url_for("ui.mark_all_viewed"), follow_redirects=True)
|
||||
|
||||
assert b'class="has-unviewed' not in res.data
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'class="has-unread-changes' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# #2458 "clear history" should make the Watch object update its status correctly when the first snapshot lands again
|
||||
client.get(url_for("ui.clear_watch_history", uuid=uuid))
|
||||
@@ -165,3 +165,130 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_non_text_mime_or_downloads(client, live_server, measure_memory_usage):
|
||||
"""
|
||||
|
||||
https://github.com/dgtlmoon/changedetection.io/issues/3434
|
||||
I noticed that a watched website can be monitored fine as long as the server sends content-type: text/plain; charset=utf-8,
|
||||
but once the server sends content-type: application/octet-stream (which is usually done to force the browser to show the Download dialog),
|
||||
changedetection somehow ignores all line breaks and treats the document file as if everything is on one line.
|
||||
|
||||
:param client:
|
||||
:param live_server:
|
||||
:param measure_memory_usage:
|
||||
:return:
|
||||
"""
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("""some random text that should be split by line
|
||||
and not parsed with html_to_text
|
||||
this way we know that it correctly parsed as plain text
|
||||
\r\n
|
||||
ok\r\n
|
||||
got it\r\n
|
||||
""")
|
||||
|
||||
test_url = url_for('test_endpoint', content_type="application/octet-stream", _external=True)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
### check the front end
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"some random text that should be split by line\n" in res.data
|
||||
####
|
||||
|
||||
# Check the snapshot by API that it has linefeeds too
|
||||
watch_uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
res = client.get(
|
||||
url_for("watchhistory", uuid=watch_uuid),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
|
||||
# Fetch a snapshot by timestamp, check the right one was found
|
||||
res = client.get(
|
||||
url_for("watchsinglehistory", uuid=watch_uuid, timestamp=list(res.json.keys())[-1]),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
assert b"some random text that should be split by line\n" in res.data
|
||||
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
|
||||
def test_standard_text_plain(client, live_server, measure_memory_usage):
|
||||
"""
|
||||
|
||||
https://github.com/dgtlmoon/changedetection.io/issues/3434
|
||||
I noticed that a watched website can be monitored fine as long as the server sends content-type: text/plain; charset=utf-8,
|
||||
but once the server sends content-type: application/octet-stream (which is usually done to force the browser to show the Download dialog),
|
||||
changedetection somehow ignores all line breaks and treats the document file as if everything is on one line.
|
||||
|
||||
The real bug here can be that it will try to process plain-text as HTML, losing <etc>
|
||||
|
||||
:param client:
|
||||
:param live_server:
|
||||
:param measure_memory_usage:
|
||||
:return:
|
||||
"""
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("""some random text that should be split by line
|
||||
and not parsed with html_to_text
|
||||
<title>Even this title should stay because we are just plain text</title>
|
||||
this way we know that it correctly parsed as plain text
|
||||
\r\n
|
||||
ok\r\n
|
||||
got it\r\n
|
||||
""")
|
||||
|
||||
test_url = url_for('test_endpoint', content_type="text/plain", _external=True)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
### check the front end
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"some random text that should be split by line\n" in res.data
|
||||
####
|
||||
|
||||
# Check the snapshot by API that it has linefeeds too
|
||||
watch_uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
res = client.get(
|
||||
url_for("watchhistory", uuid=watch_uuid),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
|
||||
# Fetch a snapshot by timestamp, check the right one was found
|
||||
res = client.get(
|
||||
url_for("watchsinglehistory", uuid=watch_uuid, timestamp=list(res.json.keys())[-1]),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
assert b"some random text that should be split by line\n" in res.data
|
||||
assert b"<title>Even this title should stay because we are just plain text</title>" in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
|
||||
@@ -58,6 +58,7 @@ def run_socketio_watch_update_test(client, live_server, password_mode=""):
|
||||
|
||||
has_watch_update = False
|
||||
has_unviewed_update = False
|
||||
got_general_stats_update = False
|
||||
|
||||
for i in range(10):
|
||||
# Get received events
|
||||
@@ -65,15 +66,11 @@ def run_socketio_watch_update_test(client, live_server, password_mode=""):
|
||||
|
||||
if received:
|
||||
logger.info(f"Received {len(received)} events after {i+1} seconds")
|
||||
|
||||
# Check for watch_update events with unviewed=True
|
||||
for event in received:
|
||||
if event['name'] == 'watch_update':
|
||||
has_watch_update = True
|
||||
if event['args'][0]['watch'].get('unviewed', False):
|
||||
has_unviewed_update = True
|
||||
logger.info("Found unviewed update event!")
|
||||
break
|
||||
if event['name'] == 'general_stats_update':
|
||||
got_general_stats_update = True
|
||||
|
||||
if has_unviewed_update:
|
||||
break
|
||||
@@ -92,7 +89,7 @@ def run_socketio_watch_update_test(client, live_server, password_mode=""):
|
||||
assert has_watch_update, "No watch_update events received"
|
||||
|
||||
# Verify we received an unviewed event
|
||||
assert has_unviewed_update, "No watch_update event with unviewed=True received"
|
||||
assert got_general_stats_update, "Got general stats update event"
|
||||
|
||||
# Alternatively, check directly if the watch in the datastore is marked as unviewed
|
||||
from changedetectionio.flask_app import app
|
||||
|
||||
@@ -86,7 +86,8 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"text_should_not_be_present": ignore_text,
|
||||
"url": test_url,
|
||||
'fetch_backend': "html_requests"
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -106,9 +107,9 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
# The page changed, BUT the text is still there, just the rest of it changes, we should not see a change
|
||||
@@ -119,9 +120,9 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
# 2548
|
||||
@@ -130,7 +131,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
|
||||
# Now we set a change where the text is gone AND its different content, it should now trigger
|
||||
@@ -138,7 +139,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -105,6 +105,7 @@ def test_conditions_with_text_and_number(client, live_server):
|
||||
"conditions-5-operator": "contains_regex",
|
||||
"conditions-5-field": "page_filtered_text",
|
||||
"conditions-5-value": "\d",
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -124,7 +125,7 @@ def test_conditions_with_text_and_number(client, live_server):
|
||||
time.sleep(2)
|
||||
# 75 is > 20 and < 100 and contains "5"
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
|
||||
# Case 2: Change with one condition violated
|
||||
@@ -140,7 +141,7 @@ def test_conditions_with_text_and_number(client, live_server):
|
||||
|
||||
# Should NOT be marked as having changes since not all conditions are met
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
@@ -288,7 +289,8 @@ def test_lev_conditions_plugin(client, live_server, measure_memory_usage):
|
||||
"conditions_match_logic": CONDITIONS_MATCH_LOGIC_DEFAULT, # ALL = AND logic
|
||||
"conditions-0-field": "levenshtein_ratio",
|
||||
"conditions-0-operator": "<",
|
||||
"conditions-0-value": "0.8" # needs to be more of a diff to trigger a change
|
||||
"conditions-0-value": "0.8", # needs to be more of a diff to trigger a change
|
||||
"time_between_check_use_default": "y"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -297,7 +299,7 @@ def test_lev_conditions_plugin(client, live_server, measure_memory_usage):
|
||||
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Check the content saved initially, even tho a condition was set - this is the first snapshot so shouldnt be affected by conditions
|
||||
res = client.get(
|
||||
@@ -324,7 +326,7 @@ def test_lev_conditions_plugin(client, live_server, measure_memory_usage):
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data #because this will be like 0.90 not 0.8 threshold
|
||||
assert b'has-unread-changes' not in res.data #because this will be like 0.90 not 0.8 threshold
|
||||
|
||||
############### Now change it a MORE THAN 50%
|
||||
test_return_data = """<html>
|
||||
@@ -343,7 +345,7 @@ def test_lev_conditions_plugin(client, live_server, measure_memory_usage):
|
||||
assert b'Queued 1 watch for rechecking.' in res.data
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
# cleanup for the next
|
||||
client.get(
|
||||
url_for("ui.form_delete", uuid="all"),
|
||||
|
||||
@@ -95,7 +95,7 @@ def test_check_markup_include_filters_restriction(client, live_server, measure_m
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -116,10 +116,10 @@ def test_check_markup_include_filters_restriction(client, live_server, measure_m
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
# It should have 'has-unread-changes' still
|
||||
# Because it should be looking at only that 'sametext' id
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
|
||||
# Tests the whole stack works with the CSS Filter
|
||||
@@ -154,7 +154,8 @@ def test_check_multiple_filters(client, live_server, measure_memory_usage):
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -208,7 +209,8 @@ def test_filter_is_empty_help_suggestion(client, live_server, measure_memory_usa
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -171,6 +171,7 @@ def test_element_removal_full(client, live_server, measure_memory_usage):
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
@@ -189,7 +190,7 @@ def test_element_removal_full(client, live_server, measure_memory_usage):
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# so that we set the state to 'unviewed' after all the edits
|
||||
# so that we set the state to 'has-unread-changes' after all the edits
|
||||
client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
|
||||
# Make a change to header/footer/nav
|
||||
@@ -245,6 +246,7 @@ body > table > tr:nth-child(3) > td:nth-child(3)""",
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
|
||||
@@ -31,7 +31,7 @@ def _runner_test_http_errors(client, live_server, http_code, expected_text):
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
# no change
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert bytes(expected_text.encode('utf-8')) in res.data
|
||||
|
||||
|
||||
@@ -127,7 +127,8 @@ def test_low_level_errors_clear_correctly(client, live_server, measure_memory_us
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_requests"},
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
@@ -95,7 +95,8 @@ def test_check_filter_multiline(client, live_server, measure_memory_usage):
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -149,7 +150,8 @@ def test_check_filter_and_regex_extract(client, live_server, measure_memory_usag
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -172,10 +174,10 @@ def test_check_filter_and_regex_extract(client, live_server, measure_memory_usag
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
# It should have 'has-unread-changes' still
|
||||
# Because it should be looking at only that 'sametext' id
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# Check HTML conversion detected and workd
|
||||
res = client.get(
|
||||
@@ -222,7 +224,8 @@ def test_regex_error_handling(client, live_server, measure_memory_usage):
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"extract_text": '/something bad\d{3/XYZ',
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_requests"},
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
@@ -94,7 +94,8 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se
|
||||
"title": "my title",
|
||||
"headers": "",
|
||||
"include_filters": '.ticket-available',
|
||||
"fetch_backend": "html_requests"})
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"})
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
|
||||
@@ -72,6 +72,7 @@ def run_filter_test(client, live_server, content_filter):
|
||||
"notification_format": "Text",
|
||||
"fetch_backend": "html_requests",
|
||||
"filter_failure_notification_send": 'y',
|
||||
"time_between_check_use_default": "y",
|
||||
"headers": "",
|
||||
"tags": "my tag",
|
||||
"title": "my title 123",
|
||||
|
||||
@@ -424,7 +424,8 @@ def test_order_of_filters_tag_filter_and_watch_filter(client, live_server, measu
|
||||
"url": test_url,
|
||||
"tags": "test-tag-keep-order",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -58,3 +58,39 @@ def test_ignore(client, live_server, measure_memory_usage):
|
||||
# Should be in base.html
|
||||
assert b'csrftoken' in res.data
|
||||
|
||||
|
||||
def test_strip_ignore_lines(client, live_server, measure_memory_usage):
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
set_original_ignore_response()
|
||||
|
||||
|
||||
# Goto the settings page, add our ignore text
|
||||
res = client.post(
|
||||
url_for("settings.settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-ignore_whitespace": "y",
|
||||
"application-strip_ignored_lines": "y",
|
||||
"application-global_ignore_text": "Which is across multiple",
|
||||
'application-fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
|
||||
# It should not be in the preview anymore
|
||||
res = client.get(url_for("ui.ui_views.preview_page", uuid=uuid))
|
||||
assert b'<div class="ignored">' not in res.data
|
||||
assert b'Which is across multiple' not in res.data
|
||||
|
||||
@@ -111,7 +111,7 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"ignore_text": ignore_text, "url": test_url, 'fetch_backend': "html_requests"},
|
||||
data={"ignore_text": ignore_text, "url": test_url, 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -128,9 +128,9 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
# Make a change
|
||||
@@ -141,9 +141,9 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
|
||||
@@ -154,7 +154,7 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(url_for("ui.ui_views.preview_page", uuid="first"))
|
||||
|
||||
@@ -205,7 +205,7 @@ def _run_test_global_ignore(client, as_source=False, extra_ignore=""):
|
||||
#Adding some ignore text should not trigger a change
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"ignore_text": "something irrelevent but just to check", "url": test_url, 'fetch_backend': "html_requests"},
|
||||
data={"ignore_text": "something irrelevent but just to check", "url": test_url, 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -222,9 +222,9 @@ def _run_test_global_ignore(client, as_source=False, extra_ignore=""):
|
||||
# Trigger a check
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
# It should report nothing found (no new 'unviewed' class), adding random ignore text should not cause a change
|
||||
# It should report nothing found (no new 'has-unread-changes' class), adding random ignore text should not cause a change
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
#####
|
||||
|
||||
@@ -238,10 +238,10 @@ def _run_test_global_ignore(client, as_source=False, extra_ignore=""):
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
# Just to be sure.. set a regular modified change that will trigger it
|
||||
@@ -249,7 +249,7 @@ def _run_test_global_ignore(client, as_source=False, extra_ignore=""):
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
@@ -111,7 +111,7 @@ def test_render_anchor_tag_content_true(client, live_server, measure_memory_usag
|
||||
assert '(/modified_link)' in res.data.decode()
|
||||
|
||||
# since the link has changed, and we chose to render anchor tag content,
|
||||
# we should detect a change (new 'unviewed' class)
|
||||
# we should detect a change (new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b"unviewed" in res.data
|
||||
assert b"/test-endpoint" in res.data
|
||||
|
||||
@@ -77,9 +77,9 @@ def test_normal_page_check_works_with_ignore_status_code(client, live_server, me
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
|
||||
@@ -108,7 +108,7 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server, measu
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"ignore_status_codes": "y", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"ignore_status_codes": "y", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -124,8 +124,8 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server, measu
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
# It should have 'has-unread-changes' still
|
||||
# Because it should be looking at only that 'sametext' id
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
|
||||
@@ -89,7 +89,7 @@ def test_check_ignore_whitespace(client, live_server, measure_memory_usage):
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
@@ -26,7 +26,7 @@ def test_jinja2_in_url_query(client, live_server, measure_memory_usage):
|
||||
assert b"Watch added" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
@@ -51,7 +51,7 @@ def test_jinja2_security_url_query(client, live_server, measure_memory_usage):
|
||||
assert b"Watch added" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'is invalid and cannot be used' in res.data
|
||||
# Some of the spewed output from the subclasses
|
||||
|
||||
@@ -257,7 +257,8 @@ def check_json_filter(json_filter, client, live_server):
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests"
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -279,9 +280,9 @@ def check_json_filter(json_filter, client, live_server):
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
# It should have 'has-unread-changes' still
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# Should not see this, because its not in the JSONPath we entered
|
||||
res = client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
@@ -328,7 +329,8 @@ def check_json_filter_bool_val(json_filter, client, live_server):
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests"
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -393,7 +395,8 @@ def check_json_ext_filter(json_filter, client, live_server):
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests"
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -415,14 +418,14 @@ def check_json_ext_filter(json_filter, client, live_server):
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should have 'unviewed'
|
||||
# It should have 'has-unread-changes'
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(url_for("ui.ui_views.preview_page", uuid="first"))
|
||||
|
||||
# We should never see 'ForSale' because we are selecting on 'Sold' in the rule,
|
||||
# But we should know it triggered ('unviewed' assert above)
|
||||
# But we should know it triggered ('has-unread-changes' assert above)
|
||||
assert b'ForSale' not in res.data
|
||||
assert b'Sold' in res.data
|
||||
|
||||
@@ -462,7 +465,7 @@ def test_ignore_json_order(client, live_server, measure_memory_usage):
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Just to be sure it still works
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
@@ -473,7 +476,7 @@ def test_ignore_json_order(client, live_server, measure_memory_usage):
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
@@ -38,6 +38,7 @@ def test_content_filter_live_preview(client, live_server, measure_memory_usage):
|
||||
"ignore_text": "something to ignore",
|
||||
"trigger_text": "something to trigger",
|
||||
"url": test_url,
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -40,9 +40,9 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
|
||||
#####################
|
||||
@@ -62,9 +62,9 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
watch = live_server.app.config['DATASTORE'].data['watching'][uuid]
|
||||
@@ -92,9 +92,9 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
client.get(url_for("ui.mark_all_viewed"), follow_redirects=True)
|
||||
time.sleep(0.2)
|
||||
|
||||
@@ -108,7 +108,7 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data # A change should have registered because empty_pages_are_a_change is ON
|
||||
assert b'has-unread-changes' in res.data # A change should have registered because empty_pages_are_a_change is ON
|
||||
assert b'fetch-error' not in res.data
|
||||
|
||||
#
|
||||
|
||||
@@ -108,7 +108,8 @@ def test_check_notification(client, live_server, measure_memory_usage):
|
||||
"tags": "my tag, my second tag",
|
||||
"title": "my title",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests"})
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"})
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
@@ -225,7 +226,8 @@ def test_check_notification(client, live_server, measure_memory_usage):
|
||||
"notification_title": '',
|
||||
"notification_body": '',
|
||||
"notification_format": default_notification_format,
|
||||
"fetch_backend": "html_requests"},
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -36,7 +36,8 @@ def test_check_notification_error_handling(client, live_server, measure_memory_u
|
||||
"title": "",
|
||||
"headers": "",
|
||||
"time_between_check-minutes": "180",
|
||||
"fetch_backend": "html_requests"},
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -49,9 +49,9 @@ def test_fetch_pdf(client, live_server, measure_memory_usage):
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Now something should be ready, indicated by having a 'unviewed' class
|
||||
# Now something should be ready, indicated by having a 'has-unread-changes' class
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# The original checksum should be not be here anymore (cdio adds it to the bottom of the text)
|
||||
|
||||
|
||||
@@ -47,9 +47,9 @@ def test_fetch_pdf(client, live_server, measure_memory_usage):
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Now something should be ready, indicated by having a 'unviewed' class
|
||||
# Now something should be ready, indicated by having a 'has-unread-changes' class
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# The original checksum should be not be here anymore (cdio adds it to the bottom of the text)
|
||||
|
||||
|
||||
@@ -44,7 +44,8 @@ def test_headers_in_request(client, live_server, measure_memory_usage):
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
|
||||
"headers": "jinja2:{{ 1+1 }}\nxxx:ooo\ncool:yeah\r\ncookie:"+cookie_header},
|
||||
"headers": "jinja2:{{ 1+1 }}\nxxx:ooo\ncool:yeah\r\ncookie:"+cookie_header,
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -109,7 +110,8 @@ def test_body_in_request(client, live_server, measure_memory_usage):
|
||||
"tags": "",
|
||||
"method": "POST",
|
||||
"fetch_backend": "html_requests",
|
||||
"body": "something something"},
|
||||
"body": "something something",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -126,7 +128,8 @@ def test_body_in_request(client, live_server, measure_memory_usage):
|
||||
"tags": "",
|
||||
"method": "POST",
|
||||
"fetch_backend": "html_requests",
|
||||
"body": body_value},
|
||||
"body": body_value,
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -172,7 +175,8 @@ def test_body_in_request(client, live_server, measure_memory_usage):
|
||||
"tags": "",
|
||||
"method": "GET",
|
||||
"fetch_backend": "html_requests",
|
||||
"body": "invalid"},
|
||||
"body": "invalid",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Body must be empty when Request Method is set to GET" in res.data
|
||||
@@ -211,7 +215,8 @@ def test_method_in_request(client, live_server, measure_memory_usage):
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"method": "invalid"},
|
||||
"method": "invalid",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Not a valid choice" in res.data
|
||||
@@ -223,7 +228,8 @@ def test_method_in_request(client, live_server, measure_memory_usage):
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"method": "PATCH"},
|
||||
"method": "PATCH",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -297,7 +303,8 @@ def test_ua_global_override(client, live_server, measure_memory_usage):
|
||||
"tags": "testtag",
|
||||
"fetch_backend": 'html_requests',
|
||||
# Important - also test case-insensitive
|
||||
"headers": "User-AGent: agent-from-watch"},
|
||||
"headers": "User-AGent: agent-from-watch",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -365,7 +372,8 @@ def test_headers_textfile_in_request(client, live_server, measure_memory_usage):
|
||||
"url": test_url,
|
||||
"tags": "testtag",
|
||||
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
|
||||
"headers": "xxx:ooo\ncool:yeah\r\n"},
|
||||
"headers": "xxx:ooo\ncool:yeah\r\n",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -440,7 +448,8 @@ def test_headers_validation(client, live_server):
|
||||
data={
|
||||
"url": test_url,
|
||||
"fetch_backend": 'html_requests',
|
||||
"headers": "User-AGent agent-from-watch\r\nsadfsadfsadfsdaf\r\n:foobar"},
|
||||
"headers": "User-AGent agent-from-watch\r\nsadfsadfsadfsdaf\r\n:foobar",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
@@ -112,7 +112,7 @@ def test_itemprop_price_change(client, live_server):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'180.45' in res.data
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
client.get(url_for("ui.mark_all_viewed"), follow_redirects=True)
|
||||
time.sleep(0.2)
|
||||
|
||||
@@ -121,7 +121,7 @@ def test_itemprop_price_change(client, live_server):
|
||||
set_original_response(props_markup=instock_props[0], price='120.45')
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"restock_settings-follow_price_changes": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"restock_settings-follow_price_changes": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -129,7 +129,7 @@ def test_itemprop_price_change(client, live_server):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'120.45' in res.data
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
@@ -155,7 +155,8 @@ def _run_test_minmax_limit(client, extra_watch_edit_form):
|
||||
"url": test_url,
|
||||
"headers": "",
|
||||
"time_between_check-hours": 5,
|
||||
'fetch_backend': "html_requests"
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"
|
||||
}
|
||||
data.update(extra_watch_edit_form)
|
||||
res = client.post(
|
||||
@@ -177,7 +178,7 @@ def _run_test_minmax_limit(client, extra_watch_edit_form):
|
||||
assert b'more than one price detected' not in res.data
|
||||
# BUT the new price should show, even tho its within limits
|
||||
assert b'1,000.45' or b'1000.45' in res.data #depending on locale
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# price changed to something LESS than min (900), SHOULD be a change
|
||||
set_original_response(props_markup=instock_props[0], price='890.45')
|
||||
@@ -187,7 +188,7 @@ def _run_test_minmax_limit(client, extra_watch_edit_form):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'890.45' in res.data
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
client.get(url_for("ui.mark_all_viewed"))
|
||||
|
||||
@@ -199,7 +200,7 @@ def _run_test_minmax_limit(client, extra_watch_edit_form):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'820.45' in res.data
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
client.get(url_for("ui.mark_all_viewed"))
|
||||
|
||||
# price changed to something MORE than max (1100.10), SHOULD be a change
|
||||
@@ -209,7 +210,7 @@ def _run_test_minmax_limit(client, extra_watch_edit_form):
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
# Depending on the LOCALE it may be either of these (generally for US/default/etc)
|
||||
assert b'1,890.45' in res.data or b'1890.45' in res.data
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
@@ -278,7 +279,8 @@ def test_itemprop_percent_threshold(client, live_server):
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -292,7 +294,7 @@ def test_itemprop_percent_threshold(client, live_server):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'960.45' in res.data
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Bigger INCREASE change than the threshold should trigger
|
||||
set_original_response(props_markup=instock_props[0], price='1960.45')
|
||||
@@ -300,7 +302,7 @@ def test_itemprop_percent_threshold(client, live_server):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'1,960.45' or b'1960.45' in res.data #depending on locale
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
|
||||
# Small decrease should NOT trigger
|
||||
@@ -310,7 +312,7 @@ def test_itemprop_percent_threshold(client, live_server):
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'1,950.45' or b'1950.45' in res.data #depending on locale
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -158,6 +158,7 @@ def test_rss_xpath_filtering(client, live_server, measure_memory_usage):
|
||||
"proxy": "no-proxy",
|
||||
"tags": "",
|
||||
"url": test_url,
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import time
|
||||
from copy import copy
|
||||
from datetime import datetime, timezone
|
||||
from zoneinfo import ZoneInfo
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
from ..forms import REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT, REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT
|
||||
|
||||
|
||||
# def test_setup(client, live_server):
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
@@ -42,11 +45,12 @@ def test_check_basic_scheduler_functionality(client, live_server, measure_memory
|
||||
uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
|
||||
# Setup all the days of the weeks using XXX as the placeholder for monday/tuesday/etc
|
||||
|
||||
last_check = copy(live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'])
|
||||
tpl = {
|
||||
"time_schedule_limit-XXX-start_time": "00:00",
|
||||
"time_schedule_limit-XXX-duration-hours": 24,
|
||||
"time_schedule_limit-XXX-duration-minutes": 0,
|
||||
"time_between_check-seconds": 1,
|
||||
"time_schedule_limit-XXX-enabled": '', # All days are turned off
|
||||
"time_schedule_limit-enabled": 'y', # Scheduler is enabled, all days however are off.
|
||||
}
|
||||
@@ -58,13 +62,13 @@ def test_check_basic_scheduler_functionality(client, live_server, measure_memory
|
||||
new_key = key.replace("XXX", day)
|
||||
scheduler_data[new_key] = value
|
||||
|
||||
last_check = live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked']
|
||||
data = {
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_requests"
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "" # no
|
||||
}
|
||||
data.update(scheduler_data)
|
||||
|
||||
time.sleep(1)
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data=data,
|
||||
@@ -77,6 +81,7 @@ def test_check_basic_scheduler_functionality(client, live_server, measure_memory
|
||||
|
||||
# "Edit" should not trigger a check because it's not enabled in the schedule.
|
||||
time.sleep(2)
|
||||
# "time_schedule_limit-XXX-enabled": '', # All days are turned off, therefor, nothing should happen here..
|
||||
assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] == last_check
|
||||
|
||||
# Enabling today in Kiritimati should work flawless
|
||||
@@ -177,3 +182,44 @@ def test_check_basic_global_scheduler_functionality(client, live_server, measure
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
def test_validation_time_interval_field(client, live_server, measure_memory_usage):
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"trigger_text": 'The golden line',
|
||||
"url": test_url,
|
||||
'fetch_backend': "html_requests",
|
||||
'filter_text_removed': 'y',
|
||||
"time_between_check_use_default": ""
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT.encode('utf-8') in res.data
|
||||
|
||||
# Now set atleast something
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"trigger_text": 'The golden line',
|
||||
"url": test_url,
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check-minutes": 1,
|
||||
"time_between_check_use_default": ""
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT.encode('utf-8') not in res.data
|
||||
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ def test_basic_search(client, live_server, measure_memory_usage):
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"title": "xxx-title", "url": urls[0], "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"title": "xxx-title", "url": urls[0], "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -62,7 +62,7 @@ def test_search_in_tag_limit(client, live_server, measure_memory_usage):
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"title": "xxx-title", "url": urls[0].split(' ')[0], "tags": urls[0].split(' ')[1], "headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -41,7 +41,8 @@ def test_bad_access(client, live_server, measure_memory_usage):
|
||||
"tags": "",
|
||||
"method": "GET",
|
||||
"fetch_backend": "html_requests",
|
||||
"body": ""},
|
||||
"body": "",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -150,7 +151,8 @@ def test_xss_watch_last_error(client, live_server, measure_memory_usage):
|
||||
data={
|
||||
"include_filters": '<a href="https://foobar"></a><script>alert(123);</script>',
|
||||
"url": url_for('test_endpoint', _external=True),
|
||||
'fetch_backend': "html_requests"
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -29,7 +29,7 @@ def test_share_watch(client, live_server, measure_memory_usage):
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -43,9 +43,9 @@ def test_check_basic_change_detection_functionality_source(client, live_server,
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Now something should be ready, indicated by having a 'unviewed' class
|
||||
# Now something should be ready, indicated by having a 'has-unread-changes' class
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.diff_history_page", uuid="first"),
|
||||
@@ -77,7 +77,7 @@ def test_check_ignore_elements(client, live_server, measure_memory_usage):
|
||||
|
||||
client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"include_filters": 'span,p', "url": test_url, "tags": "", "subtractive_selectors": ".foobar-detection", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": 'span,p', "url": test_url, "tags": "", "subtractive_selectors": ".foobar-detection", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
@@ -81,7 +81,8 @@ def test_trigger_functionality(client, live_server, measure_memory_usage):
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"trigger_text": trigger_text,
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_requests"},
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -95,7 +96,7 @@ def test_trigger_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
|
||||
# so that we set the state to 'unviewed' after all the edits
|
||||
# so that we set the state to 'has-unread-changes' after all the edits
|
||||
client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
|
||||
# Trigger a check
|
||||
@@ -103,9 +104,9 @@ def test_trigger_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
# Make a change
|
||||
@@ -115,9 +116,9 @@ def test_trigger_functionality(client, live_server, measure_memory_usage):
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Now set the content which contains the trigger text
|
||||
set_modified_with_trigger_text_response()
|
||||
@@ -125,7 +126,7 @@ def test_trigger_functionality(client, live_server, measure_memory_usage):
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# https://github.com/dgtlmoon/changedetection.io/issues/616
|
||||
# Apparently the actual snapshot that contains the trigger never shows
|
||||
|
||||
@@ -42,18 +42,19 @@ def test_trigger_regex_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
# It should report nothing found (just a new one shouldnt have anything)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
### test regex
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"trigger_text": '/something \d{3}/',
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_requests"},
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
wait_for_all_checks(client)
|
||||
# so that we set the state to 'unviewed' after all the edits
|
||||
# so that we set the state to 'has-unread-changes' after all the edits
|
||||
client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
@@ -64,7 +65,7 @@ def test_trigger_regex_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
# It should report nothing found (nothing should match the regex)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("regex test123<br>\nsomething 123")
|
||||
@@ -72,7 +73,7 @@ def test_trigger_regex_functionality(client, live_server, measure_memory_usage):
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
@@ -50,7 +50,8 @@ def test_trigger_regex_functionality_with_filter(client, live_server, measure_me
|
||||
data={"trigger_text": "/cool.stuff/",
|
||||
"url": test_url,
|
||||
"include_filters": '#in-here',
|
||||
"fetch_backend": "html_requests"},
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -68,7 +69,7 @@ def test_trigger_regex_functionality_with_filter(client, live_server, measure_me
|
||||
|
||||
# It should report nothing found (nothing should match the regex and filter)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# now this should trigger something
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
@@ -77,7 +78,7 @@ def test_trigger_regex_functionality_with_filter(client, live_server, measure_me
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
@@ -2,12 +2,107 @@
|
||||
|
||||
from flask import url_for
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
|
||||
from ..forms import REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT, REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT
|
||||
|
||||
|
||||
def test_recheck_time_field_validation_global_settings(client, live_server):
|
||||
"""
|
||||
Tests that the global settings time field has atleast one value for week/day/hours/minute/seconds etc entered
|
||||
class globalSettingsRequestForm(Form):
|
||||
time_between_check = RequiredFormField(TimeBetweenCheckForm)
|
||||
"""
|
||||
res = client.post(
|
||||
url_for("settings.settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-weeks": '',
|
||||
"requests-time_between_check-days": '',
|
||||
"requests-time_between_check-hours": '',
|
||||
"requests-time_between_check-minutes": '',
|
||||
"requests-time_between_check-seconds": '',
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
assert REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT.encode('utf-8') in res.data
|
||||
|
||||
|
||||
def test_recheck_time_field_validation_single_watch(client, live_server):
|
||||
"""
|
||||
Tests that the global settings time field has atleast one value for week/day/hours/minute/seconds etc entered
|
||||
class globalSettingsRequestForm(Form):
|
||||
time_between_check = RequiredFormField(TimeBetweenCheckForm)
|
||||
"""
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "", # OFF
|
||||
"time_between_check-weeks": '',
|
||||
"time_between_check-days": '',
|
||||
"time_between_check-hours": '',
|
||||
"time_between_check-minutes": '',
|
||||
"time_between_check-seconds": '',
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
assert REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT.encode('utf-8') in res.data
|
||||
|
||||
# Now set some time
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "", # OFF
|
||||
"time_between_check-weeks": '',
|
||||
"time_between_check-days": '',
|
||||
"time_between_check-hours": '',
|
||||
"time_between_check-minutes": '5',
|
||||
"time_between_check-seconds": '',
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
assert REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT.encode('utf-8') not in res.data
|
||||
|
||||
# Now set to use defaults
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y", # ON YES
|
||||
"time_between_check-weeks": '',
|
||||
"time_between_check-days": '',
|
||||
"time_between_check-hours": '',
|
||||
"time_between_check-minutes": '',
|
||||
"time_between_check-seconds": '',
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
assert REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT.encode('utf-8') not in res.data
|
||||
|
||||
def test_checkbox_open_diff_in_new_tab(client, live_server):
|
||||
|
||||
set_original_response()
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
@@ -78,3 +173,119 @@ def test_checkbox_open_diff_in_new_tab(client, live_server):
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_page_title_listing_behaviour(client, live_server):
|
||||
|
||||
set_original_response(extra_title="custom html")
|
||||
|
||||
# either the manually entered title/description or the page link should be visible
|
||||
res = client.post(
|
||||
url_for("settings.settings_page"),
|
||||
data={"application-ui-use_page_title_in_list": "",
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": url_for('test_endpoint', _external=True)},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# We see the URL only, no title/description was manually entered
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert url_for('test_endpoint', _external=True).encode('utf-8') in res.data
|
||||
|
||||
|
||||
# Now 'my title' should override
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={
|
||||
"url": url_for('test_endpoint', _external=True),
|
||||
"title": "my title",
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b"my title" in res.data
|
||||
|
||||
# Now we enable page <title> and unset the override title/description
|
||||
res = client.post(
|
||||
url_for("settings.settings_page"),
|
||||
data={"application-ui-use_page_title_in_list": "y",
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Page title description override should take precedence
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b"my title" in res.data
|
||||
|
||||
# Remove page title description override and it should fall back to title
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={
|
||||
"url": url_for('test_endpoint', _external=True),
|
||||
"title": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# No page title description, and 'use_page_title_in_list' is on, it should show the <title>
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b"head titlecustom html" in res.data
|
||||
|
||||
|
||||
def test_ui_viewed_unread_flag(client, live_server):
|
||||
|
||||
import time
|
||||
|
||||
set_original_response(extra_title="custom html")
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": url_for('test_endpoint', _external=True)+"\r\n"+url_for('test_endpoint', _external=True)},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"2 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
set_modified_response()
|
||||
res = client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
assert b'Queued 2 watches for rechecking.' in res.data
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'<span id="unread-tab-counter">2</span>' in res.data
|
||||
assert res.data.count(b'data-watch-uuid') == 2
|
||||
|
||||
# one should now be viewed, but two in total still
|
||||
client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'<span id="unread-tab-counter">1</span>' in res.data
|
||||
assert res.data.count(b'data-watch-uuid') == 2
|
||||
|
||||
# check ?unread=1 works
|
||||
res = client.get(url_for("watchlist.index")+"?unread=1")
|
||||
assert res.data.count(b'data-watch-uuid') == 1
|
||||
assert b'<span id="unread-tab-counter">1</span>' in res.data
|
||||
|
||||
# Mark all viewed test again
|
||||
client.get(url_for("ui.mark_all_viewed"), follow_redirects=True)
|
||||
time.sleep(0.2)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'<span id="unread-tab-counter">0</span>' in res.data
|
||||
@@ -92,11 +92,12 @@ def test_unique_lines_functionality(client, live_server, measure_memory_usage):
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"check_unique_lines": "y",
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_requests"},
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Make a change
|
||||
set_modified_swapped_lines()
|
||||
@@ -107,16 +108,16 @@ def test_unique_lines_functionality(client, live_server, measure_memory_usage):
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
# It should report nothing found (no new 'has-unread-changes' class)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
|
||||
# Now set the content which contains the new text and re-ordered existing text
|
||||
set_modified_with_trigger_text_response()
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
@@ -140,7 +141,8 @@ def test_sort_lines_functionality(client, live_server, measure_memory_usage):
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"sort_text_alphabetically": "n",
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_requests"},
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -155,7 +157,7 @@ def test_sort_lines_functionality(client, live_server, measure_memory_usage):
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
# Should be a change registered
|
||||
assert b'unviewed' in res.data
|
||||
assert b'has-unread-changes' in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.preview_page", uuid="first"),
|
||||
@@ -192,7 +194,8 @@ def test_extra_filters(client, live_server, measure_memory_usage):
|
||||
"trim_text_whitespace": "y",
|
||||
"sort_text_alphabetically": "", # leave this OFF for testing
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_requests"},
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -28,7 +28,8 @@ def test_check_watch_field_storage(client, live_server, measure_memory_usage):
|
||||
"url": test_url,
|
||||
"tags": "woohoo",
|
||||
"headers": "curl:foo",
|
||||
'fetch_backend': "html_requests"
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -92,7 +92,7 @@ def test_check_xpath_filter_utf8(client, live_server, measure_memory_usage):
|
||||
wait_for_all_checks(client)
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -146,7 +146,7 @@ def test_check_xpath_text_function_utf8(client, live_server, measure_memory_usag
|
||||
wait_for_all_checks(client)
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -188,7 +188,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server, measure_memo
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"include_filters": xpath_filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": xpath_filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -208,7 +208,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server, measure_memo
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'has-unread-changes' not in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
@@ -226,7 +226,7 @@ def test_xpath_validation(client, live_server, measure_memory_usage):
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"include_filters": "/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": "/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"is not a valid XPath expression" in res.data
|
||||
@@ -247,7 +247,7 @@ def test_xpath23_prefix_validation(client, live_server, measure_memory_usage):
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"include_filters": "xpath:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": "xpath:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"is not a valid XPath expression" in res.data
|
||||
@@ -298,7 +298,7 @@ def test_xpath1_lxml(client, live_server, measure_memory_usage):
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"include_filters": "xpath1://title/text()", "url": test_url, "tags": "", "headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -331,7 +331,7 @@ def test_xpath1_validation(client, live_server, measure_memory_usage):
|
||||
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"include_filters": "xpath1:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": "xpath1:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"is not a valid XPath expression" in res.data
|
||||
@@ -359,7 +359,7 @@ def test_check_with_prefix_include_filters(client, live_server, measure_memory_u
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={"include_filters": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tags": "", "headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -413,7 +413,8 @@ def test_various_rules(client, live_server, measure_memory_usage):
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
wait_for_all_checks(client)
|
||||
@@ -444,7 +445,8 @@ def test_xpath_20(client, live_server, measure_memory_usage):
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -481,7 +483,8 @@ def test_xpath_20_function_count(client, live_server, measure_memory_usage):
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -517,7 +520,8 @@ def test_xpath_20_function_count2(client, live_server, measure_memory_usage):
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -554,7 +558,8 @@ def test_xpath_20_function_string_join_matches(client, live_server, measure_memo
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
'fetch_backend': "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ def test_visual_selector_content_ready(client, live_server, measure_memory_usage
|
||||
# For now, cookies doesnt work in headers because it must be a full cookiejar object
|
||||
'headers': "testheader: yes\buser-agent: MyCustomAgent",
|
||||
'fetch_backend': "html_webdriver",
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -116,6 +117,7 @@ def test_basic_browserstep(client, live_server, measure_memory_usage):
|
||||
'browser_steps-1-optional_value': '',
|
||||
# For now, cookies doesnt work in headers because it must be a full cookiejar object
|
||||
'headers': "testheader: yes\buser-agent: MyCustomAgent",
|
||||
"time_between_check_use_default": "y",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -167,7 +169,8 @@ def test_non_200_errors_report_browsersteps(client, live_server):
|
||||
'fetch_backend': "html_webdriver",
|
||||
'browser_steps-0-operation': 'Click element',
|
||||
'browser_steps-0-selector': 'button[name=test-button]',
|
||||
'browser_steps-0-optional_value': ''
|
||||
'browser_steps-0-optional_value': '',
|
||||
"time_between_check_use_default": "y"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
openapi: 3.0.3
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: ChangeDetection.io API
|
||||
description: |
|
||||
@@ -28,7 +28,7 @@ info:
|
||||
|
||||
For example: `x-api-key: YOUR_API_KEY`
|
||||
|
||||
version: 0.1.0
|
||||
version: 0.1.1
|
||||
contact:
|
||||
name: ChangeDetection.io
|
||||
url: https://github.com/dgtlmoon/changedetection.io
|
||||
@@ -129,7 +129,7 @@ components:
|
||||
maxLength: 5000
|
||||
title:
|
||||
type: string
|
||||
description: Custom title for the web page change monitor (watch)
|
||||
description: Custom title for the web page change monitor (watch), not to be confused with page_title
|
||||
maxLength: 5000
|
||||
tag:
|
||||
type: string
|
||||
@@ -188,6 +188,10 @@ components:
|
||||
seconds:
|
||||
type: integer
|
||||
description: Time intervals between checks
|
||||
time_between_check_use_default:
|
||||
type: boolean
|
||||
default: true
|
||||
description: Whether to use global settings for time between checks - defaults to true if not set
|
||||
notification_urls:
|
||||
type: array
|
||||
items:
|
||||
@@ -224,8 +228,6 @@ components:
|
||||
maxLength: 5000
|
||||
required: [operation, selector, optional_value]
|
||||
description: Browser automation steps
|
||||
required:
|
||||
- url
|
||||
|
||||
Watch:
|
||||
allOf:
|
||||
@@ -253,6 +255,11 @@ components:
|
||||
type: integer
|
||||
description: Unix timestamp in seconds of the last time the watch was viewed. Setting it to a value higher than `last_changed` in the "Update watch" endpoint marks the watch as viewed.
|
||||
minimum: 0
|
||||
link:
|
||||
type: string
|
||||
format: string
|
||||
description: The watch URL rendered in case of any Jinja2 markup, always use this for listing.
|
||||
readOnly: true
|
||||
|
||||
CreateWatch:
|
||||
allOf:
|
||||
@@ -261,6 +268,16 @@ components:
|
||||
required:
|
||||
- url
|
||||
|
||||
UpdateWatch:
|
||||
allOf:
|
||||
- $ref: '#/components/schemas/WatchBase'
|
||||
- type: object
|
||||
properties:
|
||||
last_viewed:
|
||||
type: integer
|
||||
description: Unix timestamp in seconds of the last time the watch was viewed. Setting it to a value higher than `last_changed` in the "Update watch" endpoint marks the watch as viewed.
|
||||
minimum: 0
|
||||
|
||||
Tag:
|
||||
type: object
|
||||
properties:
|
||||
@@ -281,8 +298,13 @@ components:
|
||||
notification_muted:
|
||||
type: boolean
|
||||
description: Whether notifications are muted for this tag
|
||||
required:
|
||||
- title
|
||||
|
||||
CreateTag:
|
||||
allOf:
|
||||
- $ref: '#/components/schemas/Tag'
|
||||
- type: object
|
||||
required:
|
||||
- title
|
||||
|
||||
NotificationUrls:
|
||||
type: object
|
||||
@@ -378,9 +400,10 @@ paths:
|
||||
example:
|
||||
"095be615-a8ad-4c33-8e9c-c7612fbf6c9f":
|
||||
uuid: "095be615-a8ad-4c33-8e9c-c7612fbf6c9f"
|
||||
url: "http://example.com"
|
||||
title: "Example Website Monitor"
|
||||
tag: "550e8400-e29b-41d4-a716-446655440000"
|
||||
url: "http://example.com?id={{1+1}} - the raw URL"
|
||||
link: "http://example.com?id=2 - the rendered URL, always use this for listing."
|
||||
title: "Example Website Monitor - manually entered title/description"
|
||||
page_title: "The HTML <title> from the page"
|
||||
tags: ["550e8400-e29b-41d4-a716-446655440000"]
|
||||
paused: false
|
||||
muted: false
|
||||
@@ -390,9 +413,10 @@ paths:
|
||||
last_changed: 1640995200
|
||||
"7c9e6b8d-f2a1-4e5c-9d3b-8a7f6e4c2d1a":
|
||||
uuid: "7c9e6b8d-f2a1-4e5c-9d3b-8a7f6e4c2d1a"
|
||||
url: "https://news.example.org"
|
||||
title: "News Site Tracker"
|
||||
tag: "330e8400-e29b-41d4-a716-446655440001"
|
||||
url: "http://example.com?id={{1+1}} - the raw URL"
|
||||
link: "http://example.com?id=2 - the rendered URL, always use this for listing."
|
||||
title: "News Site Tracker - manually entered title/description"
|
||||
page_title: "The HTML <title> from the page"
|
||||
tags: ["330e8400-e29b-41d4-a716-446655440001"]
|
||||
paused: false
|
||||
muted: true
|
||||
@@ -572,7 +596,7 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Watch'
|
||||
$ref: '#/components/schemas/UpdateWatch'
|
||||
responses:
|
||||
'200':
|
||||
description: Web page change monitor (watch) updated successfully
|
||||
@@ -815,7 +839,7 @@ paths:
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
data = {'title': 'Important Sites'}
|
||||
response = requests.post('http://localhost:5000/api/v1/tag',
|
||||
response = requests.post('http://localhost:5000/api/v1/tag',
|
||||
headers=headers, json=data)
|
||||
print(response.json())
|
||||
requestBody:
|
||||
@@ -823,7 +847,7 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Tag'
|
||||
$ref: '#/components/schemas/CreateTag'
|
||||
example:
|
||||
title: "Important Sites"
|
||||
responses:
|
||||
@@ -1198,7 +1222,6 @@ paths:
|
||||
uuid: "095be615-a8ad-4c33-8e9c-c7612fbf6c9f"
|
||||
url: "http://example.com"
|
||||
title: "Example Website Monitor"
|
||||
tag: "550e8400-e29b-41d4-a716-446655440000"
|
||||
tags: ["550e8400-e29b-41d4-a716-446655440000"]
|
||||
paused: false
|
||||
muted: false
|
||||
|
||||
@@ -39,7 +39,7 @@ jsonpath-ng~=1.5.3
|
||||
# jq not available on Windows so must be installed manually
|
||||
|
||||
# Notification library
|
||||
apprise==1.9.3
|
||||
apprise==1.9.5
|
||||
|
||||
# - Needed for apprise/spush, and maybe others? hopefully doesnt trigger a rust compile.
|
||||
# - Requires extra wheel for rPi, adds build time for arm/v8 which is not in piwheels
|
||||
@@ -51,8 +51,8 @@ cryptography==44.0.1
|
||||
# use any version other than 2.0.x due to https://github.com/eclipse/paho.mqtt.python/issues/814
|
||||
paho-mqtt!=2.0.*
|
||||
|
||||
# Used for CSS filtering
|
||||
beautifulsoup4>=4.0.0
|
||||
# Used for CSS filtering, JSON extraction from HTML
|
||||
beautifulsoup4>=4.0.0,<=4.13.5
|
||||
|
||||
# XPath filtering, lxml is required by bs4 anyway, but put it here to be safe.
|
||||
# #2328 - 5.2.0 and 5.2.1 had extra CPU flag CFLAGS set which was not compatible on older hardware
|
||||
|
||||
Reference in New Issue
Block a user