Compare commits

...

11 Commits

Author SHA1 Message Date
dgtlmoon
59ce32fa4c tweaks 2025-09-16 18:26:20 +02:00
dgtlmoon
48962c0226 more 2025-09-16 18:08:25 +02:00
dgtlmoon
1039a1e590 woops 2025-09-16 18:03:46 +02:00
dgtlmoon
8688f9ecaa more 2025-09-16 17:56:18 +02:00
dgtlmoon
511f0c4853 Fix 2025-09-16 17:29:50 +02:00
dgtlmoon
b598c742f2 adding test 2025-09-16 17:28:36 +02:00
dgtlmoon
773cd803d6 logic tweak 2025-09-16 17:21:26 +02:00
dgtlmoon
582232b02f Add missing val 2025-09-16 17:14:09 +02:00
dgtlmoon
2c6b36e788 Adding flag 2025-09-16 16:48:52 +02:00
dgtlmoon
ca40b2ea7a Adding output 2025-09-16 16:34:58 +02:00
dgtlmoon
2e631c93c4 recheck time field validation 2025-09-16 16:33:00 +02:00
45 changed files with 392 additions and 74 deletions

View File

@@ -14,6 +14,39 @@ import copy
from . import schema, schema_create_watch, schema_update_watch, validate_openapi_request from . import schema, schema_create_watch, schema_update_watch, validate_openapi_request
def validate_time_between_check_required(json_data):
"""
Validate that at least one time interval is specified when not using default settings.
Returns None if valid, or error message string if invalid.
Defaults to using global settings if time_between_check_use_default is not provided.
"""
# Default to using global settings if not specified
use_default = json_data.get('time_between_check_use_default', True)
# If using default settings, no validation needed
if use_default:
return None
# If not using defaults, check if time_between_check exists and has at least one non-zero value
time_check = json_data.get('time_between_check')
if not time_check:
# No time_between_check provided and not using defaults - this is an error
return "At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings."
# time_between_check exists, check if it has at least one non-zero value
if any([
(time_check.get('weeks') or 0) > 0,
(time_check.get('days') or 0) > 0,
(time_check.get('hours') or 0) > 0,
(time_check.get('minutes') or 0) > 0,
(time_check.get('seconds') or 0) > 0
]):
return None
# time_between_check exists but all values are 0 or empty - this is an error
return "At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings."
class Watch(Resource): class Watch(Resource):
def __init__(self, **kwargs): def __init__(self, **kwargs):
# datastore is a black box dependency # datastore is a black box dependency
@@ -81,6 +114,11 @@ class Watch(Resource):
if not request.json.get('proxy') in plist: if not request.json.get('proxy') in plist:
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400 return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
# Validate time_between_check when not using defaults
validation_error = validate_time_between_check_required(request.json)
if validation_error:
return validation_error, 400
watch.update(request.json) watch.update(request.json)
return "OK", 200 return "OK", 200
@@ -196,6 +234,11 @@ class CreateWatch(Resource):
if not json_data.get('proxy') in plist: if not json_data.get('proxy') in plist:
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400 return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
# Validate time_between_check when not using defaults
validation_error = validate_time_between_check_required(json_data)
if validation_error:
return validation_error, 400
extras = copy.deepcopy(json_data) extras = copy.deepcopy(json_data)
# Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API) # Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API)

View File

@@ -119,6 +119,12 @@ def build_watch_json_schema(d):
schema['properties']['time_between_check'] = build_time_between_check_json_schema() schema['properties']['time_between_check'] = build_time_between_check_json_schema()
schema['properties']['time_between_check_use_default'] = {
"type": "boolean",
"default": True,
"description": "Whether to use global settings for time between checks - defaults to true if not set"
}
schema['properties']['browser_steps'] = { schema['properties']['browser_steps'] = {
"anyOf": [ "anyOf": [
{ {

View File

@@ -23,6 +23,7 @@ from wtforms import (
) )
from flask_wtf.file import FileField, FileAllowed from flask_wtf.file import FileField, FileAllowed
from wtforms.fields import FieldList from wtforms.fields import FieldList
from wtforms.utils import unset_value
from wtforms.validators import ValidationError from wtforms.validators import ValidationError
@@ -56,6 +57,8 @@ valid_method = {
default_method = 'GET' default_method = 'GET'
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False')) allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT='At least one time interval (weeks, days, hours, minutes, or seconds) must be specified.'
REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT='At least one time interval (weeks, days, hours, minutes, or seconds) must be specified when not using global settings.'
class StringListField(StringField): class StringListField(StringField):
widget = widgets.TextArea() widget = widgets.TextArea()
@@ -212,6 +215,33 @@ class ScheduleLimitForm(Form):
self.sunday.form.enabled.label.text = "Sunday" self.sunday.form.enabled.label.text = "Sunday"
def validate_time_between_check_has_values(form):
"""
Custom validation function for TimeBetweenCheckForm.
Returns True if at least one time interval field has a value > 0.
"""
return any([
form.weeks.data and form.weeks.data > 0,
form.days.data and form.days.data > 0,
form.hours.data and form.hours.data > 0,
form.minutes.data and form.minutes.data > 0,
form.seconds.data and form.seconds.data > 0
])
class RequiredTimeInterval(object):
"""
WTForms validator that ensures at least one time interval field has a value > 0.
Use this with FormField(TimeBetweenCheckForm, validators=[RequiredTimeInterval()]).
"""
def __init__(self, message=None):
self.message = message or 'At least one time interval (weeks, days, hours, minutes, or seconds) must be specified.'
def __call__(self, form, field):
if not validate_time_between_check_has_values(field.form):
raise ValidationError(self.message)
class TimeBetweenCheckForm(Form): class TimeBetweenCheckForm(Form):
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")]) weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")]) days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
@@ -220,6 +250,123 @@ class TimeBetweenCheckForm(Form):
seconds = IntegerField('Seconds', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")]) seconds = IntegerField('Seconds', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
# @todo add total seconds minimum validatior = minimum_seconds_recheck_time # @todo add total seconds minimum validatior = minimum_seconds_recheck_time
def __init__(self, formdata=None, obj=None, prefix="", data=None, meta=None, **kwargs):
super().__init__(formdata, obj, prefix, data, meta, **kwargs)
self.require_at_least_one = kwargs.get('require_at_least_one', False)
self.require_at_least_one_message = kwargs.get('require_at_least_one_message', REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT)
def validate(self, **kwargs):
"""Custom validation that can optionally require at least one time interval."""
# Run normal field validation first
if not super().validate(**kwargs):
return False
# Apply optional "at least one" validation
if self.require_at_least_one:
if not validate_time_between_check_has_values(self):
# Add error to the form's general errors (not field-specific)
if not hasattr(self, '_formdata_errors'):
self._formdata_errors = []
self._formdata_errors.append(self.require_at_least_one_message)
return False
return True
class EnhancedFormField(FormField):
"""
An enhanced FormField that supports conditional validation with top-level error messages.
Adds a 'top_errors' property for validation errors at the FormField level.
"""
def __init__(self, form_class, label=None, validators=None, separator="-",
conditional_field=None, conditional_message=None, conditional_test_function=None, **kwargs):
"""
Initialize EnhancedFormField with optional conditional validation.
:param conditional_field: Name of the field this FormField depends on (e.g. 'time_between_check_use_default')
:param conditional_message: Error message to show when validation fails
:param conditional_test_function: Custom function to test if FormField has valid values.
Should take self.form as parameter and return True if valid.
"""
super().__init__(form_class, label, validators, separator, **kwargs)
self.top_errors = []
self.conditional_field = conditional_field
self.conditional_message = conditional_message or "At least one field must have a value when not using defaults."
self.conditional_test_function = conditional_test_function
def validate(self, form, extra_validators=()):
"""
Custom validation that supports conditional logic and stores top-level errors.
"""
self.top_errors = []
# First run the normal FormField validation
base_valid = super().validate(form, extra_validators)
# Apply conditional validation if configured
if self.conditional_field and hasattr(form, self.conditional_field):
conditional_field_obj = getattr(form, self.conditional_field)
# If the conditional field is False/unchecked, check if this FormField has any values
if not conditional_field_obj.data:
# Use custom test function if provided, otherwise use generic fallback
if self.conditional_test_function:
has_any_value = self.conditional_test_function(self.form)
else:
# Generic fallback - check if any field has truthy data
has_any_value = any(field.data for field in self.form if hasattr(field, 'data') and field.data)
if not has_any_value:
self.top_errors.append(self.conditional_message)
base_valid = False
return base_valid
class RequiredFormField(FormField):
"""
A FormField that passes require_at_least_one=True to TimeBetweenCheckForm.
Use this when you want the sub-form to always require at least one value.
"""
def __init__(self, form_class, label=None, validators=None, separator="-", **kwargs):
super().__init__(form_class, label, validators, separator, **kwargs)
def process(self, formdata, data=unset_value, extra_filters=None):
if extra_filters:
raise TypeError(
"FormField cannot take filters, as the encapsulated"
"data is not mutable."
)
if data is unset_value:
try:
data = self.default()
except TypeError:
data = self.default
self._obj = data
self.object_data = data
prefix = self.name + self.separator
# Pass require_at_least_one=True to the sub-form
if isinstance(data, dict):
self.form = self.form_class(formdata=formdata, prefix=prefix, require_at_least_one=True, **data)
else:
self.form = self.form_class(formdata=formdata, obj=data, prefix=prefix, require_at_least_one=True)
@property
def errors(self):
"""Include sub-form validation errors"""
form_errors = self.form.errors
# Add any general form errors to a special 'form' key
if hasattr(self.form, '_formdata_errors') and self.form._formdata_errors:
form_errors = dict(form_errors) # Make a copy
form_errors['form'] = self.form._formdata_errors
return form_errors
# Separated by key:value # Separated by key:value
class StringDictKeyValue(StringField): class StringDictKeyValue(StringField):
widget = widgets.TextArea() widget = widgets.TextArea()
@@ -348,7 +495,7 @@ class ValidateJinja2Template(object):
joined_data = ' '.join(map(str, field.data)) if isinstance(field.data, list) else f"{field.data}" joined_data = ' '.join(map(str, field.data)) if isinstance(field.data, list) else f"{field.data}"
try: try:
jinja2_env = ImmutableSandboxedEnvironment(loader=BaseLoader) jinja2_env = ImmutableSandboxedEnvironment(loader=BaseLoader, extensions=['jinja2_time.TimeExtension'])
jinja2_env.globals.update(notification.valid_tokens) jinja2_env.globals.update(notification.valid_tokens)
# Extra validation tokens provided on the form_class(... extra_tokens={}) setup # Extra validation tokens provided on the form_class(... extra_tokens={}) setup
if hasattr(field, 'extra_notification_tokens'): if hasattr(field, 'extra_notification_tokens'):
@@ -583,11 +730,16 @@ class processor_text_json_diff_form(commonSettingsForm):
url = fields.URLField('URL', validators=[validateURL()]) url = fields.URLField('URL', validators=[validateURL()])
tags = StringTagUUID('Group tag', [validators.Optional()], default='') tags = StringTagUUID('Group tag', [validators.Optional()], default='')
time_between_check = FormField(TimeBetweenCheckForm) time_between_check = EnhancedFormField(
TimeBetweenCheckForm,
conditional_field='time_between_check_use_default',
conditional_message=REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT,
conditional_test_function=validate_time_between_check_has_values
)
time_schedule_limit = FormField(ScheduleLimitForm) time_schedule_limit = FormField(ScheduleLimitForm)
time_between_check_use_default = BooleanField('Use global settings for time between check', default=False) time_between_check_use_default = BooleanField('Use global settings for time between check and scheduler.', default=False)
include_filters = StringListField('CSS/JSONPath/JQ/XPath Filters', [ValidateCSSJSONXPATHInput()], default='') include_filters = StringListField('CSS/JSONPath/JQ/XPath Filters', [ValidateCSSJSONXPATHInput()], default='')
@@ -728,7 +880,7 @@ class DefaultUAInputForm(Form):
# datastore.data['settings']['requests'].. # datastore.data['settings']['requests']..
class globalSettingsRequestForm(Form): class globalSettingsRequestForm(Form):
time_between_check = FormField(TimeBetweenCheckForm) time_between_check = RequiredFormField(TimeBetweenCheckForm)
time_schedule_limit = FormField(ScheduleLimitForm) time_schedule_limit = FormField(ScheduleLimitForm)
proxy = RadioField('Proxy') proxy = RadioField('Proxy')
jitter_seconds = IntegerField('Random jitter seconds ± check', jitter_seconds = IntegerField('Random jitter seconds ± check',

View File

@@ -1,6 +1,13 @@
{% macro render_field(field) %} {% macro render_field(field) %}
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div> <div {% if field.errors or field.top_errors %} class="error" {% endif %}>{{ field.label }}</div>
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }} <div {% if field.errors or field.top_errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
{% if field.top_errors %}
<ul class="errors top-errors">
{% for error in field.top_errors %}
<li>{{ error }}</li>
{% endfor %}
</ul>
{% endif %}
{% if field.errors %} {% if field.errors %}
<ul class=errors> <ul class=errors>
{% for error in field.errors %} {% for error in field.errors %}

View File

@@ -55,7 +55,8 @@ def do_test(client, live_server, make_test_use_extra_browser=False):
"tags": "", "tags": "",
"headers": "", "headers": "",
'fetch_backend': f"extra_browser_{custom_browser_name}", 'fetch_backend': f"extra_browser_{custom_browser_name}",
'webdriver_js_execute_code': '' 'webdriver_js_execute_code': '',
"time_between_check_use_default": "y"
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -28,6 +28,7 @@ def test_execute_custom_js(client, live_server, measure_memory_usage):
'fetch_backend': "html_webdriver", 'fetch_backend': "html_webdriver",
'webdriver_js_execute_code': 'document.querySelector("button[name=test-button]").click();', 'webdriver_js_execute_code': 'document.querySelector("button[name=test-button]").click();',
'headers': "testheader: yes\buser-agent: MyCustomAgent", 'headers': "testheader: yes\buser-agent: MyCustomAgent",
"time_between_check_use_default": "y",
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -27,6 +27,7 @@ def test_preferred_proxy(client, live_server, measure_memory_usage):
"proxy": "proxy-two", "proxy": "proxy-two",
"tags": "", "tags": "",
"url": url, "url": url,
"time_between_check_use_default": "y",
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -62,6 +62,7 @@ def test_noproxy_option(client, live_server, measure_memory_usage):
"proxy": "no-proxy", "proxy": "no-proxy",
"tags": "", "tags": "",
"url": url, "url": url,
"time_between_check_use_default": "y",
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -44,6 +44,7 @@ def test_proxy_noconnect_custom(client, live_server, measure_memory_usage):
"url": test_url, "url": test_url,
"fetch_backend": "html_webdriver" if os.getenv('PLAYWRIGHT_DRIVER_URL') or os.getenv("WEBDRIVER_URL") else "html_requests", "fetch_backend": "html_webdriver" if os.getenv('PLAYWRIGHT_DRIVER_URL') or os.getenv("WEBDRIVER_URL") else "html_requests",
"proxy": "ui-0custom-test-proxy", "proxy": "ui-0custom-test-proxy",
"time_between_check_use_default": "y",
} }
res = client.post( res = client.post(

View File

@@ -66,6 +66,7 @@ def test_socks5(client, live_server, measure_memory_usage):
"proxy": "ui-0socks5proxy", "proxy": "ui-0socks5proxy",
"tags": "", "tags": "",
"url": test_url, "url": test_url,
"time_between_check_use_default": "y",
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -53,6 +53,7 @@ def test_socks5_from_proxiesjson_file(client, live_server, measure_memory_usage)
"proxy": "socks5proxy", "proxy": "socks5proxy",
"tags": "", "tags": "",
"url": test_url, "url": test_url,
"time_between_check_use_default": "y",
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -157,7 +157,8 @@ def test_check_notification_email_formats_default_Text_override_HTML(client, liv
data={ data={
"url": test_url, "url": test_url,
"notification_format": 'HTML', "notification_format": 'HTML',
'fetch_backend': "html_requests"}, 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data

View File

@@ -61,7 +61,8 @@ def test_check_removed_line_contains_trigger(client, live_server, measure_memory
data={"trigger_text": 'The golden line', data={"trigger_text": 'The golden line',
"url": test_url, "url": test_url,
'fetch_backend': "html_requests", 'fetch_backend': "html_requests",
'filter_text_removed': 'y'}, 'filter_text_removed': 'y',
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -154,7 +155,8 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa
'processor': 'text_json_diff', 'processor': 'text_json_diff',
'fetch_backend': "html_requests", 'fetch_backend': "html_requests",
'filter_text_removed': '', 'filter_text_removed': '',
'filter_text_added': 'y'}, 'filter_text_added': 'y',
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data

View File

@@ -23,7 +23,7 @@ def test_basic_auth(client, live_server, measure_memory_usage):
# Check form validation # Check form validation
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, data={"include_filters": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data

View File

@@ -86,7 +86,8 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"text_should_not_be_present": ignore_text, data={"text_should_not_be_present": ignore_text,
"url": test_url, "url": test_url,
'fetch_backend': "html_requests" 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -105,6 +105,7 @@ def test_conditions_with_text_and_number(client, live_server):
"conditions-5-operator": "contains_regex", "conditions-5-operator": "contains_regex",
"conditions-5-field": "page_filtered_text", "conditions-5-field": "page_filtered_text",
"conditions-5-value": "\d", "conditions-5-value": "\d",
"time_between_check_use_default": "y",
}, },
follow_redirects=True follow_redirects=True
) )
@@ -288,7 +289,8 @@ def test_lev_conditions_plugin(client, live_server, measure_memory_usage):
"conditions_match_logic": CONDITIONS_MATCH_LOGIC_DEFAULT, # ALL = AND logic "conditions_match_logic": CONDITIONS_MATCH_LOGIC_DEFAULT, # ALL = AND logic
"conditions-0-field": "levenshtein_ratio", "conditions-0-field": "levenshtein_ratio",
"conditions-0-operator": "<", "conditions-0-operator": "<",
"conditions-0-value": "0.8" # needs to be more of a diff to trigger a change "conditions-0-value": "0.8", # needs to be more of a diff to trigger a change
"time_between_check_use_default": "y"
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -95,7 +95,7 @@ def test_check_markup_include_filters_restriction(client, live_server, measure_m
# Add our URL to the import page # Add our URL to the import page
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -154,7 +154,8 @@ def test_check_multiple_filters(client, live_server, measure_memory_usage):
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"headers": "", "headers": "",
'fetch_backend': "html_requests"}, 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -208,7 +209,8 @@ def test_filter_is_empty_help_suggestion(client, live_server, measure_memory_usa
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"headers": "", "headers": "",
'fetch_backend': "html_requests"}, 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data

View File

@@ -171,6 +171,7 @@ def test_element_removal_full(client, live_server, measure_memory_usage):
"tags": "", "tags": "",
"headers": "", "headers": "",
"fetch_backend": "html_requests", "fetch_backend": "html_requests",
"time_between_check_use_default": "y",
}, },
follow_redirects=True, follow_redirects=True,
) )
@@ -245,6 +246,7 @@ body > table > tr:nth-child(3) > td:nth-child(3)""",
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"fetch_backend": "html_requests", "fetch_backend": "html_requests",
"time_between_check_use_default": "y",
}, },
follow_redirects=True, follow_redirects=True,
) )

View File

@@ -127,7 +127,8 @@ def test_low_level_errors_clear_correctly(client, live_server, measure_memory_us
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={ data={
"url": test_url, "url": test_url,
"fetch_backend": "html_requests"}, "fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )

View File

@@ -95,7 +95,8 @@ def test_check_filter_multiline(client, live_server, measure_memory_usage):
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"headers": "", "headers": "",
'fetch_backend': "html_requests" 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
}, },
follow_redirects=True follow_redirects=True
) )
@@ -149,7 +150,8 @@ def test_check_filter_and_regex_extract(client, live_server, measure_memory_usag
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"headers": "", "headers": "",
'fetch_backend': "html_requests" 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
}, },
follow_redirects=True follow_redirects=True
) )
@@ -222,7 +224,8 @@ def test_regex_error_handling(client, live_server, measure_memory_usage):
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"extract_text": '/something bad\d{3/XYZ', data={"extract_text": '/something bad\d{3/XYZ',
"url": test_url, "url": test_url,
"fetch_backend": "html_requests"}, "fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )

View File

@@ -94,7 +94,8 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se
"title": "my title", "title": "my title",
"headers": "", "headers": "",
"include_filters": '.ticket-available', "include_filters": '.ticket-available',
"fetch_backend": "html_requests"}) "fetch_backend": "html_requests",
"time_between_check_use_default": "y"})
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),

View File

@@ -72,6 +72,7 @@ def run_filter_test(client, live_server, content_filter):
"notification_format": "Text", "notification_format": "Text",
"fetch_backend": "html_requests", "fetch_backend": "html_requests",
"filter_failure_notification_send": 'y', "filter_failure_notification_send": 'y',
"time_between_check_use_default": "y",
"headers": "", "headers": "",
"tags": "my tag", "tags": "my tag",
"title": "my title 123", "title": "my title 123",

View File

@@ -424,7 +424,8 @@ def test_order_of_filters_tag_filter_and_watch_filter(client, live_server, measu
"url": test_url, "url": test_url,
"tags": "test-tag-keep-order", "tags": "test-tag-keep-order",
"headers": "", "headers": "",
'fetch_backend': "html_requests"}, 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data

View File

@@ -111,7 +111,7 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa
# Add our URL to the import page # Add our URL to the import page
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"ignore_text": ignore_text, "url": test_url, 'fetch_backend': "html_requests"}, data={"ignore_text": ignore_text, "url": test_url, 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -205,7 +205,7 @@ def _run_test_global_ignore(client, as_source=False, extra_ignore=""):
#Adding some ignore text should not trigger a change #Adding some ignore text should not trigger a change
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"ignore_text": "something irrelevent but just to check", "url": test_url, 'fetch_backend': "html_requests"}, data={"ignore_text": "something irrelevent but just to check", "url": test_url, 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data

View File

@@ -108,7 +108,7 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server, measu
# Add our URL to the import page # Add our URL to the import page
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"ignore_status_codes": "y", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, data={"ignore_status_codes": "y", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data

View File

@@ -257,7 +257,8 @@ def check_json_filter(json_filter, client, live_server):
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"headers": "", "headers": "",
"fetch_backend": "html_requests" "fetch_backend": "html_requests",
"time_between_check_use_default": "y"
}, },
follow_redirects=True follow_redirects=True
) )
@@ -328,7 +329,8 @@ def check_json_filter_bool_val(json_filter, client, live_server):
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"headers": "", "headers": "",
"fetch_backend": "html_requests" "fetch_backend": "html_requests",
"time_between_check_use_default": "y"
}, },
follow_redirects=True follow_redirects=True
) )
@@ -393,7 +395,8 @@ def check_json_ext_filter(json_filter, client, live_server):
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"headers": "", "headers": "",
"fetch_backend": "html_requests" "fetch_backend": "html_requests",
"time_between_check_use_default": "y"
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -38,6 +38,7 @@ def test_content_filter_live_preview(client, live_server, measure_memory_usage):
"ignore_text": "something to ignore", "ignore_text": "something to ignore",
"trigger_text": "something to trigger", "trigger_text": "something to trigger",
"url": test_url, "url": test_url,
"time_between_check_use_default": "y",
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -108,7 +108,8 @@ def test_check_notification(client, live_server, measure_memory_usage):
"tags": "my tag, my second tag", "tags": "my tag, my second tag",
"title": "my title", "title": "my title",
"headers": "", "headers": "",
"fetch_backend": "html_requests"}) "fetch_backend": "html_requests",
"time_between_check_use_default": "y"})
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
@@ -225,7 +226,8 @@ def test_check_notification(client, live_server, measure_memory_usage):
"notification_title": '', "notification_title": '',
"notification_body": '', "notification_body": '',
"notification_format": default_notification_format, "notification_format": default_notification_format,
"fetch_backend": "html_requests"}, "fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data

View File

@@ -36,7 +36,8 @@ def test_check_notification_error_handling(client, live_server, measure_memory_u
"title": "", "title": "",
"headers": "", "headers": "",
"time_between_check-minutes": "180", "time_between_check-minutes": "180",
"fetch_backend": "html_requests"}, "fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data

View File

@@ -44,7 +44,8 @@ def test_headers_in_request(client, live_server, measure_memory_usage):
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests', "fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
"headers": "jinja2:{{ 1+1 }}\nxxx:ooo\ncool:yeah\r\ncookie:"+cookie_header}, "headers": "jinja2:{{ 1+1 }}\nxxx:ooo\ncool:yeah\r\ncookie:"+cookie_header,
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -109,7 +110,8 @@ def test_body_in_request(client, live_server, measure_memory_usage):
"tags": "", "tags": "",
"method": "POST", "method": "POST",
"fetch_backend": "html_requests", "fetch_backend": "html_requests",
"body": "something something"}, "body": "something something",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -126,7 +128,8 @@ def test_body_in_request(client, live_server, measure_memory_usage):
"tags": "", "tags": "",
"method": "POST", "method": "POST",
"fetch_backend": "html_requests", "fetch_backend": "html_requests",
"body": body_value}, "body": body_value,
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -172,7 +175,8 @@ def test_body_in_request(client, live_server, measure_memory_usage):
"tags": "", "tags": "",
"method": "GET", "method": "GET",
"fetch_backend": "html_requests", "fetch_backend": "html_requests",
"body": "invalid"}, "body": "invalid",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Body must be empty when Request Method is set to GET" in res.data assert b"Body must be empty when Request Method is set to GET" in res.data
@@ -211,7 +215,8 @@ def test_method_in_request(client, live_server, measure_memory_usage):
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"fetch_backend": "html_requests", "fetch_backend": "html_requests",
"method": "invalid"}, "method": "invalid",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Not a valid choice" in res.data assert b"Not a valid choice" in res.data
@@ -223,7 +228,8 @@ def test_method_in_request(client, live_server, measure_memory_usage):
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"fetch_backend": "html_requests", "fetch_backend": "html_requests",
"method": "PATCH"}, "method": "PATCH",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -297,7 +303,8 @@ def test_ua_global_override(client, live_server, measure_memory_usage):
"tags": "testtag", "tags": "testtag",
"fetch_backend": 'html_requests', "fetch_backend": 'html_requests',
# Important - also test case-insensitive # Important - also test case-insensitive
"headers": "User-AGent: agent-from-watch"}, "headers": "User-AGent: agent-from-watch",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -365,7 +372,8 @@ def test_headers_textfile_in_request(client, live_server, measure_memory_usage):
"url": test_url, "url": test_url,
"tags": "testtag", "tags": "testtag",
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests', "fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
"headers": "xxx:ooo\ncool:yeah\r\n"}, "headers": "xxx:ooo\ncool:yeah\r\n",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -440,7 +448,8 @@ def test_headers_validation(client, live_server):
data={ data={
"url": test_url, "url": test_url,
"fetch_backend": 'html_requests', "fetch_backend": 'html_requests',
"headers": "User-AGent agent-from-watch\r\nsadfsadfsadfsdaf\r\n:foobar"}, "headers": "User-AGent agent-from-watch\r\nsadfsadfsadfsdaf\r\n:foobar",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )

View File

@@ -121,7 +121,7 @@ def test_itemprop_price_change(client, live_server):
set_original_response(props_markup=instock_props[0], price='120.45') set_original_response(props_markup=instock_props[0], price='120.45')
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"restock_settings-follow_price_changes": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, data={"restock_settings-follow_price_changes": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -155,7 +155,8 @@ def _run_test_minmax_limit(client, extra_watch_edit_form):
"url": test_url, "url": test_url,
"headers": "", "headers": "",
"time_between_check-hours": 5, "time_between_check-hours": 5,
'fetch_backend': "html_requests" 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
} }
data.update(extra_watch_edit_form) data.update(extra_watch_edit_form)
res = client.post( res = client.post(
@@ -278,7 +279,8 @@ def test_itemprop_percent_threshold(client, live_server):
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"headers": "", "headers": "",
'fetch_backend': "html_requests" 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -158,6 +158,7 @@ def test_rss_xpath_filtering(client, live_server, measure_memory_usage):
"proxy": "no-proxy", "proxy": "no-proxy",
"tags": "", "tags": "",
"url": test_url, "url": test_url,
"time_between_check_use_default": "y",
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -1,10 +1,13 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import time import time
from copy import copy
from datetime import datetime, timezone from datetime import datetime, timezone
from zoneinfo import ZoneInfo from zoneinfo import ZoneInfo
from flask import url_for from flask import url_for
from .util import live_server_setup, wait_for_all_checks, extract_UUID_from_client from .util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
from ..forms import REQUIRE_ATLEAST_ONE_TIME_PART_MESSAGE_DEFAULT, REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT
# def test_setup(client, live_server): # def test_setup(client, live_server):
# live_server_setup(live_server) # Setup on conftest per function # live_server_setup(live_server) # Setup on conftest per function
@@ -42,11 +45,12 @@ def test_check_basic_scheduler_functionality(client, live_server, measure_memory
uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
# Setup all the days of the weeks using XXX as the placeholder for monday/tuesday/etc # Setup all the days of the weeks using XXX as the placeholder for monday/tuesday/etc
last_check = copy(live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'])
tpl = { tpl = {
"time_schedule_limit-XXX-start_time": "00:00", "time_schedule_limit-XXX-start_time": "00:00",
"time_schedule_limit-XXX-duration-hours": 24, "time_schedule_limit-XXX-duration-hours": 24,
"time_schedule_limit-XXX-duration-minutes": 0, "time_schedule_limit-XXX-duration-minutes": 0,
"time_between_check-seconds": 1,
"time_schedule_limit-XXX-enabled": '', # All days are turned off "time_schedule_limit-XXX-enabled": '', # All days are turned off
"time_schedule_limit-enabled": 'y', # Scheduler is enabled, all days however are off. "time_schedule_limit-enabled": 'y', # Scheduler is enabled, all days however are off.
} }
@@ -58,13 +62,13 @@ def test_check_basic_scheduler_functionality(client, live_server, measure_memory
new_key = key.replace("XXX", day) new_key = key.replace("XXX", day)
scheduler_data[new_key] = value scheduler_data[new_key] = value
last_check = live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked']
data = { data = {
"url": test_url, "url": test_url,
"fetch_backend": "html_requests" "fetch_backend": "html_requests",
"time_between_check_use_default": "" # no
} }
data.update(scheduler_data) data.update(scheduler_data)
time.sleep(1)
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data=data, data=data,
@@ -77,6 +81,7 @@ def test_check_basic_scheduler_functionality(client, live_server, measure_memory
# "Edit" should not trigger a check because it's not enabled in the schedule. # "Edit" should not trigger a check because it's not enabled in the schedule.
time.sleep(2) time.sleep(2)
# "time_schedule_limit-XXX-enabled": '', # All days are turned off, therefor, nothing should happen here..
assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] == last_check assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] == last_check
# Enabling today in Kiritimati should work flawless # Enabling today in Kiritimati should work flawless
@@ -177,3 +182,44 @@ def test_check_basic_global_scheduler_functionality(client, live_server, measure
# Cleanup everything # Cleanup everything
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True) res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data assert b'Deleted' in res.data
def test_validation_time_interval_field(client, live_server, measure_memory_usage):
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("imports.import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"trigger_text": 'The golden line',
"url": test_url,
'fetch_backend': "html_requests",
'filter_text_removed': 'y',
"time_between_check_use_default": ""
},
follow_redirects=True
)
assert REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT.encode('utf-8') in res.data
# Now set atleast something
res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"),
data={"trigger_text": 'The golden line',
"url": test_url,
'fetch_backend': "html_requests",
"time_between_check-minutes": 1,
"time_between_check_use_default": ""
},
follow_redirects=True
)
assert REQUIRE_ATLEAST_ONE_TIME_PART_WHEN_NOT_GLOBAL_DEFAULT.encode('utf-8') not in res.data

View File

@@ -27,7 +27,7 @@ def test_basic_search(client, live_server, measure_memory_usage):
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"title": "xxx-title", "url": urls[0], "tags": "", "headers": "", 'fetch_backend': "html_requests"}, data={"title": "xxx-title", "url": urls[0], "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -62,7 +62,7 @@ def test_search_in_tag_limit(client, live_server, measure_memory_usage):
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"title": "xxx-title", "url": urls[0].split(' ')[0], "tags": urls[0].split(' ')[1], "headers": "", data={"title": "xxx-title", "url": urls[0].split(' ')[0], "tags": urls[0].split(' ')[1], "headers": "",
'fetch_backend': "html_requests"}, 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data

View File

@@ -41,7 +41,8 @@ def test_bad_access(client, live_server, measure_memory_usage):
"tags": "", "tags": "",
"method": "GET", "method": "GET",
"fetch_backend": "html_requests", "fetch_backend": "html_requests",
"body": ""}, "body": "",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
@@ -150,7 +151,8 @@ def test_xss_watch_last_error(client, live_server, measure_memory_usage):
data={ data={
"include_filters": '<a href="https://foobar"></a><script>alert(123);</script>', "include_filters": '<a href="https://foobar"></a><script>alert(123);</script>',
"url": url_for('test_endpoint', _external=True), "url": url_for('test_endpoint', _external=True),
'fetch_backend': "html_requests" 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -29,7 +29,7 @@ def test_share_watch(client, live_server, measure_memory_usage):
# Add our URL to the import page # Add our URL to the import page
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data

View File

@@ -77,7 +77,7 @@ def test_check_ignore_elements(client, live_server, measure_memory_usage):
client.post( client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": 'span,p', "url": test_url, "tags": "", "subtractive_selectors": ".foobar-detection", 'fetch_backend': "html_requests"}, data={"include_filters": 'span,p', "url": test_url, "tags": "", "subtractive_selectors": ".foobar-detection", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )

View File

@@ -81,7 +81,8 @@ def test_trigger_functionality(client, live_server, measure_memory_usage):
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"trigger_text": trigger_text, data={"trigger_text": trigger_text,
"url": test_url, "url": test_url,
"fetch_backend": "html_requests"}, "fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data

View File

@@ -49,7 +49,8 @@ def test_trigger_regex_functionality(client, live_server, measure_memory_usage):
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"trigger_text": '/something \d{3}/', data={"trigger_text": '/something \d{3}/',
"url": test_url, "url": test_url,
"fetch_backend": "html_requests"}, "fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
wait_for_all_checks(client) wait_for_all_checks(client)

View File

@@ -50,7 +50,8 @@ def test_trigger_regex_functionality_with_filter(client, live_server, measure_me
data={"trigger_text": "/cool.stuff/", data={"trigger_text": "/cool.stuff/",
"url": test_url, "url": test_url,
"include_filters": '#in-here', "include_filters": '#in-here',
"fetch_backend": "html_requests"}, "fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )

View File

@@ -92,7 +92,8 @@ def test_unique_lines_functionality(client, live_server, measure_memory_usage):
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"check_unique_lines": "y", data={"check_unique_lines": "y",
"url": test_url, "url": test_url,
"fetch_backend": "html_requests"}, "fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -140,7 +141,8 @@ def test_sort_lines_functionality(client, live_server, measure_memory_usage):
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"sort_text_alphabetically": "n", data={"sort_text_alphabetically": "n",
"url": test_url, "url": test_url,
"fetch_backend": "html_requests"}, "fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -192,7 +194,8 @@ def test_extra_filters(client, live_server, measure_memory_usage):
"trim_text_whitespace": "y", "trim_text_whitespace": "y",
"sort_text_alphabetically": "", # leave this OFF for testing "sort_text_alphabetically": "", # leave this OFF for testing
"url": test_url, "url": test_url,
"fetch_backend": "html_requests"}, "fetch_backend": "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data

View File

@@ -28,7 +28,8 @@ def test_check_watch_field_storage(client, live_server, measure_memory_usage):
"url": test_url, "url": test_url,
"tags": "woohoo", "tags": "woohoo",
"headers": "curl:foo", "headers": "curl:foo",
'fetch_backend': "html_requests" 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -92,7 +92,7 @@ def test_check_xpath_filter_utf8(client, live_server, measure_memory_usage):
wait_for_all_checks(client) wait_for_all_checks(client)
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -146,7 +146,7 @@ def test_check_xpath_text_function_utf8(client, live_server, measure_memory_usag
wait_for_all_checks(client) wait_for_all_checks(client)
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -188,7 +188,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server, measure_memo
# Add our URL to the import page # Add our URL to the import page
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": xpath_filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, data={"include_filters": xpath_filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"Updated watch." in res.data assert b"Updated watch." in res.data
@@ -226,7 +226,7 @@ def test_xpath_validation(client, live_server, measure_memory_usage):
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": "/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, data={"include_filters": "/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"is not a valid XPath expression" in res.data assert b"is not a valid XPath expression" in res.data
@@ -247,7 +247,7 @@ def test_xpath23_prefix_validation(client, live_server, measure_memory_usage):
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": "xpath:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, data={"include_filters": "xpath:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"is not a valid XPath expression" in res.data assert b"is not a valid XPath expression" in res.data
@@ -298,7 +298,7 @@ def test_xpath1_lxml(client, live_server, measure_memory_usage):
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": "xpath1://title/text()", "url": test_url, "tags": "", "headers": "", data={"include_filters": "xpath1://title/text()", "url": test_url, "tags": "", "headers": "",
'fetch_backend': "html_requests"}, 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
@@ -331,7 +331,7 @@ def test_xpath1_validation(client, live_server, measure_memory_usage):
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": "xpath1:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"}, data={"include_filters": "xpath1:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
assert b"is not a valid XPath expression" in res.data assert b"is not a valid XPath expression" in res.data
@@ -359,7 +359,7 @@ def test_check_with_prefix_include_filters(client, live_server, measure_memory_u
res = client.post( res = client.post(
url_for("ui.ui_edit.edit_page", uuid="first"), url_for("ui.ui_edit.edit_page", uuid="first"),
data={"include_filters": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tags": "", "headers": "", data={"include_filters": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tags": "", "headers": "",
'fetch_backend': "html_requests"}, 'fetch_backend': "html_requests", "time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
@@ -413,7 +413,8 @@ def test_various_rules(client, live_server, measure_memory_usage):
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"headers": "", "headers": "",
'fetch_backend': "html_requests"}, 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
wait_for_all_checks(client) wait_for_all_checks(client)
@@ -444,7 +445,8 @@ def test_xpath_20(client, live_server, measure_memory_usage):
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"headers": "", "headers": "",
'fetch_backend': "html_requests"}, 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
@@ -481,7 +483,8 @@ def test_xpath_20_function_count(client, live_server, measure_memory_usage):
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"headers": "", "headers": "",
'fetch_backend': "html_requests"}, 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
@@ -517,7 +520,8 @@ def test_xpath_20_function_count2(client, live_server, measure_memory_usage):
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"headers": "", "headers": "",
'fetch_backend': "html_requests"}, 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )
@@ -554,7 +558,8 @@ def test_xpath_20_function_string_join_matches(client, live_server, measure_memo
"url": test_url, "url": test_url,
"tags": "", "tags": "",
"headers": "", "headers": "",
'fetch_backend': "html_requests"}, 'fetch_backend': "html_requests",
"time_between_check_use_default": "y"},
follow_redirects=True follow_redirects=True
) )

View File

@@ -36,6 +36,7 @@ def test_visual_selector_content_ready(client, live_server, measure_memory_usage
# For now, cookies doesnt work in headers because it must be a full cookiejar object # For now, cookies doesnt work in headers because it must be a full cookiejar object
'headers': "testheader: yes\buser-agent: MyCustomAgent", 'headers': "testheader: yes\buser-agent: MyCustomAgent",
'fetch_backend': "html_webdriver", 'fetch_backend': "html_webdriver",
"time_between_check_use_default": "y",
}, },
follow_redirects=True follow_redirects=True
) )
@@ -116,6 +117,7 @@ def test_basic_browserstep(client, live_server, measure_memory_usage):
'browser_steps-1-optional_value': '', 'browser_steps-1-optional_value': '',
# For now, cookies doesnt work in headers because it must be a full cookiejar object # For now, cookies doesnt work in headers because it must be a full cookiejar object
'headers': "testheader: yes\buser-agent: MyCustomAgent", 'headers': "testheader: yes\buser-agent: MyCustomAgent",
"time_between_check_use_default": "y",
}, },
follow_redirects=True follow_redirects=True
) )
@@ -167,7 +169,8 @@ def test_non_200_errors_report_browsersteps(client, live_server):
'fetch_backend': "html_webdriver", 'fetch_backend': "html_webdriver",
'browser_steps-0-operation': 'Click element', 'browser_steps-0-operation': 'Click element',
'browser_steps-0-selector': 'button[name=test-button]', 'browser_steps-0-selector': 'button[name=test-button]',
'browser_steps-0-optional_value': '' 'browser_steps-0-optional_value': '',
"time_between_check_use_default": "y"
}, },
follow_redirects=True follow_redirects=True
) )

View File

@@ -188,6 +188,10 @@ components:
seconds: seconds:
type: integer type: integer
description: Time intervals between checks description: Time intervals between checks
time_between_check_use_default:
type: boolean
default: true
description: Whether to use global settings for time between checks - defaults to true if not set
notification_urls: notification_urls:
type: array type: array
items: items: