mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2026-03-18 01:38:13 +00:00
Compare commits
5 Commits
cpu-memory
...
multiple-g
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
62e1259750 | ||
|
|
380d8a26a1 | ||
|
|
02c03fc32b | ||
|
|
db3d38b3ee | ||
|
|
ecd8af94f6 |
@@ -61,8 +61,22 @@ import time
|
||||
# ==============================================================================
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Limit glibc malloc arena count to prevent RSS growth from concurrent requests.
|
||||
# Default: glibc creates up to 8×CPU_cores arenas. Each concurrent thread/connection
|
||||
# can trigger a new arena, and freed memory stays mapped in those arenas as RSS forever.
|
||||
# With MALLOC_ARENA_MAX=2, at most 2 arenas are used; freed pages return to the OS faster.
|
||||
# Must be set before worker threads start; env var is read lazily by glibc on first arena creation.
|
||||
if 'MALLOC_ARENA_MAX' not in os.environ:
|
||||
os.environ['MALLOC_ARENA_MAX'] = '2'
|
||||
try:
|
||||
import ctypes as _ctypes
|
||||
_ctypes.CDLL('libc.so.6').mallopt(-8, 2) # M_ARENA_MAX = -8
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Set spawn as global default (safety net - all our code uses explicit contexts anyway)
|
||||
# Skip in tests to avoid breaking pytest-flask's LiveServer fixture (uses unpicklable local functions)
|
||||
if 'pytest' not in sys.modules:
|
||||
|
||||
@@ -177,6 +177,13 @@ class Tag(Resource):
|
||||
|
||||
new_uuid = self.datastore.add_tag(title=title)
|
||||
if new_uuid:
|
||||
# Apply any extra fields (e.g. processor_config_restock_diff) beyond just title
|
||||
extra = {k: v for k, v in json_data.items() if k != 'title'}
|
||||
if extra:
|
||||
tag = self.datastore.data['settings']['application']['tags'].get(new_uuid)
|
||||
if tag:
|
||||
tag.update(extra)
|
||||
tag.commit()
|
||||
return {'uuid': new_uuid}, 201
|
||||
else:
|
||||
return "Invalid or unsupported tag", 400
|
||||
|
||||
@@ -10,7 +10,8 @@ from changedetectionio import html_tools
|
||||
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
preview_blueprint = Blueprint('ui_preview', __name__, template_folder="../ui/templates")
|
||||
|
||||
@preview_blueprint.route("/preview/<uuid_str:uuid>", methods=['GET'])
|
||||
|
||||
@preview_blueprint.route("/preview/<uuid_str:uuid>", methods=['GET', 'POST'])
|
||||
@login_optionally_required
|
||||
def preview_page(uuid):
|
||||
"""
|
||||
@@ -74,7 +75,9 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
flash(gettext("Preview unavailable - No fetch/check completed or triggers not reached"), "error")
|
||||
else:
|
||||
# So prepare the latest preview or not
|
||||
preferred_version = request.args.get('version')
|
||||
preferred_version = request.values.get('version') if request.method == 'POST' else request.args.get('version')
|
||||
|
||||
|
||||
versions = list(watch.history.keys())
|
||||
timestamp = versions[-1]
|
||||
if preferred_version and preferred_version in versions:
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
<script src="{{ url_for('static_content', group='js', filename='tabs.js') }}" defer></script>
|
||||
{% if versions|length >= 2 %}
|
||||
<div id="diff-form" style="text-align: center;">
|
||||
<form class="pure-form " action="" method="POST">
|
||||
<form class="pure-form " action="{{url_for('ui.ui_preview.preview_page', uuid=uuid)}}" method="POST">
|
||||
<fieldset>
|
||||
<label for="preview-version">{{ _('Select timestamp') }}</label> <select id="preview-version"
|
||||
name="from_version"
|
||||
@@ -28,6 +28,7 @@
|
||||
</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<button type="submit" class="pure-button pure-button-primary">{{ _('Go') }}</button>
|
||||
|
||||
</fieldset>
|
||||
|
||||
@@ -81,6 +81,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
|
||||
sorted_tags = sorted(datastore.data['settings']['application'].get('tags').items(), key=lambda x: x[1]['title'])
|
||||
|
||||
proxy_list = datastore.proxy_list
|
||||
output = render_template(
|
||||
"watch-overview.html",
|
||||
active_tag=active_tag,
|
||||
@@ -92,7 +93,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
form=form,
|
||||
generate_tag_colors=processors.generate_processor_badge_colors,
|
||||
guid=datastore.data['app_guid'],
|
||||
has_proxies=datastore.proxy_list,
|
||||
has_proxies=proxy_list,
|
||||
hosted_sticky=os.getenv("SALTED_PASS", False) == False,
|
||||
now_time_server=round(time.time()),
|
||||
pagination=pagination,
|
||||
@@ -110,6 +111,16 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
watches=sorted_watches
|
||||
)
|
||||
|
||||
# Return freed template-building memory to the OS immediately.
|
||||
# render_template allocates ~20MB of intermediate strings that are freed on return,
|
||||
# but glibc keeps those pages mapped in its arenas as RSS. malloc_trim() forces
|
||||
# glibc to release them, preventing RSS growth from concurrent Chrome connections.
|
||||
try:
|
||||
import ctypes
|
||||
ctypes.CDLL('libc.so.6').malloc_trim(0)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if session.get('share-link'):
|
||||
del (session['share-link'])
|
||||
|
||||
|
||||
@@ -213,12 +213,13 @@ html[data-darkmode="true"] .watch-tag-list.tag-{{ class_name }} {
|
||||
{%- set checking_now = is_checking_now(watch) -%}
|
||||
{%- set history_n = watch.history_n -%}
|
||||
{%- set favicon = watch.get_favicon_filename() -%}
|
||||
{%- set error_texts = watch.compile_error_texts(has_proxies=has_proxies) -%}
|
||||
{%- set system_use_url_watchlist = datastore.data['settings']['application']['ui'].get('use_page_title_in_list') -%}
|
||||
{# Class settings mirrored in changedetectionio/static/js/realtime.js for the frontend #}
|
||||
{%- set row_classes = [
|
||||
loop.cycle('pure-table-odd', 'pure-table-even'),
|
||||
'processor-' ~ watch['processor'],
|
||||
'has-error' if watch.compile_error_texts()|length > 2 else '',
|
||||
'has-error' if error_texts|length > 2 else '',
|
||||
'paused' if watch.paused is defined and watch.paused != False else '',
|
||||
'unviewed' if watch.has_unviewed else '',
|
||||
'has-restock-info' if watch.has_restock_info else 'no-restock-info',
|
||||
@@ -271,7 +272,7 @@ html[data-darkmode="true"] .watch-tag-list.tag-{{ class_name }} {
|
||||
{% endif %}
|
||||
<a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}"> </a>
|
||||
</span>
|
||||
<div class="error-text" style="display:none;">{{ watch.compile_error_texts(has_proxies=datastore.proxy_list)|safe }}</div>
|
||||
<div class="error-text" style="display:none;">{{ error_texts|safe }}</div>
|
||||
{%- if watch['processor'] == 'text_json_diff' -%}
|
||||
{%- if watch['has_ldjson_price_data'] and not watch['track_ldjson_price_data'] -%}
|
||||
<div class="ldjson-price-track-offer">Switch to Restock & Price watch mode? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
|
||||
|
||||
@@ -4,6 +4,7 @@ import flask_login
|
||||
import locale
|
||||
import os
|
||||
import queue
|
||||
import re
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
@@ -387,6 +388,8 @@ def _jinja2_filter_fetcher_status_icons(fetcher_name):
|
||||
|
||||
return ''
|
||||
|
||||
_RE_SANITIZE_TAG = re.compile(r'[^a-zA-Z0-9]')
|
||||
|
||||
@app.template_filter('sanitize_tag_class')
|
||||
def _jinja2_filter_sanitize_tag_class(tag_title):
|
||||
"""Sanitize a tag title to create a valid CSS class name.
|
||||
@@ -398,9 +401,8 @@ def _jinja2_filter_sanitize_tag_class(tag_title):
|
||||
Returns:
|
||||
str: A sanitized string suitable for use as a CSS class name
|
||||
"""
|
||||
import re
|
||||
# Remove all non-alphanumeric characters and convert to lowercase
|
||||
sanitized = re.sub(r'[^a-zA-Z0-9]', '', tag_title).lower()
|
||||
sanitized = _RE_SANITIZE_TAG.sub('', tag_title).lower()
|
||||
# Ensure it starts with a letter (CSS requirement)
|
||||
if sanitized and not sanitized[0].isalpha():
|
||||
sanitized = 'tag' + sanitized
|
||||
@@ -488,28 +490,21 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
available_languages = get_available_languages()
|
||||
language_codes = get_language_codes()
|
||||
|
||||
def get_locale():
|
||||
# Locale aliases: map browser language codes to translation directory names
|
||||
# This handles cases where browsers send standard codes (e.g., zh-TW)
|
||||
# but our translations use more specific codes (e.g., zh_Hant_TW)
|
||||
locale_aliases = {
|
||||
'zh-TW': 'zh_Hant_TW', # Traditional Chinese: browser sends zh-TW, we use zh_Hant_TW
|
||||
'zh_TW': 'zh_Hant_TW', # Also handle underscore variant
|
||||
}
|
||||
_locale_aliases = {
|
||||
'zh-TW': 'zh_Hant_TW', # Traditional Chinese: browser sends zh-TW, we use zh_Hant_TW
|
||||
'zh_TW': 'zh_Hant_TW', # Also handle underscore variant
|
||||
}
|
||||
_locale_match_list = language_codes + list(_locale_aliases.keys())
|
||||
|
||||
def get_locale():
|
||||
# 1. Try to get locale from session (user explicitly selected)
|
||||
if 'locale' in session:
|
||||
return session['locale']
|
||||
|
||||
# 2. Fall back to Accept-Language header
|
||||
# Get the best match from browser's Accept-Language header
|
||||
browser_locale = request.accept_languages.best_match(language_codes + list(locale_aliases.keys()))
|
||||
|
||||
# 3. Check if we need to map the browser locale to our internal locale
|
||||
if browser_locale in locale_aliases:
|
||||
return locale_aliases[browser_locale]
|
||||
|
||||
return browser_locale
|
||||
browser_locale = request.accept_languages.best_match(_locale_match_list)
|
||||
# 3. Map browser locale to our internal locale if needed
|
||||
return _locale_aliases.get(browser_locale, browser_locale)
|
||||
|
||||
# Initialize Babel with locale selector
|
||||
babel = Babel(app, locale_selector=get_locale)
|
||||
@@ -1022,15 +1017,16 @@ def check_for_new_version():
|
||||
import urllib3
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
session = requests.Session()
|
||||
session.verify = False
|
||||
|
||||
while not app.config.exit.is_set():
|
||||
try:
|
||||
r = requests.post("https://changedetection.io/check-ver.php",
|
||||
r = session.post("https://changedetection.io/check-ver.php",
|
||||
data={'version': __version__,
|
||||
'app_guid': datastore.data['app_guid'],
|
||||
'watch_count': len(datastore.data['watching'])
|
||||
},
|
||||
|
||||
verify=False)
|
||||
})
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@@ -43,6 +43,11 @@ from ..html_tools import TRANSLATE_WHITESPACE_TABLE
|
||||
FAVICON_RESAVE_THRESHOLD_SECONDS=86400
|
||||
BROTLI_COMPRESS_SIZE_THRESHOLD = int(os.getenv('SNAPSHOT_BROTLI_COMPRESSION_THRESHOLD', 1024*20))
|
||||
|
||||
# Module-level favicon filename cache: data_dir → basename (or None)
|
||||
# Keyed by data_dir so it survives Watch object recreation, deepcopy, and concurrent requests.
|
||||
# Invalidated explicitly in bump_favicon() when a new favicon is saved.
|
||||
_FAVICON_FILENAME_CACHE: dict = {}
|
||||
|
||||
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 3))
|
||||
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
|
||||
|
||||
@@ -806,9 +811,8 @@ class model(EntityPersistenceMixin, watch_base):
|
||||
with open(fname, 'wb') as f:
|
||||
f.write(decoded)
|
||||
|
||||
# Invalidate favicon filename cache
|
||||
if hasattr(self, '_favicon_filename_cache'):
|
||||
delattr(self, '_favicon_filename_cache')
|
||||
# Invalidate module-level favicon filename cache for this watch
|
||||
_FAVICON_FILENAME_CACHE.pop(self.data_dir, None)
|
||||
|
||||
# A signal that could trigger the socket server to update the browser also
|
||||
watch_check_update = signal('watch_favicon_bump')
|
||||
@@ -823,35 +827,23 @@ class model(EntityPersistenceMixin, watch_base):
|
||||
|
||||
def get_favicon_filename(self) -> str | None:
|
||||
"""
|
||||
Find any favicon.* file in the current working directory
|
||||
and return the contents of the newest one.
|
||||
Find any favicon.* file in the watch data directory.
|
||||
|
||||
MEMORY LEAK FIX: Cache the result to avoid repeated glob.glob() operations.
|
||||
glob.glob() causes millions of fnmatch allocations when called for every watch on page load.
|
||||
Uses a module-level cache keyed by data_dir to survive Watch object recreation,
|
||||
deepcopy (which drops instance attrs), and concurrent request races.
|
||||
Invalidated by bump_favicon() when a new favicon is saved.
|
||||
|
||||
Returns:
|
||||
str: Basename of the newest favicon file, or None if not found.
|
||||
str: Basename of the favicon file, or None if not found.
|
||||
"""
|
||||
# Check cache first (prevents 26M+ allocations from repeated glob operations)
|
||||
cache_key = '_favicon_filename_cache'
|
||||
if hasattr(self, cache_key):
|
||||
return getattr(self, cache_key)
|
||||
if self.data_dir in _FAVICON_FILENAME_CACHE:
|
||||
return _FAVICON_FILENAME_CACHE[self.data_dir]
|
||||
|
||||
import glob
|
||||
|
||||
# Search for all favicon.* files
|
||||
files = glob.glob(os.path.join(self.data_dir, "favicon.*"))
|
||||
|
||||
if not files:
|
||||
result = None
|
||||
else:
|
||||
# Find the newest by modification time
|
||||
newest_file = max(files, key=os.path.getmtime)
|
||||
result = os.path.basename(newest_file)
|
||||
|
||||
# Cache the result
|
||||
setattr(self, cache_key, result)
|
||||
return result
|
||||
fname = os.path.basename(files[0]) if files else None
|
||||
_FAVICON_FILENAME_CACHE[self.data_dir] = fname
|
||||
return fname
|
||||
|
||||
def get_screenshot_as_thumbnail(self, max_age=3200):
|
||||
"""Return path to a square thumbnail of the most recent screenshot.
|
||||
@@ -1182,18 +1174,13 @@ class model(EntityPersistenceMixin, watch_base):
|
||||
def compile_error_texts(self, has_proxies=None):
|
||||
"""Compile error texts for this watch.
|
||||
Accepts has_proxies parameter to ensure it works even outside app context"""
|
||||
from flask import url_for
|
||||
from flask import url_for, has_request_context
|
||||
from markupsafe import Markup
|
||||
|
||||
output = [] # Initialize as list since we're using append
|
||||
last_error = self.get('last_error','')
|
||||
|
||||
try:
|
||||
url_for('settings.settings_page')
|
||||
except Exception as e:
|
||||
has_app_context = False
|
||||
else:
|
||||
has_app_context = True
|
||||
has_app_context = has_request_context()
|
||||
|
||||
# has app+request context, we can use url_for()
|
||||
if has_app_context:
|
||||
|
||||
@@ -6,6 +6,7 @@ Extracted from update_worker.py to provide standalone notification functionality
|
||||
for both sync and async workers
|
||||
"""
|
||||
import datetime
|
||||
from copy import deepcopy
|
||||
|
||||
import pytz
|
||||
from loguru import logger
|
||||
@@ -352,7 +353,7 @@ class NotificationService:
|
||||
"""
|
||||
Send notification when content changes are detected
|
||||
"""
|
||||
n_object = NotificationContextData()
|
||||
|
||||
watch = self.datastore.data['watching'].get(watch_uuid)
|
||||
if not watch:
|
||||
return
|
||||
@@ -369,21 +370,51 @@ class NotificationService:
|
||||
# Should be a better parent getter in the model object
|
||||
|
||||
# Prefer - Individual watch settings > Tag settings > Global settings (in that order)
|
||||
# this change probably not needed?
|
||||
n_object['notification_urls'] = _check_cascading_vars(self.datastore, 'notification_urls', watch)
|
||||
# If the watch has no notification_body for example, it will try to get from the first matching group or system setting
|
||||
|
||||
# Should be, if none in the watch, and no group tag ones found, then use system ones at the end
|
||||
#n_object['notification_urls'] = _check_cascading_vars(self.datastore, 'notification_urls', watch)
|
||||
n_object = NotificationContextData()
|
||||
n_object['notification_title'] = _check_cascading_vars(self.datastore,'notification_title', watch)
|
||||
n_object['notification_body'] = _check_cascading_vars(self.datastore,'notification_body', watch)
|
||||
n_object['notification_format'] = _check_cascading_vars(self.datastore,'notification_format', watch)
|
||||
|
||||
notification_objects = []
|
||||
if n_object.get('notification_urls'):
|
||||
notification_objects.append(n_object)
|
||||
|
||||
|
||||
# LOGIC SHOULD BE something that all tests currently pass too
|
||||
# !!! _check_cascading_vars is not really used much, only used here..
|
||||
#
|
||||
|
||||
|
||||
# If any related group/tag has a notification_url set, then we fan out horizontally and collect it as extra notifications
|
||||
tags = self.datastore.get_all_tags_for_watch(uuid=watch.get('uuid'))
|
||||
logger.debug(f'{len(tags)} related to this watch')
|
||||
if tags:
|
||||
for tag_uuid, tag in tags.items():
|
||||
logger.debug(f"Checking group/tag for notification URLs '{tag['title']}' Muted? '{tag.get('notification_muted')}', URLs {tag.get('notification_urls')}")
|
||||
v = tag.get('notification_urls')
|
||||
if v and not tag.get('notification_muted'):
|
||||
logger.debug("OK MAN")
|
||||
next_n_object = deepcopy(n_object)
|
||||
next_n_object['notification_urls'] = v
|
||||
next_n_object['notification_title'] = _check_cascading_vars(self.datastore, 'notification_title', watch)
|
||||
next_n_object['notification_body'] = _check_cascading_vars(self.datastore, 'notification_body', watch)
|
||||
next_n_object['notification_format'] = _check_cascading_vars(self.datastore, 'notification_format', watch)
|
||||
notification_objects.append(next_n_object)
|
||||
logger.debug(f"Adding notification from group/tag {tag['title']}")
|
||||
|
||||
|
||||
# (Individual watch) Only prepare to notify if the rules above matched
|
||||
queued = False
|
||||
if n_object and n_object.get('notification_urls'):
|
||||
if notification_objects:
|
||||
queued = True
|
||||
|
||||
count = watch.get('notification_alert_count', 0) + 1
|
||||
self.datastore.update_watch(uuid=watch_uuid, update_obj={'notification_alert_count': count})
|
||||
|
||||
self.queue_notification_for_watch(n_object=n_object, watch=watch)
|
||||
for n_object in notification_objects:
|
||||
self.queue_notification_for_watch(n_object=n_object, watch=watch)
|
||||
|
||||
return queued
|
||||
|
||||
|
||||
@@ -178,23 +178,44 @@ def test_api_tags_listing(client, live_server, measure_memory_usage, datastore_p
|
||||
|
||||
def test_api_tag_restock_processor_config(client, live_server, measure_memory_usage, datastore_path):
|
||||
"""
|
||||
Test that a tag/group can be updated with processor_config_restock_diff via the API.
|
||||
Test that a tag/group can be created and updated with processor_config_restock_diff via the API.
|
||||
Since Tag extends WatchBase, processor config fields injected into WatchBase are also valid for tags.
|
||||
"""
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
|
||||
set_original_response(datastore_path=datastore_path)
|
||||
|
||||
# Create a tag
|
||||
# Create a tag with processor_config_restock_diff in a single POST (issue #3966)
|
||||
res = client.post(
|
||||
url_for("tag"),
|
||||
data=json.dumps({"title": "Restock Group"}),
|
||||
data=json.dumps({
|
||||
"title": "Restock Group",
|
||||
"overrides_watch": True,
|
||||
"processor_config_restock_diff": {
|
||||
"in_stock_processing": "in_stock_only",
|
||||
"follow_price_changes": True,
|
||||
"price_change_min": 7777777
|
||||
}
|
||||
}),
|
||||
headers={'content-type': 'application/json', 'x-api-key': api_key}
|
||||
)
|
||||
assert res.status_code == 201
|
||||
assert res.status_code == 201, f"POST tag with restock config failed: {res.data}"
|
||||
tag_uuid = res.json.get('uuid')
|
||||
|
||||
# Update tag with valid processor_config_restock_diff
|
||||
# Verify processor config was saved during creation (the bug: these were discarded)
|
||||
res = client.get(
|
||||
url_for("tag", uuid=tag_uuid),
|
||||
headers={'x-api-key': api_key}
|
||||
)
|
||||
assert res.status_code == 200
|
||||
tag_data = res.json
|
||||
assert tag_data.get('overrides_watch') == True, "overrides_watch should be saved on POST"
|
||||
assert tag_data.get('processor_config_restock_diff', {}).get('in_stock_processing') == 'in_stock_only', \
|
||||
"processor_config_restock_diff should be saved on POST"
|
||||
assert tag_data.get('processor_config_restock_diff', {}).get('price_change_min') == 7777777, \
|
||||
"price_change_min should be saved on POST"
|
||||
|
||||
# Update tag with valid processor_config_restock_diff via PUT
|
||||
res = client.put(
|
||||
url_for("tag", uuid=tag_uuid),
|
||||
headers={'x-api-key': api_key, 'content-type': 'application/json'},
|
||||
|
||||
@@ -48,6 +48,15 @@ def test_check_basic_change_detection_functionality(client, live_server, measure
|
||||
# Check this class does not appear (that we didnt see the actual source)
|
||||
assert b'foobar-detection' not in res.data
|
||||
|
||||
# Check POST preview
|
||||
res = client.post(
|
||||
url_for("ui.ui_preview.preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
# Check this class does not appear (that we didnt see the actual source)
|
||||
assert b'foobar-detection' not in res.data
|
||||
|
||||
|
||||
# Make a change
|
||||
set_modified_response(datastore_path=datastore_path)
|
||||
|
||||
|
||||
@@ -171,6 +171,7 @@ def test_group_tag_notification(client, live_server, measure_memory_usage, datas
|
||||
delete_all_watches(client)
|
||||
|
||||
set_original_response(datastore_path=datastore_path)
|
||||
notification_url_endpoint = url_for('test_notification_endpoint', _external=True).replace('http', 'post')
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
@@ -181,35 +182,50 @@ def test_group_tag_notification(client, live_server, measure_memory_usage, datas
|
||||
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json')
|
||||
notification_form_data = {"notification_urls": notification_url,
|
||||
"notification_title": "New GROUP TAG ChangeDetection.io Notification - {{watch_url}}",
|
||||
"notification_body": "BASE URL: {{base_url}}\n"
|
||||
"Watch URL: {{watch_url}}\n"
|
||||
"Watch UUID: {{watch_uuid}}\n"
|
||||
"Watch title: {{watch_title}}\n"
|
||||
"Watch tag: {{watch_tag}}\n"
|
||||
"Preview: {{preview_url}}\n"
|
||||
"Diff URL: {{diff_url}}\n"
|
||||
"Snapshot: {{current_snapshot}}\n"
|
||||
"Diff: {{diff}}\n"
|
||||
"Diff Added: {{diff_added}}\n"
|
||||
"Diff Removed: {{diff_removed}}\n"
|
||||
"Diff Full: {{diff_full}}\n"
|
||||
"Diff as Patch: {{diff_patch}}\n"
|
||||
":-)",
|
||||
"notification_screenshot": True,
|
||||
"notification_format": 'text',
|
||||
"title": "test-tag"}
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
group_tag_form_data = {
|
||||
"notification_title": "New GROUP TAG ChangeDetection.io Notification - {{watch_url}}",
|
||||
"notification_body": "BASE URL: {{base_url}}\n"
|
||||
"Watch URL: {{watch_url}}\n"
|
||||
"Watch UUID: {{watch_uuid}}\n"
|
||||
"Watch title: {{watch_title}}\n"
|
||||
"Watch tag: {{watch_tag}}\n"
|
||||
"Preview: {{preview_url}}\n"
|
||||
"Diff URL: {{diff_url}}\n"
|
||||
"Snapshot: {{current_snapshot}}\n"
|
||||
"Diff: {{diff}}\n"
|
||||
"Diff Added: {{diff_added}}\n"
|
||||
"Diff Removed: {{diff_removed}}\n"
|
||||
"Diff Full: {{diff_full}}\n"
|
||||
"Diff as Patch: {{diff_patch}}\n"
|
||||
":-)",
|
||||
"notification_screenshot": True,
|
||||
"notification_format": 'text',
|
||||
}
|
||||
|
||||
# Setup for test-tag
|
||||
group_tag_form_data['notification_urls'] = notification_url_endpoint+"?outputfilename=test-tag.txt"
|
||||
group_tag_form_data['title'] = 'test-tag'
|
||||
res = client.post(
|
||||
url_for("tags.form_tag_edit_submit", uuid=get_UUID_for_tag_name(client, name="test-tag")),
|
||||
data=notification_form_data,
|
||||
data=group_tag_form_data,
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated" in res.data
|
||||
|
||||
# Setup for other-tag, we only add notifications-urls
|
||||
group_tag_form_data['notification_urls'] = notification_url_endpoint+"?outputfilename=other-tag.txt"
|
||||
group_tag_form_data['title'] = 'other-tag'
|
||||
|
||||
res = client.post(
|
||||
url_for("tags.form_tag_edit_submit", uuid=get_UUID_for_tag_name(client, name="other-tag")),
|
||||
data=group_tag_form_data,
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated" in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
set_modified_response(datastore_path=datastore_path)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
@@ -217,12 +233,14 @@ def test_group_tag_notification(client, live_server, measure_memory_usage, datas
|
||||
|
||||
time.sleep(3)
|
||||
|
||||
assert os.path.isfile(os.path.join(datastore_path, "notification.txt"))
|
||||
assert os.path.isfile(os.path.join(datastore_path, "test-tag.txt"))
|
||||
assert os.path.isfile(os.path.join(datastore_path, "other-tag.txt"))
|
||||
|
||||
# @todo assert the group name or other unique body is in other-tag.txt
|
||||
# Verify what was sent as a notification, this file should exist
|
||||
with open(os.path.join(datastore_path, "notification.txt"), "r") as f:
|
||||
with open(os.path.join(datastore_path, "test-tag.txt"), "r") as f:
|
||||
notification_submission = f.read()
|
||||
os.unlink(os.path.join(datastore_path, "notification.txt"))
|
||||
os.unlink(os.path.join(datastore_path, "test-tag.txt"))
|
||||
|
||||
# Did we see the URL that had a change, in the notification?
|
||||
# Diff was correctly executed
|
||||
|
||||
@@ -343,8 +343,11 @@ def new_live_server_setup(live_server):
|
||||
@live_server.app.route('/test_notification_endpoint', methods=['POST', 'GET'])
|
||||
def test_notification_endpoint():
|
||||
datastore_path = current_app.config.get('TEST_DATASTORE_PATH', 'test-datastore')
|
||||
|
||||
with open(os.path.join(datastore_path, "notification.txt"), "wb") as f:
|
||||
from loguru import logger
|
||||
# @todo make safe
|
||||
fname = request.args.get('outputfilename', "notification.txt")
|
||||
logger.debug(f"Writing test notification endpoint data to '{fname}' - {request.args}")
|
||||
with open(os.path.join(datastore_path, fname), "wb") as f:
|
||||
# Debug method, dump all POST to file also, used to prove #65
|
||||
data = request.stream.read()
|
||||
if data != None:
|
||||
|
||||
BIN
changedetectionio/translations/es/LC_MESSAGES/messages.mo
Normal file
BIN
changedetectionio/translations/es/LC_MESSAGES/messages.mo
Normal file
Binary file not shown.
3620
changedetectionio/translations/es/LC_MESSAGES/messages.po
Normal file
3620
changedetectionio/translations/es/LC_MESSAGES/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
@@ -100,6 +100,19 @@ def is_safe_valid_url(test_url):
|
||||
logger.warning('URL validation failed: URL is empty or whitespace only')
|
||||
return False
|
||||
|
||||
# Per-request cache: same URL is often validated 2-3x per watchlist render (sort + display).
|
||||
# Flask's g is scoped to one request and auto-cleared on teardown, so dynamic Jinja2 URLs
|
||||
# like {{microtime()}} are always re-evaluated on the next request.
|
||||
# Falls back gracefully when called outside a request context (e.g. background workers).
|
||||
_cache_key = test_url
|
||||
try:
|
||||
from flask import g
|
||||
_cache = g.setdefault('_url_validation_cache', {})
|
||||
if _cache_key in _cache:
|
||||
return _cache[_cache_key]
|
||||
except RuntimeError:
|
||||
_cache = None # No app context
|
||||
|
||||
allow_file_access = strtobool(os.getenv('ALLOW_FILE_URI', 'false'))
|
||||
safe_protocol_regex = '^(http|https|ftp|file):' if allow_file_access else '^(http|https|ftp):'
|
||||
|
||||
@@ -112,11 +125,14 @@ def is_safe_valid_url(test_url):
|
||||
test_url = r.sub('', test_url)
|
||||
|
||||
# Check the actual rendered URL in case of any Jinja markup
|
||||
try:
|
||||
test_url = jinja_render(test_url)
|
||||
except Exception as e:
|
||||
logger.error(f'URL "{test_url}" is not correct Jinja2? {str(e)}')
|
||||
return False
|
||||
# Only run jinja_render when the URL actually contains Jinja2 syntax - creating a new
|
||||
# ImmutableSandboxedEnvironment is expensive and is called once per watch per page load
|
||||
if '{%' in test_url or '{{' in test_url:
|
||||
try:
|
||||
test_url = jinja_render(test_url)
|
||||
except Exception as e:
|
||||
logger.error(f'URL "{test_url}" is not correct Jinja2? {str(e)}')
|
||||
return False
|
||||
|
||||
# Check query parameters and fragment
|
||||
if re.search(r'[<>]', test_url):
|
||||
@@ -142,4 +158,6 @@ def is_safe_valid_url(test_url):
|
||||
logger.warning(f'URL f"{test_url}" failed validation, aborting.')
|
||||
return False
|
||||
|
||||
if _cache is not None:
|
||||
_cache[_cache_key] = True
|
||||
return True
|
||||
|
||||
Reference in New Issue
Block a user