mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-11-12 20:46:17 +00:00
Compare commits
9 Commits
0.43
...
reset-prev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d4a85976e6 | ||
|
|
1987e109e8 | ||
|
|
20d65cdd26 | ||
|
|
37ff5f6d37 | ||
|
|
2f777ea3bb | ||
|
|
e709201955 | ||
|
|
572f71299f | ||
|
|
5f150c4f03 | ||
|
|
8cbf8e8f57 |
@@ -1,2 +1,18 @@
|
|||||||
.git
|
.git
|
||||||
.github
|
.github
|
||||||
|
changedetectionio/processors/__pycache__
|
||||||
|
changedetectionio/api/__pycache__
|
||||||
|
changedetectionio/model/__pycache__
|
||||||
|
changedetectionio/blueprint/price_data_follower/__pycache__
|
||||||
|
changedetectionio/blueprint/tags/__pycache__
|
||||||
|
changedetectionio/blueprint/__pycache__
|
||||||
|
changedetectionio/blueprint/browser_steps/__pycache__
|
||||||
|
changedetectionio/fetchers/__pycache__
|
||||||
|
changedetectionio/tests/visualselector/__pycache__
|
||||||
|
changedetectionio/tests/restock/__pycache__
|
||||||
|
changedetectionio/tests/__pycache__
|
||||||
|
changedetectionio/tests/fetchers/__pycache__
|
||||||
|
changedetectionio/tests/unit/__pycache__
|
||||||
|
changedetectionio/tests/proxy_list/__pycache__
|
||||||
|
changedetectionio/__pycache__
|
||||||
|
|
||||||
|
|||||||
10
.github/workflows/test-only.yml
vendored
10
.github/workflows/test-only.yml
vendored
@@ -37,6 +37,11 @@ jobs:
|
|||||||
# Build a changedetection.io container and start testing inside
|
# Build a changedetection.io container and start testing inside
|
||||||
docker build . -t test-changedetectionio
|
docker build . -t test-changedetectionio
|
||||||
|
|
||||||
|
- name: Spin up ancillary SMTP+Echo message test server
|
||||||
|
run: |
|
||||||
|
# Debug SMTP server/echo message back server
|
||||||
|
docker run --network changedet-network -d -p 11025:11025 -p 11080:11080 --hostname mailserver test-changedetectionio bash -c 'python changedetectionio/tests/smtp/smtp-test-server.py'
|
||||||
|
|
||||||
- name: Test built container with pytest
|
- name: Test built container with pytest
|
||||||
run: |
|
run: |
|
||||||
|
|
||||||
@@ -63,6 +68,11 @@ jobs:
|
|||||||
# restock detection via playwright - added name=changedet here so that playwright/browserless can connect to it
|
# restock detection via playwright - added name=changedet here so that playwright/browserless can connect to it
|
||||||
docker run --rm --name "changedet" -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-port=5004 --live-server-host=0.0.0.0 tests/restock/test_restock.py'
|
docker run --rm --name "changedet" -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-port=5004 --live-server-host=0.0.0.0 tests/restock/test_restock.py'
|
||||||
|
|
||||||
|
- name: Test SMTP notification mime types
|
||||||
|
run: |
|
||||||
|
# SMTP content types - needs the 'Debug SMTP server/echo message back server' container from above
|
||||||
|
docker run --rm --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/smtp/test_notification_smtp.py'
|
||||||
|
|
||||||
- name: Test with puppeteer fetcher and disk cache
|
- name: Test with puppeteer fetcher and disk cache
|
||||||
run: |
|
run: |
|
||||||
docker run --rm -e "PUPPETEER_DISK_CACHE=/tmp/data/" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
|
docker run --rm -e "PUPPETEER_DISK_CACHE=/tmp/data/" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
|
||||||
|
|||||||
@@ -13,3 +13,6 @@ include changedetection.py
|
|||||||
global-exclude *.pyc
|
global-exclude *.pyc
|
||||||
global-exclude node_modules
|
global-exclude node_modules
|
||||||
global-exclude venv
|
global-exclude venv
|
||||||
|
|
||||||
|
global-exclude test-datastore
|
||||||
|
global-exclude changedetection.io*dist-info
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ from flask_paginate import Pagination, get_page_parameter
|
|||||||
from changedetectionio import html_tools
|
from changedetectionio import html_tools
|
||||||
from changedetectionio.api import api_v1
|
from changedetectionio.api import api_v1
|
||||||
|
|
||||||
__version__ = '0.43'
|
__version__ = '0.43.2'
|
||||||
|
|
||||||
datastore = None
|
datastore = None
|
||||||
|
|
||||||
@@ -1364,7 +1364,8 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
flash("{} watches set to use default notification settings".format(len(uuids)))
|
flash("{} watches set to use default notification settings".format(len(uuids)))
|
||||||
|
|
||||||
elif (op == 'assign-tag'):
|
elif (op == 'assign-tag'):
|
||||||
op_extradata = request.form.get('op_extradata')
|
op_extradata = request.form.get('op_extradata', '').strip()
|
||||||
|
if op_extradata:
|
||||||
tag_uuid = datastore.add_tag(name=op_extradata)
|
tag_uuid = datastore.add_tag(name=op_extradata)
|
||||||
if op_extradata and tag_uuid:
|
if op_extradata and tag_uuid:
|
||||||
for uuid in uuids:
|
for uuid in uuids:
|
||||||
@@ -1438,6 +1439,10 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
import changedetectionio.blueprint.tags as tags
|
import changedetectionio.blueprint.tags as tags
|
||||||
app.register_blueprint(tags.construct_blueprint(datastore), url_prefix='/tags')
|
app.register_blueprint(tags.construct_blueprint(datastore), url_prefix='/tags')
|
||||||
|
|
||||||
|
import changedetectionio.blueprint.check_proxies as check_proxies
|
||||||
|
app.register_blueprint(check_proxies.construct_blueprint(datastore=datastore), url_prefix='/check_proxy')
|
||||||
|
|
||||||
|
|
||||||
# @todo handle ctrl break
|
# @todo handle ctrl break
|
||||||
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
|
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
|
||||||
threading.Thread(target=notification_runner).start()
|
threading.Thread(target=notification_runner).start()
|
||||||
|
|||||||
@@ -219,13 +219,15 @@ class CreateWatch(Resource):
|
|||||||
|
|
||||||
extras = copy.deepcopy(json_data)
|
extras = copy.deepcopy(json_data)
|
||||||
|
|
||||||
# Because we renamed 'tag' to 'tags' but dont want to change the API (can do this in v2 of the API)
|
# Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API)
|
||||||
|
tags = None
|
||||||
if extras.get('tag'):
|
if extras.get('tag'):
|
||||||
extras['tags'] = extras.get('tag')
|
tags = extras.get('tag')
|
||||||
|
del extras['tag']
|
||||||
|
|
||||||
del extras['url']
|
del extras['url']
|
||||||
|
|
||||||
new_uuid = self.datastore.add_watch(url=url, extras=extras)
|
new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags)
|
||||||
if new_uuid:
|
if new_uuid:
|
||||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
|
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
|
||||||
return {'uuid': new_uuid}, 201
|
return {'uuid': new_uuid}, 201
|
||||||
|
|||||||
105
changedetectionio/blueprint/check_proxies/__init__.py
Normal file
105
changedetectionio/blueprint/check_proxies/__init__.py
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
from flask import Blueprint
|
||||||
|
from flask_login import login_required
|
||||||
|
|
||||||
|
from changedetectionio.processors import text_json_diff
|
||||||
|
from changedetectionio.store import ChangeDetectionStore
|
||||||
|
|
||||||
|
|
||||||
|
STATUS_CHECKING = 0
|
||||||
|
STATUS_FAILED = 1
|
||||||
|
STATUS_OK = 2
|
||||||
|
THREADPOOL_MAX_WORKERS = 3
|
||||||
|
_DEFAULT_POOL = ThreadPoolExecutor(max_workers=THREADPOOL_MAX_WORKERS)
|
||||||
|
|
||||||
|
|
||||||
|
# Maybe use fetch-time if its >5 to show some expected load time?
|
||||||
|
def threadpool(f, executor=None):
|
||||||
|
@wraps(f)
|
||||||
|
def wrap(*args, **kwargs):
|
||||||
|
return (executor or _DEFAULT_POOL).submit(f, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrap
|
||||||
|
|
||||||
|
|
||||||
|
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||||
|
check_proxies_blueprint = Blueprint('check_proxies', __name__)
|
||||||
|
checks_in_progress = {}
|
||||||
|
|
||||||
|
@threadpool
|
||||||
|
def long_task(uuid, preferred_proxy):
|
||||||
|
import time
|
||||||
|
from changedetectionio import content_fetcher
|
||||||
|
|
||||||
|
status = {'status': '', 'length': 0, 'text': ''}
|
||||||
|
from jinja2 import Environment, BaseLoader
|
||||||
|
|
||||||
|
contents = ''
|
||||||
|
now = time.time()
|
||||||
|
try:
|
||||||
|
update_handler = text_json_diff.perform_site_check(datastore=datastore)
|
||||||
|
changed_detected, update_obj, contents = update_handler.run(uuid, preferred_proxy=preferred_proxy, skip_when_checksum_same=False)
|
||||||
|
# title, size is len contents not len xfer
|
||||||
|
except content_fetcher.Non200ErrorCodeReceived as e:
|
||||||
|
if e.status_code == 404:
|
||||||
|
status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but 404 (page not found)"})
|
||||||
|
elif e.status_code == 403:
|
||||||
|
status.update({'status': 'ERROR', 'length': len(contents), 'text': f"403 - Access denied"})
|
||||||
|
else:
|
||||||
|
status.update({'status': 'ERROR', 'length': len(contents), 'text': f"Status code: {e.status_code}"})
|
||||||
|
except content_fetcher.EmptyReply as e:
|
||||||
|
status.update({'status': 'ERROR OTHER', 'length': len(contents) if contents else 0, 'text': "Empty reply, needs chrome?"})
|
||||||
|
except Exception as e:
|
||||||
|
status.update({'status': 'ERROR OTHER', 'length': len(contents) if contents else 0, 'text': 'Error: '+str(e)})
|
||||||
|
else:
|
||||||
|
status.update({'status': 'OK', 'length': len(contents), 'text': ''})
|
||||||
|
|
||||||
|
if status.get('text'):
|
||||||
|
status['text'] = Environment(loader=BaseLoader()).from_string('{{text|e}}').render({'text': status['text']})
|
||||||
|
|
||||||
|
status['time'] = "{:.2f}s".format(time.time() - now)
|
||||||
|
|
||||||
|
return status
|
||||||
|
|
||||||
|
def _recalc_check_status(uuid):
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
for k, v in checks_in_progress.get(uuid, {}).items():
|
||||||
|
try:
|
||||||
|
r_1 = v.result(timeout=0.05)
|
||||||
|
except Exception as e:
|
||||||
|
# If timeout error?
|
||||||
|
results[k] = {'status': 'RUNNING'}
|
||||||
|
|
||||||
|
else:
|
||||||
|
results[k] = r_1
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
@login_required
|
||||||
|
@check_proxies_blueprint.route("/<string:uuid>/status", methods=['GET'])
|
||||||
|
def get_recheck_status(uuid):
|
||||||
|
results = _recalc_check_status(uuid=uuid)
|
||||||
|
return results
|
||||||
|
|
||||||
|
@login_required
|
||||||
|
@check_proxies_blueprint.route("/<string:uuid>/start", methods=['GET'])
|
||||||
|
def start_check(uuid):
|
||||||
|
|
||||||
|
if not datastore.proxy_list:
|
||||||
|
return
|
||||||
|
|
||||||
|
# @todo - Cancel any existing runs
|
||||||
|
checks_in_progress[uuid] = {}
|
||||||
|
|
||||||
|
for k, v in datastore.proxy_list.items():
|
||||||
|
if not checks_in_progress[uuid].get(k):
|
||||||
|
checks_in_progress[uuid][k] = long_task(uuid=uuid, preferred_proxy=k)
|
||||||
|
|
||||||
|
results = _recalc_check_status(uuid=uuid)
|
||||||
|
return results
|
||||||
|
|
||||||
|
return check_proxies_blueprint
|
||||||
@@ -76,6 +76,16 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
|||||||
flash(f"Tag unlinked removed from {unlinked} watches")
|
flash(f"Tag unlinked removed from {unlinked} watches")
|
||||||
return redirect(url_for('tags.tags_overview_page'))
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
@tags_blueprint.route("/delete_all", methods=['GET'])
|
||||||
|
@login_optionally_required
|
||||||
|
def delete_all():
|
||||||
|
for watch_uuid, watch in datastore.data['watching'].items():
|
||||||
|
watch['tags'] = []
|
||||||
|
datastore.data['settings']['application']['tags'] = {}
|
||||||
|
|
||||||
|
flash(f"All tags deleted")
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
@tags_blueprint.route("/edit/<string:uuid>", methods=['GET'])
|
@tags_blueprint.route("/edit/<string:uuid>", methods=['GET'])
|
||||||
@login_optionally_required
|
@login_optionally_required
|
||||||
def form_tag_edit(uuid):
|
def form_tag_edit(uuid):
|
||||||
|
|||||||
@@ -54,4 +54,5 @@ def render_diff(previous_version_file_contents, newest_version_file_contents, in
|
|||||||
|
|
||||||
# Recursively join lists
|
# Recursively join lists
|
||||||
f = lambda L: line_feed_sep.join([f(x) if type(x) is list else x for x in L])
|
f = lambda L: line_feed_sep.join([f(x) if type(x) is list else x for x in L])
|
||||||
return f(rendered_diff)
|
p= f(rendered_diff)
|
||||||
|
return p
|
||||||
|
|||||||
@@ -85,6 +85,7 @@ class import_distill_io_json(Importer):
|
|||||||
now = time.time()
|
now = time.time()
|
||||||
self.new_uuids=[]
|
self.new_uuids=[]
|
||||||
|
|
||||||
|
# @todo Use JSONSchema like in the API to validate here.
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = json.loads(data.strip())
|
data = json.loads(data.strip())
|
||||||
@@ -120,11 +121,8 @@ class import_distill_io_json(Importer):
|
|||||||
except IndexError:
|
except IndexError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Does this need to be here anymore?
|
|
||||||
if d.get('tags', False):
|
|
||||||
extras['tags'] = ", ".join(d['tags'])
|
|
||||||
|
|
||||||
new_uuid = datastore.add_watch(url=d['uri'].strip(),
|
new_uuid = datastore.add_watch(url=d['uri'].strip(),
|
||||||
|
tag=",".join(d.get('tags', [])),
|
||||||
extras=extras,
|
extras=extras,
|
||||||
write_to_disk_now=False)
|
write_to_disk_now=False)
|
||||||
|
|
||||||
|
|||||||
@@ -93,6 +93,12 @@ def process_notification(n_object, datastore):
|
|||||||
valid_notification_formats[default_notification_format],
|
valid_notification_formats[default_notification_format],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# If we arrived with 'System default' then look it up
|
||||||
|
if n_format == default_notification_format_for_watch and datastore.data['settings']['application'].get('notification_format') != default_notification_format_for_watch:
|
||||||
|
# Initially text or whatever
|
||||||
|
n_format = datastore.data['settings']['application'].get('notification_format', valid_notification_formats[default_notification_format])
|
||||||
|
|
||||||
|
|
||||||
# https://github.com/caronc/apprise/wiki/Development_LogCapture
|
# https://github.com/caronc/apprise/wiki/Development_LogCapture
|
||||||
# Anything higher than or equal to WARNING (which covers things like Connection errors)
|
# Anything higher than or equal to WARNING (which covers things like Connection errors)
|
||||||
# raise it as an exception
|
# raise it as an exception
|
||||||
@@ -145,9 +151,12 @@ def process_notification(n_object, datastore):
|
|||||||
# Apprise will default to HTML, so we need to override it
|
# Apprise will default to HTML, so we need to override it
|
||||||
# So that whats' generated in n_body is in line with what is going to be sent.
|
# So that whats' generated in n_body is in line with what is going to be sent.
|
||||||
# https://github.com/caronc/apprise/issues/633#issuecomment-1191449321
|
# https://github.com/caronc/apprise/issues/633#issuecomment-1191449321
|
||||||
if not 'format=' in url and (n_format == 'text' or n_format == 'markdown'):
|
if not 'format=' in url and (n_format == 'Text' or n_format == 'Markdown'):
|
||||||
prefix = '?' if not '?' in url else '&'
|
prefix = '?' if not '?' in url else '&'
|
||||||
|
# Apprise format is lowercase text https://github.com/caronc/apprise/issues/633
|
||||||
|
n_format = n_format.tolower()
|
||||||
url = "{}{}format={}".format(url, prefix, n_format)
|
url = "{}{}format={}".format(url, prefix, n_format)
|
||||||
|
# If n_format == HTML, then apprise email should default to text/html and we should be sending HTML only
|
||||||
|
|
||||||
apobj.add(url)
|
apobj.add(url)
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ class difference_detection_processor():
|
|||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def run(self, uuid, skip_when_checksum_same=True):
|
def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
|
||||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||||
some_data = 'xxxxx'
|
some_data = 'xxxxx'
|
||||||
update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()
|
update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ class perform_site_check(difference_detection_processor):
|
|||||||
|
|
||||||
return regex
|
return regex
|
||||||
|
|
||||||
def run(self, uuid, skip_when_checksum_same=True):
|
def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
|
||||||
changed_detected = False
|
changed_detected = False
|
||||||
screenshot = False # as bytes
|
screenshot = False # as bytes
|
||||||
stripped_text_from_html = ""
|
stripped_text_from_html = ""
|
||||||
@@ -105,7 +105,11 @@ class perform_site_check(difference_detection_processor):
|
|||||||
# If the klass doesnt exist, just use a default
|
# If the klass doesnt exist, just use a default
|
||||||
klass = getattr(content_fetcher, "html_requests")
|
klass = getattr(content_fetcher, "html_requests")
|
||||||
|
|
||||||
|
if preferred_proxy:
|
||||||
|
proxy_id = preferred_proxy
|
||||||
|
else:
|
||||||
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
||||||
|
|
||||||
proxy_url = None
|
proxy_url = None
|
||||||
if proxy_id:
|
if proxy_id:
|
||||||
proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
|
proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
|
||||||
|
|||||||
87
changedetectionio/static/js/recheck-proxy.js
Normal file
87
changedetectionio/static/js/recheck-proxy.js
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
$(function () {
|
||||||
|
/* add container before each proxy location to show status */
|
||||||
|
|
||||||
|
var option_li = $('.fetch-backend-proxy li').filter(function() {
|
||||||
|
return $("input",this)[0].value.length >0;
|
||||||
|
});
|
||||||
|
|
||||||
|
//var option_li = $('.fetch-backend-proxy li');
|
||||||
|
var isActive = false;
|
||||||
|
$(option_li).prepend('<div class="proxy-status"></div>');
|
||||||
|
$(option_li).append('<div class="proxy-timing"></div><div class="proxy-check-details"></div>');
|
||||||
|
|
||||||
|
function set_proxy_check_status(proxy_key, state) {
|
||||||
|
// select input by value name
|
||||||
|
const proxy_li = $("input[value=" + proxy_key + "]").parent();
|
||||||
|
if (state['status'] === 'RUNNING') {
|
||||||
|
$('.proxy-status', proxy_li).html('<span class="spinner"></span>');
|
||||||
|
}
|
||||||
|
if (state['status'] === 'OK') {
|
||||||
|
$('.proxy-status', proxy_li).html('<span style="color: green; font-weight: bold" >OK</span>');
|
||||||
|
$('.proxy-check-details', proxy_li).html(state['text']);
|
||||||
|
}
|
||||||
|
if (state['status'] === 'ERROR' || state['status'] === 'ERROR OTHER') {
|
||||||
|
$('.proxy-status', proxy_li).html('<span style="color: red; font-weight: bold" >X</span>');
|
||||||
|
$('.proxy-check-details', proxy_li).html(state['text']);
|
||||||
|
}
|
||||||
|
$('.proxy-timing', proxy_li).html(state['time']);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function pollServer() {
|
||||||
|
if (isActive) {
|
||||||
|
window.setTimeout(function () {
|
||||||
|
$.ajax({
|
||||||
|
url: proxy_recheck_status_url,
|
||||||
|
success: function (data) {
|
||||||
|
var all_done = true;
|
||||||
|
$.each(data, function (proxy_key, state) {
|
||||||
|
set_proxy_check_status(proxy_key, state);
|
||||||
|
if (state['status'] === 'RUNNING') {
|
||||||
|
all_done = false;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (all_done) {
|
||||||
|
console.log("Shutting down poller, all done.")
|
||||||
|
isActive = false;
|
||||||
|
} else {
|
||||||
|
pollServer();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
error: function () {
|
||||||
|
//ERROR HANDLING
|
||||||
|
pollServer();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}, 2000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$('#check-all-proxies').click(function (e) {
|
||||||
|
e.preventDefault()
|
||||||
|
$('body').addClass('proxy-check-active');
|
||||||
|
$('.proxy-check-details').html('');
|
||||||
|
$('.proxy-status').html('<span class="spinner"></span>').fadeIn();
|
||||||
|
$('.proxy-timing').html('');
|
||||||
|
|
||||||
|
// Request start, needs CSRF?
|
||||||
|
$.ajax({
|
||||||
|
type: "GET",
|
||||||
|
url: recheck_proxy_start_url,
|
||||||
|
}).done(function (data) {
|
||||||
|
$.each(data, function (proxy_key, state) {
|
||||||
|
set_proxy_check_status(proxy_key, state['status'])
|
||||||
|
});
|
||||||
|
isActive = true;
|
||||||
|
pollServer();
|
||||||
|
|
||||||
|
}).fail(function (data) {
|
||||||
|
console.log(data);
|
||||||
|
alert('There was an error communicating with the server.');
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
@@ -7,6 +7,7 @@ ul#requests-extra_proxies {
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* each proxy entry is a `table` */
|
/* each proxy entry is a `table` */
|
||||||
table {
|
table {
|
||||||
tr {
|
tr {
|
||||||
@@ -15,3 +16,30 @@ ul#requests-extra_proxies {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#request {
|
||||||
|
/* Auto proxy scan/checker */
|
||||||
|
label[for=proxy] {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
body.proxy-check-active {
|
||||||
|
#request {
|
||||||
|
.proxy-status {
|
||||||
|
width: 2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.proxy-check-details {
|
||||||
|
font-size: 80%;
|
||||||
|
color: #555;
|
||||||
|
display: block;
|
||||||
|
padding-left: 4em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.proxy-timing {
|
||||||
|
font-size: 80%;
|
||||||
|
padding-left: 1rem;
|
||||||
|
color: var(--color-link);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -95,6 +95,25 @@ ul#requests-extra_proxies {
|
|||||||
ul#requests-extra_proxies table tr {
|
ul#requests-extra_proxies table tr {
|
||||||
display: inline; }
|
display: inline; }
|
||||||
|
|
||||||
|
#request {
|
||||||
|
/* Auto proxy scan/checker */ }
|
||||||
|
#request label[for=proxy] {
|
||||||
|
display: inline-block; }
|
||||||
|
|
||||||
|
body.proxy-check-active #request .proxy-status {
|
||||||
|
width: 2em; }
|
||||||
|
|
||||||
|
body.proxy-check-active #request .proxy-check-details {
|
||||||
|
font-size: 80%;
|
||||||
|
color: #555;
|
||||||
|
display: block;
|
||||||
|
padding-left: 4em; }
|
||||||
|
|
||||||
|
body.proxy-check-active #request .proxy-timing {
|
||||||
|
font-size: 80%;
|
||||||
|
padding-left: 1rem;
|
||||||
|
color: var(--color-link); }
|
||||||
|
|
||||||
.pagination-page-info {
|
.pagination-page-info {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
font-size: 0.85rem;
|
font-size: 0.85rem;
|
||||||
@@ -283,10 +302,6 @@ html[data-darkmode="true"] {
|
|||||||
--color-icon-github-hover: var(--color-grey-700);
|
--color-icon-github-hover: var(--color-grey-700);
|
||||||
--color-watch-table-error: var(--color-light-red);
|
--color-watch-table-error: var(--color-light-red);
|
||||||
--color-watch-table-row-text: var(--color-grey-800); }
|
--color-watch-table-row-text: var(--color-grey-800); }
|
||||||
html[data-darkmode="true"] #toggle-light-mode .icon-light {
|
|
||||||
display: none; }
|
|
||||||
html[data-darkmode="true"] #toggle-light-mode .icon-dark {
|
|
||||||
display: block; }
|
|
||||||
html[data-darkmode="true"] .icon-spread {
|
html[data-darkmode="true"] .icon-spread {
|
||||||
filter: hue-rotate(-10deg) brightness(1.5); }
|
filter: hue-rotate(-10deg) brightness(1.5); }
|
||||||
html[data-darkmode="true"] .watch-table .title-col a[target="_blank"]::after,
|
html[data-darkmode="true"] .watch-table .title-col a[target="_blank"]::after,
|
||||||
@@ -339,6 +354,10 @@ a.github-link {
|
|||||||
width: 3rem; }
|
width: 3rem; }
|
||||||
#toggle-light-mode .icon-dark {
|
#toggle-light-mode .icon-dark {
|
||||||
display: none; }
|
display: none; }
|
||||||
|
#toggle-light-mode.dark .icon-light {
|
||||||
|
display: none; }
|
||||||
|
#toggle-light-mode.dark .icon-dark {
|
||||||
|
display: block; }
|
||||||
|
|
||||||
#toggle-search {
|
#toggle-search {
|
||||||
width: 2rem; }
|
width: 2rem; }
|
||||||
|
|||||||
@@ -205,10 +205,9 @@ class ChangeDetectionStore:
|
|||||||
|
|
||||||
# Clone a watch by UUID
|
# Clone a watch by UUID
|
||||||
def clone(self, uuid):
|
def clone(self, uuid):
|
||||||
url = self.data['watching'][uuid]['url']
|
url = self.data['watching'][uuid].get('url')
|
||||||
tag = self.data['watching'][uuid].get('tags',[])
|
|
||||||
extras = self.data['watching'][uuid]
|
extras = self.data['watching'][uuid]
|
||||||
new_uuid = self.add_watch(url=url, tag_uuids=tag, extras=extras)
|
new_uuid = self.add_watch(url=url, extras=extras)
|
||||||
return new_uuid
|
return new_uuid
|
||||||
|
|
||||||
def url_exists(self, url):
|
def url_exists(self, url):
|
||||||
@@ -248,12 +247,9 @@ class ChangeDetectionStore:
|
|||||||
if extras is None:
|
if extras is None:
|
||||||
extras = {}
|
extras = {}
|
||||||
|
|
||||||
# should always be str
|
|
||||||
if tag is None or not tag:
|
|
||||||
tag = ''
|
|
||||||
|
|
||||||
# Incase these are copied across, assume it's a reference and deepcopy()
|
# Incase these are copied across, assume it's a reference and deepcopy()
|
||||||
apply_extras = deepcopy(extras)
|
apply_extras = deepcopy(extras)
|
||||||
|
apply_extras['tags'] = [] if not apply_extras.get('tags') else apply_extras.get('tags')
|
||||||
|
|
||||||
# Was it a share link? try to fetch the data
|
# Was it a share link? try to fetch the data
|
||||||
if (url.startswith("https://changedetection.io/share/")):
|
if (url.startswith("https://changedetection.io/share/")):
|
||||||
@@ -303,20 +299,22 @@ class ChangeDetectionStore:
|
|||||||
flash('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX', 'error')
|
flash('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX', 'error')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
if tag and type(tag) == str:
|
||||||
# #Re 569
|
# Then it's probably a string of the actual tag by name, split and add it
|
||||||
# Could be in 'tags', var or extras, smash them together and strip
|
for t in tag.split(','):
|
||||||
apply_extras['tags'] = []
|
|
||||||
if tag or extras.get('tags'):
|
|
||||||
tags = list(filter(None, list(set().union(tag.split(','), extras.get('tags', '').split(',')))))
|
|
||||||
for t in list(map(str.strip, tags)):
|
|
||||||
# for each stripped tag, add tag as UUID
|
# for each stripped tag, add tag as UUID
|
||||||
apply_extras['tags'].append(self.add_tag(t))
|
for a_t in t.split(','):
|
||||||
|
tag_uuid = self.add_tag(a_t)
|
||||||
|
apply_extras['tags'].append(tag_uuid)
|
||||||
|
|
||||||
# Or if UUIDs given directly
|
# Or if UUIDs given directly
|
||||||
if tag_uuids:
|
if tag_uuids:
|
||||||
apply_extras['tags'] = list(set(apply_extras['tags'] + tag_uuids))
|
apply_extras['tags'] = list(set(apply_extras['tags'] + tag_uuids))
|
||||||
|
|
||||||
|
# Make any uuids unique
|
||||||
|
if apply_extras.get('tags'):
|
||||||
|
apply_extras['tags'] = list(set(apply_extras.get('tags')))
|
||||||
|
|
||||||
new_watch = Watch.model(datastore_path=self.datastore_path, url=url)
|
new_watch = Watch.model(datastore_path=self.datastore_path, url=url)
|
||||||
|
|
||||||
new_uuid = new_watch.get('uuid')
|
new_uuid = new_watch.get('uuid')
|
||||||
@@ -566,9 +564,12 @@ class ChangeDetectionStore:
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
def add_tag(self, name):
|
def add_tag(self, name):
|
||||||
print (">>> Adding new tag -", name)
|
|
||||||
# If name exists, return that
|
# If name exists, return that
|
||||||
n = name.strip().lower()
|
n = name.strip().lower()
|
||||||
|
print (f">>> Adding new tag - '{n}'")
|
||||||
|
if not n:
|
||||||
|
return False
|
||||||
|
|
||||||
for uuid, tag in self.__data['settings']['application'].get('tags', {}).items():
|
for uuid, tag in self.__data['settings']['application'].get('tags', {}).items():
|
||||||
if n == tag.get('title', '').lower().strip():
|
if n == tag.get('title', '').lower().strip():
|
||||||
print (f">>> Tag {name} already exists")
|
print (f">>> Tag {name} already exists")
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
{% macro render_field(field) %}
|
{% macro render_field(field) %}
|
||||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
|
||||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
||||||
|
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||||
{% if field.errors %}
|
{% if field.errors %}
|
||||||
<ul class=errors>
|
<ul class=errors>
|
||||||
{% for error in field.errors %}
|
{% for error in field.errors %}
|
||||||
@@ -25,18 +24,6 @@
|
|||||||
</div>
|
</div>
|
||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
{% macro render_field(field) %}
|
|
||||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
|
||||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
|
||||||
{% if field.errors %}
|
|
||||||
<ul class=errors>
|
|
||||||
{% for error in field.errors %}
|
|
||||||
<li>{{ error }}</li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
{% endmacro %}
|
|
||||||
|
|
||||||
{% macro render_simple_field(field) %}
|
{% macro render_simple_field(field) %}
|
||||||
<span class="label {% if field.errors %}error{% endif %}">{{ field.label }}</span>
|
<span class="label {% if field.errors %}error{% endif %}">{{ field.label }}</span>
|
||||||
|
|||||||
@@ -4,18 +4,19 @@
|
|||||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||||
<script>
|
<script>
|
||||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
|
||||||
const watch_visual_selector_data_url="{{url_for('static_content', group='visual_selector_data', filename=uuid)}}";
|
|
||||||
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
|
|
||||||
const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %};
|
|
||||||
|
|
||||||
{% if emailprefix %}
|
|
||||||
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}');
|
const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}');
|
||||||
const browser_steps_start_url="{{url_for('browser_steps.browsersteps_start_session', uuid=uuid)}}";
|
const browser_steps_start_url="{{url_for('browser_steps.browsersteps_start_session', uuid=uuid)}}";
|
||||||
const browser_steps_sync_url="{{url_for('browser_steps.browsersteps_ui_update', uuid=uuid)}}";
|
const browser_steps_sync_url="{{url_for('browser_steps.browsersteps_ui_update', uuid=uuid)}}";
|
||||||
|
{% if emailprefix %}
|
||||||
|
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
|
||||||
|
{% endif %}
|
||||||
|
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||||
|
const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %};
|
||||||
|
const recheck_proxy_start_url="{{url_for('check_proxies.start_check', uuid=uuid)}}";
|
||||||
|
const proxy_recheck_status_url="{{url_for('check_proxies.get_recheck_status', uuid=uuid)}}";
|
||||||
|
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
|
||||||
|
const watch_visual_selector_data_url="{{url_for('static_content', group='visual_selector_data', filename=uuid)}}";
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
@@ -27,6 +28,8 @@
|
|||||||
<script src="{{url_for('static_content', group='js', filename='browser-steps.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='browser-steps.js')}}" defer></script>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
<script src="{{url_for('static_content', group='js', filename='recheck-proxy.js')}}" defer></script>
|
||||||
|
|
||||||
<div class="edit-form monospaced-textarea">
|
<div class="edit-form monospaced-textarea">
|
||||||
|
|
||||||
<div class="tabs collapsable">
|
<div class="tabs collapsable">
|
||||||
@@ -111,7 +114,8 @@
|
|||||||
</div>
|
</div>
|
||||||
{% if form.proxy %}
|
{% if form.proxy %}
|
||||||
<div class="pure-control-group inline-radio">
|
<div class="pure-control-group inline-radio">
|
||||||
{{ render_field(form.proxy, class="fetch-backend-proxy") }}
|
<div>{{ form.proxy.label }} <a href="" id="check-all-proxies" class="pure-button button-secondary button-xsmall" >Check/Scan all</a></div>
|
||||||
|
<div>{{ form.proxy(class="fetch-backend-proxy") }}</div>
|
||||||
<span class="pure-form-message-inline">
|
<span class="pure-form-message-inline">
|
||||||
Choose a proxy for this watch
|
Choose a proxy for this watch
|
||||||
</span>
|
</span>
|
||||||
|
|||||||
42
changedetectionio/tests/smtp/smtp-test-server.py
Executable file
42
changedetectionio/tests/smtp/smtp-test-server.py
Executable file
@@ -0,0 +1,42 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
import smtpd
|
||||||
|
import asyncore
|
||||||
|
|
||||||
|
# Accept a SMTP message and offer a way to retrieve the last message via TCP Socket
|
||||||
|
|
||||||
|
last_received_message = b"Nothing"
|
||||||
|
|
||||||
|
|
||||||
|
class CustomSMTPServer(smtpd.SMTPServer):
|
||||||
|
|
||||||
|
def process_message(self, peer, mailfrom, rcpttos, data, **kwargs):
|
||||||
|
global last_received_message
|
||||||
|
last_received_message = data
|
||||||
|
print('Receiving message from:', peer)
|
||||||
|
print('Message addressed from:', mailfrom)
|
||||||
|
print('Message addressed to :', rcpttos)
|
||||||
|
print('Message length :', len(data))
|
||||||
|
print(data.decode('utf8'))
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
# Just print out the last message received on plain TCP socket server
|
||||||
|
class EchoServer(asyncore.dispatcher):
|
||||||
|
|
||||||
|
def __init__(self, host, port):
|
||||||
|
asyncore.dispatcher.__init__(self)
|
||||||
|
self.create_socket()
|
||||||
|
self.set_reuse_addr()
|
||||||
|
self.bind((host, port))
|
||||||
|
self.listen(5)
|
||||||
|
|
||||||
|
def handle_accepted(self, sock, addr):
|
||||||
|
global last_received_message
|
||||||
|
print('Incoming connection from %s' % repr(addr))
|
||||||
|
sock.send(last_received_message)
|
||||||
|
last_received_message = b''
|
||||||
|
|
||||||
|
|
||||||
|
server = CustomSMTPServer(('0.0.0.0', 11025), None) # SMTP mail goes here
|
||||||
|
server2 = EchoServer('0.0.0.0', 11080) # Echo back last message received
|
||||||
|
asyncore.loop()
|
||||||
165
changedetectionio/tests/smtp/test_notification_smtp.py
Normal file
165
changedetectionio/tests/smtp/test_notification_smtp.py
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import re
|
||||||
|
from flask import url_for
|
||||||
|
from changedetectionio.tests.util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup, \
|
||||||
|
wait_for_all_checks, \
|
||||||
|
set_longer_modified_response
|
||||||
|
from changedetectionio.tests.util import extract_UUID_from_client
|
||||||
|
import logging
|
||||||
|
import base64
|
||||||
|
|
||||||
|
# NOTE - RELIES ON mailserver as hostname running, see github build recipes
|
||||||
|
smtp_test_server = 'mailserver'
|
||||||
|
|
||||||
|
from changedetectionio.notification import (
|
||||||
|
default_notification_body,
|
||||||
|
default_notification_format,
|
||||||
|
default_notification_title,
|
||||||
|
valid_notification_formats,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_setup(live_server):
|
||||||
|
live_server_setup(live_server)
|
||||||
|
|
||||||
|
def get_last_message_from_smtp_server():
|
||||||
|
import socket
|
||||||
|
global smtp_test_server
|
||||||
|
port = 11080 # socket server port number
|
||||||
|
|
||||||
|
client_socket = socket.socket() # instantiate
|
||||||
|
client_socket.connect((smtp_test_server, port)) # connect to the server
|
||||||
|
|
||||||
|
data = client_socket.recv(50024).decode() # receive response
|
||||||
|
client_socket.close() # close the connection
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
# Requires running the test SMTP server
|
||||||
|
|
||||||
|
def test_check_notification_email_formats_default_HTML(client, live_server):
|
||||||
|
# live_server_setup(live_server)
|
||||||
|
set_original_response()
|
||||||
|
|
||||||
|
global smtp_test_server
|
||||||
|
notification_url = f'mailto://changedetection@{smtp_test_server}:11025/?to=fff@home.com'
|
||||||
|
|
||||||
|
#####################
|
||||||
|
# Set this up for when we remove the notification from the watch, it should fallback with these details
|
||||||
|
res = client.post(
|
||||||
|
url_for("settings_page"),
|
||||||
|
data={"application-notification_urls": notification_url,
|
||||||
|
"application-notification_title": "fallback-title " + default_notification_title,
|
||||||
|
"application-notification_body": "fallback-body<br> " + default_notification_body,
|
||||||
|
"application-notification_format": 'HTML',
|
||||||
|
"requests-time_between_check-minutes": 180,
|
||||||
|
'application-fetch_backend': "html_requests"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Settings updated." in res.data
|
||||||
|
|
||||||
|
# Add a watch and trigger a HTTP POST
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("form_quick_watch_add"),
|
||||||
|
data={"url": test_url, "tags": 'nice one'},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
set_longer_modified_response()
|
||||||
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
time.sleep(3)
|
||||||
|
|
||||||
|
msg = get_last_message_from_smtp_server()
|
||||||
|
assert len(msg) >= 1
|
||||||
|
|
||||||
|
# The email should have two bodies, and the text/html part should be <br>
|
||||||
|
assert 'Content-Type: text/plain' in msg
|
||||||
|
assert '(added) So let\'s see what happens.\n' in msg # The plaintext part with \n
|
||||||
|
assert 'Content-Type: text/html' in msg
|
||||||
|
assert '(added) So let\'s see what happens.<br>' in msg # the html part
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_notification_email_formats_default_Text_override_HTML(client, live_server):
|
||||||
|
# live_server_setup(live_server)
|
||||||
|
|
||||||
|
# HTML problems? see this
|
||||||
|
# https://github.com/caronc/apprise/issues/633
|
||||||
|
|
||||||
|
set_original_response()
|
||||||
|
global smtp_test_server
|
||||||
|
notification_url = f'mailto://changedetection@{smtp_test_server}:11025/?to=fff@home.com'
|
||||||
|
|
||||||
|
#####################
|
||||||
|
# Set this up for when we remove the notification from the watch, it should fallback with these details
|
||||||
|
res = client.post(
|
||||||
|
url_for("settings_page"),
|
||||||
|
data={"application-notification_urls": notification_url,
|
||||||
|
"application-notification_title": "fallback-title " + default_notification_title,
|
||||||
|
"application-notification_body": default_notification_body,
|
||||||
|
"application-notification_format": 'Text',
|
||||||
|
"requests-time_between_check-minutes": 180,
|
||||||
|
'application-fetch_backend': "html_requests"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Settings updated." in res.data
|
||||||
|
|
||||||
|
# Add a watch and trigger a HTTP POST
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("form_quick_watch_add"),
|
||||||
|
data={"url": test_url, "tags": 'nice one'},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
set_longer_modified_response()
|
||||||
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
time.sleep(3)
|
||||||
|
msg = get_last_message_from_smtp_server()
|
||||||
|
assert len(msg) >= 1
|
||||||
|
# with open('/tmp/m.txt', 'w') as f:
|
||||||
|
# f.write(msg)
|
||||||
|
|
||||||
|
# The email should not have two bodies, should be TEXT only
|
||||||
|
|
||||||
|
assert 'Content-Type: text/plain' in msg
|
||||||
|
assert '(added) So let\'s see what happens.\n' in msg # The plaintext part with \n
|
||||||
|
|
||||||
|
set_original_response()
|
||||||
|
# Now override as HTML format
|
||||||
|
res = client.post(
|
||||||
|
url_for("edit_page", uuid="first"),
|
||||||
|
data={
|
||||||
|
"url": test_url,
|
||||||
|
"notification_format": 'HTML',
|
||||||
|
'fetch_backend': "html_requests"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Updated watch." in res.data
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
time.sleep(3)
|
||||||
|
msg = get_last_message_from_smtp_server()
|
||||||
|
assert len(msg) >= 1
|
||||||
|
|
||||||
|
# The email should have two bodies, and the text/html part should be <br>
|
||||||
|
assert 'Content-Type: text/plain' in msg
|
||||||
|
assert '(removed) So let\'s see what happens.\n' in msg # The plaintext part with \n
|
||||||
|
assert 'Content-Type: text/html' in msg
|
||||||
|
assert '(removed) So let\'s see what happens.<br>' in msg # the html part
|
||||||
|
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
@@ -267,7 +267,7 @@ def test_api_watch_PUT_update(client, live_server):
|
|||||||
|
|
||||||
#live_server_setup(live_server)
|
#live_server_setup(live_server)
|
||||||
api_key = extract_api_key_from_UI(client)
|
api_key = extract_api_key_from_UI(client)
|
||||||
time.sleep(1)
|
|
||||||
# Create a watch
|
# Create a watch
|
||||||
set_original_response()
|
set_original_response()
|
||||||
test_url = url_for('test_endpoint', _external=True,
|
test_url = url_for('test_endpoint', _external=True,
|
||||||
@@ -283,7 +283,6 @@ def test_api_watch_PUT_update(client, live_server):
|
|||||||
|
|
||||||
assert res.status_code == 201
|
assert res.status_code == 201
|
||||||
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
# Get a listing, it will be the first one
|
# Get a listing, it will be the first one
|
||||||
res = client.get(
|
res = client.get(
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from .util import live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, get_UUID_for_tag_name
|
from .util import live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, get_UUID_for_tag_name, extract_UUID_from_client
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
@@ -154,6 +154,10 @@ def test_tag_add_in_ui(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"Tag added" in res.data
|
assert b"Tag added" in res.data
|
||||||
assert b"new-test-tag" in res.data
|
assert b"new-test-tag" in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||||
|
assert b'All tags deleted' in res.data
|
||||||
|
|
||||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
assert b'Deleted' in res.data
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
@@ -219,12 +223,10 @@ def test_group_tag_notification(client, live_server):
|
|||||||
assert "test-tag" in notification_submission
|
assert "test-tag" in notification_submission
|
||||||
assert "other-tag" in notification_submission
|
assert "other-tag" in notification_submission
|
||||||
|
|
||||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
|
||||||
assert b'Deleted' in res.data
|
|
||||||
|
|
||||||
#@todo Test that multiple notifications fired
|
#@todo Test that multiple notifications fired
|
||||||
#@todo Test that each of multiple notifications with different settings
|
#@todo Test that each of multiple notifications with different settings
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
def test_limit_tag_ui(client, live_server):
|
def test_limit_tag_ui(client, live_server):
|
||||||
#live_server_setup(live_server)
|
#live_server_setup(live_server)
|
||||||
@@ -260,3 +262,61 @@ def test_limit_tag_ui(client, live_server):
|
|||||||
assert b'test-tag' in res.data
|
assert b'test-tag' in res.data
|
||||||
assert res.data.count(b'processor-text_json_diff') == 20
|
assert res.data.count(b'processor-text_json_diff') == 20
|
||||||
assert b"object at" not in res.data
|
assert b"object at" not in res.data
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||||
|
assert b'All tags deleted' in res.data
|
||||||
|
def test_clone_tag_on_import(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": test_url + " test-tag, another-tag\r\n"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
assert b'test-tag' in res.data
|
||||||
|
assert b'another-tag' in res.data
|
||||||
|
|
||||||
|
watch_uuid = extract_UUID_from_client(client)
|
||||||
|
res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True)
|
||||||
|
|
||||||
|
assert b'Cloned' in res.data
|
||||||
|
# 2 times plus the top link to tag
|
||||||
|
assert res.data.count(b'test-tag') == 3
|
||||||
|
assert res.data.count(b'another-tag') == 3
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
def test_clone_tag_on_quickwatchform_add(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
|
||||||
|
res = client.post(
|
||||||
|
url_for("form_quick_watch_add"),
|
||||||
|
data={"url": test_url, "tags": ' test-tag, another-tag '},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
assert b'test-tag' in res.data
|
||||||
|
assert b'another-tag' in res.data
|
||||||
|
|
||||||
|
watch_uuid = extract_UUID_from_client(client)
|
||||||
|
res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True)
|
||||||
|
|
||||||
|
assert b'Cloned' in res.data
|
||||||
|
# 2 times plus the top link to tag
|
||||||
|
assert res.data.count(b'test-tag') == 3
|
||||||
|
assert res.data.count(b'another-tag') == 3
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||||
|
assert b'All tags deleted' in res.data
|
||||||
@@ -112,6 +112,7 @@ def test_import_distillio(client, live_server):
|
|||||||
# did the tags work?
|
# did the tags work?
|
||||||
res = client.get( url_for("index"))
|
res = client.get( url_for("index"))
|
||||||
|
|
||||||
|
# check tags
|
||||||
assert b"nice stuff" in res.data
|
assert b"nice stuff" in res.data
|
||||||
assert b"nerd-news" in res.data
|
assert b"nerd-news" in res.data
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,8 @@ import os
|
|||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from .util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup, wait_for_all_checks
|
from .util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup, wait_for_all_checks, \
|
||||||
|
set_longer_modified_response
|
||||||
from . util import extract_UUID_from_client
|
from . util import extract_UUID_from_client
|
||||||
import logging
|
import logging
|
||||||
import base64
|
import base64
|
||||||
@@ -24,9 +25,6 @@ def test_check_notification(client, live_server):
|
|||||||
#live_server_setup(live_server)
|
#live_server_setup(live_server)
|
||||||
set_original_response()
|
set_original_response()
|
||||||
|
|
||||||
# Give the endpoint time to spin up
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
# Re 360 - new install should have defaults set
|
# Re 360 - new install should have defaults set
|
||||||
res = client.get(url_for("settings_page"))
|
res = client.get(url_for("settings_page"))
|
||||||
notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json')
|
notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json')
|
||||||
@@ -142,8 +140,7 @@ def test_check_notification(client, live_server):
|
|||||||
|
|
||||||
# Did we see the URL that had a change, in the notification?
|
# Did we see the URL that had a change, in the notification?
|
||||||
# Diff was correctly executed
|
# Diff was correctly executed
|
||||||
assert test_url in notification_submission
|
|
||||||
assert ':-)' in notification_submission
|
|
||||||
assert "Diff Full: Some initial text" in notification_submission
|
assert "Diff Full: Some initial text" in notification_submission
|
||||||
assert "Diff: (changed) Which is across multiple lines" in notification_submission
|
assert "Diff: (changed) Which is across multiple lines" in notification_submission
|
||||||
assert "(into) which has this one new line" in notification_submission
|
assert "(into) which has this one new line" in notification_submission
|
||||||
@@ -156,7 +153,8 @@ def test_check_notification(client, live_server):
|
|||||||
assert "preview/" in notification_submission
|
assert "preview/" in notification_submission
|
||||||
assert ":-)" in notification_submission
|
assert ":-)" in notification_submission
|
||||||
assert "New ChangeDetection.io Notification - {}".format(test_url) in notification_submission
|
assert "New ChangeDetection.io Notification - {}".format(test_url) in notification_submission
|
||||||
|
assert test_url in notification_submission
|
||||||
|
assert ':-)' in notification_submission
|
||||||
# Check the attachment was added, and that it is a JPEG from the original PNG
|
# Check the attachment was added, and that it is a JPEG from the original PNG
|
||||||
notification_submission_object = json.loads(notification_submission)
|
notification_submission_object = json.loads(notification_submission)
|
||||||
# We keep PNG screenshots for now
|
# We keep PNG screenshots for now
|
||||||
@@ -275,7 +273,7 @@ def test_notification_validation(client, live_server):
|
|||||||
|
|
||||||
|
|
||||||
def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
||||||
time.sleep(1)
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
# test_endpoint - that sends the contents of a file
|
# test_endpoint - that sends the contents of a file
|
||||||
# test_notification_endpoint - that takes a POST and writes it to file (test-datastore/notification.txt)
|
# test_notification_endpoint - that takes a POST and writes it to file (test-datastore/notification.txt)
|
||||||
@@ -286,12 +284,14 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
|||||||
|
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("settings_page"),
|
url_for("settings_page"),
|
||||||
data={"application-notification_title": "New ChangeDetection.io Notification - {{ watch_url }}",
|
data={
|
||||||
"application-notification_body": '{ "url" : "{{ watch_url }}", "secret": 444 }',
|
"application-fetch_backend": "html_requests",
|
||||||
# https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation
|
|
||||||
"application-notification_urls": test_notification_url,
|
|
||||||
"application-minutes_between_check": 180,
|
"application-minutes_between_check": 180,
|
||||||
"application-fetch_backend": "html_requests"
|
"application-notification_body": '{ "url" : "{{ watch_url }}", "secret": 444 }',
|
||||||
|
"application-notification_format": default_notification_format,
|
||||||
|
"application-notification_urls": test_notification_url,
|
||||||
|
# https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation
|
||||||
|
"application-notification_title": "New ChangeDetection.io Notification - {{ watch_url }}",
|
||||||
},
|
},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
@@ -316,9 +316,8 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
|||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
|
||||||
|
|
||||||
with open("test-datastore/notification.txt", 'r') as f:
|
with open("test-datastore/notification.txt", 'r') as f:
|
||||||
x=f.read()
|
x = f.read()
|
||||||
j = json.loads(x)
|
j = json.loads(x)
|
||||||
assert j['url'].startswith('http://localhost')
|
assert j['url'].startswith('http://localhost')
|
||||||
assert j['secret'] == 444
|
assert j['secret'] == 444
|
||||||
@@ -329,5 +328,9 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
|||||||
notification_url = f.read()
|
notification_url = f.read()
|
||||||
assert 'xxx=http' in notification_url
|
assert 'xxx=http' in notification_url
|
||||||
|
|
||||||
os.unlink("test-datastore/notification-url.txt")
|
# Should always be automatically detected as JSON content type even when we set it as 'Text' (default)
|
||||||
|
assert os.path.isfile("test-datastore/notification-content-type.txt")
|
||||||
|
with open("test-datastore/notification-content-type.txt", 'r') as f:
|
||||||
|
assert 'application/json' in f.read()
|
||||||
|
|
||||||
|
os.unlink("test-datastore/notification-url.txt")
|
||||||
|
|||||||
@@ -38,7 +38,25 @@ def set_modified_response():
|
|||||||
f.write(test_return_data)
|
f.write(test_return_data)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
def set_longer_modified_response():
|
||||||
|
test_return_data = """<html>
|
||||||
|
<head><title>modified head title</title></head>
|
||||||
|
<body>
|
||||||
|
Some initial text<br>
|
||||||
|
<p>which has this one new line</p>
|
||||||
|
<br>
|
||||||
|
So let's see what happens. <br>
|
||||||
|
So let's see what happens. <br>
|
||||||
|
So let's see what happens. <br>
|
||||||
|
So let's see what happens. <br>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
|
f.write(test_return_data)
|
||||||
|
|
||||||
|
return None
|
||||||
def set_more_modified_response():
|
def set_more_modified_response():
|
||||||
test_return_data = """<html>
|
test_return_data = """<html>
|
||||||
<head><title>modified head title</title></head>
|
<head><title>modified head title</title></head>
|
||||||
@@ -187,6 +205,10 @@ def live_server_setup(live_server):
|
|||||||
with open("test-datastore/notification-url.txt", "w") as f:
|
with open("test-datastore/notification-url.txt", "w") as f:
|
||||||
f.write(request.url)
|
f.write(request.url)
|
||||||
|
|
||||||
|
if request.content_type:
|
||||||
|
with open("test-datastore/notification-content-type.txt", "w") as f:
|
||||||
|
f.write(request.content_type)
|
||||||
|
|
||||||
print("\n>> Test notification endpoint was hit.\n", data)
|
print("\n>> Test notification endpoint was hit.\n", data)
|
||||||
return "Text was set"
|
return "Text was set"
|
||||||
|
|
||||||
|
|||||||
@@ -32,15 +32,17 @@ class update_worker(threading.Thread):
|
|||||||
|
|
||||||
watch_history = watch.history
|
watch_history = watch.history
|
||||||
dates = list(watch_history.keys())
|
dates = list(watch_history.keys())
|
||||||
|
# Add text that was triggered
|
||||||
|
snapshot_contents = watch.get_history_snapshot(dates[-1])
|
||||||
|
|
||||||
# HTML needs linebreak, but MarkDown and Text can use a linefeed
|
# HTML needs linebreak, but MarkDown and Text can use a linefeed
|
||||||
if n_object['notification_format'] == 'HTML':
|
if n_object['notification_format'] == 'HTML':
|
||||||
line_feed_sep = "<br>"
|
line_feed_sep = "<br>"
|
||||||
|
# Snapshot will be plaintext on the disk, convert to some kind of HTML
|
||||||
|
snapshot_contents = snapshot_contents.replace('\n', line_feed_sep)
|
||||||
else:
|
else:
|
||||||
line_feed_sep = "\n"
|
line_feed_sep = "\n"
|
||||||
|
|
||||||
# Add text that was triggered
|
|
||||||
snapshot_contents = watch.get_history_snapshot(dates[-1])
|
|
||||||
trigger_text = watch.get('trigger_text', [])
|
trigger_text = watch.get('trigger_text', [])
|
||||||
triggered_text = ''
|
triggered_text = ''
|
||||||
|
|
||||||
@@ -65,15 +67,48 @@ class update_worker(threading.Thread):
|
|||||||
logging.info (">> SENDING NOTIFICATION")
|
logging.info (">> SENDING NOTIFICATION")
|
||||||
self.notification_q.put(n_object)
|
self.notification_q.put(n_object)
|
||||||
|
|
||||||
|
# Prefer - Individual watch settings > Tag settings > Global settings (in that order)
|
||||||
|
def _check_cascading_vars(self, var_name, watch):
|
||||||
|
|
||||||
|
from changedetectionio.notification import (
|
||||||
|
default_notification_format_for_watch,
|
||||||
|
default_notification_body,
|
||||||
|
default_notification_title
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Would be better if this was some kind of Object where Watch can reference the parent datastore etc
|
||||||
|
v = watch.get(var_name)
|
||||||
|
if v and not watch.get('notification_muted'):
|
||||||
|
if var_name == 'notification_format' and v == default_notification_format_for_watch:
|
||||||
|
return self.datastore.data['settings']['application'].get('notification_format')
|
||||||
|
|
||||||
|
return v
|
||||||
|
|
||||||
|
tags = self.datastore.get_all_tags_for_watch(uuid=watch.get('uuid'))
|
||||||
|
if tags:
|
||||||
|
for tag_uuid, tag in tags.items():
|
||||||
|
v = tag.get(var_name)
|
||||||
|
if v and not tag.get('notification_muted'):
|
||||||
|
return v
|
||||||
|
|
||||||
|
if self.datastore.data['settings']['application'].get(var_name):
|
||||||
|
return self.datastore.data['settings']['application'].get(var_name)
|
||||||
|
|
||||||
|
# Otherwise could be defaults
|
||||||
|
if var_name == 'notification_format':
|
||||||
|
return default_notification_format_for_watch
|
||||||
|
if var_name == 'notification_body':
|
||||||
|
return default_notification_body
|
||||||
|
if var_name == 'notification_title':
|
||||||
|
return default_notification_title
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
def send_content_changed_notification(self, watch_uuid):
|
def send_content_changed_notification(self, watch_uuid):
|
||||||
|
|
||||||
from changedetectionio.notification import (
|
|
||||||
default_notification_format_for_watch
|
|
||||||
)
|
|
||||||
|
|
||||||
n_object = {}
|
n_object = {}
|
||||||
watch = self.datastore.data['watching'].get(watch_uuid, False)
|
watch = self.datastore.data['watching'].get(watch_uuid)
|
||||||
if not watch:
|
if not watch:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -87,57 +122,20 @@ class update_worker(threading.Thread):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Should be a better parent getter in the model object
|
# Should be a better parent getter in the model object
|
||||||
|
|
||||||
# Prefer - Individual watch settings > Tag settings > Global settings (in that order)
|
# Prefer - Individual watch settings > Tag settings > Global settings (in that order)
|
||||||
n_object['notification_urls'] = watch.get('notification_urls')
|
n_object['notification_urls'] = self._check_cascading_vars('notification_urls', watch)
|
||||||
|
n_object['notification_title'] = self._check_cascading_vars('notification_title', watch)
|
||||||
n_object['notification_title'] = watch['notification_title'] if watch['notification_title'] else \
|
n_object['notification_body'] = self._check_cascading_vars('notification_body', watch)
|
||||||
self.datastore.data['settings']['application']['notification_title']
|
n_object['notification_format'] = self._check_cascading_vars('notification_format', watch)
|
||||||
|
|
||||||
n_object['notification_body'] = watch['notification_body'] if watch['notification_body'] else \
|
|
||||||
self.datastore.data['settings']['application']['notification_body']
|
|
||||||
|
|
||||||
n_object['notification_format'] = watch['notification_format'] if watch['notification_format'] != default_notification_format_for_watch else \
|
|
||||||
self.datastore.data['settings']['application']['notification_format']
|
|
||||||
|
|
||||||
# (Individual watch) Only prepare to notify if the rules above matched
|
# (Individual watch) Only prepare to notify if the rules above matched
|
||||||
sent = False
|
queued = False
|
||||||
if 'notification_urls' in n_object and n_object['notification_urls']:
|
if n_object and n_object.get('notification_urls'):
|
||||||
sent = True
|
queued = True
|
||||||
self.queue_notification_for_watch(n_object, watch)
|
self.queue_notification_for_watch(n_object, watch)
|
||||||
|
|
||||||
# (Group tags) try by group tag
|
return queued
|
||||||
if not sent:
|
|
||||||
# Else, Try by tag, and use system default vars for format, body etc as fallback
|
|
||||||
tags = self.datastore.get_all_tags_for_watch(uuid=watch_uuid)
|
|
||||||
for tag_uuid, tag in tags.items():
|
|
||||||
n_object = {}
|
|
||||||
n_object['notification_urls'] = tag.get('notification_urls')
|
|
||||||
|
|
||||||
n_object['notification_title'] = tag.get('notification_title') if tag.get('notification_title') else \
|
|
||||||
self.datastore.data['settings']['application']['notification_title']
|
|
||||||
|
|
||||||
n_object['notification_body'] = tag.get('notification_body') if tag.get('notification_body') else \
|
|
||||||
self.datastore.data['settings']['application']['notification_body']
|
|
||||||
|
|
||||||
n_object['notification_format'] = tag.get('notification_format') if tag.get('notification_format') != default_notification_format_for_watch else \
|
|
||||||
self.datastore.data['settings']['application']['notification_format']
|
|
||||||
|
|
||||||
if 'notification_urls' in n_object and n_object.get('notification_urls') and not tag.get('notification_muted'):
|
|
||||||
sent = True
|
|
||||||
self.queue_notification_for_watch(n_object, watch)
|
|
||||||
|
|
||||||
# (Group tags) try by global
|
|
||||||
if not sent:
|
|
||||||
# leave this as is, but repeat in a loop for each tag also
|
|
||||||
n_object['notification_urls'] = self.datastore.data['settings']['application'].get('notification_urls')
|
|
||||||
n_object['notification_title'] = self.datastore.data['settings']['application'].get('notification_title')
|
|
||||||
n_object['notification_body'] = self.datastore.data['settings']['application'].get('notification_body')
|
|
||||||
n_object['notification_format'] = self.datastore.data['settings']['application'].get('notification_format')
|
|
||||||
if n_object.get('notification_urls') and n_object.get('notification_body') and n_object.get('notification_title'):
|
|
||||||
sent = True
|
|
||||||
self.queue_notification_for_watch(n_object, watch)
|
|
||||||
|
|
||||||
return sent
|
|
||||||
|
|
||||||
|
|
||||||
def send_filter_failure_notification(self, watch_uuid):
|
def send_filter_failure_notification(self, watch_uuid):
|
||||||
@@ -381,6 +379,9 @@ class update_worker(threading.Thread):
|
|||||||
if not self.datastore.data['watching'][uuid].get('ignore_status_codes'):
|
if not self.datastore.data['watching'][uuid].get('ignore_status_codes'):
|
||||||
update_obj['consecutive_filter_failures'] = 0
|
update_obj['consecutive_filter_failures'] = 0
|
||||||
|
|
||||||
|
# Everything ran OK, clean off any previous error
|
||||||
|
update_obj['last_error'] = False
|
||||||
|
|
||||||
self.cleanup_error_artifacts(uuid)
|
self.cleanup_error_artifacts(uuid)
|
||||||
|
|
||||||
#
|
#
|
||||||
|
|||||||
Reference in New Issue
Block a user