Compare commits

..

24 Commits

Author SHA1 Message Date
dgtlmoon
627fd368fb Return exit status 2022-08-02 09:06:52 +02:00
dgtlmoon
f2f8469891 WIP 2022-08-01 21:02:07 +02:00
dgtlmoon
27117b324d Add comment 2022-08-01 19:30:04 +02:00
dgtlmoon
3cc7b8e489 Nope ignore it :) 2022-08-01 19:25:45 +02:00
dgtlmoon
307ce59b38 Handle SIGSTOP for suspend mode also 2022-08-01 19:23:38 +02:00
dgtlmoon
d1bd5b442b whitespace cleanup 2022-08-01 19:18:13 +02:00
dgtlmoon
a3f492bf17 use multiprocessing to wrap flask and use SIGTERM to save DB 2022-08-01 19:13:57 +02:00
dgtlmoon
58b0166330 Merge branch 'master' into sig-handler 2022-08-01 16:48:10 +02:00
dgtlmoon
a1c3107cd6 Feature - priority queue - edited and added watches should get checked before automatically queued watches (#799) 2022-07-31 15:35:35 +02:00
dgtlmoon
8fef3ff4ab [preview current] cleanup code and add test 2022-07-30 20:11:56 +02:00
dgtlmoon
baa25c9f9e Feature - mute notifications (#791) 2022-07-29 21:09:55 +02:00
dgtlmoon
488699b7d4 Test improvement - remove unnecessary step 2022-07-29 10:23:59 +02:00
dgtlmoon
cf3a1ee3e3 0.39.17.1 2022-07-29 10:13:29 +02:00
dgtlmoon
daae43e9f9 Bug fix: Filter failure detection notification was interfering with change-detection results, added test case (#786) 2022-07-29 10:11:49 +02:00
dgtlmoon
cdeedaa65c README.md - new Discord invite link 2022-07-28 23:07:07 +02:00
dgtlmoon
3c9d2ded38 0.39.17 2022-07-28 13:07:51 +02:00
dgtlmoon
9f4364a130 Add https://discord.com/api notification hook to the automatic truncation due to Discords 2000 char limit 2022-07-28 12:34:55 +02:00
dgtlmoon
5bd9eaf99d UI Feature - Add watch in "paused" state, saving then unpauses (#779) 2022-07-28 12:13:26 +02:00
dgtlmoon
b1c51c0a65 Enhancement - support xPath text() function filter, for example "//title/text()" in RSS feeds (#778) 2022-07-28 11:50:31 +02:00
dgtlmoon
232bd92389 Bug fix - Filter "Only trigger when new lines appear" should check all history, not only the first item (#777) 2022-07-28 10:16:19 +02:00
dgtlmoon
e6173357a9 Visual Selector direct element finder fix 2022-07-28 09:19:10 +02:00
dgtlmoon
f2b8888aff Update README.md 2022-07-27 14:25:24 +02:00
dgtlmoon
9c46f175f9 Update README.md links 2022-07-27 14:23:18 +02:00
dgtlmoon
fd080e9e4b Handle SIGINT somewhat 2022-07-04 20:52:15 +02:00
24 changed files with 682 additions and 345 deletions

View File

@@ -11,7 +11,7 @@ Live your data-life *pro-actively* instead of *re-actively*.
Free, Open-source web page monitoring, notification and change detection. Don't have time? [**Try our $6.99/month subscription - unlimited checks and watches!**](https://lemonade.changedetection.io/start) Free, Open-source web page monitoring, notification and change detection. Don't have time? [**Try our $6.99/month subscription - unlimited checks and watches!**](https://lemonade.changedetection.io/start)
[[ Discord ]](https://discord.com/channels/1000806276256780309/1000806276873334816) [[ YouTube ]](https://www.youtube.com/channel/UCbS09q1TRf0o4N2t-WA3emQ) [[ LinkedIn ]](https://www.linkedin.com/company/changedetection-io/) [![Discord](https://img.shields.io/badge/DISCORD-%237289DA.svg?style=for-the-badge&logo=discord&logoColor=white)](https://discord.gg/XJZy7QK3ja) [ ![YouTube](https://img.shields.io/badge/YouTube-%23FF0000.svg?style=for-the-badge&logo=YouTube&logoColor=white)](https://www.youtube.com/channel/UCbS09q1TRf0o4N2t-WA3emQ) [![LinkedIn](https://img.shields.io/badge/linkedin-%230077B5.svg?style=for-the-badge&logo=linkedin&logoColor=white)](https://www.linkedin.com/company/changedetection-io/)
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start) [<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start)

View File

@@ -6,6 +6,34 @@
# Read more https://github.com/dgtlmoon/changedetection.io/wiki # Read more https://github.com/dgtlmoon/changedetection.io/wiki
from changedetectionio import changedetection from changedetectionio import changedetection
import multiprocessing
import signal
import os
def sigterm_handler(_signo, _stack_frame):
import sys
print('Shutdown: Got SIGCHLD')
# https://stackoverflow.com/questions/40453496/python-multiprocessing-capturing-signals-to-restart-child-processes-or-shut-do
pid, status = os.waitpid(-1, os.WNOHANG | os.WUNTRACED | os.WCONTINUED)
print('Sub-process: pid %d status %d' % (pid, status))
if status != 0:
sys.exit(1)
raise SystemExit
if __name__ == '__main__': if __name__ == '__main__':
changedetection.main() signal.signal(signal.SIGCHLD, sigterm_handler)
# The only way I could find to get Flask to shutdown, is to wrap it and then rely on the subsystem issuing SIGTERM/SIGKILL
parse_process = multiprocessing.Process(target=changedetection.main)
parse_process.daemon = True
parse_process.start()
import time
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
#parse_process.terminate() not needed, because this process will issue it to the sub-process anyway
print ("Exited - CTRL+C")

View File

@@ -1 +1,2 @@
test-datastore test-datastore
package-lock.json

View File

@@ -44,7 +44,7 @@ from flask_wtf import CSRFProtect
from changedetectionio import html_tools from changedetectionio import html_tools
from changedetectionio.api import api_v1 from changedetectionio.api import api_v1
__version__ = '0.39.16' __version__ = '0.39.17.1'
datastore = None datastore = None
@@ -54,7 +54,7 @@ ticker_thread = None
extra_stylesheets = [] extra_stylesheets = []
update_q = queue.Queue() update_q = queue.PriorityQueue()
notification_q = queue.Queue() notification_q = queue.Queue()
@@ -105,10 +105,9 @@ def init_app_secret(datastore_path):
# running or something similar. # running or something similar.
@app.template_filter('format_last_checked_time') @app.template_filter('format_last_checked_time')
def _jinja2_filter_datetime(watch_obj, format="%Y-%m-%d %H:%M:%S"): def _jinja2_filter_datetime(watch_obj, format="%Y-%m-%d %H:%M:%S"):
# Worker thread tells us which UUID it is currently processing. # Worker thread tells us which UUID it is currently processing.
for t in threading.enumerate(): for t in running_update_threads:
if t.name == 'update_worker' and t.current_uuid == watch_obj['uuid']: if t.current_uuid == watch_obj['uuid']:
return '<span class="loader"></span><span> Checking now</span>' return '<span class="loader"></span><span> Checking now</span>'
if watch_obj['last_checked'] == 0: if watch_obj['last_checked'] == 0:
@@ -371,20 +370,20 @@ def changedetection_app(config=None, datastore_o=None):
from changedetectionio import forms from changedetectionio import forms
limit_tag = request.args.get('tag') limit_tag = request.args.get('tag')
pause_uuid = request.args.get('pause')
# Redirect for the old rss path which used the /?rss=true # Redirect for the old rss path which used the /?rss=true
if request.args.get('rss'): if request.args.get('rss'):
return redirect(url_for('rss', tag=limit_tag)) return redirect(url_for('rss', tag=limit_tag))
if pause_uuid: op = request.args.get('op')
try: if op:
datastore.data['watching'][pause_uuid]['paused'] ^= True uuid = request.args.get('uuid')
datastore.needs_write = True if op == 'pause':
datastore.data['watching'][uuid]['paused'] ^= True
elif op == 'mute':
datastore.data['watching'][uuid]['notification_muted'] ^= True
return redirect(url_for('index', tag = limit_tag)) datastore.needs_write = True
except KeyError: return redirect(url_for('index', tag = limit_tag))
pass
# Sort by last_changed and add the uuid which is usually the key.. # Sort by last_changed and add the uuid which is usually the key..
sorted_watches = [] sorted_watches = []
@@ -407,7 +406,6 @@ def changedetection_app(config=None, datastore_o=None):
existing_tags = datastore.get_all_tags() existing_tags = datastore.get_all_tags()
form = forms.quickWatchForm(request.form) form = forms.quickWatchForm(request.form)
output = render_template("watch-overview.html", output = render_template("watch-overview.html",
form=form, form=form,
watches=sorted_watches, watches=sorted_watches,
@@ -418,7 +416,7 @@ def changedetection_app(config=None, datastore_o=None):
# Don't link to hosting when we're on the hosting environment # Don't link to hosting when we're on the hosting environment
hosted_sticky=os.getenv("SALTED_PASS", False) == False, hosted_sticky=os.getenv("SALTED_PASS", False) == False,
guid=datastore.data['app_guid'], guid=datastore.data['app_guid'],
queued_uuids=update_q.queue) queued_uuids=[uuid for p,uuid in update_q.queue])
if session.get('share-link'): if session.get('share-link'):
@@ -581,6 +579,9 @@ def changedetection_app(config=None, datastore_o=None):
if request.method == 'POST' and form.validate(): if request.method == 'POST' and form.validate():
extra_update_obj = {} extra_update_obj = {}
if request.args.get('unpause_on_save'):
extra_update_obj['paused'] = False
# Re #110, if they submit the same as the default value, set it to None, so we continue to follow the default # Re #110, if they submit the same as the default value, set it to None, so we continue to follow the default
# Assume we use the default value, unless something relevant is different, then use the form value # Assume we use the default value, unless something relevant is different, then use the form value
# values could be None, 0 etc. # values could be None, 0 etc.
@@ -620,14 +621,17 @@ def changedetection_app(config=None, datastore_o=None):
datastore.data['watching'][uuid].update(form.data) datastore.data['watching'][uuid].update(form.data)
datastore.data['watching'][uuid].update(extra_update_obj) datastore.data['watching'][uuid].update(extra_update_obj)
flash("Updated watch.") if request.args.get('unpause_on_save'):
flash("Updated watch - unpaused!.")
else:
flash("Updated watch.")
# Re #286 - We wait for syncing new data to disk in another thread every 60 seconds # Re #286 - We wait for syncing new data to disk in another thread every 60 seconds
# But in the case something is added we should save straight away # But in the case something is added we should save straight away
datastore.needs_write_urgent = True datastore.needs_write_urgent = True
# Queue the watch for immediate recheck # Queue the watch for immediate recheck, with a higher priority
update_q.put(uuid) update_q.put((1, uuid))
# Diff page [edit] link should go back to diff page # Diff page [edit] link should go back to diff page
if request.args.get("next") and request.args.get("next") == 'diff' and not form.save_and_preview_button.data: if request.args.get("next") and request.args.get("next") == 'diff' and not form.save_and_preview_button.data:
@@ -741,7 +745,7 @@ def changedetection_app(config=None, datastore_o=None):
importer = import_url_list() importer = import_url_list()
importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore) importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore)
for uuid in importer.new_uuids: for uuid in importer.new_uuids:
update_q.put(uuid) update_q.put((1, uuid))
if len(importer.remaining_data) == 0: if len(importer.remaining_data) == 0:
return redirect(url_for('index')) return redirect(url_for('index'))
@@ -754,7 +758,7 @@ def changedetection_app(config=None, datastore_o=None):
d_importer = import_distill_io_json() d_importer = import_distill_io_json()
d_importer.run(data=request.values.get('distill-io'), flash=flash, datastore=datastore) d_importer.run(data=request.values.get('distill-io'), flash=flash, datastore=datastore)
for uuid in d_importer.new_uuids: for uuid in d_importer.new_uuids:
update_q.put(uuid) update_q.put((1, uuid))
@@ -872,42 +876,40 @@ def changedetection_app(config=None, datastore_o=None):
flash("No history found for the specified link, bad link?", "error") flash("No history found for the specified link, bad link?", "error")
return redirect(url_for('index')) return redirect(url_for('index'))
if watch.history_n >0:
timestamps = sorted(watch.history.keys(), key=lambda x: int(x))
filename = watch.history[timestamps[-1]]
try:
with open(filename, 'r') as f:
tmp = f.readlines()
# Get what needs to be highlighted timestamp = list(watch.history.keys())[-1]
ignore_rules = watch.get('ignore_text', []) + datastore.data['settings']['application']['global_ignore_text'] filename = watch.history[timestamp]
try:
with open(filename, 'r') as f:
tmp = f.readlines()
# .readlines will keep the \n, but we will parse it here again, in the future tidy this up # Get what needs to be highlighted
ignored_line_numbers = html_tools.strip_ignore_text(content="".join(tmp), ignore_rules = watch.get('ignore_text', []) + datastore.data['settings']['application']['global_ignore_text']
wordlist=ignore_rules,
mode='line numbers'
)
trigger_line_numbers = html_tools.strip_ignore_text(content="".join(tmp), # .readlines will keep the \n, but we will parse it here again, in the future tidy this up
wordlist=watch['trigger_text'], ignored_line_numbers = html_tools.strip_ignore_text(content="".join(tmp),
mode='line numbers' wordlist=ignore_rules,
) mode='line numbers'
# Prepare the classes and lines used in the template )
i=0
for l in tmp:
classes=[]
i+=1
if i in ignored_line_numbers:
classes.append('ignored')
if i in trigger_line_numbers:
classes.append('triggered')
content.append({'line': l, 'classes': ' '.join(classes)})
trigger_line_numbers = html_tools.strip_ignore_text(content="".join(tmp),
wordlist=watch['trigger_text'],
mode='line numbers'
)
# Prepare the classes and lines used in the template
i=0
for l in tmp:
classes=[]
i+=1
if i in ignored_line_numbers:
classes.append('ignored')
if i in trigger_line_numbers:
classes.append('triggered')
content.append({'line': l, 'classes': ' '.join(classes)})
except Exception as e:
content.append({'line': "File doesnt exist or unable to read file {}".format(filename), 'classes': ''})
except Exception as e:
content.append({'line': "File doesnt exist or unable to read file {}".format(filename), 'classes': ''})
else:
content.append({'line': "No history found", 'classes': ''})
screenshot_url = datastore.get_screenshot(uuid) screenshot_url = datastore.get_screenshot(uuid)
system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver' system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver'
@@ -1064,9 +1066,9 @@ def changedetection_app(config=None, datastore_o=None):
except FileNotFoundError: except FileNotFoundError:
abort(404) abort(404)
@app.route("/api/add", methods=['POST']) @app.route("/form/add/quickwatch", methods=['POST'])
@login_required @login_required
def form_watch_add(): def form_quick_watch_add():
from changedetectionio import forms from changedetectionio import forms
form = forms.quickWatchForm(request.form) form = forms.quickWatchForm(request.form)
@@ -1079,13 +1081,19 @@ def changedetection_app(config=None, datastore_o=None):
flash('The URL {} already exists'.format(url), "error") flash('The URL {} already exists'.format(url), "error")
return redirect(url_for('index')) return redirect(url_for('index'))
# @todo add_watch should throw a custom Exception for validation etc add_paused = request.form.get('edit_and_watch_submit_button') != None
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip()) new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip(), extras={'paused': add_paused})
if new_uuid:
if not add_paused and new_uuid:
# Straight into the queue. # Straight into the queue.
update_q.put(new_uuid) update_q.put((1, new_uuid))
flash("Watch added.") flash("Watch added.")
if add_paused:
flash('Watch added in Paused state, saving will unpause.')
return redirect(url_for('edit_page', uuid=new_uuid, unpause_on_save=1))
return redirect(url_for('index')) return redirect(url_for('index'))
@@ -1116,7 +1124,7 @@ def changedetection_app(config=None, datastore_o=None):
uuid = list(datastore.data['watching'].keys()).pop() uuid = list(datastore.data['watching'].keys()).pop()
new_uuid = datastore.clone(uuid) new_uuid = datastore.clone(uuid)
update_q.put(new_uuid) update_q.put((5, new_uuid))
flash('Cloned.') flash('Cloned.')
return redirect(url_for('index')) return redirect(url_for('index'))
@@ -1137,7 +1145,7 @@ def changedetection_app(config=None, datastore_o=None):
if uuid: if uuid:
if uuid not in running_uuids: if uuid not in running_uuids:
update_q.put(uuid) update_q.put((1, uuid))
i = 1 i = 1
elif tag != None: elif tag != None:
@@ -1145,7 +1153,7 @@ def changedetection_app(config=None, datastore_o=None):
for watch_uuid, watch in datastore.data['watching'].items(): for watch_uuid, watch in datastore.data['watching'].items():
if (tag != None and tag in watch['tag']): if (tag != None and tag in watch['tag']):
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']: if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
update_q.put(watch_uuid) update_q.put((1, watch_uuid))
i += 1 i += 1
else: else:
@@ -1153,7 +1161,7 @@ def changedetection_app(config=None, datastore_o=None):
for watch_uuid, watch in datastore.data['watching'].items(): for watch_uuid, watch in datastore.data['watching'].items():
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']: if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
update_q.put(watch_uuid) update_q.put((1, watch_uuid))
i += 1 i += 1
flash("{} watches are queued for rechecking.".format(i)) flash("{} watches are queued for rechecking.".format(i))
return redirect(url_for('index', tag=tag)) return redirect(url_for('index', tag=tag))
@@ -1214,7 +1222,6 @@ def changedetection_app(config=None, datastore_o=None):
# @todo handle ctrl break # @todo handle ctrl break
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start() ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
threading.Thread(target=ticker_thread_job_queue_processor).start()
threading.Thread(target=notification_runner).start() threading.Thread(target=notification_runner).start()
@@ -1256,7 +1263,6 @@ def notification_runner():
global notification_debug_log global notification_debug_log
from datetime import datetime from datetime import datetime
import json import json
while not app.config.exit.is_set(): while not app.config.exit.is_set():
try: try:
# At the moment only one thread runs (single runner) # At the moment only one thread runs (single runner)
@@ -1290,63 +1296,25 @@ def notification_runner():
# Trim the log length # Trim the log length
notification_debug_log = notification_debug_log[-100:] notification_debug_log = notification_debug_log[-100:]
# Check the queue, when a job exists, start a fresh thread of update_worker
def ticker_thread_job_queue_processor():
from changedetectionio import update_worker
n_workers = int(os.getenv("FETCH_WORKERS", datastore.data['settings']['requests']['workers']))
while not app.config.exit.is_set():
time.sleep(0.3)
# Check that some threads are free
running = 0
for t in threading.enumerate():
if t.name == 'update_worker':
running += 1
if running >= n_workers:
continue
try:
uuid = update_q.get(block=False)
except queue.Empty:
# Go back to waiting for exit and/or another entry from the queue
continue
print ("Starting a thread fetch")
try:
# Launch the update_worker thread that will handle picking items off a queue and sending them off
# in the event that playwright or others have a memory leak, this should clean it up better than gc.collect()
# (By letting it exit entirely)
update_worker.update_worker(update_q, notification_q, app, datastore, uuid).start()
except Exception as e:
print ("Error launching update_worker for UUID {}.".format(uuid))
print (str(e))
print ("Running now {}", running)
# Thread runner to check every minute, look for new watches to feed into the Queue. # Thread runner to check every minute, look for new watches to feed into the Queue.
def ticker_thread_check_time_launch_checks(): def ticker_thread_check_time_launch_checks():
import random import random
from changedetectionio import update_worker
recheck_time_minimum_seconds = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 20)) recheck_time_minimum_seconds = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 20))
print("System env MINIMUM_SECONDS_RECHECK_TIME", recheck_time_minimum_seconds) print("System env MINIMUM_SECONDS_RECHECK_TIME", recheck_time_minimum_seconds)
# Can go in its own function
# Always maintain the minimum number of threads, each thread will terminate when it has processed exactly 1 queued watch
# This is to be totally sure that they don't leak memory
# Spin up Workers that do the fetching # Spin up Workers that do the fetching
# Can be overriden by ENV or use the default settings # Can be overriden by ENV or use the default settings
n_workers = int(os.getenv("FETCH_WORKERS", datastore.data['settings']['requests']['workers']))
for _ in range(n_workers):
new_worker = update_worker.update_worker(update_q, notification_q, app, datastore)
running_update_threads.append(new_worker)
new_worker.start()
while not app.config.exit.is_set(): while not app.config.exit.is_set():
# Update our list of watches by UUID that are currently fetching data, used in the UI # Get a list of watches by UUID that are currently fetching data
running_uuids = [] running_uuids = []
for t in running_update_threads: for t in running_update_threads:
if t.current_uuid: if t.current_uuid:
@@ -1396,14 +1364,14 @@ def ticker_thread_check_time_launch_checks():
seconds_since_last_recheck = now - watch['last_checked'] seconds_since_last_recheck = now - watch['last_checked']
if seconds_since_last_recheck >= (threshold + watch.jitter_seconds) and seconds_since_last_recheck >= recheck_time_minimum_seconds: if seconds_since_last_recheck >= (threshold + watch.jitter_seconds) and seconds_since_last_recheck >= recheck_time_minimum_seconds:
if not uuid in running_uuids and uuid not in update_q.queue: if not uuid in running_uuids and uuid not in [q_uuid for p,q_uuid in update_q.queue]:
print("Queued watch UUID {} last checked at {} queued at {:0.2f} jitter {:0.2f}s, {:0.2f}s since last checked".format(uuid, print("> Queued watch UUID {} last checked at {} queued at {:0.2f} jitter {:0.2f}s, {:0.2f}s since last checked".format(uuid,
watch['last_checked'], watch['last_checked'],
now, now,
watch.jitter_seconds, watch.jitter_seconds,
now - watch['last_checked'])) now - watch['last_checked']))
# Into the queue with you # Into the queue with you
update_q.put(uuid) update_q.put((5, uuid))
# Reset for next time # Reset for next time
watch.jitter_seconds = 0 watch.jitter_seconds = 0

View File

@@ -24,7 +24,7 @@ class Watch(Resource):
abort(404, message='No watch exists with the UUID of {}'.format(uuid)) abort(404, message='No watch exists with the UUID of {}'.format(uuid))
if request.args.get('recheck'): if request.args.get('recheck'):
self.update_q.put(uuid) self.update_q.put((1, uuid))
return "OK", 200 return "OK", 200
# Return without history, get that via another API call # Return without history, get that via another API call
@@ -100,7 +100,7 @@ class CreateWatch(Resource):
extras = {'title': json_data['title'].strip()} if json_data.get('title') else {} extras = {'title': json_data['title'].strip()} if json_data.get('title') else {}
new_uuid = self.datastore.add_watch(url=json_data['url'].strip(), tag=tag, extras=extras) new_uuid = self.datastore.add_watch(url=json_data['url'].strip(), tag=tag, extras=extras)
self.update_q.put(new_uuid) self.update_q.put((1, new_uuid))
return {'uuid': new_uuid}, 201 return {'uuid': new_uuid}, 201
# Return concise list of available watches and some very basic info # Return concise list of available watches and some very basic info
@@ -118,7 +118,7 @@ class CreateWatch(Resource):
if request.args.get('recheck_all'): if request.args.get('recheck_all'):
for uuid in self.datastore.data['watching'].keys(): for uuid in self.datastore.data['watching'].keys():
self.update_q.put(uuid) self.update_q.put((1, uuid))
return {'status': "OK"}, 200 return {'status': "OK"}, 200
return list, 200 return list, 200

View File

@@ -4,6 +4,7 @@
import getopt import getopt
import os import os
import signal
import sys import sys
import eventlet import eventlet
@@ -11,7 +12,22 @@ import eventlet.wsgi
from . import store, changedetection_app, content_fetcher from . import store, changedetection_app, content_fetcher
from . import __version__ from . import __version__
# Only global so we can access it in the signal handler
datastore = None
app = None
def sigterm_handler(_signo, _stack_frame):
global app
global datastore
app.config.exit.set()
datastore.sync_to_json()
print('Shutdown: Got SIGTERM, DB saved to disk')
raise SystemExit
def main(): def main():
global datastore
global app
ssl_mode = False ssl_mode = False
host = '' host = ''
port = os.environ.get('PORT') or 5000 port = os.environ.get('PORT') or 5000
@@ -72,8 +88,10 @@ def main():
"Or use the -C parameter to create the directory.".format(app_config['datastore_path']), file=sys.stderr) "Or use the -C parameter to create the directory.".format(app_config['datastore_path']), file=sys.stderr)
sys.exit(2) sys.exit(2)
datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], version_tag=__version__) datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], version_tag=__version__)
app = changedetection_app(app_config, datastore) app = changedetection_app(app_config, datastore)
signal.signal(signal.SIGTERM, sigterm_handler)
# Go into cleanup mode # Go into cleanup mode
if do_cleanup: if do_cleanup:
@@ -111,4 +129,3 @@ def main():
else: else:
eventlet.wsgi.server(eventlet.listen((host, int(port))), app) eventlet.wsgi.server(eventlet.listen((host, int(port))), app)

View File

@@ -63,12 +63,12 @@ class Fetcher():
break; break;
} }
if('' !==r.id) { if('' !==r.id) {
chained_css.unshift("#"+r.id); chained_css.unshift("#"+CSS.escape(r.id));
final_selector= chained_css.join('>'); final_selector= chained_css.join(' > ');
// Be sure theres only one, some sites have multiples of the same ID tag :-( // Be sure theres only one, some sites have multiples of the same ID tag :-(
if (window.document.querySelectorAll(final_selector).length ==1 ) { if (window.document.querySelectorAll(final_selector).length ==1 ) {
return final_selector; return final_selector;
} }
return null; return null;
} else { } else {
chained_css.unshift(r.tagName.toLowerCase()); chained_css.unshift(r.tagName.toLowerCase());

View File

@@ -308,6 +308,9 @@ class ValidateCSSJSONXPATHInput(object):
class quickWatchForm(Form): class quickWatchForm(Form):
url = fields.URLField('URL', validators=[validateURL()]) url = fields.URLField('URL', validators=[validateURL()])
tag = StringField('Group tag', [validators.Optional()]) tag = StringField('Group tag', [validators.Optional()])
watch_submit_button = SubmitField('Watch', render_kw={"class": "pure-button pure-button-primary"})
edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"})
# Common to a single watch and the global settings # Common to a single watch and the global settings
class commonSettingsForm(Form): class commonSettingsForm(Form):

View File

@@ -4,6 +4,8 @@ from typing import List
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from jsonpath_ng.ext import parse from jsonpath_ng.ext import parse
import re import re
from inscriptis import get_text
from inscriptis.model.config import ParserConfig
class FilterNotFoundInResponse(ValueError): class FilterNotFoundInResponse(ValueError):
def __init__(self, msg): def __init__(self, msg):
@@ -50,8 +52,15 @@ def xpath_filter(xpath_filter, html_content):
if len(html_content) > 0 and len(r) == 0: if len(html_content) > 0 and len(r) == 0:
raise FilterNotFoundInResponse(xpath_filter) raise FilterNotFoundInResponse(xpath_filter)
for item in r: #@note: //title/text() wont work where <title>CDATA..
html_block += etree.tostring(item, pretty_print=True).decode('utf-8') + "<br/>"
for element in r:
if type(element) == etree._ElementStringResult:
html_block += str(element) + "<br/>"
elif type(element) == etree._ElementUnicodeResult:
html_block += str(element) + "<br/>"
else:
html_block += etree.tostring(element, pretty_print=True).decode('utf-8') + "<br/>"
return html_block return html_block
@@ -181,17 +190,10 @@ def strip_ignore_text(content, wordlist, mode="content"):
def html_to_text(html_content: str, render_anchor_tag_content=False) -> str: def html_to_text(html_content: str, render_anchor_tag_content=False) -> str:
import multiprocessing
from inscriptis.model.config import ParserConfig
"""Converts html string to a string with just the text. If ignoring """Converts html string to a string with just the text. If ignoring
rendering anchor tag content is enable, anchor tag content are also rendering anchor tag content is enable, anchor tag content are also
included in the text included in the text
@NOTE: HORRIBLE LXML INDUCED MEMORY LEAK WORKAROUND HERE
https://www.reddit.com/r/Python/comments/j0gl8t/psa_pythonlxml_memory_leaks_and_a_solution/
:param html_content: string with html content :param html_content: string with html content
:param render_anchor_tag_content: boolean flag indicating whether to extract :param render_anchor_tag_content: boolean flag indicating whether to extract
hyperlinks (the anchor tag content) together with text. This refers to the hyperlinks (the anchor tag content) together with text. This refers to the
@@ -212,19 +214,8 @@ def html_to_text(html_content: str, render_anchor_tag_content=False) -> str:
else: else:
parser_config = None parser_config = None
# get text and annotations via inscriptis
def parse_function(html_content, parser_config, results_queue): text_content = get_text(html_content, config=parser_config)
from inscriptis import get_text
# get text and annotations via inscriptis
text_content = get_text(html_content, config=parser_config)
results_queue.put(text_content)
results_queue = multiprocessing.Queue()
parse_process = multiprocessing.Process(target=parse_function, args=(html_content, parser_config, results_queue))
parse_process.daemon = True
parse_process.start()
text_content = results_queue.get() # blocks until results are available
parse_process.terminate()
return text_content return text_content

View File

@@ -36,6 +36,7 @@ class model(dict):
'notification_title': default_notification_title, 'notification_title': default_notification_title,
'notification_body': default_notification_body, 'notification_body': default_notification_body,
'notification_format': default_notification_format, 'notification_format': default_notification_format,
'notification_muted': False,
'css_filter': '', 'css_filter': '',
'extract_text': [], # Extract text by regex after filters 'extract_text': [], # Extract text by regex after filters
'subtractive_selectors': [], 'subtractive_selectors': [],
@@ -172,13 +173,14 @@ class model(dict):
# Iterate over all history texts and see if something new exists # Iterate over all history texts and see if something new exists
def lines_contain_something_unique_compared_to_history(self, lines=[]): def lines_contain_something_unique_compared_to_history(self, lines=[]):
local_lines = [l.decode('utf-8').strip().lower() for l in lines] local_lines = set([l.decode('utf-8').strip().lower() for l in lines])
# Compare each lines (set) against each history text file (set) looking for something new.. # Compare each lines (set) against each history text file (set) looking for something new..
existing_history = set({})
for k, v in self.history.items(): for k, v in self.history.items():
alist = [line.decode('utf-8').strip().lower() for line in open(v, 'rb')] alist = set([line.decode('utf-8').strip().lower() for line in open(v, 'rb')])
res = set(alist) != set(local_lines) existing_history = existing_history.union(alist)
if res:
return True
return False # Check that everything in local_lines(new stuff) already exists in existing_history - it should
# if not, something new happened
return not local_lines.issubset(existing_history)

View File

@@ -78,7 +78,7 @@ def process_notification(n_object, datastore):
n_title = n_title[0:payload_max_size] n_title = n_title[0:payload_max_size]
n_body = n_body[0:body_limit] n_body = n_body[0:body_limit]
elif url.startswith('discord://') or url.startswith('https://discordapp.com/api/webhooks'): elif url.startswith('discord://') or url.startswith('https://discordapp.com/api/webhooks') or url.startswith('https://discord.com/api'):
# real limit is 2000, but minus some for extra metadata # real limit is 2000, but minus some for extra metadata
payload_max_size = 1700 payload_max_size = 1700
body_limit = max(0, payload_max_size - len(n_title)) body_limit = max(0, payload_max_size - len(n_title))

View File

@@ -0,0 +1,42 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
width="15"
height="16.363636"
viewBox="0 0 15 16.363636"
version="1.1"
id="svg4"
sodipodi:docname="bell-off.svg"
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview5"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
showgrid="false"
fit-margin-top="0"
fit-margin-left="0"
fit-margin-right="0"
fit-margin-bottom="0"
inkscape:zoom="28.416667"
inkscape:cx="-0.59824046"
inkscape:cy="12"
inkscape:window-width="1554"
inkscape:window-height="896"
inkscape:window-x="2095"
inkscape:window-y="107"
inkscape:window-maximized="0"
inkscape:current-layer="svg4" />
<defs
id="defs8" />
<path
d="m 14.318182,11.762045 v 1.1925 H 5.4102273 L 11.849318,7.1140909 C 12.234545,9.1561364 12.54,11.181818 14.318182,11.762045 Z m -6.7984093,4.601591 c 1.0759091,0 2.0256823,-0.955909 2.0256823,-2.045454 H 5.4545455 c 0,1.089545 0.9879545,2.045454 2.0652272,2.045454 z M 15,2.8622727 0.9177273,15.636136 0,14.627045 l 1.8443182,-1.6725 h -1.1625 v -1.1925 C 4.0070455,10.677273 2.1784091,4.5388636 5.3611364,2.6897727 5.8009091,2.4347727 6.0709091,1.9609091 6.0702273,1.4488636 v -0.00205 C 6.0702273,0.64772727 6.7104545,0 7.5,0 8.2895455,0 8.9297727,0.64772727 8.9297727,1.4468182 v 0.00205 C 8.9290909,1.9602319 9.199773,2.4354591 9.638864,2.6897773 10.364318,3.111141 10.827273,3.7568228 11.1525,4.5129591 L 14.085682,1.8531818 Z M 6.8181818,1.3636364 C 6.8181818,1.74 7.1236364,2.0454545 7.5,2.0454545 7.8763636,2.0454545 8.1818182,1.74 8.1818182,1.3636364 8.1818182,0.98795455 7.8763636,0.68181818 7.5,0.68181818 c -0.3763636,0 -0.6818182,0.30613637 -0.6818182,0.68181822 z"
id="path2"
style="fill:#f8321b;stroke-width:0.681818;fill-opacity:1" />
</svg>

After

Width:  |  Height:  |  Size: 2.1 KiB

View File

@@ -1,9 +1,7 @@
/* /*
* -- BASE STYLES -- * -- BASE STYLES --
* Most of these are inherited from Base, but I want to change a few. * Most of these are inherited from Base, but I want to change a few.
* nvm use v14.18.1 * nvm use v14.18.1 && npm install && npm run build
* npm install
* npm run build
* or npm run watch * or npm run watch
*/ */
body { body {
@@ -203,13 +201,18 @@ body:after, body:before {
border-radius: 10px; border-radius: 10px;
margin-bottom: 1em; } margin-bottom: 1em; }
#new-watch-form input { #new-watch-form input {
width: auto !important; display: inline-block;
display: inline-block; } margin-bottom: 5px; }
#new-watch-form .label { #new-watch-form .label {
display: none; } display: none; }
#new-watch-form legend { #new-watch-form legend {
color: #fff; color: #fff;
font-weight: bold; } font-weight: bold; }
#new-watch-form #watch-add-wrapper-zone > div {
display: inline-block; }
@media only screen and (max-width: 760px) {
#new-watch-form #watch-add-wrapper-zone #url {
width: 100%; } }
#diff-col { #diff-col {
padding-left: 40px; } padding-left: 40px; }
@@ -268,11 +271,15 @@ footer {
#new-version-text a { #new-version-text a {
color: #e07171; } color: #e07171; }
.paused-state.state-False img { .watch-controls {
opacity: 0.2; } /* default */ }
.watch-controls .state-on img {
.paused-state.state-False:hover img { opacity: 0.8; }
opacity: 0.8; } .watch-controls img {
opacity: 0.2; }
.watch-controls img:hover {
transition: opacity 0.3s;
opacity: 0.8; }
.monospaced-textarea textarea { .monospaced-textarea textarea {
width: 100%; width: 100%;

View File

@@ -1,9 +1,7 @@
/* /*
* -- BASE STYLES -- * -- BASE STYLES --
* Most of these are inherited from Base, but I want to change a few. * Most of these are inherited from Base, but I want to change a few.
* nvm use v14.18.1 * nvm use v14.18.1 && npm install && npm run build
* npm install
* npm run build
* or npm run watch * or npm run watch
*/ */
body { body {
@@ -269,8 +267,8 @@ body:after, body:before {
border-radius: 10px; border-radius: 10px;
margin-bottom: 1em; margin-bottom: 1em;
input { input {
width: auto !important;
display: inline-block; display: inline-block;
margin-bottom: 5px;
} }
.label { .label {
display: none; display: none;
@@ -279,6 +277,17 @@ body:after, body:before {
color: #fff; color: #fff;
font-weight: bold; font-weight: bold;
} }
#watch-add-wrapper-zone {
> div {
display: inline-block;
}
@media only screen and (max-width: 760px) {
#url {
width: 100%;
}
}
}
} }
@@ -353,14 +362,25 @@ footer {
color: #e07171; color: #e07171;
} }
.paused-state { .watch-controls {
&.state-False img { .state-on {
img {
opacity: 0.8;
}
}
/* default */
img {
opacity: 0.2; opacity: 0.2;
} }
&.state-False:hover img { img {
opacity: 0.8; &:hover {
transition: opacity 0.3s;
opacity: 0.8;
}
} }
} }
.monospaced-textarea { .monospaced-textarea {

View File

@@ -33,7 +33,7 @@
<div class="box-wrap inner"> <div class="box-wrap inner">
<form class="pure-form pure-form-stacked" <form class="pure-form pure-form-stacked"
action="{{ url_for('edit_page', uuid=uuid, next = request.args.get('next') ) }}" method="POST"> action="{{ url_for('edit_page', uuid=uuid, next = request.args.get('next'), unpause_on_save = request.args.get('unpause_on_save')) }}" method="POST">
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/> <input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
<div class="tab-pane-inner" id="general"> <div class="tab-pane-inner" id="general">
@@ -163,15 +163,26 @@ User-Agent: wonderbra 1.0") }}
</div> </div>
</fieldset> </fieldset>
<div class="pure-control-group"> <div class="pure-control-group">
{{ render_field(form.css_filter, placeholder=".class-name or #some-id, or other CSS selector rule.", {% set field = render_field(form.css_filter,
class="m-d") }} placeholder=".class-name or #some-id, or other CSS selector rule.",
class="m-d")
%}
{{ field }}
{% if '/text()' in field %}
<span class="pure-form-message-inline"><strong>Note!: //text() function does not work where the &lt;element&gt; contains &lt;![CDATA[]]&gt;</strong></span><br/>
{% endif %}
<span class="pure-form-message-inline"> <span class="pure-form-message-inline">
<ul> <ul>
<li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li> <li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li>
<li>JSON - Limit text to this JSON rule, using <a href="https://pypi.org/project/jsonpath-ng/">JSONPath</a>, prefix with <code>"json:"</code>, use <code>json:$</code> to force re-formatting if required, <a <li>JSON - Limit text to this JSON rule, using <a href="https://pypi.org/project/jsonpath-ng/">JSONPath</a>, prefix with <code>"json:"</code>, use <code>json:$</code> to force re-formatting if required, <a
href="https://jsonpath.com/" target="new">test your JSONPath here</a></li> href="https://jsonpath.com/" target="new">test your JSONPath here</a></li>
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash, example <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a <li>XPath - Limit text to this XPath rule, simply start with a forward-slash,
<ul>
<li>Example: <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
href="http://xpather.com/" target="new">test your XPath here</a></li> href="http://xpather.com/" target="new">test your XPath here</a></li>
<li>Example: Get all titles from an RSS feed <code>//title/text()</code></li>
</ul>
</li>
</ul> </ul>
Please be sure that you thoroughly understand how to write CSS or JSONPath, XPath selector rules before filing an issue on GitHub! <a Please be sure that you thoroughly understand how to write CSS or JSONPath, XPath selector rules before filing an issue on GitHub! <a
href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br/> href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br/>

View File

@@ -1,18 +1,25 @@
{% extends 'base.html' %} {% extends 'base.html' %}
{% block content %} {% block content %}
{% from '_helpers.jinja' import render_simple_field %} {% from '_helpers.jinja' import render_simple_field, render_field %}
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script> <script type="text/javascript" src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script> <script type="text/javascript" src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script>
<div class="box"> <div class="box">
<form class="pure-form" action="{{ url_for('form_watch_add') }}" method="POST" id="new-watch-form"> <form class="pure-form" action="{{ url_for('form_quick_watch_add') }}" method="POST" id="new-watch-form">
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/> <input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
<fieldset> <fieldset>
<legend>Add a new change detection watch</legend> <legend>Add a new change detection watch</legend>
{{ render_simple_field(form.url, placeholder="https://...", required=true) }} <div id="watch-add-wrapper-zone">
{{ render_simple_field(form.tag, value=active_tag if active_tag else '', placeholder="watch group") }} <div>
<button type="submit" class="pure-button pure-button-primary">Watch</button> {{ render_simple_field(form.url, placeholder="https://...", required=true) }}
{{ render_simple_field(form.tag, value=active_tag if active_tag else '', placeholder="watch group") }}
</div>
<div>
{{ render_simple_field(form.watch_submit_button, title="Watch this URL!" ) }}
{{ render_simple_field(form.edit_and_watch_submit_button, title="Edit first then Watch") }}
</div>
</div>
</fieldset> </fieldset>
<span style="color:#eee; font-size: 80%;"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread-white.svg')}}" /> Tip: You can also add 'shared' watches. <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Sharing-a-Watch">More info</a></a></span> <span style="color:#eee; font-size: 80%;"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread-white.svg')}}" /> Tip: You can also add 'shared' watches. <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Sharing-a-Watch">More info</a></a></span>
</form> </form>
@@ -49,8 +56,10 @@
{% if watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %}unviewed{% endif %} {% if watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %}unviewed{% endif %}
{% if watch.uuid in queued_uuids %}queued{% endif %}"> {% if watch.uuid in queued_uuids %}queued{% endif %}">
<td class="inline">{{ loop.index }}</td> <td class="inline">{{ loop.index }}</td>
<td class="inline paused-state state-{{watch.paused}}"><a href="{{url_for('index', pause=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause" title="Pause"/></a></td> <td class="inline watch-controls">
<a class="state-{{'on' if watch.paused }}" href="{{url_for('index', op='pause', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause checks" title="Pause checks"/></a>
<a class="state-{{'on' if watch.notification_muted}}" href="{{url_for('index', op='mute', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications"/></a>
</td>
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}} <td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
<a class="external" target="_blank" rel="noopener" href="{{ watch.url.replace('source:','') }}"></a> <a class="external" target="_blank" rel="noopener" href="{{ watch.url.replace('source:','') }}"></a>
<a href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" /></a> <a href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" /></a>

View File

@@ -90,6 +90,14 @@ def test_check_basic_change_detection_functionality(client, live_server):
res = client.get(url_for("diff_history_page", uuid="first")) res = client.get(url_for("diff_history_page", uuid="first"))
assert b'Compare newest' in res.data assert b'Compare newest' in res.data
# Check the [preview] pulls the right one
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True
)
assert b'which has this one new line' in res.data
assert b'Which is across multiple lines' not in res.data
time.sleep(2) time.sleep(2)
# Do this a few times.. ensures we dont accidently set the status # Do this a few times.. ensures we dont accidently set the status

View File

@@ -28,13 +28,9 @@ def test_error_handler(client, live_server):
) )
assert b"1 Imported" in res.data assert b"1 Imported" in res.data
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up # Give the thread time to pick it up
time.sleep(3) time.sleep(3)
res = client.get(url_for("index")) res = client.get(url_for("index"))
assert b'unviewed' not in res.data assert b'unviewed' not in res.data
assert b'Status Code 403' in res.data assert b'Status Code 403' in res.data
@@ -53,9 +49,6 @@ def test_error_text_handler(client, live_server):
) )
assert b"1 Imported" in res.data assert b"1 Imported" in res.data
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up # Give the thread time to pick it up
time.sleep(3) time.sleep(3)

View File

@@ -0,0 +1,134 @@
#!/usr/bin/python3
# https://www.reddit.com/r/selfhosted/comments/wa89kp/comment/ii3a4g7/?context=3
import os
import time
from flask import url_for
from .util import set_original_response, live_server_setup
from changedetectionio.model import App
def set_response_without_filter():
test_return_data = """<html>
<body>
Some initial text</br>
<p>Which is across multiple lines</p>
</br>
So let's see what happens. </br>
<div id="nope-doesnt-exist">Some text thats the same</div>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
return None
def set_response_with_filter():
test_return_data = """<html>
<body>
Some initial text</br>
<p>Which is across multiple lines</p>
</br>
So let's see what happens. </br>
<div class="ticket-available">Ticket now on sale!</div>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
return None
def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_server):
# Filter knowingly doesn't exist, like someone setting up a known filter to see if some cinema tickets are on sale again
# And the page has that filter available
# Then I should get a notification
live_server_setup(live_server)
# Give the endpoint time to spin up
time.sleep(1)
set_response_without_filter()
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("form_quick_watch_add"),
data={"url": test_url, "tag": 'cinema'},
follow_redirects=True
)
assert b"Watch added" in res.data
# Give the thread time to pick up the first version
time.sleep(3)
# Goto the edit page, add our ignore text
# Add our URL to the import page
url = url_for('test_notification_endpoint', _external=True)
notification_url = url.replace('http', 'json')
print(">>>> Notification URL: " + notification_url)
# Just a regular notification setting, this will be used by the special 'filter not found' notification
notification_form_data = {"notification_urls": notification_url,
"notification_title": "New ChangeDetection.io Notification - {watch_url}",
"notification_body": "BASE URL: {base_url}\n"
"Watch URL: {watch_url}\n"
"Watch UUID: {watch_uuid}\n"
"Watch title: {watch_title}\n"
"Watch tag: {watch_tag}\n"
"Preview: {preview_url}\n"
"Diff URL: {diff_url}\n"
"Snapshot: {current_snapshot}\n"
"Diff: {diff}\n"
"Diff Full: {diff_full}\n"
":-)",
"notification_format": "Text"}
notification_form_data.update({
"url": test_url,
"tag": "my tag",
"title": "my title",
"headers": "",
"css_filter": '.ticket-available',
"fetch_backend": "html_requests"})
res = client.post(
url_for("edit_page", uuid="first"),
data=notification_form_data,
follow_redirects=True
)
assert b"Updated watch." in res.data
time.sleep(3)
# Shouldn't exist, shouldn't have fired
assert not os.path.isfile("test-datastore/notification.txt")
# Now the filter should exist
set_response_with_filter()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(3)
assert os.path.isfile("test-datastore/notification.txt")
with open("test-datastore/notification.txt", 'r') as f:
notification = f.read()
assert 'Ticket now on sale' in notification
os.unlink("test-datastore/notification.txt")
# Test that if it gets removed, then re-added, we get a notification
# Remove the target and re-add it, we should get a new notification
set_response_without_filter()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(3)
assert not os.path.isfile("test-datastore/notification.txt")
set_response_with_filter()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(3)
assert os.path.isfile("test-datastore/notification.txt")
# Also test that the filter was updated after the first one was requested

View File

@@ -26,14 +26,22 @@ def run_filter_test(client, content_filter):
# Give the endpoint time to spin up # Give the endpoint time to spin up
time.sleep(1) time.sleep(1)
# cleanup for the next
client.get(
url_for("form_delete", uuid="all"),
follow_redirects=True
)
if os.path.isfile("test-datastore/notification.txt"):
os.unlink("test-datastore/notification.txt")
# Add our URL to the import page # Add our URL to the import page
test_url = url_for('test_endpoint', _external=True) test_url = url_for('test_endpoint', _external=True)
res = client.post( res = client.post(
url_for("form_watch_add"), url_for("form_quick_watch_add"),
data={"url": test_url, "tag": ''}, data={"url": test_url, "tag": ''},
follow_redirects=True follow_redirects=True
) )
assert b"Watch added" in res.data assert b"Watch added" in res.data
# Give the thread time to pick up the first version # Give the thread time to pick up the first version
@@ -67,6 +75,7 @@ def run_filter_test(client, content_filter):
"tag": "my tag", "tag": "my tag",
"title": "my title", "title": "my title",
"headers": "", "headers": "",
"filter_failure_notification_send": 'y',
"css_filter": content_filter, "css_filter": content_filter,
"fetch_backend": "html_requests"}) "fetch_backend": "html_requests"})
@@ -86,7 +95,7 @@ def run_filter_test(client, content_filter):
time.sleep(3) time.sleep(3)
# We should see something in the frontend # We should see something in the frontend
assert b'Did the page change its layout' in res.data assert b'Warning, filter' in res.data
# Now it should exist and contain our "filter not found" alert # Now it should exist and contain our "filter not found" alert
assert os.path.isfile("test-datastore/notification.txt") assert os.path.isfile("test-datastore/notification.txt")
@@ -132,3 +141,4 @@ def test_check_xpath_filter_failure_notification(client, live_server):
time.sleep(1) time.sleep(1)
run_filter_test(client, '//*[@id="nope-doesnt-exist"]') run_filter_test(client, '//*[@id="nope-doesnt-exist"]')
# Test that notification is never sent

View File

@@ -36,7 +36,7 @@ def test_check_notification(client, live_server):
# Add our URL to the import page # Add our URL to the import page
test_url = url_for('test_endpoint', _external=True) test_url = url_for('test_endpoint', _external=True)
res = client.post( res = client.post(
url_for("form_watch_add"), url_for("form_quick_watch_add"),
data={"url": test_url, "tag": ''}, data={"url": test_url, "tag": ''},
follow_redirects=True follow_redirects=True
) )
@@ -172,7 +172,7 @@ def test_notification_validation(client, live_server):
# Add our URL to the import page # Add our URL to the import page
test_url = url_for('test_endpoint', _external=True) test_url = url_for('test_endpoint', _external=True)
res = client.post( res = client.post(
url_for("form_watch_add"), url_for("form_quick_watch_add"),
data={"url": test_url, "tag": 'nice one'}, data={"url": test_url, "tag": 'nice one'},
follow_redirects=True follow_redirects=True
) )

View File

@@ -16,7 +16,7 @@ def test_check_notification_error_handling(client, live_server):
# use a different URL so that it doesnt interfere with the actual check until we are ready # use a different URL so that it doesnt interfere with the actual check until we are ready
test_url = url_for('test_endpoint', _external=True) test_url = url_for('test_endpoint', _external=True)
res = client.post( res = client.post(
url_for("form_watch_add"), url_for("form_quick_watch_add"),
data={"url": "https://changedetection.io/CHANGELOG.txt", "tag": ''}, data={"url": "https://changedetection.io/CHANGELOG.txt", "tag": ''},
follow_redirects=True follow_redirects=True
) )

View File

@@ -86,6 +86,7 @@ def test_check_xpath_filter_utf8(client, live_server):
follow_redirects=True follow_redirects=True
) )
assert b"1 Imported" in res.data assert b"1 Imported" in res.data
time.sleep(1)
res = client.post( res = client.post(
url_for("edit_page", uuid="first"), url_for("edit_page", uuid="first"),
data={"css_filter": filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"}, data={"css_filter": filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
@@ -99,6 +100,68 @@ def test_check_xpath_filter_utf8(client, live_server):
assert b'Deleted' in res.data assert b'Deleted' in res.data
# Handle utf-8 charset replies https://github.com/dgtlmoon/changedetection.io/pull/613
def test_check_xpath_text_function_utf8(client, live_server):
filter='//item/title/text()'
d='''<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0">
<channel>
<title>rpilocator.com</title>
<link>https://rpilocator.com</link>
<description>Find Raspberry Pi Computers in Stock</description>
<lastBuildDate>Thu, 19 May 2022 23:27:30 GMT</lastBuildDate>

<item>
<title>Stock Alert (UK): RPi CM4</title>
<foo>something else unrelated</foo>
</item>
<item>
<title>Stock Alert (UK): Big monitor</title>
<foo>something else unrelated</foo>
</item>
</channel>
</rss>'''
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(d)
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True, content_type="application/rss+xml;charset=UTF-8")
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(1)
res = client.post(
url_for("edit_page", uuid="first"),
data={"css_filter": filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
time.sleep(3)
res = client.get(url_for("index"))
assert b'Unicode strings with encoding declaration are not supported.' not in res.data
# The service should echo back the request headers
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True
)
assert b'<div class="">Stock Alert (UK): RPi CM4' in res.data
assert b'<div class="">Stock Alert (UK): Big monitor' in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_check_markup_xpath_filter_restriction(client, live_server): def test_check_markup_xpath_filter_restriction(client, live_server):
sleep_time_for_fetch_thread = 3 sleep_time_for_fetch_thread = 3

View File

@@ -7,25 +7,84 @@ from changedetectionio.html_tools import FilterNotFoundInResponse
# A single update worker # A single update worker
# #
# # Requests for checking on a single site(watch) from a queue of watches
# (another process inserts watches into the queue that are time-ready for checking)
class update_worker(threading.Thread): class update_worker(threading.Thread):
current_uuid = None current_uuid = None
def __init__(self, q, notification_q, app, datastore, uuid, *args, **kwargs): def __init__(self, q, notification_q, app, datastore, *args, **kwargs):
self.q = q self.q = q
self.app = app self.app = app
self.notification_q = notification_q self.notification_q = notification_q
self.datastore = datastore self.datastore = datastore
self.current_uuid = uuid
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.name = "update_worker"
def send_filter_failure_notification(self, uuid): def send_content_changed_notification(self, t, watch_uuid):
from changedetectionio import diff
n_object = {}
watch = self.datastore.data['watching'].get(watch_uuid, False)
if not watch:
return
watch_history = watch.history
dates = list(watch_history.keys())
# Theoretically it's possible that this could be just 1 long,
# - In the case that the timestamp key was not unique
if len(dates) == 1:
raise ValueError(
"History index had 2 or more, but only 1 date loaded, timestamps were not unique? maybe two of the same timestamps got written, needs more delay?"
)
# Did it have any notification alerts to hit?
if len(watch['notification_urls']):
print(">>> Notifications queued for UUID from watch {}".format(watch_uuid))
n_object['notification_urls'] = watch['notification_urls']
n_object['notification_title'] = watch['notification_title']
n_object['notification_body'] = watch['notification_body']
n_object['notification_format'] = watch['notification_format']
# No? maybe theres a global setting, queue them all
elif len(self.datastore.data['settings']['application']['notification_urls']):
print(">>> Watch notification URLs were empty, using GLOBAL notifications for UUID: {}".format(watch_uuid))
n_object['notification_urls'] = self.datastore.data['settings']['application']['notification_urls']
n_object['notification_title'] = self.datastore.data['settings']['application']['notification_title']
n_object['notification_body'] = self.datastore.data['settings']['application']['notification_body']
n_object['notification_format'] = self.datastore.data['settings']['application']['notification_format']
else:
print(">>> NO notifications queued, watch and global notification URLs were empty.")
# Only prepare to notify if the rules above matched
if 'notification_urls' in n_object:
# HTML needs linebreak, but MarkDown and Text can use a linefeed
if n_object['notification_format'] == 'HTML':
line_feed_sep = "</br>"
else:
line_feed_sep = "\n"
snapshot_contents = ''
with open(watch_history[dates[-1]], 'rb') as f:
snapshot_contents = f.read()
n_object.update({
'watch_url': watch['url'],
'uuid': watch_uuid,
'current_snapshot': snapshot_contents.decode('utf-8'),
'diff': diff.render_diff(watch_history[dates[-2]], watch_history[dates[-1]], line_feed_sep=line_feed_sep),
'diff_full': diff.render_diff(watch_history[dates[-2]], watch_history[dates[-1]], True, line_feed_sep=line_feed_sep)
})
self.notification_q.put(n_object)
def send_filter_failure_notification(self, watch_uuid):
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts') threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts')
watch = self.datastore.data['watching'].get(uuid, False) watch = self.datastore.data['watching'].get(watch_uuid, False)
if not watch:
return
n_object = {'notification_title': 'Changedetection.io - Alert - CSS/xPath filter was not present in the page', n_object = {'notification_title': 'Changedetection.io - Alert - CSS/xPath filter was not present in the page',
'notification_body': "Your configured CSS/xPath filter of '{}' for {{watch_url}} did not appear on the page after {} attempts, did the page change layout?\n\nLink: {{base_url}}/edit/{{watch_uuid}}\n\nThanks - Your omniscient changedetection.io installation :)\n".format( 'notification_body': "Your configured CSS/xPath filter of '{}' for {{watch_url}} did not appear on the page after {} attempts, did the page change layout?\n\nLink: {{base_url}}/edit/{{watch_uuid}}\n\nThanks - Your omniscient changedetection.io installation :)\n".format(
@@ -43,175 +102,146 @@ class update_worker(threading.Thread):
if 'notification_urls' in n_object: if 'notification_urls' in n_object:
n_object.update({ n_object.update({
'watch_url': watch['url'], 'watch_url': watch['url'],
'uuid': uuid 'uuid': watch_uuid
}) })
self.notification_q.put(n_object) self.notification_q.put(n_object)
print("Sent filter not found notification for {}".format(uuid)) print("Sent filter not found notification for {}".format(watch_uuid))
# Pick one job off the list, process it threaded, exist
def run(self): def run(self):
# Go talk to the website
self.perform_site_update()
self.current_uuid = None # Done
self.q.task_done()
# Let the thread die after processing 1
# We will launch nice juicy fresh threads every time to prevent memory leaks in complex runner code (playwright etc)
print ("EXITING THREAD!")
self.app.config.exit.wait(1)
return
def perform_site_update(self):
from changedetectionio import fetch_site_status from changedetectionio import fetch_site_status
if not self.current_uuid in list(self.datastore.data['watching'].keys()):
return
changed_detected = False
contents = ""
screenshot = False
update_obj= {}
xpath_data = False
now = time.time()
update_handler = fetch_site_status.perform_site_check(datastore=self.datastore) update_handler = fetch_site_status.perform_site_check(datastore=self.datastore)
try:
changed_detected, update_obj, contents, screenshot, xpath_data = update_handler.run(self.current_uuid)
# Re #342
# In Python 3, all strings are sequences of Unicode characters. There is a bytes type that holds raw bytes.
# We then convert/.decode('utf-8') for the notification etc
if not isinstance(contents, (bytes, bytearray)):
raise Exception("Error - returned data from the fetch handler SHOULD be bytes")
except PermissionError as e:
self.app.logger.error("File permission error updating", self.current_uuid, str(e))
except content_fetcher.ReplyWithContentButNoText as e:
# Totally fine, it's by choice - just continue on, nothing more to care about
# Page had elements/content but no renderable text
self.datastore.update_watch(uuid=self.current_uuid, update_obj={'last_error': "Got HTML content but no text found."})
except FilterNotFoundInResponse as e:
err_text = "Filter '{}' not found - Did the page change its layout?".format(str(e))
c = 0
if self.datastore.data['watching'].get(self.current_uuid, False):
c = self.datastore.data['watching'][self.current_uuid].get('consecutive_filter_failures', 5)
c += 1
# Send notification if we reached the threshold? while not self.app.config.exit.is_set():
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts', 0)
print("Filter for {} not found, consecutive_filter_failures: {}".format(self.current_uuid, c))
if threshold >0 and c >= threshold:
self.send_filter_failure_notification(self.current_uuid)
c = 0
self.datastore.update_watch(uuid=self.current_uuid, update_obj={'last_error': err_text,
'consecutive_filter_failures': c})
except content_fetcher.EmptyReply as e:
# Some kind of custom to-str handler in the exception handler that does this?
err_text = "EmptyReply - try increasing 'Wait seconds before extracting text', Status Code {}".format(e.status_code)
self.datastore.update_watch(uuid=self.current_uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code})
except content_fetcher.ScreenshotUnavailable as e:
err_text = "Screenshot unavailable, page did not render fully in the expected time - try increasing 'Wait seconds before extracting text'"
self.datastore.update_watch(uuid=self.current_uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code})
except content_fetcher.PageUnloadable as e:
err_text = "Page request from server didnt respond correctly"
self.datastore.update_watch(uuid=self.current_uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code})
except Exception as e:
self.app.logger.error("Exception reached processing watch UUID: %s - %s", self.current_uuid, str(e))
self.datastore.update_watch(uuid=self.current_uuid, update_obj={'last_error': str(e)})
else:
try: try:
watch = self.datastore.data['watching'][self.current_uuid] priority, uuid = self.q.get(block=False)
fname = "" # Saved history text filename except queue.Empty:
pass
# For the FIRST time we check a site, or a change detected, save the snapshot. else:
if changed_detected or not watch['last_checked']: self.current_uuid = uuid
# A change was detected
fname = watch.save_history_text(contents=contents, timestamp=str(round(time.time())))
# Generally update anything interesting returned if uuid in list(self.datastore.data['watching'].keys()):
update_obj['consecutive_filter_failures'] = 0 changed_detected = False
self.datastore.update_watch(uuid=self.current_uuid, update_obj=update_obj) contents = b''
screenshot = False
update_obj= {}
xpath_data = False
process_changedetection_results = True
print("> Processing UUID {} Priority {} URL {}".format(uuid, priority, self.datastore.data['watching'][uuid]['url']))
now = time.time()
# A change was detected try:
if changed_detected: changed_detected, update_obj, contents, screenshot, xpath_data = update_handler.run(uuid)
n_object = {} # Re #342
print (">> Change detected in UUID {} - {}".format(self.current_uuid, watch['url'])) # In Python 3, all strings are sequences of Unicode characters. There is a bytes type that holds raw bytes.
# We then convert/.decode('utf-8') for the notification etc
if not isinstance(contents, (bytes, bytearray)):
raise Exception("Error - returned data from the fetch handler SHOULD be bytes")
except PermissionError as e:
self.app.logger.error("File permission error updating", uuid, str(e))
process_changedetection_results = False
except content_fetcher.ReplyWithContentButNoText as e:
# Totally fine, it's by choice - just continue on, nothing more to care about
# Page had elements/content but no renderable text
# Backend (not filters) gave zero output
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': "Got HTML content but no text found."})
process_changedetection_results = False
# Notifications should only trigger on the second time (first time, we gather the initial snapshot) except FilterNotFoundInResponse as e:
if watch.history_n >= 2: err_text = "Warning, filter '{}' not found".format(str(e))
# Atleast 2, means there really was a change self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
self.datastore.update_watch(uuid=self.current_uuid, update_obj={'last_changed': round(now)}) # So that we get a trigger when the content is added again
'previous_md5': ''})
watch_history = watch.history # Only when enabled, send the notification
dates = list(watch_history.keys()) if self.datastore.data['watching'][uuid].get('filter_failure_notification_send', False):
# Theoretically it's possible that this could be just 1 long, c = self.datastore.data['watching'][uuid].get('consecutive_filter_failures', 5)
# - In the case that the timestamp key was not unique c += 1
if len(dates) == 1: # Send notification if we reached the threshold?
raise ValueError( threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts',
"History index had 2 or more, but only 1 date loaded, timestamps were not unique? maybe two of the same timestamps got written, needs more delay?" 0)
) print("Filter for {} not found, consecutive_filter_failures: {}".format(uuid, c))
prev_fname = watch_history[dates[-2]] if threshold > 0 and c >= threshold:
if not self.datastore.data['watching'][uuid].get('notification_muted'):
self.send_filter_failure_notification(uuid)
c = 0
# Did it have any notification alerts to hit? self.datastore.update_watch(uuid=uuid, update_obj={'consecutive_filter_failures': c})
if len(watch['notification_urls']):
print(">>> Notifications queued for UUID from watch {}".format(self.current_uuid))
n_object['notification_urls'] = watch['notification_urls']
n_object['notification_title'] = watch['notification_title']
n_object['notification_body'] = watch['notification_body']
n_object['notification_format'] = watch['notification_format']
# No? maybe theres a global setting, queue them all process_changedetection_results = True
elif len(self.datastore.data['settings']['application']['notification_urls']):
print(">>> Watch notification URLs were empty, using GLOBAL notifications for UUID: {}".format(self.current_uuid))
n_object['notification_urls'] = self.datastore.data['settings']['application']['notification_urls']
n_object['notification_title'] = self.datastore.data['settings']['application']['notification_title']
n_object['notification_body'] = self.datastore.data['settings']['application']['notification_body']
n_object['notification_format'] = self.datastore.data['settings']['application']['notification_format']
else:
print(">>> NO notifications queued, watch and global notification URLs were empty.")
# Only prepare to notify if the rules above matched except content_fetcher.EmptyReply as e:
if 'notification_urls' in n_object: # Some kind of custom to-str handler in the exception handler that does this?
# HTML needs linebreak, but MarkDown and Text can use a linefeed err_text = "EmptyReply - try increasing 'Wait seconds before extracting text', Status Code {}".format(e.status_code)
if n_object['notification_format'] == 'HTML': self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
line_feed_sep = "</br>" 'last_check_status': e.status_code})
else: except content_fetcher.ScreenshotUnavailable as e:
line_feed_sep = "\n" err_text = "Screenshot unavailable, page did not render fully in the expected time - try increasing 'Wait seconds before extracting text'"
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code})
process_changedetection_results = False
except content_fetcher.PageUnloadable as e:
err_text = "Page request from server didnt respond correctly"
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code})
except Exception as e:
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
# Other serious error
process_changedetection_results = False
else:
# Mark that we never had any failures
update_obj['consecutive_filter_failures'] = 0
from changedetectionio import diff # Different exceptions mean that we may or may not want to bump the snapshot, trigger notifications etc
n_object.update({ if process_changedetection_results:
'watch_url': watch['url'], try:
'uuid': self.current_uuid, watch = self.datastore.data['watching'][uuid]
'current_snapshot': contents.decode('utf-8'), fname = "" # Saved history text filename
'diff': diff.render_diff(prev_fname, fname, line_feed_sep=line_feed_sep),
'diff_full': diff.render_diff(prev_fname, fname, True, line_feed_sep=line_feed_sep)
})
self.notification_q.put(n_object) # For the FIRST time we check a site, or a change detected, save the snapshot.
if changed_detected or not watch['last_checked']:
# A change was detected
watch.save_history_text(contents=contents, timestamp=str(round(time.time())))
except Exception as e: self.datastore.update_watch(uuid=uuid, update_obj=update_obj)
# Catch everything possible here, so that if a worker crashes, we don't lose it until restart!
print("!!!! Exception in update_worker !!!\n", e)
self.app.logger.error("Exception reached processing watch UUID: %s - %s", self.current_uuid, str(e))
self.datastore.update_watch(uuid=self.current_uuid, update_obj={'last_error': str(e)})
finally: # A change was detected
# Always record that we atleast tried if changed_detected:
self.datastore.update_watch(uuid=self.current_uuid, update_obj={'fetch_time': round(time.time() - now, 3), print (">> Change detected in UUID {} - {}".format(uuid, watch['url']))
'last_checked': round(time.time())})
# Always save the screenshot if it's available # Notifications should only trigger on the second time (first time, we gather the initial snapshot)
if screenshot: if watch.history_n >= 2:
self.datastore.save_screenshot(watch_uuid=self.current_uuid, screenshot=screenshot) # Atleast 2, means there really was a change
if xpath_data: self.datastore.update_watch(uuid=uuid, update_obj={'last_changed': round(now)})
self.datastore.save_xpath_data(watch_uuid=self.current_uuid, data=xpath_data) if not self.datastore.data['watching'][uuid].get('notification_muted'):
self.send_content_changed_notification(self, watch_uuid=uuid)
except Exception as e:
# Catch everything possible here, so that if a worker crashes, we don't lose it until restart!
print("!!!! Exception in update_worker !!!\n", e)
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
# Always record that we atleast tried
self.datastore.update_watch(uuid=uuid, update_obj={'fetch_time': round(time.time() - now, 3),
'last_checked': round(time.time())})
# Always save the screenshot if it's available
if screenshot:
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=screenshot)
if xpath_data:
self.datastore.save_xpath_data(watch_uuid=uuid, data=xpath_data)
self.current_uuid = None # Done
self.q.task_done()
# Give the CPU time to interrupt
time.sleep(0.1)
self.app.config.exit.wait(1)