Compare commits

...

15 Commits

Author SHA1 Message Date
dgtlmoon
e1b4055330 Simplify scrub operation 2022-05-02 20:57:37 +02:00
dgtlmoon
f69585b276 Improving support info in README.md 2022-04-29 20:26:02 +02:00
dgtlmoon
0179940df1 Handle deletions better (#570) 2022-04-29 19:12:33 +02:00
dgtlmoon
c0d0424e7e Data storage bug fix #569 2022-04-29 18:26:15 +02:00
dgtlmoon
014dc61222 Upgrade notifications library - fixing marketup in email subject 2022-04-29 09:39:40 +02:00
dgtlmoon
06517bfd22 Ability to 'Share' a watch by a generated link, this will include all filters and triggers - see Wiki (#563) 2022-04-26 10:52:08 +02:00
dgtlmoon
b3a115dd4a Upgrade notifications library Re #555 - fixing telegram HTML markup in notification title 2022-04-25 23:12:32 +02:00
dgtlmoon
ffc4215411 Unify MINIMUM_SECONDS_RECHECK_TIME env var variable to 60 seconds 2022-04-24 20:37:30 +02:00
dgtlmoon
9e708810d1 Seconds/minutes/hours/days between checks form field upgrade from 'minutes' only (#512) 2022-04-24 16:56:32 +02:00
dgtlmoon
1e8aa6158b Form styling improvements 2022-04-24 14:40:53 +02:00
dgtlmoon
015353eccc Form field handling improvements - fixing field list handler for empty lines 2022-04-24 13:53:13 +02:00
dgtlmoon
501183e66b Fix "Add email" button in main global notification settings 2022-04-22 10:51:52 +02:00
dgtlmoon
def74f27e6 Test notification button fixed in main settings (#556) 2022-04-21 21:36:10 +02:00
dgtlmoon
37775a46c6 tgram:// be sure total notification size is always under their 4096 size limit 2022-04-21 16:28:15 +02:00
dgtlmoon
e4eaa0c817 Shows which items are already in the queue, disables adding to the queue if already in the recheck queue (#552) 2022-04-21 12:52:45 +02:00
33 changed files with 636 additions and 365 deletions

View File

@@ -170,9 +170,12 @@ Raspberry Pi and linux/arm/v6 linux/arm/v7 arm64 devices are supported! See the
Do you use changedetection.io to make money? does it save you time or money? Does it make your life easier? less stressful? Remember, we write this software when we should be doing actual paid work, we have to buy food and pay rent just like you.
Please support us, even small amounts help a LOT.
BTC `1PLFN327GyUarpJd7nVe7Reqg9qHx5frNn`
Firstly, consider taking out a [change detection monthly subscription - unlimited checks and watches](https://lemonade.changedetection.io/start) , even if you don't use it, you still get the warm fuzzy feeling of helping out the project. (And who knows, you might just use it!)
Or directly donate an amount PayPal [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/donate/?hosted_button_id=7CP6HR9ZCNDYJ)
Or BTC `1PLFN327GyUarpJd7nVe7Reqg9qHx5frNn`
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/btc-support.png" style="max-width:50%;" alt="Support us!" />

View File

@@ -32,6 +32,7 @@ from flask import (
render_template,
request,
send_from_directory,
session,
url_for,
)
from flask_login import login_required
@@ -342,6 +343,8 @@ def changedetection_app(config=None, datastore_o=None):
@app.route("/", methods=['GET'])
@login_required
def index():
from changedetectionio import forms
limit_tag = request.args.get('tag')
pause_uuid = request.args.get('pause')
@@ -378,7 +381,6 @@ def changedetection_app(config=None, datastore_o=None):
existing_tags = datastore.get_all_tags()
from changedetectionio import forms
form = forms.quickWatchForm(request.form)
output = render_template("watch-overview.html",
@@ -390,8 +392,10 @@ def changedetection_app(config=None, datastore_o=None):
has_unviewed=datastore.data['has_unviewed'],
# Don't link to hosting when we're on the hosting environment
hosted_sticky=os.getenv("SALTED_PASS", False) == False,
guid=datastore.data['app_guid'])
guid=datastore.data['app_guid'],
queued_uuids=update_q.queue)
if session.get('share-link'):
del(session['share-link'])
return output
@@ -430,48 +434,21 @@ def changedetection_app(config=None, datastore_o=None):
@login_required
def scrub_page():
import re
if request.method == 'POST':
confirmtext = request.form.get('confirmtext')
limit_date = request.form.get('limit_date')
limit_timestamp = 0
# Re #149 - allow empty/0 timestamp limit
if len(limit_date):
try:
limit_date = limit_date.replace('T', ' ')
# I noticed chrome will show '/' but actually submit '-'
limit_date = limit_date.replace('-', '/')
# In the case that :ss seconds are supplied
limit_date = re.sub(r'(\d\d:\d\d)(:\d\d)', '\\1', limit_date)
str_to_dt = datetime.datetime.strptime(limit_date, '%Y/%m/%d %H:%M')
limit_timestamp = int(str_to_dt.timestamp())
if limit_timestamp > time.time():
flash("Timestamp is in the future, cannot continue.", 'error')
return redirect(url_for('scrub_page'))
except ValueError:
flash('Incorrect date format, cannot continue.', 'error')
return redirect(url_for('scrub_page'))
if confirmtext == 'scrub':
changes_removed = 0
for uuid, watch in datastore.data['watching'].items():
if limit_timestamp:
changes_removed += datastore.scrub_watch(uuid, limit_timestamp=limit_timestamp)
else:
changes_removed += datastore.scrub_watch(uuid)
for uuid in datastore.data['watching'].keys():
datastore.scrub_watch(uuid)
flash("Cleared snapshot history ({} snapshots removed)".format(changes_removed))
flash("Cleared all snapshot history")
else:
flash('Incorrect confirmation text.', 'error')
return redirect(url_for('index'))
output = render_template("scrub.html")
output = render_template("scrub.html")
return output
@@ -517,7 +494,7 @@ def changedetection_app(config=None, datastore_o=None):
def edit_page(uuid):
from changedetectionio import forms
using_default_check_time = True
# More for testing, possible to return the first/only
if not datastore.data['watching'].keys():
flash("No watches to edit", "error")
@@ -530,27 +507,39 @@ def changedetection_app(config=None, datastore_o=None):
flash("No watch with the UUID %s found." % (uuid), "error")
return redirect(url_for('index'))
# be sure we update with a copy instead of accidently editing the live object by reference
default = deepcopy(datastore.data['watching'][uuid])
# Show system wide default if nothing configured
if datastore.data['watching'][uuid]['fetch_backend'] is None:
default['fetch_backend'] = datastore.data['settings']['application']['fetch_backend']
# Show system wide default if nothing configured
if all(value == 0 or value == None for value in datastore.data['watching'][uuid]['time_between_check'].values()):
default['time_between_check'] = deepcopy(datastore.data['settings']['requests']['time_between_check'])
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
data=datastore.data['watching'][uuid]
data=default
)
if request.method == 'GET':
# Set some defaults that refer to the main config when None, we do the same in POST,
# probably there should be a nice little handler for this.
if datastore.data['watching'][uuid]['fetch_backend'] is None:
form.fetch_backend.data = datastore.data['settings']['application']['fetch_backend']
if datastore.data['watching'][uuid]['minutes_between_check'] is None:
form.minutes_between_check.data = datastore.data['settings']['requests']['minutes_between_check']
if request.method == 'POST' and form.validate():
extra_update_obj = {}
# Re #110, if they submit the same as the default value, set it to None, so we continue to follow the default
if form.minutes_between_check.data == datastore.data['settings']['requests']['minutes_between_check']:
form.minutes_between_check.data = None
if form.fetch_backend.data == datastore.data['settings']['application']['fetch_backend']:
form.fetch_backend.data = None
# Assume we use the default value, unless something relevant is different, then use the form value
# values could be None, 0 etc.
# Set to None unless the next for: says that something is different
extra_update_obj['time_between_check'] = dict.fromkeys(form.time_between_check.data)
for k, v in form.time_between_check.data.items():
if v and v != datastore.data['settings']['requests']['time_between_check'][k]:
extra_update_obj['time_between_check'] = form.time_between_check.data
using_default_check_time = False
break
extra_update_obj = {}
# Use the default if its the same as system wide
if form.fetch_backend.data == datastore.data['settings']['application']['fetch_backend']:
extra_update_obj['fetch_backend'] = None
# Notification URLs
datastore.data['watching'][uuid]['notification_urls'] = form.notification_urls.data
@@ -576,7 +565,7 @@ def changedetection_app(config=None, datastore_o=None):
# Re #286 - We wait for syncing new data to disk in another thread every 60 seconds
# But in the case something is added we should save straight away
datastore.sync_to_json()
datastore.needs_write_urgent = True
# Queue the watch for immediate recheck
update_q.put(uuid)
@@ -595,12 +584,12 @@ def changedetection_app(config=None, datastore_o=None):
if request.method == 'POST' and not form.validate():
flash("An error occurred, please see below.", "error")
has_empty_checktime = datastore.data['watching'][uuid].has_empty_checktime
output = render_template("edit.html",
uuid=uuid,
watch=datastore.data['watching'][uuid],
form=form,
has_empty_checktime=has_empty_checktime,
has_empty_checktime=using_default_check_time,
current_base_url=datastore.data['settings']['application']['base_url'],
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False)
)
@@ -630,15 +619,15 @@ def changedetection_app(config=None, datastore_o=None):
if form.validate():
datastore.data['settings']['application'].update(form.data['application'])
datastore.data['settings']['requests'].update(form.data['requests'])
datastore.needs_write = True
if not os.getenv("SALTED_PASS", False) and len(form.application.form.password.encrypted_password):
datastore.data['settings']['application']['password'] = form.application.form.password.encrypted_password
datastore.needs_write = True
datastore.needs_write_urgent = True
flash("Password protection enabled.", 'notice')
flask_login.logout_user()
return redirect(url_for('index'))
datastore.needs_write_urgent = True
flash("Settings updated.")
else:
@@ -674,12 +663,14 @@ def changedetection_app(config=None, datastore_o=None):
# Up to 5000 per batch so we dont flood the server
if len(url) and validators.url(url.replace('source:', '')) and good < 5000:
new_uuid = datastore.add_watch(url=url.strip(), tag=" ".join(tags), write_to_disk_now=False)
# Straight into the queue.
update_q.put(new_uuid)
good += 1
else:
if len(url):
remaining_urls.append(url)
if new_uuid:
# Straight into the queue.
update_q.put(new_uuid)
good += 1
continue
if len(url.strip()):
remaining_urls.append(url)
flash("{} Imported in {:.2f}s, {} Skipped.".format(good, time.time()-now,len(remaining_urls)))
datastore.needs_write = True
@@ -986,29 +977,35 @@ def changedetection_app(config=None, datastore_o=None):
from changedetectionio import forms
form = forms.quickWatchForm(request.form)
if form.validate():
url = request.form.get('url').strip()
if datastore.url_exists(url):
flash('The URL {} already exists'.format(url), "error")
return redirect(url_for('index'))
# @todo add_watch should throw a custom Exception for validation etc
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip())
# Straight into the queue.
update_q.put(new_uuid)
flash("Watch added.")
return redirect(url_for('index'))
else:
if not form.validate():
flash("Error")
return redirect(url_for('index'))
url = request.form.get('url').strip()
if datastore.url_exists(url):
flash('The URL {} already exists'.format(url), "error")
return redirect(url_for('index'))
# @todo add_watch should throw a custom Exception for validation etc
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip())
if new_uuid:
# Straight into the queue.
update_q.put(new_uuid)
flash("Watch added.")
return redirect(url_for('index'))
@app.route("/api/delete", methods=['GET'])
@login_required
def api_delete():
uuid = request.args.get('uuid')
if uuid != 'all' and not uuid in datastore.data['watching'].keys():
flash('The watch by UUID {} does not exist.'.format(uuid), 'error')
return redirect(url_for('index'))
# More for testing, possible to return the first/only
if uuid == 'first':
uuid = list(datastore.data['watching'].keys()).pop()
@@ -1068,6 +1065,59 @@ def changedetection_app(config=None, datastore_o=None):
flash("{} watches are queued for rechecking.".format(i))
return redirect(url_for('index', tag=tag))
@app.route("/api/share-url", methods=['GET'])
@login_required
def api_share_put_watch():
"""Given a watch UUID, upload the info and return a share-link
the share-link can be imported/added"""
import requests
import json
tag = request.args.get('tag')
uuid = request.args.get('uuid')
# more for testing
if uuid == 'first':
uuid = list(datastore.data['watching'].keys()).pop()
# copy it to memory as trim off what we dont need (history)
watch = deepcopy(datastore.data['watching'][uuid])
if (watch.get('history')):
del (watch['history'])
# for safety/privacy
for k in list(watch.keys()):
if k.startswith('notification_'):
del watch[k]
for r in['uuid', 'last_checked', 'last_changed']:
if watch.get(r):
del (watch[r])
# Add the global stuff which may have an impact
watch['ignore_text'] += datastore.data['settings']['application']['global_ignore_text']
watch['subtractive_selectors'] += datastore.data['settings']['application']['global_subtractive_selectors']
watch_json = json.dumps(watch)
try:
r = requests.request(method="POST",
data={'watch': watch_json},
url="https://changedetection.io/share/share",
headers={'App-Guid': datastore.data['app_guid']})
res = r.json()
session['share-link'] = "https://changedetection.io/share/{}".format(res['share_key'])
except Exception as e:
flash("Could not share, something went wrong while communicating with the share server.", 'error')
# https://changedetection.io/share/VrMv05wpXyQa
# in the browser - should give you a nice info page - wtf
# paste in etc
return redirect(url_for('index'))
# @todo handle ctrl break
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
@@ -1175,7 +1225,7 @@ def ticker_thread_check_time_launch_checks():
now = time.time()
recheck_time_minimum_seconds = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
recheck_time_system_seconds = int(copied_datastore.data['settings']['requests']['minutes_between_check']) * 60
recheck_time_system_seconds = datastore.threshold_seconds
for uuid, watch in copied_datastore.data['watching'].items():
@@ -1185,8 +1235,9 @@ def ticker_thread_check_time_launch_checks():
# If they supplied an individual entry minutes to threshold.
threshold = now
if watch.threshold_seconds:
threshold -= watch.threshold_seconds
watch_threshold_seconds = watch.threshold_seconds()
if watch_threshold_seconds:
threshold -= watch_threshold_seconds
else:
threshold -= recheck_time_system_seconds

View File

@@ -164,8 +164,8 @@ class perform_site_check():
else:
fetched_md5 = hashlib.md5(stripped_text_from_html).hexdigest()
# On the first run of a site, watch['previous_md5'] will be an empty string, set it the current one.
if not len(watch['previous_md5']):
# On the first run of a site, watch['previous_md5'] will be None, set it the current one.
if not watch.get('previous_md5'):
watch['previous_md5'] = fetched_md5
update_obj["previous_md5"] = fetched_md5

View File

@@ -37,27 +37,31 @@ valid_method = {
default_method = 'GET'
class StringListField(StringField):
widget = widgets.TextArea()
def _value(self):
if self.data:
return "\r\n".join(self.data)
# ignore empty lines in the storage
data = list(filter(lambda x: len(x.strip()), self.data))
# Apply strip to each line
data = list(map(lambda x: x.strip(), data))
return "\r\n".join(data)
else:
return u''
# incoming
def process_formdata(self, valuelist):
if valuelist:
# Remove empty strings
cleaned = list(filter(None, valuelist[0].split("\n")))
self.data = [x.strip() for x in cleaned]
p = 1
if valuelist and len(valuelist[0].strip()):
# Remove empty strings, stripping and splitting \r\n, only \n etc.
self.data = valuelist[0].splitlines()
# Remove empty lines from the final data
self.data = list(filter(lambda x: len(x.strip()), self.data))
else:
self.data = []
class SaltyPasswordField(StringField):
widget = widgets.PasswordInput()
encrypted_password = ""
@@ -85,6 +89,13 @@ class SaltyPasswordField(StringField):
else:
self.data = False
class TimeBetweenCheckForm(Form):
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
hours = IntegerField('Hours', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
minutes = IntegerField('Minutes', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
seconds = IntegerField('Seconds', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
# @todo add total seconds minimum validatior = minimum_seconds_recheck_time
# Separated by key:value
class StringDictKeyValue(StringField):
@@ -313,8 +324,7 @@ class watchForm(commonSettingsForm):
url = fields.URLField('URL', validators=[validateURL()])
tag = StringField('Group tag', [validators.Optional(), validators.Length(max=35)], default='')
minutes_between_check = fields.IntegerField('Maximum time in minutes until recheck',
[validators.Optional(), validators.NumberRange(min=1)])
time_between_check = FormField(TimeBetweenCheckForm)
css_filter = StringField('CSS/JSON/XPATH Filter', [ValidateCSSJSONXPATHInput()], default='')
@@ -347,8 +357,9 @@ class watchForm(commonSettingsForm):
# datastore.data['settings']['requests']..
class globalSettingsRequestForm(Form):
minutes_between_check = fields.IntegerField('Maximum time in minutes until recheck',
[validators.NumberRange(min=1)])
time_between_check = FormField(TimeBetweenCheckForm)
# datastore.data['settings']['application']..
class globalSettingsApplicationForm(commonSettingsForm):

View File

@@ -10,9 +10,7 @@ from changedetectionio.notification import (
)
class model(dict):
def __init__(self, *arg, **kw):
super(model, self).__init__(*arg, **kw)
self.update({
base_config = {
'note': "Hello! If you change this file manually, please be sure to restart your changedetection.io instance!",
'watching': {},
'settings': {
@@ -24,8 +22,7 @@ class model(dict):
},
'requests': {
'timeout': 15, # Default 15 seconds
# Default 3 hours
'minutes_between_check': 3 * 60, # Default 3 hours
'time_between_check': {'weeks': None, 'days': None, 'hours': 3, 'minutes': None, 'seconds': None},
'workers': 10 # Number of threads, lower is better for slow connections
},
'application': {
@@ -46,4 +43,8 @@ class model(dict):
'schema_version' : 0
}
}
})
}
def __init__(self, *arg, **kw):
super(model, self).__init__(*arg, **kw)
self.update(self.base_config)

View File

@@ -2,7 +2,7 @@ import os
import uuid as uuid_builder
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 5))
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
from changedetectionio.notification import (
default_notification_body,
@@ -12,18 +12,18 @@ from changedetectionio.notification import (
class model(dict):
def __init__(self, *arg, **kw):
self.update({
base_config = {
'url': None,
'tag': None,
'last_checked': 0,
'last_changed': 0,
'paused': False,
'last_viewed': 0, # history key value of the last viewed via the [diff] link
'newest_history_key': "",
'newest_history_key': 0,
'title': None,
'previous_md5': "",
'uuid': str(uuid_builder.uuid4()),
'previous_md5': False,
# UUID not needed, should be generated only as a key
# 'uuid':
'headers': {}, # Extra headers to send
'body': None,
'method': 'GET',
@@ -42,21 +42,27 @@ class model(dict):
# Re #110, so then if this is set to None, we know to use the default value instead
# Requires setting to None on submit if it's the same as the default
# Should be all None by default, so we use the system default in this case.
'minutes_between_check': None
})
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None}
}
def __init__(self, *arg, **kw):
self.update(self.base_config)
# goes at the end so we update the default object with the initialiser
super(model, self).__init__(*arg, **kw)
@property
def has_empty_checktime(self):
if self.get('minutes_between_check', None):
return False
return True
# using all() + dictionary comprehension
# Check if all values are 0 in dictionary
res = all(x == None or x == False or x==0 for x in self.get('time_between_check', {}).values())
return res
@property
def threshold_seconds(self):
sec = self.get('minutes_between_check', None)
if sec:
sec = sec * 60
return sec
seconds = 0
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
for m, n in mtable.items():
x = self.get('time_between_check', {}).get(m, None)
if x:
seconds += x * n
return seconds

View File

@@ -66,19 +66,25 @@ def process_notification(n_object, datastore):
if not 'avatar_url' in url:
url += k + 'avatar_url=https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png'
payload_max_size = 1700
# Trim everything to a max of 1700 total chars (leave some for padding, control etc)
# Incase n_title > 1700
# basically trim back the body until the total size fits our threshold
# and trim off the n_title to fit always.
body_limit = max(0, payload_max_size - len(n_title))
body = n_body[0:body_limit] if 'discord://' in url else n_body
if url.startswith('tgram://'):
# real limit is 4096, but minus some for extra metadata
payload_max_size = 3600
body_limit = max(0, payload_max_size - len(n_title))
n_title = n_title[0:payload_max_size]
n_body = n_body[0:body_limit]
elif url.startswith('discord://'):
# real limit is 2000, but minus some for extra metadata
payload_max_size = 1700
body_limit = max(0, payload_max_size - len(n_title))
n_title = n_title[0:payload_max_size]
n_body = n_body[0:body_limit]
apobj.add(url)
apobj.notify(
title=n_title[0:payload_max_size],
body=body,
title=n_title,
body=n_body,
body_format=n_format)
apobj.clear()

View File

@@ -0,0 +1,40 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
version="1.1"
id="Layer_1"
x="0px"
y="0px"
viewBox="0 0 115.77 122.88"
style="enable-background:new 0 0 115.77 122.88"
xml:space="preserve"
sodipodi:docname="copy.svg"
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg"><defs
id="defs11" /><sodipodi:namedview
id="namedview9"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
showgrid="false"
inkscape:zoom="5.5501303"
inkscape:cx="57.83648"
inkscape:cy="61.439999"
inkscape:window-width="1920"
inkscape:window-height="1056"
inkscape:window-x="1920"
inkscape:window-y="0"
inkscape:window-maximized="1"
inkscape:current-layer="g6" /><style
type="text/css"
id="style2">.st0{fill-rule:evenodd;clip-rule:evenodd;}</style><g
id="g6"><path
class="st0"
d="M89.62,13.96v7.73h12.19h0.01v0.02c3.85,0.01,7.34,1.57,9.86,4.1c2.5,2.51,4.06,5.98,4.07,9.82h0.02v0.02 v73.27v0.01h-0.02c-0.01,3.84-1.57,7.33-4.1,9.86c-2.51,2.5-5.98,4.06-9.82,4.07v0.02h-0.02h-61.7H40.1v-0.02 c-3.84-0.01-7.34-1.57-9.86-4.1c-2.5-2.51-4.06-5.98-4.07-9.82h-0.02v-0.02V92.51H13.96h-0.01v-0.02c-3.84-0.01-7.34-1.57-9.86-4.1 c-2.5-2.51-4.06-5.98-4.07-9.82H0v-0.02V13.96v-0.01h0.02c0.01-3.85,1.58-7.34,4.1-9.86c2.51-2.5,5.98-4.06,9.82-4.07V0h0.02h61.7 h0.01v0.02c3.85,0.01,7.34,1.57,9.86,4.1c2.5,2.51,4.06,5.98,4.07,9.82h0.02V13.96L89.62,13.96z M79.04,21.69v-7.73v-0.02h0.02 c0-0.91-0.39-1.75-1.01-2.37c-0.61-0.61-1.46-1-2.37-1v0.02h-0.01h-61.7h-0.02v-0.02c-0.91,0-1.75,0.39-2.37,1.01 c-0.61,0.61-1,1.46-1,2.37h0.02v0.01v64.59v0.02h-0.02c0,0.91,0.39,1.75,1.01,2.37c0.61,0.61,1.46,1,2.37,1v-0.02h0.01h12.19V35.65 v-0.01h0.02c0.01-3.85,1.58-7.34,4.1-9.86c2.51-2.5,5.98-4.06,9.82-4.07v-0.02h0.02H79.04L79.04,21.69z M105.18,108.92V35.65v-0.02 h0.02c0-0.91-0.39-1.75-1.01-2.37c-0.61-0.61-1.46-1-2.37-1v0.02h-0.01h-61.7h-0.02v-0.02c-0.91,0-1.75,0.39-2.37,1.01 c-0.61,0.61-1,1.46-1,2.37h0.02v0.01v73.27v0.02h-0.02c0,0.91,0.39,1.75,1.01,2.37c0.61,0.61,1.46,1,2.37,1v-0.02h0.01h61.7h0.02 v0.02c0.91,0,1.75-0.39,2.37-1.01c0.61-0.61,1-1.46,1-2.37h-0.02V108.92L105.18,108.92z"
id="path4"
style="fill:#ffffff;fill-opacity:1" /></g></svg>

After

Width:  |  Height:  |  Size: 2.5 KiB

View File

@@ -0,0 +1,46 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
width="18"
height="19.92"
viewBox="0 0 18 19.92"
version="1.1"
id="svg6"
sodipodi:docname="spread.svg"
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs10" />
<sodipodi:namedview
id="namedview8"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
showgrid="false"
fit-margin-top="0"
fit-margin-left="0"
fit-margin-right="0"
fit-margin-bottom="0"
inkscape:zoom="28.416667"
inkscape:cx="9.0087975"
inkscape:cy="9.9941348"
inkscape:window-width="1920"
inkscape:window-height="1056"
inkscape:window-x="1920"
inkscape:window-y="0"
inkscape:window-maximized="1"
inkscape:current-layer="svg6" />
<path
d="M -3,-2 H 21 V 22 H -3 Z"
fill="none"
id="path2" />
<path
d="m 15,14.08 c -0.76,0 -1.44,0.3 -1.96,0.77 L 5.91,10.7 C 5.96,10.47 6,10.24 6,10 6,9.76 5.96,9.53 5.91,9.3 L 12.96,5.19 C 13.5,5.69 14.21,6 15,6 16.66,6 18,4.66 18,3 18,1.34 16.66,0 15,0 c -1.66,0 -3,1.34 -3,3 0,0.24 0.04,0.47 0.09,0.7 L 5.04,7.81 C 4.5,7.31 3.79,7 3,7 1.34,7 0,8.34 0,10 c 0,1.66 1.34,3 3,3 0.79,0 1.5,-0.31 2.04,-0.81 l 7.12,4.16 c -0.05,0.21 -0.08,0.43 -0.08,0.65 0,1.61 1.31,2.92 2.92,2.92 1.61,0 2.92,-1.31 2.92,-2.92 0,-1.61 -1.31,-2.92 -2.92,-2.92 z"
id="path4"
style="fill:#0078e7;fill-opacity:1" />
</svg>

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -4,7 +4,7 @@ $(document).ready(function() {
e.preventDefault();
email = prompt("Destination email");
if(email) {
var n = $("#notification_urls");
var n = $(".notification-urls");
var p=email_notification_prefix;
$(n).val( $.trim( $(n).val() )+"\n"+email_notification_prefix+email );
}
@@ -25,10 +25,10 @@ $(document).ready(function() {
data = {
window_url : window.location.href,
notification_urls : $('#notification_urls').val(),
notification_title : $('#notification_title').val(),
notification_body : $('#notification_body').val(),
notification_format : $('#notification_format').val(),
notification_urls : $('.notification-urls').val(),
notification_title : $('.notification-title').val(),
notification_body : $('.notification-body').val(),
notification_format : $('.notification-format').val(),
}
for (key in data) {
if (!data[key].length) {

View File

@@ -3,4 +3,22 @@ $(function () {
$('.diff-link').click(function () {
$(this).closest('.unviewed').removeClass('unviewed');
});
$('.with-share-link > *').click(function () {
$("#copied-clipboard").remove();
var range = document.createRange();
var n=$("#share-link")[0];
range.selectNode(n);
window.getSelection().removeAllRanges();
window.getSelection().addRange(range);
document.execCommand("copy");
window.getSelection().removeAllRanges();
$('.with-share-link').append('<span style="font-size: 80%; color: #fff;" id="copied-clipboard">Copied to clipboard</span>');
$("#copied-clipboard").fadeOut(2500, function() {
$(this).remove();
});
});
});

View File

@@ -180,6 +180,9 @@ body:after, body:before {
.messages li.notice {
background: rgba(255, 255, 255, 0.5); }
.messages.with-share-link > *:hover {
cursor: pointer; }
#notification-customisation {
border: 1px solid #ccc;
padding: 0.5rem;
@@ -306,10 +309,10 @@ footer {
font-weight: bold; }
.pure-form textarea {
width: 100%; }
.pure-form ul#fetch_backend {
.pure-form ul.fetch-backend {
margin: 0px;
list-style: none; }
.pure-form ul#fetch_backend > li > * {
.pure-form ul.fetch-backend li > * {
display: inline-block; }
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 1024px) {
@@ -440,3 +443,8 @@ ul {
padding-left: 1em;
padding-top: 0px;
margin-top: 4px; }
.time-check-widget tr {
display: inline; }
.time-check-widget tr input[type="number"] {
width: 4em; }

View File

@@ -237,6 +237,11 @@ body:after, body:before {
background: rgba(255, 255, 255, .5);
}
}
&.with-share-link {
> *:hover {
cursor:pointer;
}
}
}
#notification-customisation {
@@ -413,10 +418,10 @@ footer {
textarea {
width: 100%;
}
ul#fetch_backend {
ul.fetch-backend {
margin: 0px;
list-style: none;
> li {
li {
> * {
display: inline-block;
}
@@ -624,4 +629,13 @@ ul {
padding-left: 1em;
padding-top: 0px;
margin-top: 4px;
}
.time-check-widget {
tr {
display: inline;
input[type="number"] {
width: 4em;
}
}
}

View File

@@ -1,3 +1,6 @@
from flask import (
flash
)
import json
import logging
import os
@@ -7,6 +10,8 @@ import uuid as uuid_builder
from copy import deepcopy
from os import mkdir, path, unlink
from threading import Lock
import re
import requests
from changedetectionio.model import Watch, App
@@ -16,6 +21,11 @@ from changedetectionio.model import Watch, App
# https://stackoverflow.com/questions/6190468/how-to-trigger-function-on-value-change
class ChangeDetectionStore:
lock = Lock()
# For general updates/writes that can wait a few seconds
needs_write = False
# For when we edit, we should write to disk
needs_write_urgent = False
def __init__(self, datastore_path="/datastore", include_default_watches=True, version_tag="0.0.0"):
# Should only be active for docker
@@ -28,7 +38,8 @@ class ChangeDetectionStore:
self.__data = App.model()
# Base definition for all watchers
self.generic_definition = Watch.model()
# deepcopy part of #569 - not sure why its needed exactly
self.generic_definition = deepcopy(Watch.model())
if path.isfile('changedetectionio/source.txt'):
with open('changedetectionio/source.txt') as f:
@@ -100,6 +111,9 @@ class ChangeDetectionStore:
secret = secrets.token_hex(16)
self.__data['settings']['application']['rss_access_token'] = secret
# Bump the update version by running updates
self.run_updates()
self.needs_write = True
# Finally start the thread that will manage periodic data saves to JSON
@@ -145,6 +159,17 @@ class ChangeDetectionStore:
self.needs_write = True
@property
def threshold_seconds(self):
seconds = 0
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
for m, n in mtable.items():
x = self.__data['settings']['requests']['time_between_check'].get(m)
if x:
seconds += x * n
return max(seconds, minimum_seconds_recheck_time)
@property
def data(self):
has_unviewed = False
@@ -207,7 +232,7 @@ class ChangeDetectionStore:
del self.data['watching'][uuid]
self.needs_write = True
self.needs_write_urgent = True
# Clone a watch by UUID
def clone(self, uuid):
@@ -231,61 +256,58 @@ class ChangeDetectionStore:
return self.data['watching'][uuid].get(val)
# Remove a watchs data but keep the entry (URL etc)
def scrub_watch(self, uuid, limit_timestamp = False):
def scrub_watch(self, uuid):
import pathlib
import hashlib
del_timestamps = []
self.__data['watching'][uuid].update({'history': {}, 'last_checked': 0, 'last_changed': 0, 'newest_history_key': 0, 'previous_md5': False})
self.needs_write_urgent = True
changes_removed = 0
for timestamp, path in self.data['watching'][uuid]['history'].items():
if not limit_timestamp or (limit_timestamp is not False and int(timestamp) > limit_timestamp):
self.unlink_history_file(path)
del_timestamps.append(timestamp)
changes_removed += 1
if not limit_timestamp:
self.data['watching'][uuid]['last_checked'] = 0
self.data['watching'][uuid]['last_changed'] = 0
self.data['watching'][uuid]['previous_md5'] = ""
for timestamp in del_timestamps:
del self.data['watching'][uuid]['history'][str(timestamp)]
# If there was a limitstamp, we need to reset some meta data about the entry
# This has to happen after we remove the others from the list
if limit_timestamp:
newest_key = self.get_newest_history_key(uuid)
if newest_key:
self.data['watching'][uuid]['last_checked'] = int(newest_key)
# @todo should be the original value if it was less than newest key
self.data['watching'][uuid]['last_changed'] = int(newest_key)
try:
with open(self.data['watching'][uuid]['history'][str(newest_key)], "rb") as fp:
content = fp.read()
self.data['watching'][uuid]['previous_md5'] = hashlib.md5(content).hexdigest()
except (FileNotFoundError, IOError):
self.data['watching'][uuid]['previous_md5'] = ""
pass
self.needs_write = True
return changes_removed
for item in pathlib.Path(self.datastore_path).rglob(uuid+"/*.txt"):
unlink(item)
def add_watch(self, url, tag="", extras=None, write_to_disk_now=True):
if extras is None:
extras = {}
# Incase these are copied across, assume it's a reference and deepcopy()
apply_extras = deepcopy(extras)
# Was it a share link? try to fetch the data
if (url.startswith("https://changedetection.io/share/")):
try:
r = requests.request(method="GET",
url=url,
# So we know to return the JSON instead of the human-friendly "help" page
headers={'App-Guid': self.__data['app_guid']})
res = r.json()
# List of permisable stuff we accept from the wild internet
for k in ['url', 'tag',
'paused', 'title',
'previous_md5', 'headers',
'body', 'method',
'ignore_text', 'css_filter',
'subtractive_selectors', 'trigger_text',
'extract_title_as_title']:
if res.get(k):
apply_extras[k] = res[k]
except Exception as e:
logging.error("Error fetching metadata for shared watch link", url, str(e))
flash("Error fetching metadata for {}".format(url), 'error')
return False
with self.lock:
# @todo use a common generic version of this
new_uuid = str(uuid_builder.uuid4())
new_watch = Watch.model({
# #Re 569
# Not sure why deepcopy was needed here, sometimes new watches would appear to already have 'history' set
# I assumed this would instantiate a new object but somehow an existing dict was getting used
new_watch = deepcopy(Watch.model({
'url': url,
'tag': tag
})
}))
# Incase these are copied across, assume it's a reference and deepcopy()
apply_extras = deepcopy(extras)
for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']:
if k in apply_extras:
del apply_extras[k]
@@ -339,7 +361,7 @@ class ChangeDetectionStore:
def sync_to_json(self):
logging.info("Saving JSON..")
print("Saving JSON..")
try:
data = deepcopy(self.__data)
except RuntimeError as e:
@@ -361,6 +383,7 @@ class ChangeDetectionStore:
logging.error("Error writing JSON!! (Main JSON file save was skipped) : %s", str(e))
self.needs_write = False
self.needs_write_urgent = False
# Thread runner, this helps with thread/write issues when there are many operations that want to update the JSON
# by just running periodically in one thread, according to python, dict updates are threadsafe.
@@ -371,14 +394,14 @@ class ChangeDetectionStore:
print("Shutting down datastore thread")
return
if self.needs_write:
if self.needs_write or self.needs_write_urgent:
self.sync_to_json()
# Once per minute is enough, more and it can cause high CPU usage
# better here is to use something like self.app.config.exit.wait(1), but we cant get to 'app' from here
for i in range(30):
time.sleep(2)
if self.stop_thread:
for i in range(120):
time.sleep(0.5)
if self.stop_thread or self.needs_write_urgent:
break
# Go through the datastore path and remove any snapshots that are not mentioned in the index
@@ -394,7 +417,54 @@ class ChangeDetectionStore:
import pathlib
# Only in the sub-directories
for item in pathlib.Path(self.datastore_path).rglob("*/*txt"):
if not str(item) in index:
print ("Removing",item)
unlink(item)
for uuid in self.data['watching']:
for item in pathlib.Path(self.datastore_path).rglob(uuid+"/*.txt"):
if not str(item) in index:
print ("Removing",item)
unlink(item)
# Run all updates
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
# So therefor - each `update_n` should be very careful about checking if it needs to actually run
# Probably we should bump the current update schema version with each tag release version?
def run_updates(self):
import inspect
import shutil
updates_available = []
for i, o in inspect.getmembers(self, predicate=inspect.ismethod):
m = re.search(r'update_(\d+)$', i)
if m:
updates_available.append(int(m.group(1)))
updates_available.sort()
for update_n in updates_available:
if update_n > self.__data['settings']['application']['schema_version']:
print ("Applying update_{}".format((update_n)))
# Wont exist on fresh installs
if os.path.exists(self.json_store_path):
shutil.copyfile(self.json_store_path, self.datastore_path+"/url-watches-before-{}.json".format(update_n))
try:
update_method = getattr(self, "update_{}".format(update_n))()
except Exception as e:
print("Error while trying update_{}".format((update_n)))
print(e)
# Don't run any more updates
return
else:
# Bump the version, important
self.__data['settings']['application']['schema_version'] = update_n
# Convert minutes to seconds on settings and each watch
def update_1(self):
if self.data['settings']['requests'].get('minutes_between_check'):
self.data['settings']['requests']['time_between_check']['minutes'] = self.data['settings']['requests']['minutes_between_check']
# Remove the default 'hours' that is set from the model
self.data['settings']['requests']['time_between_check']['hours'] = None
for uuid, watch in self.data['watching'].items():
if 'minutes_between_check' in watch:
# Only upgrade individual watch time if it was set
if watch.get('minutes_between_check', False):
self.data['watching'][uuid]['time_between_check']['minutes'] = watch['minutes_between_check']

View File

@@ -8,7 +8,7 @@
Gitter - gitter://token/room
Office365 - o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail
AWS SNS - sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo
SMTPS - mailtos://user:pass@mail.domain.com?to=receivingAddress@example.com")
SMTPS - mailtos://user:pass@mail.domain.com?to=receivingAddress@example.com", class="notification-urls")
}}
<div class="pure-form-message-inline">
<ul>
@@ -22,20 +22,19 @@
<a id="send-test-notification" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Send test notification</a>
{% if emailprefix %}
<a id="add-email-helper" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Add email</a>
{% endif %}
</div>
<div id="notification-customisation" class="pure-control-group">
<div class="pure-control-group">
{{ render_field(form.notification_title, class="m-d") }}
{{ render_field(form.notification_title, class="m-d notification-title") }}
<span class="pure-form-message-inline">Title for all notifications</span>
</div>
<div class="pure-control-group">
{{ render_field(form.notification_body , rows=5) }}
{{ render_field(form.notification_body , rows=5, class="notification-body") }}
<span class="pure-form-message-inline">Body for all notifications</span>
</div>
<div class="pure-control-group">
{{ render_field(form.notification_format , rows=5) }}
{{ render_field(form.notification_format , rows=5, class="notification-format") }}
<span class="pure-form-message-inline">Format for all notifications</span>
</div>
<div class="pure-controls">

View File

@@ -94,6 +94,13 @@
</ul>
{% endif %}
{% endwith %}
{% if session['share-link'] %}
<ul class="messages with-share-link">
<li class="message">Share this link: <span id="share-link">{{ session['share-link'] }}</span> <img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='copy.svg')}}" /></li>
</ul>
{% endif %}
{% block content %}
{% endblock %}

View File

@@ -41,7 +41,7 @@
<span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span>
</div>
<div class="pure-control-group">
{{ render_field(form.minutes_between_check) }}
{{ render_field(form.time_between_check, class="time-check-widget") }}
{% if has_empty_checktime %}
<span class="pure-form-message-inline">Currently using the <a
href="{{ url_for('settings_page', uuid=uuid) }}">default global settings</a>, change to another value if you want to be specific.</span>
@@ -58,7 +58,7 @@
<div class="tab-pane-inner" id="request">
<div class="pure-control-group">
{{ render_field(form.fetch_backend) }}
{{ render_field(form.fetch_backend, class="fetch-backend") }}
<span class="pure-form-message-inline">
<p>Use the <strong>Basic</strong> method (default) where your watched site doesn't need Javascript to render.</p>
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>

View File

@@ -7,7 +7,7 @@
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
<fieldset>
<div class="pure-control-group">
This will remove all version snapshots/data, but keep your list of URLs. <br/>
This will remove ALL version snapshots/data, but keep your list of URLs. <br/>
You may like to use the <strong>BACKUP</strong> link first.<br/>
</div>
<br/>
@@ -17,12 +17,6 @@
<span class="pure-form-message-inline">Type in the word <strong>scrub</strong> to confirm that you understand!</span>
</div>
<br/>
<div class="pure-control-group">
<label for="confirmtext">Optional: Limit deletion of snapshots to snapshots <i>newer</i> than date/time</label>
<input type="datetime-local" id="limit_date" name="limit_date" />
<span class="pure-form-message-inline">dd/mm/yyyy hh:mm (24 hour format)</span>
</div>
<br/>
<div class="pure-control-group">
<button type="submit" class="pure-button pure-button-primary">Scrub!</button>
</div>

View File

@@ -28,7 +28,7 @@
<div class="tab-pane-inner" id="general">
<fieldset>
<div class="pure-control-group">
{{ render_field(form.requests.form.minutes_between_check) }}
{{ render_field(form.requests.form.time_between_check, class="time-check-widget") }}
<span class="pure-form-message-inline">Default time for all watches, when the watch does not have a specific time setting.</span>
</div>
<div class="pure-control-group">
@@ -75,7 +75,7 @@
<div class="tab-pane-inner" id="fetching">
<div class="pure-control-group">
{{ render_field(form.application.form.fetch_backend) }}
{{ render_field(form.application.form.fetch_backend, class="fetch-backend") }}
<span class="pure-form-message-inline">
<p>Use the <strong>Basic</strong> method (default) where your watched sites don't need Javascript to render.</p>
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>

View File

@@ -13,8 +13,7 @@
{{ render_simple_field(form.tag, value=active_tag if active_tag else '', placeholder="watch group") }}
<button type="submit" class="pure-button pure-button-primary">Watch</button>
</fieldset>
<!-- add extra stuff, like do a http POST and send headers -->
<!-- user/pass r = requests.get('https://api.github.com/user', auth=('user', 'pass')) -->
<span style="color:#eee; font-size: 80%;"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" /> Tip: You can also add 'shared' watches. <a href="#">More info</a></a></span>
</form>
<div>
<a href="{{url_for('index')}}" class="pure-button button-tag {{'active' if not active_tag }}">All</a>
@@ -46,12 +45,15 @@
{% if watch.last_error is defined and watch.last_error != False %}error{% endif %}
{% if watch.last_notification_error is defined and watch.last_notification_error != False %}error{% endif %}
{% if watch.paused is defined and watch.paused != False %}paused{% endif %}
{% if watch.newest_history_key| int > watch.last_viewed| int %}unviewed{% endif %}">
{% if watch.newest_history_key| int > watch.last_viewed| int %}unviewed{% endif %}
{% if watch.uuid in queued_uuids %}queued{% endif %}">
<td class="inline">{{ loop.index }}</td>
<td class="inline paused-state state-{{watch.paused}}"><a href="{{url_for('index', pause=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause" title="Pause"/></a></td>
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
<a class="external" target="_blank" rel="noopener" href="{{ watch.url.replace('source:','') }}"></a>
<a href="{{url_for('api_share_put_watch', uuid=watch.uuid)}}"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" /></a>
{%if watch.fetch_backend == "html_webdriver" %}<img style="height: 1em; display:inline-block;" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" />{% endif %}
{% if watch.last_error is defined and watch.last_error != False %}
@@ -72,8 +74,8 @@
{% endif %}
</td>
<td>
<a href="{{ url_for('api_watch_checknow', uuid=watch.uuid, tag=request.args.get('tag')) }}"
class="pure-button button-small pure-button-primary">Recheck</a>
<a {% if watch.uuid in queued_uuids %}disabled="true"{% endif %} href="{{ url_for('api_watch_checknow', uuid=watch.uuid, tag=request.args.get('tag')) }}"
class="recheck pure-button button-small pure-button-primary">{% if watch.uuid in queued_uuids %}Queued{% else %}Recheck{% endif %}</a>
<a href="{{ url_for('edit_page', uuid=watch.uuid)}}" class="pure-button button-small pure-button-primary">Edit</a>
{% if watch.history|length >= 2 %}
<a href="{{ url_for('diff_history_page', uuid=watch.uuid) }}" target="{{watch.uuid}}" class="pure-button button-small pure-button-primary diff-link">Diff</a>

View File

@@ -13,7 +13,7 @@ def test_check_access_control(app, client):
res = c.post(
url_for("settings_page"),
data={"application-password": "foobar",
"requests-minutes_between_check": 180,
"requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_requests"},
follow_redirects=True
)
@@ -46,7 +46,7 @@ def test_check_access_control(app, client):
assert b"BACKUP" in res.data
assert b"IMPORT" in res.data
assert b"LOG OUT" in res.data
assert b"minutes_between_check" in res.data
assert b"time_between_check-minutes" in res.data
assert b"fetch_backend" in res.data
##################################################
@@ -55,7 +55,7 @@ def test_check_access_control(app, client):
res = c.post(
url_for("settings_page"),
data={
"requests-minutes_between_check": 180,
"requests-time_between_check-minutes": 180,
"application-fetch_backend": "html_webdriver",
"application-removepassword_button": "Remove password"
},
@@ -70,7 +70,7 @@ def test_check_access_control(app, client):
res = c.post(
url_for("settings_page"),
data={"application-password": "",
"requests-minutes_between_check": 180,
"requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_requests"},
follow_redirects=True
)

View File

@@ -109,7 +109,7 @@ def test_check_basic_change_detection_functionality(client, live_server):
# Enable auto pickup of <title> in settings
res = client.post(
url_for("settings_page"),
data={"application-extract_title_as_title": "1", "requests-minutes_between_check": 180, 'application-fetch_backend': "html_requests"},
data={"application-extract_title_as_title": "1", "requests-time_between_check-minutes": 180, 'application-fetch_backend': "html_requests"},
follow_redirects=True
)

View File

@@ -171,11 +171,24 @@ def test_check_ignore_text_functionality(client, live_server):
def test_check_global_ignore_text_functionality(client, live_server):
sleep_time_for_fetch_thread = 3
# Give the endpoint time to spin up
time.sleep(1)
ignore_text = "XXXXX\r\nYYYYY\r\nZZZZZ"
set_original_ignore_response()
# Give the endpoint time to spin up
time.sleep(1)
# Goto the settings page, add our ignore text
res = client.post(
url_for("settings_page"),
data={
"requests-time_between_check-minutes": 180,
"application-global_ignore_text": ignore_text,
'application-fetch_backend': "html_requests"
},
follow_redirects=True
)
assert b"Settings updated." in res.data
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
@@ -192,17 +205,6 @@ def test_check_global_ignore_text_functionality(client, live_server):
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# Goto the settings page, add our ignore text
res = client.post(
url_for("settings_page"),
data={
"requests-minutes_between_check": 180,
"application-global_ignore_text": ignore_text,
'application-fetch_backend': "html_requests"
},
follow_redirects=True
)
assert b"Settings updated." in res.data
# Goto the edit page of the item, add our ignore text
# Add our URL to the import page
@@ -225,12 +227,16 @@ def test_check_global_ignore_text_functionality(client, live_server):
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# so that we are sure everything is viewed and in a known 'nothing changed' state
res = client.get(url_for("diff_history_page", uuid="first"))
# It should report nothing found (no new 'unviewed' class)
res = client.get(url_for("index"))
assert b'unviewed' not in res.data
assert b'/test-endpoint' in res.data
# Make a change
# Make a change which includes the ignore text
set_modified_ignore_response()
# Trigger a check
@@ -243,7 +249,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
assert b'unviewed' not in res.data
assert b'/test-endpoint' in res.data
# Just to be sure.. set a regular modified change..
# Just to be sure.. set a regular modified change that will trigger it
set_modified_original_ignore_response()
client.get(url_for("api_watch_checknow"), follow_redirects=True)
time.sleep(sleep_time_for_fetch_thread)

View File

@@ -51,12 +51,11 @@ def test_render_anchor_tag_content_true(client, live_server):
# set original html text
set_original_ignore_response()
# Goto the settings page, choose not to ignore links
# Goto the settings page, choose to ignore links (dont select/send "application-render_anchor_tag_content")
res = client.post(
url_for("settings_page"),
data={
"requests-minutes_between_check": 180,
"application-render_anchor_tag_content": "true",
"requests-time_between_check-minutes": 180,
"application-fetch_backend": "html_requests",
},
follow_redirects=True,
@@ -85,6 +84,30 @@ def test_render_anchor_tag_content_true(client, live_server):
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# We should not see the rendered anchor tag
res = client.get(url_for("preview_page", uuid="first"))
assert '(/modified_link)' not in res.data.decode()
# Goto the settings page, ENABLE render anchor tag
res = client.post(
url_for("settings_page"),
data={
"requests-time_between_check-minutes": 180,
"application-render_anchor_tag_content": "true",
"application-fetch_backend": "html_requests",
},
follow_redirects=True,
)
assert b"Settings updated." in res.data
# Trigger a check
client.get(url_for("api_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# check that the anchor tag content is rendered
res = client.get(url_for("preview_page", uuid="first"))
assert '(/modified_link)' in res.data.decode()
@@ -100,120 +123,3 @@ def test_render_anchor_tag_content_true(client, live_server):
follow_redirects=True)
assert b'Deleted' in res.data
def test_render_anchor_tag_content_false(client, live_server):
"""Testing that anchor tag content changes are ignored when
render_anchor_tag_content setting is set to false"""
sleep_time_for_fetch_thread = 3
# Give the endpoint time to spin up
time.sleep(1)
# set the original html text
set_original_ignore_response()
# Goto the settings page, choose to ignore hyperlinks
res = client.post(
url_for("settings_page"),
data={
"requests-minutes_between_check": 180,
"application-render_anchor_tag_content": "false",
"application-fetch_backend": "html_requests",
},
follow_redirects=True,
)
assert b"Settings updated." in res.data
# Add our URL to the import page
test_url = url_for("test_endpoint", _external=True)
res = client.post(
url_for("import_page"), data={"urls": test_url}, follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(sleep_time_for_fetch_thread)
# Trigger a check
client.get(url_for("api_watch_checknow"), follow_redirects=True)
# set a new html text, with a modified link
set_modified_ignore_response()
time.sleep(sleep_time_for_fetch_thread)
# Trigger a check
client.get(url_for("api_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# check that the anchor tag content is not rendered
res = client.get(url_for("preview_page", uuid="first"))
assert '(/modified_link)' not in res.data.decode()
# even though the link has changed, we shouldn't detect a change since
# we selected to not render anchor tag content (no new 'unviewed' class)
res = client.get(url_for("index"))
assert b"unviewed" not in res.data
assert b"/test-endpoint" in res.data
# Cleanup everything
res = client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_render_anchor_tag_content_default(client, live_server):
"""Testing that anchor tag content changes are ignored when the
render_anchor_tag_content setting is not explicitly selected"""
sleep_time_for_fetch_thread = 3
# Give the endpoint time to spin up
time.sleep(1)
# set the original html text
set_original_ignore_response()
# Goto the settings page, not passing the render_anchor_tag_content setting
res = client.post(
url_for("settings_page"),
data={
"requests-minutes_between_check": 180,
"application-fetch_backend": "html_requests",
},
follow_redirects=True,
)
assert b"Settings updated." in res.data
# Add our URL to the import page
test_url = url_for("test_endpoint", _external=True)
res = client.post(
url_for("import_page"), data={"urls": test_url}, follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(sleep_time_for_fetch_thread)
# Trigger a check
client.get(url_for("api_watch_checknow"), follow_redirects=True)
# set a new html text, with a modified link
set_modified_ignore_response()
time.sleep(sleep_time_for_fetch_thread)
# Trigger a check
client.get(url_for("api_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# check that the anchor tag content is not rendered
res = client.get(url_for("preview_page", uuid="first"))
assert '(/modified_link)' not in res.data.decode()
# even though the link has changed, we shouldn't detect a change since
# we did not select the setting and the default behaviour is to not
# render anchor tag content (no new 'unviewed' class)
res = client.get(url_for("index"))
assert b"unviewed" not in res.data
assert b"/test-endpoint" in res.data
# Cleanup everything
res = client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data

View File

@@ -51,7 +51,7 @@ def test_normal_page_check_works_with_ignore_status_code(client, live_server):
res = client.post(
url_for("settings_page"),
data={
"requests-minutes_between_check": 180,
"requests-time_between_check-minutes": 180,
"application-ignore_status_codes": "y",
'application-fetch_backend': "html_requests"
},

View File

@@ -61,7 +61,7 @@ def test_check_ignore_whitespace(client, live_server):
res = client.post(
url_for("settings_page"),
data={
"requests-minutes_between_check": 180,
"requests-time_between_check-minutes": 180,
"application-ignore_whitespace": "y",
"application-fetch_backend": "html_requests"
},

View File

@@ -270,6 +270,7 @@ def test_check_json_filter_bool_val(client, live_server):
)
assert b"1 Imported" in res.data
time.sleep(3)
# Goto the edit page, add our ignore text
# Add our URL to the import page
res = client.post(
@@ -284,6 +285,7 @@ def test_check_json_filter_bool_val(client, live_server):
)
assert b"Updated watch." in res.data
time.sleep(3)
# Trigger a check
client.get(url_for("api_watch_checknow"), follow_redirects=True)

View File

@@ -156,7 +156,7 @@ def test_check_notification(client, live_server):
# cleanup for the next
client.get(
url_for("api_delete", uuid="first"),
url_for("api_delete", uuid="all"),
follow_redirects=True
)
@@ -172,8 +172,7 @@ def test_notification_validation(client, live_server):
data={"url": test_url, "tag": 'nice one'},
follow_redirects=True
)
with open("xxx.bin", "wb") as f:
f.write(res.data)
assert b"Watch added" in res.data
# Re #360 some validation
@@ -199,7 +198,7 @@ def test_notification_validation(client, live_server):
"application-notification_body": "Rubbish: {rubbish}\n",
"application-notification_format": "Text",
"application-notification_urls": "json://localhost/foobar",
"requests-minutes_between_check": 180,
"requests-time_between_check-minutes": 180,
"fetch_backend": "html_requests"
},
follow_redirects=True
@@ -209,6 +208,6 @@ def test_notification_validation(client, live_server):
# cleanup for the next
client.get(
url_for("api_delete", uuid="first"),
url_for("api_delete", uuid="all"),
follow_redirects=True
)

View File

@@ -35,7 +35,7 @@ def test_check_notification_error_handling(client, live_server):
"tag": "",
"title": "",
"headers": "",
"minutes_between_check": "180",
"time_between_check-minutes": "180",
"fetch_backend": "html_requests",
"trigger_check": "y"},
follow_redirects=True

View File

@@ -27,6 +27,7 @@ def test_headers_in_request(client, live_server):
)
assert b"1 Imported" in res.data
time.sleep(3)
cookie_header = '_ga=GA1.2.1022228332; cookie-preferences=analytics:accepted;'

View File

@@ -0,0 +1,76 @@
#!/usr/bin/python3
import time
from flask import url_for
from urllib.request import urlopen
from .util import set_original_response, set_modified_response, live_server_setup
import re
sleep_time_for_fetch_thread = 3
def test_share_watch(client, live_server):
set_original_response()
live_server_setup(live_server)
test_url = url_for('test_endpoint', _external=True)
css_filter = ".nice-filter"
# Add our URL to the import page
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
# Goto the edit page, add our ignore text
# Add our URL to the import page
res = client.post(
url_for("edit_page", uuid="first"),
data={"css_filter": css_filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
# Check it saved
res = client.get(
url_for("edit_page", uuid="first"),
)
assert bytes(css_filter.encode('utf-8')) in res.data
# click share the link
res = client.get(
url_for("api_share_put_watch", uuid="first"),
follow_redirects=True
)
assert b"Share this link:" in res.data
assert b"https://changedetection.io/share/" in res.data
html = res.data.decode()
share_link_search = re.search('<span id="share-link">(.*)</span>', html, re.IGNORECASE)
assert share_link_search
# Now delete what we have, we will try to re-import it
# Cleanup everything
res = client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
# Add our URL to the import page
res = client.post(
url_for("import_page"),
data={"urls": share_link_search.group(1)},
follow_redirects=True
)
assert b"1 Imported" in res.data
# Now hit edit, we should see what we expect
# that the import fetched the meta-data
# Check it saved
res = client.get(
url_for("edit_page", uuid="first"),
)
assert bytes(css_filter.encode('utf-8')) in res.data

View File

@@ -20,8 +20,8 @@ def test_check_watch_field_storage(client, live_server):
res = client.post(
url_for("edit_page", uuid="first"),
data={ "notification_urls": "json://myapi.com",
"minutes_between_check": 126,
data={ "notification_urls": "json://127.0.0.1:30000\r\njson://128.0.0.1\r\n",
"time_between_check-minutes": 126,
"css_filter" : ".fooclass",
"title" : "My title",
"ignore_text" : "ignore this",
@@ -38,8 +38,14 @@ def test_check_watch_field_storage(client, live_server):
url_for("edit_page", uuid="first"),
follow_redirects=True
)
# checks that we dont get an error when using blank lines in the field value
assert not b"json://127.0.0.1\n\njson" in res.data
assert not b"json://127.0.0.1\r\n\njson" in res.data
assert not b"json://127.0.0.1\r\n\rjson" in res.data
assert b"json://127.0.0.1" in res.data
assert b"json://128.0.0.1" in res.data
assert b"json://myapi.com" in res.data
assert b"126" in res.data
assert b".fooclass" in res.data
assert b"My title" in res.data
@@ -56,7 +62,7 @@ def test_check_recheck_global_setting(client, live_server):
res = client.post(
url_for("settings_page"),
data={
"requests-minutes_between_check": 1566,
"requests-time_between_check-minutes": 1566,
'application-fetch_backend': "html_requests"
},
follow_redirects=True
@@ -88,7 +94,7 @@ def test_check_recheck_global_setting(client, live_server):
res = client.post(
url_for("settings_page"),
data={
"requests-minutes_between_check": 222,
"requests-time_between_check-minutes": 222,
'application-fetch_backend': "html_requests"
},
follow_redirects=True
@@ -108,7 +114,7 @@ def test_check_recheck_global_setting(client, live_server):
res = client.post(
url_for("edit_page", uuid="first"),
data={"url": test_url,
"minutes_between_check": 55,
"time_between_check-minutes": 55,
'fetch_backend': "html_requests"
},
follow_redirects=True
@@ -124,8 +130,8 @@ def test_check_recheck_global_setting(client, live_server):
res = client.post(
url_for("settings_page"),
data={
"requests-minutes_between_check": 666,
'application-fetch_backend': "html_requests"
"requests-time_between_check-minutes": 666,
"application-fetch_backend": "html_requests"
},
follow_redirects=True
)
@@ -134,7 +140,7 @@ def test_check_recheck_global_setting(client, live_server):
res = client.post(
url_for("edit_page", uuid="first"),
data={"url": test_url,
"minutes_between_check": "",
"time_between_check-minutes": "",
'fetch_backend': "html_requests"
},
follow_redirects=True
@@ -147,4 +153,3 @@ def test_check_recheck_global_setting(client, live_server):
follow_redirects=True
)
assert b"666" in res.data

View File

@@ -17,7 +17,7 @@ wtforms ~= 3.0
jsonpath-ng ~= 1.5.3
# Notification library
apprise ~= 0.9.8.1
apprise ~= 0.9.8.3
# apprise mqtt https://github.com/dgtlmoon/changedetection.io/issues/315
paho-mqtt