mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-11-08 18:47:32 +00:00
Compare commits
1 Commits
0.40.0.3
...
export-dat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b9514f6a10 |
@@ -1,7 +1,7 @@
|
||||
# pip dependencies install stage
|
||||
FROM python:3.8-slim as builder
|
||||
|
||||
# See `cryptography` pin comment in requirements.txt
|
||||
# rustc compiler would be needed on ARM type devices but theres an issue with some deps not building..
|
||||
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
@@ -31,7 +31,8 @@ RUN pip install --target=/dependencies playwright~=1.27.1 \
|
||||
# Final image stage
|
||||
FROM python:3.8-slim
|
||||
|
||||
# See `cryptography` pin comment in requirements.txt
|
||||
# Actual packages needed at runtime, usually due to the notification (apprise) backend
|
||||
# rustc compiler would be needed on ARM type devices but theres an issue with some deps not building..
|
||||
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
|
||||
|
||||
# Re #93, #73, excluding rustc (adds another 430Mb~)
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
recursive-include changedetectionio/api *
|
||||
recursive-include changedetectionio/blueprint *
|
||||
recursive-include changedetectionio/model *
|
||||
recursive-include changedetectionio/res *
|
||||
recursive-include changedetectionio/static *
|
||||
recursive-include changedetectionio/templates *
|
||||
recursive-include changedetectionio/static *
|
||||
recursive-include changedetectionio/model *
|
||||
recursive-include changedetectionio/tests *
|
||||
recursive-include changedetectionio/res *
|
||||
prune changedetectionio/static/package-lock.json
|
||||
prune changedetectionio/static/styles/node_modules
|
||||
prune changedetectionio/static/styles/package-lock.json
|
||||
|
||||
24
README.md
24
README.md
@@ -159,7 +159,7 @@ Just some examples
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot-notifications.png" style="max-width:100%;" alt="Self-hosted web page change monitoring notifications" title="Self-hosted web page change monitoring notifications" />
|
||||
|
||||
Now you can also customise your notification content and use <a target="_new" href="https://jinja.palletsprojects.com/en/3.0.x/templates/">Jinja2 templating</a> for their title and body!
|
||||
Now you can also customise your notification content!
|
||||
|
||||
## JSON API Monitoring
|
||||
|
||||
@@ -187,29 +187,11 @@ When you enable a `json:` or `jq:` filter, you can even automatically extract an
|
||||
<html>
|
||||
...
|
||||
<script type="application/ld+json">
|
||||
|
||||
{
|
||||
"@context":"http://schema.org/",
|
||||
"@type":"Product",
|
||||
"offers":{
|
||||
"@type":"Offer",
|
||||
"availability":"http://schema.org/InStock",
|
||||
"price":"3949.99",
|
||||
"priceCurrency":"USD",
|
||||
"url":"https://www.newegg.com/p/3D5-000D-001T1"
|
||||
},
|
||||
"description":"Cobratype King Cobra Hero Desktop Gaming PC",
|
||||
"name":"Cobratype King Cobra Hero Desktop Gaming PC",
|
||||
"sku":"3D5-000D-001T1",
|
||||
"itemCondition":"NewCondition"
|
||||
}
|
||||
{"@context":"http://schema.org","@type":"Product","name":"Nan Optipro Stage 1 Baby Formula 800g","price": 23.50 }
|
||||
</script>
|
||||
```
|
||||
|
||||
`json:$..price` or `jq:..price` would give `3949.99`, or you can extract the whole structure (use a JSONpath test website to validate with)
|
||||
|
||||
The application also supports notifying you that it can follow this information automatically
|
||||
|
||||
`json:$.price` or `jq:.price` would give `23.50`, or you can extract the whole structure
|
||||
|
||||
## Proxy Configuration
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ import threading
|
||||
import time
|
||||
import timeago
|
||||
|
||||
from changedetectionio import queuedWatchMetaData
|
||||
from copy import deepcopy
|
||||
from distutils.util import strtobool
|
||||
from feedgen.feed import FeedGenerator
|
||||
@@ -36,7 +35,7 @@ from flask_wtf import CSRFProtect
|
||||
from changedetectionio import html_tools
|
||||
from changedetectionio.api import api_v1
|
||||
|
||||
__version__ = '0.40.0.3'
|
||||
__version__ = '0.39.22.1'
|
||||
|
||||
datastore = None
|
||||
|
||||
@@ -96,12 +95,6 @@ def init_app_secret(datastore_path):
|
||||
|
||||
return secret
|
||||
|
||||
|
||||
@app.template_global()
|
||||
def get_darkmode_state():
|
||||
css_dark_mode = request.cookies.get('css_dark_mode', 'false')
|
||||
return 'true' if css_dark_mode and strtobool(css_dark_mode) else 'false'
|
||||
|
||||
# We use the whole watch object from the store/JSON so we can see if there's some related status in terms of a thread
|
||||
# running or something similar.
|
||||
@app.template_filter('format_last_checked_time')
|
||||
@@ -209,6 +202,10 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
watch_api.add_resource(api_v1.SystemInfo, '/api/v1/systeminfo',
|
||||
resource_class_kwargs={'datastore': datastore, 'update_q': update_q})
|
||||
|
||||
def getDarkModeSetting():
|
||||
css_dark_mode = request.cookies.get('css_dark_mode')
|
||||
return True if (css_dark_mode == 'true' or css_dark_mode == True) else False
|
||||
|
||||
# Setup cors headers to allow all domains
|
||||
# https://flask-cors.readthedocs.io/en/latest/
|
||||
# CORS(app)
|
||||
@@ -405,8 +402,10 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
sorted_watches.append(watch)
|
||||
|
||||
existing_tags = datastore.get_all_tags()
|
||||
|
||||
form = forms.quickWatchForm(request.form)
|
||||
output = render_template("watch-overview.html",
|
||||
dark_mode=getDarkModeSetting(),
|
||||
form=form,
|
||||
watches=sorted_watches,
|
||||
tags=existing_tags,
|
||||
@@ -416,7 +415,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
# Don't link to hosting when we're on the hosting environment
|
||||
hosted_sticky=os.getenv("SALTED_PASS", False) == False,
|
||||
guid=datastore.data['app_guid'],
|
||||
queued_uuids=[q_uuid.item['uuid'] for q_uuid in update_q.queue])
|
||||
queued_uuids=[uuid for p,uuid in update_q.queue])
|
||||
|
||||
|
||||
if session.get('share-link'):
|
||||
@@ -596,16 +595,25 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
using_default_check_time = False
|
||||
break
|
||||
|
||||
# Use the default if it's the same as system-wide.
|
||||
# Use the default if its the same as system wide
|
||||
if form.fetch_backend.data == datastore.data['settings']['application']['fetch_backend']:
|
||||
extra_update_obj['fetch_backend'] = None
|
||||
|
||||
|
||||
|
||||
# Ignore text
|
||||
form_ignore_text = form.ignore_text.data
|
||||
datastore.data['watching'][uuid]['ignore_text'] = form_ignore_text
|
||||
|
||||
# Reset the previous_md5 so we process a new snapshot including stripping ignore text.
|
||||
if form_ignore_text:
|
||||
if len(datastore.data['watching'][uuid].history):
|
||||
extra_update_obj['previous_md5'] = get_current_checksum_include_ignore_text(uuid=uuid)
|
||||
|
||||
# Reset the previous_md5 so we process a new snapshot including stripping ignore text.
|
||||
if form.include_filters.data != datastore.data['watching'][uuid].get('include_filters', []):
|
||||
if len(datastore.data['watching'][uuid].history):
|
||||
extra_update_obj['previous_md5'] = get_current_checksum_include_ignore_text(uuid=uuid)
|
||||
|
||||
# Be sure proxy value is None
|
||||
if datastore.proxy_list is not None and form.data['proxy'] == '':
|
||||
extra_update_obj['proxy'] = None
|
||||
@@ -623,7 +631,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
datastore.needs_write_urgent = True
|
||||
|
||||
# Queue the watch for immediate recheck, with a higher priority
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
||||
update_q.put((1, uuid))
|
||||
|
||||
# Diff page [edit] link should go back to diff page
|
||||
if request.args.get("next") and request.args.get("next") == 'diff':
|
||||
@@ -656,6 +664,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
browser_steps_config=browser_step_ui_config,
|
||||
current_base_url=datastore.data['settings']['application']['base_url'],
|
||||
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
|
||||
dark_mode=getDarkModeSetting(),
|
||||
form=form,
|
||||
has_default_notification_urls=True if len(datastore.data['settings']['application']['notification_urls']) else False,
|
||||
has_empty_checktime=using_default_check_time,
|
||||
@@ -743,6 +752,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
output = render_template("settings.html",
|
||||
form=form,
|
||||
dark_mode=getDarkModeSetting(),
|
||||
current_base_url = datastore.data['settings']['application']['base_url'],
|
||||
hide_remove_pass=os.getenv("SALTED_PASS", False),
|
||||
api_key=datastore.data['settings']['application'].get('api_access_token'),
|
||||
@@ -764,7 +774,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
importer = import_url_list()
|
||||
importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore)
|
||||
for uuid in importer.new_uuids:
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
|
||||
update_q.put((1, uuid))
|
||||
|
||||
if len(importer.remaining_data) == 0:
|
||||
return redirect(url_for('index'))
|
||||
@@ -777,12 +787,13 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
d_importer = import_distill_io_json()
|
||||
d_importer.run(data=request.values.get('distill-io'), flash=flash, datastore=datastore)
|
||||
for uuid in d_importer.new_uuids:
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
|
||||
update_q.put((1, uuid))
|
||||
|
||||
|
||||
|
||||
# Could be some remaining, or we could be on GET
|
||||
output = render_template("import.html",
|
||||
dark_mode=getDarkModeSetting(),
|
||||
import_url_list_remaining="\n".join(remaining_urls),
|
||||
original_distill_json=''
|
||||
)
|
||||
@@ -882,6 +893,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
output = render_template("diff.html",
|
||||
current_diff_url=watch['url'],
|
||||
current_previous_version=str(previous_version),
|
||||
dark_mode=getDarkModeSetting(),
|
||||
extra_stylesheets=extra_stylesheets,
|
||||
extra_title=" - Diff - {}".format(watch['title'] if watch['title'] else watch['url']),
|
||||
extract_form=extract_form,
|
||||
@@ -932,6 +944,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
content=content,
|
||||
history_n=watch.history_n,
|
||||
extra_stylesheets=extra_stylesheets,
|
||||
dark_mode=getDarkModeSetting(),
|
||||
# current_diff_url=watch['url'],
|
||||
watch=watch,
|
||||
uuid=uuid,
|
||||
@@ -978,6 +991,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
content=content,
|
||||
history_n=watch.history_n,
|
||||
extra_stylesheets=extra_stylesheets,
|
||||
dark_mode=getDarkModeSetting(),
|
||||
ignored_line_numbers=ignored_line_numbers,
|
||||
triggered_line_numbers=trigger_line_numbers,
|
||||
current_diff_url=watch['url'],
|
||||
@@ -996,10 +1010,15 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
def notification_logs():
|
||||
global notification_debug_log
|
||||
output = render_template("notification-log.html",
|
||||
dark_mode=getDarkModeSetting(),
|
||||
logs=notification_debug_log if len(notification_debug_log) else ["Notification logs are empty - no notifications sent yet."])
|
||||
|
||||
return output
|
||||
|
||||
@app.route("/favicon.ico", methods=['GET'])
|
||||
def favicon():
|
||||
return send_from_directory("static/images", path="favicon.ico")
|
||||
|
||||
# We're good but backups are even better!
|
||||
@app.route("/backup", methods=['GET'])
|
||||
@login_required
|
||||
@@ -1142,7 +1161,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
if not add_paused and new_uuid:
|
||||
# Straight into the queue.
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid}))
|
||||
update_q.put((1, new_uuid))
|
||||
flash("Watch added.")
|
||||
|
||||
if add_paused:
|
||||
@@ -1179,7 +1198,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
uuid = list(datastore.data['watching'].keys()).pop()
|
||||
|
||||
new_uuid = datastore.clone(uuid)
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=5, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
|
||||
update_q.put((5, new_uuid))
|
||||
flash('Cloned.')
|
||||
|
||||
return redirect(url_for('index'))
|
||||
@@ -1187,7 +1206,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
@app.route("/api/checknow", methods=['GET'])
|
||||
@login_required
|
||||
def form_watch_checknow():
|
||||
# Forced recheck will skip the 'skip if content is the same' rule (, 'reprocess_existing_data': True})))
|
||||
|
||||
tag = request.args.get('tag')
|
||||
uuid = request.args.get('uuid')
|
||||
i = 0
|
||||
@@ -1196,9 +1215,11 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
for t in running_update_threads:
|
||||
running_uuids.append(t.current_uuid)
|
||||
|
||||
# @todo check thread is running and skip
|
||||
|
||||
if uuid:
|
||||
if uuid not in running_uuids:
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
||||
update_q.put((1, uuid))
|
||||
i = 1
|
||||
|
||||
elif tag != None:
|
||||
@@ -1206,14 +1227,14 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if (tag != None and tag in watch['tag']):
|
||||
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False}))
|
||||
update_q.put((1, watch_uuid))
|
||||
i += 1
|
||||
|
||||
else:
|
||||
# No tag, no uuid, add everything.
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False}))
|
||||
update_q.put((1, watch_uuid))
|
||||
i += 1
|
||||
flash("{} watches are queued for rechecking.".format(i))
|
||||
return redirect(url_for('index', tag=tag))
|
||||
@@ -1260,14 +1281,6 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
datastore.data['watching'][uuid.strip()]['notification_muted'] = False
|
||||
flash("{} watches un-muted".format(len(uuids)))
|
||||
|
||||
elif (op == 'recheck'):
|
||||
for uuid in uuids:
|
||||
uuid = uuid.strip()
|
||||
if datastore.data['watching'].get(uuid):
|
||||
# Recheck and require a full reprocessing
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
||||
|
||||
flash("{} watches un-muted".format(len(uuids)))
|
||||
elif (op == 'notification-default'):
|
||||
from changedetectionio.notification import (
|
||||
default_notification_format_for_watch
|
||||
@@ -1340,10 +1353,6 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
import changedetectionio.blueprint.browser_steps as browser_steps
|
||||
app.register_blueprint(browser_steps.construct_blueprint(datastore), url_prefix='/browser-steps')
|
||||
|
||||
import changedetectionio.blueprint.price_data_follower as price_data_follower
|
||||
app.register_blueprint(price_data_follower.construct_blueprint(datastore, update_q), url_prefix='/price_data_follower')
|
||||
|
||||
|
||||
# @todo handle ctrl break
|
||||
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
|
||||
threading.Thread(target=notification_runner).start()
|
||||
@@ -1449,11 +1458,7 @@ def ticker_thread_check_time_launch_checks():
|
||||
watch_uuid_list = []
|
||||
while True:
|
||||
try:
|
||||
# Get a list of watches sorted by last_checked, [1] because it gets passed a tuple
|
||||
# This is so we examine the most over-due first
|
||||
for k in sorted(datastore.data['watching'].items(), key=lambda item: item[1].get('last_checked',0)):
|
||||
watch_uuid_list.append(k[0])
|
||||
|
||||
watch_uuid_list = datastore.data['watching'].keys()
|
||||
except RuntimeError as e:
|
||||
# RuntimeError: dictionary changed size during iteration
|
||||
time.sleep(0.1)
|
||||
@@ -1493,7 +1498,7 @@ def ticker_thread_check_time_launch_checks():
|
||||
seconds_since_last_recheck = now - watch['last_checked']
|
||||
|
||||
if seconds_since_last_recheck >= (threshold + watch.jitter_seconds) and seconds_since_last_recheck >= recheck_time_minimum_seconds:
|
||||
if not uuid in running_uuids and uuid not in [q_uuid.item['uuid'] for q_uuid in update_q.queue]:
|
||||
if not uuid in running_uuids and uuid not in [q_uuid for p,q_uuid in update_q.queue]:
|
||||
|
||||
# Proxies can be set to have a limit on seconds between which they can be called
|
||||
watch_proxy = datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
||||
@@ -1524,9 +1529,8 @@ def ticker_thread_check_time_launch_checks():
|
||||
priority,
|
||||
watch.jitter_seconds,
|
||||
now - watch['last_checked']))
|
||||
|
||||
# Into the queue with you
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=priority, item={'uuid': uuid, 'skip_when_checksum_same': True}))
|
||||
update_q.put((priority, uuid))
|
||||
|
||||
# Reset for next time
|
||||
watch.jitter_seconds = 0
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from changedetectionio import queuedWatchMetaData
|
||||
from flask_restful import abort, Resource
|
||||
from flask import request, make_response
|
||||
import validators
|
||||
@@ -25,7 +24,7 @@ class Watch(Resource):
|
||||
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
|
||||
if request.args.get('recheck'):
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
|
||||
self.update_q.put((1, uuid))
|
||||
return "OK", 200
|
||||
|
||||
# Return without history, get that via another API call
|
||||
@@ -101,7 +100,7 @@ class CreateWatch(Resource):
|
||||
extras = {'title': json_data['title'].strip()} if json_data.get('title') else {}
|
||||
|
||||
new_uuid = self.datastore.add_watch(url=json_data['url'].strip(), tag=tag, extras=extras)
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
|
||||
self.update_q.put((1, new_uuid))
|
||||
return {'uuid': new_uuid}, 201
|
||||
|
||||
# Return concise list of available watches and some very basic info
|
||||
@@ -119,7 +118,7 @@ class CreateWatch(Resource):
|
||||
|
||||
if request.args.get('recheck_all'):
|
||||
for uuid in self.datastore.data['watching'].keys():
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
|
||||
self.update_q.put((1, uuid))
|
||||
return {'status': "OK"}, 200
|
||||
|
||||
return list, 200
|
||||
|
||||
@@ -75,13 +75,15 @@ class steppable_browser_interface():
|
||||
def action_goto_url(self, url, optional_value):
|
||||
# self.page.set_viewport_size({"width": 1280, "height": 5000})
|
||||
now = time.time()
|
||||
response = self.page.goto(url, timeout=0, wait_until='commit')
|
||||
response = self.page.goto(url, timeout=0, wait_until='domcontentloaded')
|
||||
print("Time to goto URL", time.time() - now)
|
||||
|
||||
# Wait_until = commit
|
||||
# - `'commit'` - consider operation to be finished when network response is received and the document started loading.
|
||||
# Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
|
||||
# This seemed to solve nearly all 'TimeoutErrors'
|
||||
print("Time to goto URL ", time.time() - now)
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5))
|
||||
self.page.wait_for_timeout(extra_wait * 1000)
|
||||
|
||||
def action_click_element_containing_text(self, selector=None, value=''):
|
||||
if not len(value.strip()):
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
|
||||
from distutils.util import strtobool
|
||||
from flask import Blueprint, flash, redirect, url_for
|
||||
from flask_login import login_required
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
from changedetectionio import queuedWatchMetaData
|
||||
from queue import PriorityQueue
|
||||
|
||||
PRICE_DATA_TRACK_ACCEPT = 'accepted'
|
||||
PRICE_DATA_TRACK_REJECT = 'rejected'
|
||||
|
||||
def construct_blueprint(datastore: ChangeDetectionStore, update_q: PriorityQueue):
|
||||
|
||||
price_data_follower_blueprint = Blueprint('price_data_follower', __name__)
|
||||
|
||||
@login_required
|
||||
@price_data_follower_blueprint.route("/<string:uuid>/accept", methods=['GET'])
|
||||
def accept(uuid):
|
||||
datastore.data['watching'][uuid]['track_ldjson_price_data'] = PRICE_DATA_TRACK_ACCEPT
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
||||
return redirect(url_for("form_watch_checknow", uuid=uuid))
|
||||
|
||||
|
||||
@login_required
|
||||
@price_data_follower_blueprint.route("/<string:uuid>/reject", methods=['GET'])
|
||||
def reject(uuid):
|
||||
datastore.data['watching'][uuid]['track_ldjson_price_data'] = PRICE_DATA_TRACK_REJECT
|
||||
return redirect(url_for("index"))
|
||||
|
||||
|
||||
return price_data_follower_blueprint
|
||||
|
||||
|
||||
@@ -23,9 +23,6 @@ class Non200ErrorCodeReceived(Exception):
|
||||
self.page_text = html_tools.html_to_text(page_html)
|
||||
return
|
||||
|
||||
class checksumFromPreviousCheckWasTheSame(Exception):
|
||||
def __init__(self):
|
||||
return
|
||||
|
||||
class JSActionExceptions(Exception):
|
||||
def __init__(self, status_code, url, screenshot, message=''):
|
||||
@@ -42,7 +39,7 @@ class BrowserStepsStepTimout(Exception):
|
||||
|
||||
|
||||
class PageUnloadable(Exception):
|
||||
def __init__(self, status_code, url, message, screenshot=False):
|
||||
def __init__(self, status_code, url, screenshot=False, message=False):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
@@ -289,8 +286,6 @@ class base_html_playwright(Fetcher):
|
||||
proxy=self.proxy,
|
||||
# This is needed to enable JavaScript execution on GitHub and others
|
||||
bypass_csp=True,
|
||||
# Can't think why we need the service workers for our use case?
|
||||
service_workers='block',
|
||||
# Should never be needed
|
||||
accept_downloads=False
|
||||
)
|
||||
@@ -299,34 +294,24 @@ class base_html_playwright(Fetcher):
|
||||
if len(request_headers):
|
||||
context.set_extra_http_headers(request_headers)
|
||||
|
||||
try:
|
||||
self.page.set_default_navigation_timeout(90000)
|
||||
self.page.set_default_timeout(90000)
|
||||
|
||||
# Listen for all console events and handle errors
|
||||
self.page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}"))
|
||||
|
||||
# Goto page
|
||||
try:
|
||||
# Bug - never set viewport size BEFORE page.goto
|
||||
|
||||
|
||||
# Waits for the next navigation. Using Python context manager
|
||||
# prevents a race condition between clicking and waiting for a navigation.
|
||||
with self.page.expect_navigation():
|
||||
response = self.page.goto(url, wait_until='load')
|
||||
# Wait_until = commit
|
||||
# - `'commit'` - consider operation to be finished when network response is received and the document started loading.
|
||||
# Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
|
||||
# This seemed to solve nearly all 'TimeoutErrors'
|
||||
response = self.page.goto(url, wait_until='commit')
|
||||
except playwright._impl._api_types.Error as e:
|
||||
# Retry once - https://github.com/browserless/chrome/issues/2485
|
||||
# Sometimes errors related to invalid cert's and other can be random
|
||||
print ("Content Fetcher > retrying request got error - ", str(e))
|
||||
time.sleep(1)
|
||||
response = self.page.goto(url, wait_until='commit')
|
||||
|
||||
except Exception as e:
|
||||
print ("Content Fetcher > Other exception when page.goto", str(e))
|
||||
context.close()
|
||||
browser.close()
|
||||
raise PageUnloadable(url=url, status_code=None, message=str(e))
|
||||
|
||||
# Execute any browser steps
|
||||
try:
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
|
||||
self.page.wait_for_timeout(extra_wait * 1000)
|
||||
|
||||
@@ -339,15 +324,17 @@ class base_html_playwright(Fetcher):
|
||||
# This can be ok, we will try to grab what we could retrieve
|
||||
pass
|
||||
except Exception as e:
|
||||
print ("Content Fetcher > Other exception when executing custom JS code", str(e))
|
||||
print ("other exception when page.goto")
|
||||
print (str(e))
|
||||
context.close()
|
||||
browser.close()
|
||||
raise PageUnloadable(url=url, status_code=None, message=str(e))
|
||||
raise PageUnloadable(url=url, status_code=None)
|
||||
|
||||
|
||||
if response is None:
|
||||
context.close()
|
||||
browser.close()
|
||||
print ("Content Fetcher > Response object was none")
|
||||
print ("response object was none")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
# Bug 2(?) Set the viewport size AFTER loading the page
|
||||
@@ -366,7 +353,7 @@ class base_html_playwright(Fetcher):
|
||||
if len(self.page.content().strip()) == 0:
|
||||
context.close()
|
||||
browser.close()
|
||||
print ("Content Fetcher > Content was empty")
|
||||
print ("Content was empty")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
# Bug 2(?) Set the viewport size AFTER loading the page
|
||||
@@ -511,7 +498,7 @@ class base_html_webdriver(Fetcher):
|
||||
try:
|
||||
self.driver.quit()
|
||||
except Exception as e:
|
||||
print("Content Fetcher > Exception in chrome shutdown/quit" + str(e))
|
||||
print("Exception in chrome shutdown/quit" + str(e))
|
||||
|
||||
|
||||
# "html_requests" is listed as the default fetcher in store.py!
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import urllib3
|
||||
|
||||
from changedetectionio import content_fetcher, html_tools
|
||||
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
|
||||
from copy import deepcopy
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
@@ -40,7 +38,8 @@ class perform_site_check():
|
||||
|
||||
return regex
|
||||
|
||||
def run(self, uuid, skip_when_checksum_same=True):
|
||||
def run(self, uuid):
|
||||
from copy import deepcopy
|
||||
changed_detected = False
|
||||
screenshot = False # as bytes
|
||||
stripped_text_from_html = ""
|
||||
@@ -123,14 +122,6 @@ class perform_site_check():
|
||||
self.screenshot = fetcher.screenshot
|
||||
self.xpath_data = fetcher.xpath_data
|
||||
|
||||
# Watches added automatically in the queue manager will skip if its the same checksum as the previous run
|
||||
# Saves a lot of CPU
|
||||
update_obj['previous_md5_before_filters'] = hashlib.md5(fetcher.content.encode('utf-8')).hexdigest()
|
||||
if skip_when_checksum_same:
|
||||
if update_obj['previous_md5_before_filters'] == watch.get('previous_md5_before_filters'):
|
||||
raise content_fetcher.checksumFromPreviousCheckWasTheSame()
|
||||
|
||||
|
||||
# Fetching complete, now filters
|
||||
# @todo move to class / maybe inside of fetcher abstract base?
|
||||
|
||||
@@ -149,7 +140,7 @@ class perform_site_check():
|
||||
is_html = False
|
||||
is_json = False
|
||||
|
||||
include_filters_rule = deepcopy(watch.get('include_filters', []))
|
||||
include_filters_rule = watch.get('include_filters', [])
|
||||
# include_filters_rule = watch['include_filters']
|
||||
subtractive_selectors = watch.get(
|
||||
"subtractive_selectors", []
|
||||
@@ -157,10 +148,6 @@ class perform_site_check():
|
||||
"global_subtractive_selectors", []
|
||||
)
|
||||
|
||||
# Inject a virtual LD+JSON price tracker rule
|
||||
if watch.get('track_ldjson_price_data', '') == PRICE_DATA_TRACK_ACCEPT:
|
||||
include_filters_rule.append(html_tools.LD_JSON_PRODUCT_OFFER_SELECTOR)
|
||||
|
||||
has_filter_rule = include_filters_rule and len("".join(include_filters_rule).strip())
|
||||
has_subtractive_selectors = subtractive_selectors and len(subtractive_selectors[0].strip())
|
||||
|
||||
@@ -168,14 +155,6 @@ class perform_site_check():
|
||||
include_filters_rule.append("json:$")
|
||||
has_filter_rule = True
|
||||
|
||||
if is_json:
|
||||
# Sort the JSON so we dont get false alerts when the content is just re-ordered
|
||||
try:
|
||||
fetcher.content = json.dumps(json.loads(fetcher.content), sort_keys=True)
|
||||
except Exception as e:
|
||||
# Might have just been a snippet, or otherwise bad JSON, continue
|
||||
pass
|
||||
|
||||
if has_filter_rule:
|
||||
json_filter_prefixes = ['json:', 'jq:']
|
||||
for filter in include_filters_rule:
|
||||
@@ -183,8 +162,6 @@ class perform_site_check():
|
||||
stripped_text_from_html += html_tools.extract_json_as_string(content=fetcher.content, json_filter=filter)
|
||||
is_html = False
|
||||
|
||||
|
||||
|
||||
if is_html or is_source:
|
||||
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
@@ -196,13 +173,9 @@ class perform_site_check():
|
||||
# Don't run get_text or xpath/css filters on plaintext
|
||||
stripped_text_from_html = html_content
|
||||
else:
|
||||
# Does it have some ld+json price data? used for easier monitoring
|
||||
update_obj['has_ldjson_price_data'] = html_tools.has_ldjson_product_info(fetcher.content)
|
||||
|
||||
# Then we assume HTML
|
||||
if has_filter_rule:
|
||||
html_content = ""
|
||||
|
||||
for filter_rule in include_filters_rule:
|
||||
# For HTML/XML we offer xpath as an option, just start a regular xPath "/.."
|
||||
if filter_rule[0] == '/' or filter_rule.startswith('xpath:'):
|
||||
|
||||
@@ -193,7 +193,7 @@ class ValidateAppRiseServers(object):
|
||||
message = field.gettext('\'%s\' is not a valid AppRise URL.' % (server_url))
|
||||
raise ValidationError(message)
|
||||
|
||||
class ValidateJinja2Template(object):
|
||||
class ValidateTokensList(object):
|
||||
"""
|
||||
Validates that a {token} is from a valid set
|
||||
"""
|
||||
@@ -202,24 +202,11 @@ class ValidateJinja2Template(object):
|
||||
|
||||
def __call__(self, form, field):
|
||||
from changedetectionio import notification
|
||||
|
||||
from jinja2 import Environment, BaseLoader, TemplateSyntaxError
|
||||
from jinja2.meta import find_undeclared_variables
|
||||
|
||||
|
||||
try:
|
||||
jinja2_env = Environment(loader=BaseLoader)
|
||||
jinja2_env.globals.update(notification.valid_tokens)
|
||||
rendered = jinja2_env.from_string(field.data).render()
|
||||
except TemplateSyntaxError as e:
|
||||
raise ValidationError(f"This is not a valid Jinja2 template: {e}") from e
|
||||
|
||||
ast = jinja2_env.parse(field.data)
|
||||
undefined = ", ".join(find_undeclared_variables(ast))
|
||||
if undefined:
|
||||
raise ValidationError(
|
||||
f"The following tokens used in the notification are not valid: {undefined}"
|
||||
)
|
||||
regex = re.compile('{.*?}')
|
||||
for p in re.findall(regex, field.data):
|
||||
if not p.strip('{}') in notification.valid_tokens:
|
||||
message = field.gettext('Token \'%s\' is not a valid token.')
|
||||
raise ValidationError(message % (p))
|
||||
|
||||
class validateURL(object):
|
||||
|
||||
@@ -238,7 +225,6 @@ class validateURL(object):
|
||||
message = field.gettext('\'%s\' is not a valid URL.' % (field.data.strip()))
|
||||
raise ValidationError(message)
|
||||
|
||||
|
||||
class ValidateListRegex(object):
|
||||
"""
|
||||
Validates that anything that looks like a regex passes as a regex
|
||||
@@ -347,11 +333,11 @@ class quickWatchForm(Form):
|
||||
|
||||
# Common to a single watch and the global settings
|
||||
class commonSettingsForm(Form):
|
||||
notification_urls = StringListField('Notification URL List', validators=[validators.Optional(), ValidateAppRiseServers()])
|
||||
notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()])
|
||||
notification_body = TextAreaField('Notification Body', default='{{ watch_url }} had a change.', validators=[validators.Optional(), ValidateJinja2Template()])
|
||||
notification_urls = StringListField('Notification URL list', validators=[validators.Optional(), ValidateAppRiseServers()])
|
||||
notification_title = StringField('Notification title', validators=[validators.Optional(), ValidateTokensList()])
|
||||
notification_body = TextAreaField('Notification body', validators=[validators.Optional(), ValidateTokensList()])
|
||||
notification_format = SelectField('Notification format', choices=valid_notification_formats.keys())
|
||||
fetch_backend = RadioField(u'Fetch Method', choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
fetch_backend = RadioField(u'Fetch method', choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
extract_title_as_title = BooleanField('Extract <title> from document and use as watch title', default=False)
|
||||
webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1,
|
||||
message="Should contain one or more seconds")])
|
||||
|
||||
@@ -10,10 +10,6 @@ import re
|
||||
# HTML added to be sure each result matching a filter (.example) gets converted to a new line by Inscriptis
|
||||
TEXT_FILTER_LIST_LINE_SUFFIX = "<br/>"
|
||||
|
||||
# 'price' , 'lowPrice', 'highPrice' are usually under here
|
||||
# all of those may or may not appear on different websites
|
||||
LD_JSON_PRODUCT_OFFER_SELECTOR = "json:$..offers"
|
||||
|
||||
class JSONNotFound(ValueError):
|
||||
def __init__(self, msg):
|
||||
ValueError.__init__(self, msg)
|
||||
@@ -131,10 +127,8 @@ def _get_stripped_text_from_json_match(match):
|
||||
|
||||
return stripped_text_from_html
|
||||
|
||||
# content - json
|
||||
# json_filter - ie json:$..price
|
||||
# ensure_is_ldjson_info_type - str "product", optional, "@type == product" (I dont know how to do that as a json selector)
|
||||
def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None):
|
||||
def extract_json_as_string(content, json_filter):
|
||||
|
||||
stripped_text_from_html = False
|
||||
|
||||
# Try to parse/filter out the JSON, if we get some parser error, then maybe it's embedded <script type=ldjson>
|
||||
@@ -145,12 +139,7 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None
|
||||
# Foreach <script json></script> blob.. just return the first that matches json_filter
|
||||
s = []
|
||||
soup = BeautifulSoup(content, 'html.parser')
|
||||
|
||||
if ensure_is_ldjson_info_type:
|
||||
bs_result = soup.findAll('script', {"type": "application/ld+json"})
|
||||
else:
|
||||
bs_result = soup.findAll('script')
|
||||
|
||||
bs_result = soup.findAll('script')
|
||||
|
||||
if not bs_result:
|
||||
raise JSONNotFound("No parsable JSON found in this document")
|
||||
@@ -167,14 +156,7 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None
|
||||
continue
|
||||
else:
|
||||
stripped_text_from_html = _parse_json(json_data, json_filter)
|
||||
if ensure_is_ldjson_info_type:
|
||||
# Could sometimes be list, string or something else random
|
||||
if isinstance(json_data, dict):
|
||||
# If it has LD JSON 'key' @type, and @type is 'product', and something was found for the search
|
||||
# (Some sites have multiple of the same ld+json @type='product', but some have the review part, some have the 'price' part)
|
||||
if json_data.get('@type', False) and json_data.get('@type','').lower() == ensure_is_ldjson_info_type.lower() and stripped_text_from_html:
|
||||
break
|
||||
elif stripped_text_from_html:
|
||||
if stripped_text_from_html:
|
||||
break
|
||||
|
||||
if not stripped_text_from_html:
|
||||
@@ -261,18 +243,6 @@ def html_to_text(html_content: str, render_anchor_tag_content=False) -> str:
|
||||
|
||||
return text_content
|
||||
|
||||
|
||||
# Does LD+JSON exist with a @type=='product' and a .price set anywhere?
|
||||
def has_ldjson_product_info(content):
|
||||
try:
|
||||
pricing_data = extract_json_as_string(content=content, json_filter=LD_JSON_PRODUCT_OFFER_SELECTOR, ensure_is_ldjson_info_type="product")
|
||||
except JSONNotFound as e:
|
||||
# Totally fine
|
||||
return False
|
||||
x=bool(pricing_data)
|
||||
return x
|
||||
|
||||
|
||||
def workarounds_for_obfuscations(content):
|
||||
"""
|
||||
Some sites are using sneaky tactics to make prices and other information un-renderable by Inscriptis
|
||||
|
||||
@@ -14,52 +14,49 @@ from changedetectionio.notification import (
|
||||
|
||||
class model(dict):
|
||||
__newest_history_key = None
|
||||
__history_n = 0
|
||||
__history_n=0
|
||||
__base_config = {
|
||||
# 'history': {}, # Dict of timestamp and output stripped filename (removed)
|
||||
# 'newest_history_key': 0, (removed, taken from history.txt index)
|
||||
'body': None,
|
||||
'check_unique_lines': False, # On change-detected, compare against all history if its something new
|
||||
'check_count': 0,
|
||||
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
|
||||
'extract_text': [], # Extract text by regex after filters
|
||||
'extract_title_as_title': False,
|
||||
'fetch_backend': None,
|
||||
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
|
||||
'has_ldjson_price_data': None,
|
||||
'track_ldjson_price_data': None,
|
||||
'headers': {}, # Extra headers to send
|
||||
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
'include_filters': [],
|
||||
'last_checked': 0,
|
||||
'last_error': False,
|
||||
'last_viewed': 0, # history key value of the last viewed via the [diff] link
|
||||
'method': 'GET',
|
||||
# Custom notification content
|
||||
'notification_body': None,
|
||||
'notification_format': default_notification_format_for_watch,
|
||||
'notification_muted': False,
|
||||
'notification_title': None,
|
||||
'notification_screenshot': False, # Include the latest screenshot if available and supported by the apprise URL
|
||||
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
|
||||
'paused': False,
|
||||
'previous_md5': False,
|
||||
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
|
||||
'proxy': None, # Preferred proxy connection
|
||||
'subtractive_selectors': [],
|
||||
'tag': None,
|
||||
'text_should_not_be_present': [], # Text that should not present
|
||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||
# Requires setting to None on submit if it's the same as the default
|
||||
# Should be all None by default, so we use the system default in this case.
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None},
|
||||
'title': None,
|
||||
'trigger_text': [], # List of text or regex to wait for until a change is detected
|
||||
'url': None,
|
||||
'uuid': str(uuid.uuid4()),
|
||||
'webdriver_delay': None,
|
||||
'webdriver_js_execute_code': None, # Run before change-detection
|
||||
}
|
||||
#'history': {}, # Dict of timestamp and output stripped filename (removed)
|
||||
#'newest_history_key': 0, (removed, taken from history.txt index)
|
||||
'body': None,
|
||||
'check_unique_lines': False, # On change-detected, compare against all history if its something new
|
||||
'check_count': 0,
|
||||
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
|
||||
'extract_text': [], # Extract text by regex after filters
|
||||
'extract_title_as_title': False,
|
||||
'fetch_backend': None,
|
||||
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
|
||||
'headers': {}, # Extra headers to send
|
||||
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
'include_filters': [],
|
||||
'last_checked': 0,
|
||||
'last_error': False,
|
||||
'last_viewed': 0, # history key value of the last viewed via the [diff] link
|
||||
'method': 'GET',
|
||||
# Custom notification content
|
||||
'notification_body': None,
|
||||
'notification_format': default_notification_format_for_watch,
|
||||
'notification_muted': False,
|
||||
'notification_title': None,
|
||||
'notification_screenshot': False, # Include the latest screenshot if available and supported by the apprise URL
|
||||
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
|
||||
'paused': False,
|
||||
'previous_md5': False,
|
||||
'proxy': None, # Preferred proxy connection
|
||||
'subtractive_selectors': [],
|
||||
'tag': None,
|
||||
'text_should_not_be_present': [], # Text that should not present
|
||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||
# Requires setting to None on submit if it's the same as the default
|
||||
# Should be all None by default, so we use the system default in this case.
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None},
|
||||
'title': None,
|
||||
'trigger_text': [], # List of text or regex to wait for until a change is detected
|
||||
'url': None,
|
||||
'uuid': str(uuid.uuid4()),
|
||||
'webdriver_delay': None,
|
||||
'webdriver_js_execute_code': None, # Run before change-detection
|
||||
}
|
||||
jitter_seconds = 0
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import apprise
|
||||
from jinja2 import Environment, BaseLoader
|
||||
from apprise import NotifyFormat
|
||||
import json
|
||||
|
||||
valid_tokens = {
|
||||
'base_url': '',
|
||||
@@ -18,8 +16,8 @@ valid_tokens = {
|
||||
|
||||
default_notification_format_for_watch = 'System default'
|
||||
default_notification_format = 'Text'
|
||||
default_notification_body = '{{watch_url}} had a change.\n---\n{{diff}}\n---\n'
|
||||
default_notification_title = 'ChangeDetection.io Notification - {{watch_url}}'
|
||||
default_notification_body = '{watch_url} had a change.\n---\n{diff}\n---\n'
|
||||
default_notification_title = 'ChangeDetection.io Notification - {watch_url}'
|
||||
|
||||
valid_notification_formats = {
|
||||
'Text': NotifyFormat.TEXT,
|
||||
@@ -29,67 +27,25 @@ valid_notification_formats = {
|
||||
default_notification_format_for_watch: default_notification_format_for_watch
|
||||
}
|
||||
|
||||
# include the decorator
|
||||
from apprise.decorators import notify
|
||||
|
||||
@notify(on="delete")
|
||||
@notify(on="deletes")
|
||||
@notify(on="get")
|
||||
@notify(on="gets")
|
||||
@notify(on="post")
|
||||
@notify(on="posts")
|
||||
@notify(on="put")
|
||||
@notify(on="puts")
|
||||
def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs):
|
||||
import requests
|
||||
url = kwargs['meta'].get('url')
|
||||
|
||||
if url.startswith('post'):
|
||||
r = requests.post
|
||||
elif url.startswith('get'):
|
||||
r = requests.get
|
||||
elif url.startswith('put'):
|
||||
r = requests.put
|
||||
elif url.startswith('delete'):
|
||||
r = requests.delete
|
||||
|
||||
url = url.replace('post://', 'http://')
|
||||
url = url.replace('posts://', 'https://')
|
||||
url = url.replace('put://', 'http://')
|
||||
url = url.replace('puts://', 'https://')
|
||||
url = url.replace('get://', 'http://')
|
||||
url = url.replace('gets://', 'https://')
|
||||
url = url.replace('put://', 'http://')
|
||||
url = url.replace('puts://', 'https://')
|
||||
url = url.replace('delete://', 'http://')
|
||||
url = url.replace('deletes://', 'https://')
|
||||
|
||||
# Try to auto-guess if it's JSON
|
||||
headers = {}
|
||||
try:
|
||||
json.loads(body)
|
||||
headers = {'Content-Type': 'application/json; charset=utf-8'}
|
||||
except ValueError as e:
|
||||
pass
|
||||
|
||||
|
||||
r(url, headers=headers, data=body)
|
||||
|
||||
|
||||
def process_notification(n_object, datastore):
|
||||
|
||||
# Insert variables into the notification content
|
||||
notification_parameters = create_notification_parameters(n_object, datastore)
|
||||
|
||||
# Get the notification body from datastore
|
||||
jinja2_env = Environment(loader=BaseLoader)
|
||||
n_body = jinja2_env.from_string(n_object.get('notification_body', default_notification_body)).render(**notification_parameters)
|
||||
n_title = jinja2_env.from_string(n_object.get('notification_title', default_notification_title)).render(**notification_parameters)
|
||||
n_body = n_object.get('notification_body', default_notification_body)
|
||||
n_title = n_object.get('notification_title', default_notification_title)
|
||||
n_format = valid_notification_formats.get(
|
||||
n_object['notification_format'],
|
||||
valid_notification_formats[default_notification_format],
|
||||
)
|
||||
|
||||
|
||||
# Insert variables into the notification content
|
||||
notification_parameters = create_notification_parameters(n_object, datastore)
|
||||
|
||||
for n_k in notification_parameters:
|
||||
token = '{' + n_k + '}'
|
||||
val = notification_parameters[n_k]
|
||||
n_title = n_title.replace(token, val)
|
||||
n_body = n_body.replace(token, val)
|
||||
|
||||
# https://github.com/caronc/apprise/wiki/Development_LogCapture
|
||||
# Anything higher than or equal to WARNING (which covers things like Connection errors)
|
||||
# raise it as an exception
|
||||
@@ -97,7 +53,6 @@ def process_notification(n_object, datastore):
|
||||
sent_objs=[]
|
||||
from .apprise_asset import asset
|
||||
for url in n_object['notification_urls']:
|
||||
url = jinja2_env.from_string(url).render(**notification_parameters)
|
||||
apobj = apprise.Apprise(debug=True, asset=asset)
|
||||
url = url.strip()
|
||||
if len(url):
|
||||
@@ -111,12 +66,7 @@ def process_notification(n_object, datastore):
|
||||
|
||||
# So if no avatar_url is specified, add one so it can be correctly calculated into the total payload
|
||||
k = '?' if not '?' in url else '&'
|
||||
if not 'avatar_url' in url \
|
||||
and not url.startswith('mail') \
|
||||
and not url.startswith('post') \
|
||||
and not url.startswith('get') \
|
||||
and not url.startswith('delete') \
|
||||
and not url.startswith('put'):
|
||||
if not 'avatar_url' in url and not url.startswith('mail'):
|
||||
url += k + 'avatar_url=https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png'
|
||||
|
||||
if url.startswith('tgram://'):
|
||||
@@ -194,7 +144,7 @@ def create_notification_parameters(n_object, datastore):
|
||||
|
||||
watch_url = n_object['watch_url']
|
||||
|
||||
# Re #148 - Some people have just {{ base_url }} in the body or title, but this may break some notification services
|
||||
# Re #148 - Some people have just {base_url} in the body or title, but this may break some notification services
|
||||
# like 'Join', so it's always best to atleast set something obvious so that they are not broken.
|
||||
if base_url == '':
|
||||
base_url = "<base-url-env-var-not-set>"
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any
|
||||
|
||||
# So that we can queue some metadata in `item`
|
||||
# https://docs.python.org/3/library/queue.html#queue.PriorityQueue
|
||||
#
|
||||
@dataclass(order=True)
|
||||
class PrioritizedItem:
|
||||
priority: int
|
||||
item: Any=field(compare=False)
|
||||
@@ -1,6 +1,3 @@
|
||||
// Copyright (C) 2021 Leigh Morresi (dgtlmoon@gmail.com)
|
||||
// All rights reserved.
|
||||
|
||||
// @file Scrape the page looking for elements of concern (%ELEMENTS%)
|
||||
// http://matatk.agrip.org.uk/tests/position-and-width/
|
||||
// https://stackoverflow.com/questions/26813480/when-is-element-getboundingclientrect-guaranteed-to-be-updated-accurate
|
||||
@@ -84,16 +81,8 @@ var bbox;
|
||||
for (var i = 0; i < elements.length; i++) {
|
||||
bbox = elements[i].getBoundingClientRect();
|
||||
|
||||
// Exclude items that are not interactable or visible
|
||||
if(elements[i].style.opacity === "0") {
|
||||
continue
|
||||
}
|
||||
if(elements[i].style.display === "none" || elements[i].style.pointerEvents === "none" ) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip really small ones, and where width or height ==0
|
||||
if (bbox['width'] * bbox['height'] < 100) {
|
||||
// Forget really small ones
|
||||
if (bbox['width'] < 10 && bbox['height'] < 10) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -149,6 +138,7 @@ for (var i = 0; i < elements.length; i++) {
|
||||
|
||||
}
|
||||
|
||||
|
||||
// Inject the current one set in the include_filters, which may be a CSS rule
|
||||
// used for displaying the current one in VisualSelector, where its not one we generated.
|
||||
if (include_filters.length) {
|
||||
@@ -176,23 +166,10 @@ if (include_filters.length) {
|
||||
}
|
||||
|
||||
if (q) {
|
||||
// #1231 - IN the case XPath attribute filter is applied, we will have to traverse up and find the element.
|
||||
if (q.hasOwnProperty('getBoundingClientRect')) {
|
||||
bbox = q.getBoundingClientRect();
|
||||
console.log("xpath_element_scraper: Got filter element, scroll from top was " + scroll_y)
|
||||
} else {
|
||||
try {
|
||||
// Try and see we can find its ownerElement
|
||||
bbox = q.ownerElement.getBoundingClientRect();
|
||||
console.log("xpath_element_scraper: Got filter by ownerElement element, scroll from top was " + scroll_y)
|
||||
} catch (e) {
|
||||
console.log("xpath_element_scraper: error looking up ownerElement")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!q) {
|
||||
console.log("xpath_element_scraper: filter element " + f + " was not found");
|
||||
bbox = q.getBoundingClientRect();
|
||||
console.log("xpath_element_scraper: Got filter element, scroll from top was "+scroll_y)
|
||||
} else {
|
||||
console.log("xpath_element_scraper: filter element "+f+" was not found");
|
||||
}
|
||||
|
||||
if (bbox && bbox['width'] > 0 && bbox['height'] > 0) {
|
||||
@@ -207,9 +184,5 @@ if (include_filters.length) {
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the elements so we find the smallest one first, in other words, we find the smallest one matching in that area
|
||||
// so that we dont select the wrapping element by mistake and be unable to select what we want
|
||||
size_pos.sort((a, b) => (a.width*a.height > b.width*b.height) ? 1 : -1)
|
||||
|
||||
// Window.width required for proper scaling in the frontend
|
||||
return {'size_pos': size_pos, 'browser_width': window.innerWidth};
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="83.39" height="89.648" enable-background="new 0 0 122.406 122.881" version="1.1" viewBox="0 0 83.39 89.648" xml:space="preserve" xmlns="http://www.w3.org/2000/svg"><g transform="translate(5e-4 -33.234)"><path d="m44.239 42.946-39.111 39.896 34.908 34.91 39.09-39.876-1.149-34.931zm-0.91791 42.273c0.979-0.979 1.507-1.99 1.577-3.027 0.077-1.043-0.248-2.424-0.967-4.135-0.725-1.717-1.348-3.346-1.87-4.885s-0.814-3.014-0.897-4.432c-0.07-1.42 0.134-2.768 0.624-4.045 0.477-1.279 1.348-2.545 2.607-3.804 2.099-2.099 4.535-3.123 7.314-3.065 2.773 0.063 5.457 1.158 8.04 3.294l2.881 3.034c1.946 2.607 2.799 5.33 2.557 8.166-0.235 2.83-1.532 5.426-3.893 7.785l-6.296-6.297c1.291-1.291 2.035-2.531 2.238-3.727 0.191-1.197-0.165-2.252-1.081-3.168-0.821-0.82-1.717-1.195-2.69-1.139-0.967 0.064-1.908 0.547-2.817 1.457-0.922 0.922-1.393 1.914-1.412 2.977s0.306 2.416 0.973 4.064c0.661 1.652 1.24 3.25 1.736 4.801 0.496 1.553 0.782 3.035 0.858 4.445 0.076 1.426-0.127 2.787-0.591 4.104-0.477 1.316-1.336 2.596-2.588 3.848-2.125 2.125-4.522 3.186-7.212 3.18s-5.311-1.063-7.855-3.16l-3.747 3.746-2.964-2.965 3.766-3.764c-2.423-2.996-3.568-5.998-3.447-9.02 0.127-3.014 1.476-5.813 4.045-8.383l6.278 6.277c-1.412 1.412-2.175 2.799-2.277 4.16-0.108 1.367 0.414 2.627 1.571 3.783 0.839 0.84 1.755 1.26 2.741 1.242 0.985-0.017 1.92-0.47 2.798-1.347zm21.127-46.435h17.457c-0.0269 2.2368 0.69936 16.025 0.69936 16.025l0.785 23.858c0.019 0.609-0.221 1.164-0.619 1.564l5e-3 4e-3 -41.236 42.022c-0.82213 0.8378-2.175 0.83-3.004 0l-37.913-37.91c-0.83-0.83-0.83-2.176 0-3.006l41.236-42.021c0.39287-0.42671 1.502-0.53568 1.502-0.53568zm18.011 11.59c-59.392-29.687-29.696-14.843 0 0z"/></g></svg>
|
||||
|
Before Width: | Height: | Size: 1.7 KiB |
@@ -1,5 +1,4 @@
|
||||
// Copyright (C) 2021 Leigh Morresi (dgtlmoon@gmail.com)
|
||||
// All rights reserved.
|
||||
// Horrible proof of concept code :)
|
||||
// yes - this is really a hack, if you are a front-ender and want to help, please get in touch!
|
||||
|
||||
$(document).ready(function () {
|
||||
@@ -178,10 +177,9 @@ $(document).ready(function () {
|
||||
// Basically, find the most 'deepest'
|
||||
var found = 0;
|
||||
ctx.fillStyle = 'rgba(205,0,0,0.35)';
|
||||
// Will be sorted by smallest width*height first
|
||||
for (var i = 0; i <= selector_data['size_pos'].length; i++) {
|
||||
for (var i = selector_data['size_pos'].length; i !== 0; i--) {
|
||||
// draw all of them? let them choose somehow?
|
||||
var sel = selector_data['size_pos'][i];
|
||||
var sel = selector_data['size_pos'][i - 1];
|
||||
// If we are in a bounding-box
|
||||
if (e.offsetY > sel.top * y_scale && e.offsetY < sel.top * y_scale + sel.height * y_scale
|
||||
&&
|
||||
@@ -197,7 +195,7 @@ $(document).ready(function () {
|
||||
// no need to keep digging
|
||||
// @todo or, O to go out/up, I to go in
|
||||
// or double click to go up/out the selector?
|
||||
current_selected_i = i;
|
||||
current_selected_i = i - 1;
|
||||
found += 1;
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -121,19 +121,15 @@ html[data-darkmode="true"] {
|
||||
--color-icon-github-hover: var(--color-grey-700);
|
||||
--color-watch-table-error: var(--color-light-red);
|
||||
--color-watch-table-row-text: var(--color-grey-800); }
|
||||
html[data-darkmode="true"] .watch-controls img {
|
||||
opacity: 0.4; }
|
||||
html[data-darkmode="true"] .watch-table .unviewed {
|
||||
color: #fff; }
|
||||
html[data-darkmode="true"] .icon-spread {
|
||||
filter: hue-rotate(-10deg) brightness(1.5); }
|
||||
html[data-darkmode="true"] .watch-table .title-col a[target="_blank"]::after,
|
||||
html[data-darkmode="true"] .watch-table .current-diff-url::after {
|
||||
filter: invert(0.5) hue-rotate(10deg) brightness(2); }
|
||||
html[data-darkmode="true"] .watch-table .watch-controls .state-off img {
|
||||
opacity: 0.3; }
|
||||
html[data-darkmode="true"] .watch-table .watch-controls .state-on img {
|
||||
opacity: 1.0; }
|
||||
html[data-darkmode="true"] .watch-table .unviewed {
|
||||
color: #fff; }
|
||||
html[data-darkmode="true"] .watch-table .unviewed.error {
|
||||
color: var(--color-watch-table-error); }
|
||||
|
||||
#diff-ui {
|
||||
background: var(--color-background);
|
||||
|
||||
@@ -140,7 +140,15 @@ html[data-darkmode="true"] {
|
||||
--color-watch-table-error: var(--color-light-red);
|
||||
--color-watch-table-row-text: var(--color-grey-800);
|
||||
|
||||
|
||||
// Anything that can't be manipulated through variables follows.
|
||||
.watch-controls {
|
||||
img {
|
||||
opacity: 0.4;
|
||||
}
|
||||
}
|
||||
.watch-table .unviewed {
|
||||
color: #fff;
|
||||
}
|
||||
.icon-spread {
|
||||
filter: hue-rotate(-10deg) brightness(1.5);
|
||||
}
|
||||
@@ -151,25 +159,5 @@ html[data-darkmode="true"] {
|
||||
.current-diff-url::after {
|
||||
filter: invert(.5) hue-rotate(10deg) brightness(2);
|
||||
}
|
||||
|
||||
.watch-controls {
|
||||
.state-off {
|
||||
img {
|
||||
opacity: 0.3;
|
||||
}
|
||||
}
|
||||
.state-on {
|
||||
img {
|
||||
opacity: 1.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.unviewed {
|
||||
color: #fff;
|
||||
&.error {
|
||||
color: var(--color-watch-table-error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,21 +125,22 @@ code {
|
||||
&.unviewed {
|
||||
font-weight: bold;
|
||||
}
|
||||
&.error {
|
||||
color: var(--color-watch-table-error);
|
||||
}
|
||||
color: var(--color-watch-table-row-text);
|
||||
}
|
||||
|
||||
|
||||
td {
|
||||
white-space: nowrap;
|
||||
&.title-col {
|
||||
word-break: break-all;
|
||||
white-space: normal;
|
||||
}
|
||||
.error {
|
||||
color: var(--color-watch-table-error);
|
||||
}
|
||||
|
||||
td {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
td.title-col {
|
||||
word-break: break-all;
|
||||
white-space: normal;
|
||||
}
|
||||
|
||||
th {
|
||||
white-space: nowrap;
|
||||
@@ -877,9 +878,6 @@ body.full-width {
|
||||
.pure-form-message-inline {
|
||||
padding-left: 0;
|
||||
color: var(--color-text-input-description);
|
||||
code {
|
||||
font-size: .875em;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1009,30 +1007,3 @@ ul {
|
||||
border-radius: 5px;
|
||||
color: var(--color-warning);
|
||||
}
|
||||
|
||||
/* automatic price following helpers */
|
||||
.tracking-ldjson-price-data {
|
||||
background-color: var(--color-background-button-green);
|
||||
color: #000;
|
||||
padding: 3px;
|
||||
border-radius: 3px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.ldjson-price-track-offer {
|
||||
a.pure-button {
|
||||
border-radius: 3px;
|
||||
padding: 3px;
|
||||
background-color: var(--color-background-button-green);
|
||||
}
|
||||
|
||||
font-weight: bold;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.price-follow-tag-icon {
|
||||
display: inline-block;
|
||||
height: 0.8rem;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
|
||||
@@ -124,19 +124,15 @@ html[data-darkmode="true"] {
|
||||
--color-icon-github-hover: var(--color-grey-700);
|
||||
--color-watch-table-error: var(--color-light-red);
|
||||
--color-watch-table-row-text: var(--color-grey-800); }
|
||||
html[data-darkmode="true"] .watch-controls img {
|
||||
opacity: 0.4; }
|
||||
html[data-darkmode="true"] .watch-table .unviewed {
|
||||
color: #fff; }
|
||||
html[data-darkmode="true"] .icon-spread {
|
||||
filter: hue-rotate(-10deg) brightness(1.5); }
|
||||
html[data-darkmode="true"] .watch-table .title-col a[target="_blank"]::after,
|
||||
html[data-darkmode="true"] .watch-table .current-diff-url::after {
|
||||
filter: invert(0.5) hue-rotate(10deg) brightness(2); }
|
||||
html[data-darkmode="true"] .watch-table .watch-controls .state-off img {
|
||||
opacity: 0.3; }
|
||||
html[data-darkmode="true"] .watch-table .watch-controls .state-on img {
|
||||
opacity: 1.0; }
|
||||
html[data-darkmode="true"] .watch-table .unviewed {
|
||||
color: #fff; }
|
||||
html[data-darkmode="true"] .watch-table .unviewed.error {
|
||||
color: var(--color-watch-table-error); }
|
||||
|
||||
/* spinner */
|
||||
.spinner,
|
||||
@@ -347,13 +343,13 @@ code {
|
||||
color: var(--color-watch-table-row-text); }
|
||||
.watch-table tr.unviewed {
|
||||
font-weight: bold; }
|
||||
.watch-table tr.error {
|
||||
color: var(--color-watch-table-error); }
|
||||
.watch-table .error {
|
||||
color: var(--color-watch-table-error); }
|
||||
.watch-table td {
|
||||
white-space: nowrap; }
|
||||
.watch-table td.title-col {
|
||||
word-break: break-all;
|
||||
white-space: normal; }
|
||||
.watch-table td.title-col {
|
||||
word-break: break-all;
|
||||
white-space: normal; }
|
||||
.watch-table th {
|
||||
white-space: nowrap; }
|
||||
.watch-table th a {
|
||||
@@ -853,8 +849,6 @@ body.full-width .edit-form {
|
||||
.edit-form .pure-form-message-inline {
|
||||
padding-left: 0;
|
||||
color: var(--color-text-input-description); }
|
||||
.edit-form .pure-form-message-inline code {
|
||||
font-size: .875em; }
|
||||
|
||||
ul {
|
||||
padding-left: 1em;
|
||||
@@ -945,24 +939,3 @@ ul {
|
||||
display: inline;
|
||||
height: 26px;
|
||||
vertical-align: middle; }
|
||||
|
||||
/* automatic price following helpers */
|
||||
.tracking-ldjson-price-data {
|
||||
background-color: var(--color-background-button-green);
|
||||
color: #000;
|
||||
padding: 3px;
|
||||
border-radius: 3px;
|
||||
white-space: nowrap; }
|
||||
|
||||
.ldjson-price-track-offer {
|
||||
font-weight: bold;
|
||||
font-style: italic; }
|
||||
.ldjson-price-track-offer a.pure-button {
|
||||
border-radius: 3px;
|
||||
padding: 3px;
|
||||
background-color: var(--color-background-button-green); }
|
||||
|
||||
.price-follow-tag-icon {
|
||||
display: inline-block;
|
||||
height: 0.8rem;
|
||||
vertical-align: middle; }
|
||||
|
||||
@@ -250,15 +250,12 @@ class ChangeDetectionStore:
|
||||
def clear_watch_history(self, uuid):
|
||||
import pathlib
|
||||
|
||||
self.__data['watching'][uuid].update({
|
||||
'last_checked': 0,
|
||||
'has_ldjson_price_data': None,
|
||||
'last_error': False,
|
||||
'last_notification_error': False,
|
||||
'last_viewed': 0,
|
||||
'previous_md5': False,
|
||||
'track_ldjson_price_data': None,
|
||||
})
|
||||
self.__data['watching'][uuid].update(
|
||||
{'last_checked': 0,
|
||||
'last_viewed': 0,
|
||||
'previous_md5': False,
|
||||
'last_notification_error': False,
|
||||
'last_error': False})
|
||||
|
||||
# JSON Data, Screenshots, Textfiles (history index and snapshots), HTML in the future etc
|
||||
for item in pathlib.Path(os.path.join(self.datastore_path, uuid)).rglob("*.*"):
|
||||
@@ -624,44 +621,4 @@ class ChangeDetectionStore:
|
||||
watch['include_filters'] = [existing_filter]
|
||||
except:
|
||||
continue
|
||||
return
|
||||
|
||||
# Convert old static notification tokens to jinja2 tokens
|
||||
def update_9(self):
|
||||
# Each watch
|
||||
import re
|
||||
# only { } not {{ or }}
|
||||
r = r'(?<!{){(?!{)(\w+)(?<!})}(?!})'
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
try:
|
||||
n_body = watch.get('notification_body', '')
|
||||
if n_body:
|
||||
watch['notification_body'] = re.sub(r, r'{{\1}}', n_body)
|
||||
|
||||
n_title = watch.get('notification_title')
|
||||
if n_title:
|
||||
watch['notification_title'] = re.sub(r, r'{{\1}}', n_title)
|
||||
|
||||
n_urls = watch.get('notification_urls')
|
||||
if n_urls:
|
||||
for i, url in enumerate(n_urls):
|
||||
watch['notification_urls'][i] = re.sub(r, r'{{\1}}', url)
|
||||
|
||||
except:
|
||||
continue
|
||||
|
||||
# System wide
|
||||
n_body = self.data['settings']['application'].get('notification_body')
|
||||
if n_body:
|
||||
self.data['settings']['application']['notification_body'] = re.sub(r, r'{{\1}}', n_body)
|
||||
|
||||
n_title = self.data['settings']['application'].get('notification_title')
|
||||
if n_body:
|
||||
self.data['settings']['application']['notification_title'] = re.sub(r, r'{{\1}}', n_title)
|
||||
|
||||
n_urls = self.data['settings']['application'].get('notification_urls')
|
||||
if n_urls:
|
||||
for i, url in enumerate(n_urls):
|
||||
self.data['settings']['application']['notification_urls'][i] = re.sub(r, r'{{\1}}', url)
|
||||
|
||||
return
|
||||
return
|
||||
@@ -16,7 +16,6 @@
|
||||
<li><code>discord://</code> only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li>
|
||||
<li><code>tgram://</code> bots cant send messages to other bots, so you should specify chat ID of non-bot user.</li>
|
||||
<li><code>tgram://</code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li>
|
||||
<li><code>gets://</code>, <code>posts://</code>, <code>puts://</code>, <code>deletes://</code> for direct API calls (or omit the "<code>s</code>" for non-SSL ie <code>get://</code>)</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="notifications-wrapper">
|
||||
@@ -42,9 +41,8 @@
|
||||
<span class="pure-form-message-inline">Format for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-controls">
|
||||
<p class="pure-form-message-inline">
|
||||
You can use <a target="_new" href="https://jinja.palletsprojects.com/en/3.0.x/templates/">Jinja2</a> templating in the notification title, body and URL.
|
||||
</p>
|
||||
<span class="pure-form-message-inline">
|
||||
These tokens can be used in the notification body and title to customise the notification text.
|
||||
|
||||
<table class="pure-table" id="token-table">
|
||||
<thead>
|
||||
@@ -55,49 +53,52 @@
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><code>{{ '{{ base_url }}' }}</code></td>
|
||||
<td><code>{base_url}</code></td>
|
||||
<td>The URL of the changedetection.io instance you are running.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{ watch_url }}' }}</code></td>
|
||||
<td><code>{watch_url}</code></td>
|
||||
<td>The URL being watched.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{ watch_uuid }}' }}</code></td>
|
||||
<td><code>{watch_uuid}</code></td>
|
||||
<td>The UUID of the watch.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{ watch_title }}' }}</code></td>
|
||||
<td><code>{watch_title}</code></td>
|
||||
<td>The title of the watch.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{ watch_tag }}' }}</code></td>
|
||||
<td>The watch label / tag</td>
|
||||
<td><code>{watch_tag}</code></td>
|
||||
<td>The tag of the watch.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{ preview_url }}' }}</code></td>
|
||||
<td><code>{preview_url}</code></td>
|
||||
<td>The URL of the preview page generated by changedetection.io.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{ diff_url }}' }}</code></td>
|
||||
<td><code>{diff}</code></td>
|
||||
<td>The diff output - differences only</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{ diff_full }}' }}</code></td>
|
||||
<td><code>{diff_full}</code></td>
|
||||
<td>The diff output - full difference output</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{ current_snapshot }}' }}</code></td>
|
||||
<td><code>{diff_url}</code></td>
|
||||
<td>The URL of the diff page generated by changedetection.io.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{current_snapshot}</code></td>
|
||||
<td>The current snapshot value, useful when combined with JSON or CSS filters
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="pure-form-message-inline">
|
||||
<br>
|
||||
URLs generated by changedetection.io (such as <code>{{ '{{ diff_url }}' }}</code>) require the <code>BASE_URL</code> environment variable set.<br/>
|
||||
Your <code>BASE_URL</code> var is currently "{{settings_application['current_base_url']}}"
|
||||
</div>
|
||||
<br/>
|
||||
URLs generated by changedetection.io (such as <code>{diff_url}</code>) require the <code>BASE_URL</code> environment variable set.<br/>
|
||||
Your <code>BASE_URL</code> var is currently "{{settings_application['current_base_url']}}"
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en" data-darkmode="{{ get_darkmode_state() }}">
|
||||
<html lang="en" data-darkmode="{{ dark_mode|lower }}">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8"/>
|
||||
|
||||
@@ -125,7 +125,7 @@
|
||||
<p>
|
||||
For example, to extract only the numbers from text ‐</br>
|
||||
<strong>Raw text</strong>: <code>Temperature <span style="color: red">5.5</span>°C in Sydney</code></br>
|
||||
<strong>RegEx to extract:</strong> <code>Temperature <span style="color: red">([0-9\.]+)</span></code><br/>
|
||||
<strong>RegEx to extract:</strong> <code>Temperature ([0-9\.]+)</code><br/>
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://RegExr.com/">Be sure to test your RegEx here.</a>
|
||||
|
||||
@@ -60,7 +60,7 @@
|
||||
{{ render_field(form.application.form.base_url, placeholder="http://yoursite.com:5000/",
|
||||
class="m-d") }}
|
||||
<span class="pure-form-message-inline">
|
||||
Base URL used for the <code>{{ '{{ base_url }}' }}</code> token in notifications and RSS links.<br/>Default value is the ENV var 'BASE_URL' (Currently "{{settings_application['current_base_url']}}"),
|
||||
Base URL used for the <code>{base_url}</code> token in notifications and RSS links.<br/>Default value is the ENV var 'BASE_URL' (Currently "{{settings_application['current_base_url']}}"),
|
||||
<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Configurable-BASE_URL-setting">read more here</a>.
|
||||
</span>
|
||||
</div>
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<div id="watch-add-wrapper-zone">
|
||||
<div>
|
||||
{{ render_simple_field(form.url, placeholder="https://...", required=true) }}
|
||||
{{ render_simple_field(form.tag, value=active_tag if active_tag else '', placeholder="watch label / tag") }}
|
||||
{{ render_simple_field(form.tag, value=active_tag if active_tag else '', placeholder="watch group") }}
|
||||
</div>
|
||||
<div>
|
||||
{{ render_simple_field(form.watch_submit_button, title="Watch this URL!" ) }}
|
||||
@@ -32,7 +32,6 @@
|
||||
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="unpause">UnPause</button>
|
||||
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="mute">Mute</button>
|
||||
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="unmute">UnMute</button>
|
||||
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="recheck">Recheck</button>
|
||||
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="notification-default">Use default notification</button>
|
||||
<button class="pure-button button-secondary button-xsmall" style="background: #dd4242; font-size: 70%" name="op" value="delete">Delete</button>
|
||||
</div>
|
||||
@@ -89,9 +88,9 @@
|
||||
</td>
|
||||
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
|
||||
<a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}"></a>
|
||||
<a class="link-spread" href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" class="icon icon-spread" title="Create a link to share watch config with others" /></a>
|
||||
<a class="link-spread" href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" class="icon icon-spread" /></a>
|
||||
|
||||
{%if watch.fetch_backend == "html_webdriver" %}<img style="height: 1em; display:inline-block;" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" title="Using a chrome browser" />{% endif %}
|
||||
{%if watch.fetch_backend == "html_webdriver" %}<img style="height: 1em; display:inline-block;" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" />{% endif %}
|
||||
|
||||
{% if watch.last_error is defined and watch.last_error != False %}
|
||||
<div class="fetch-error">{{ watch.last_error }}</div>
|
||||
@@ -99,12 +98,6 @@
|
||||
{% if watch.last_notification_error is defined and watch.last_notification_error != False %}
|
||||
<div class="fetch-error notification-error"><a href="{{url_for('notification_logs')}}">{{ watch.last_notification_error }}</a></div>
|
||||
{% endif %}
|
||||
{% if watch['has_ldjson_price_data'] and not watch['track_ldjson_price_data'] %}
|
||||
<div class="ldjson-price-track-offer">Embedded price data detected, follow only price data? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
|
||||
{% endif %}
|
||||
{% if watch['track_ldjson_price_data'] == 'accepted' %}
|
||||
<span class="tracking-ldjson-price-data" title="Automatically following embedded price information"><img src="{{url_for('static_content', group='images', filename='price-tag-icon.svg')}}" class="price-follow-tag-icon"/> Price</span>
|
||||
{% endif %}
|
||||
{% if not active_tag %}
|
||||
<span class="watch-tag-list">{{ watch.tag}}</span>
|
||||
{% endif %}
|
||||
|
||||
@@ -1,146 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, extract_UUID_from_client, extract_api_key_from_UI
|
||||
|
||||
def set_response_with_ldjson():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
<div class="sametext">Some text thats the same</div>
|
||||
<div class="changetext">Some text that will change</div>
|
||||
<script type="application/ld+json">
|
||||
{
|
||||
"@context":"https://schema.org/",
|
||||
"@type":"Product",
|
||||
"@id":"https://www.some-virtual-phone-shop.com/celular-iphone-14/p",
|
||||
"name":"Celular Iphone 14 Pro Max 256Gb E Sim A16 Bionic",
|
||||
"brand":{
|
||||
"@type":"Brand",
|
||||
"name":"APPLE"
|
||||
},
|
||||
"image":"https://www.some-virtual-phone-shop.com/15509426/image.jpg",
|
||||
"description":"You dont need it",
|
||||
"mpn":"111111",
|
||||
"sku":"22222",
|
||||
"offers":{
|
||||
"@type":"AggregateOffer",
|
||||
"lowPrice":8097000,
|
||||
"highPrice":8099900,
|
||||
"priceCurrency":"COP",
|
||||
"offers":[
|
||||
{
|
||||
"@type":"Offer",
|
||||
"price":8097000,
|
||||
"priceCurrency":"COP",
|
||||
"availability":"http://schema.org/InStock",
|
||||
"sku":"102375961",
|
||||
"itemCondition":"http://schema.org/NewCondition",
|
||||
"seller":{
|
||||
"@type":"Organization",
|
||||
"name":"ajax"
|
||||
}
|
||||
}
|
||||
],
|
||||
"offerCount":1
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
def set_response_without_ldjson():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
<div class="sametext">Some text thats the same</div>
|
||||
<div class="changetext">Some text that will change</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
# actually only really used by the distll.io importer, but could be handy too
|
||||
def test_check_ldjson_price_autodetect(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
set_response_with_ldjson()
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(3)
|
||||
|
||||
# Should get a notice that it's available
|
||||
res = client.get(url_for("index"))
|
||||
assert b'ldjson-price-track-offer' in res.data
|
||||
|
||||
# Accept it
|
||||
uuid = extract_UUID_from_client(client)
|
||||
|
||||
client.get(url_for('price_data_follower.accept', uuid=uuid, follow_redirects=True))
|
||||
time.sleep(2)
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(2)
|
||||
# Offer should be gone
|
||||
res = client.get(url_for("index"))
|
||||
assert b'Embedded price data' not in res.data
|
||||
assert b'tracking-ldjson-price-data' in res.data
|
||||
|
||||
# and last snapshop (via API) should be just the price
|
||||
api_key = extract_api_key_from_UI(client)
|
||||
res = client.get(
|
||||
url_for("watchsinglehistory", uuid=uuid, timestamp='latest'),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
|
||||
# Should see this (dont know where the whitespace came from)
|
||||
assert b'"highPrice": 8099900' in res.data
|
||||
# And not this cause its not the ld-json
|
||||
assert b"So let's see what happens" not in res.data
|
||||
|
||||
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
##########################################################################################
|
||||
# And we shouldnt see the offer
|
||||
set_response_without_ldjson()
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(3)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'ldjson-price-track-offer' not in res.data
|
||||
|
||||
##########################################################################################
|
||||
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
@@ -121,7 +121,7 @@ def test_element_removal_full(client, live_server):
|
||||
url_for("import_page"), data={"urls": test_url}, follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(1)
|
||||
|
||||
# Goto the edit page, add the filter data
|
||||
# Not sure why \r needs to be added - absent of the #changetext this is not necessary
|
||||
subtractive_selectors_data = "header\r\nfooter\r\nnav\r\n#changetext"
|
||||
|
||||
@@ -38,6 +38,9 @@ def test_check_encoding_detection(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(2)
|
||||
|
||||
|
||||
@@ -77,8 +77,7 @@ def test_DNS_errors(client, live_server):
|
||||
time.sleep(3)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
found_name_resolution_error = b"Temporary failure in name resolution" in res.data or b"Name or service not known" in res.data
|
||||
assert found_name_resolution_error
|
||||
assert b'Name or service not known' in res.data
|
||||
# Should always record that we tried
|
||||
assert bytes("just now".encode('utf-8')) in res.data
|
||||
|
||||
|
||||
@@ -73,17 +73,17 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se
|
||||
|
||||
# Just a regular notification setting, this will be used by the special 'filter not found' notification
|
||||
notification_form_data = {"notification_urls": notification_url,
|
||||
"notification_title": "New ChangeDetection.io Notification - {{watch_url}}",
|
||||
"notification_body": "BASE URL: {{base_url}}\n"
|
||||
"Watch URL: {{watch_url}}\n"
|
||||
"Watch UUID: {{watch_uuid}}\n"
|
||||
"Watch title: {{watch_title}}\n"
|
||||
"Watch tag: {{watch_tag}}\n"
|
||||
"Preview: {{preview_url}}\n"
|
||||
"Diff URL: {{diff_url}}\n"
|
||||
"Snapshot: {{current_snapshot}}\n"
|
||||
"Diff: {{diff}}\n"
|
||||
"Diff Full: {{diff_full}}\n"
|
||||
"notification_title": "New ChangeDetection.io Notification - {watch_url}",
|
||||
"notification_body": "BASE URL: {base_url}\n"
|
||||
"Watch URL: {watch_url}\n"
|
||||
"Watch UUID: {watch_uuid}\n"
|
||||
"Watch title: {watch_title}\n"
|
||||
"Watch tag: {watch_tag}\n"
|
||||
"Preview: {preview_url}\n"
|
||||
"Diff URL: {diff_url}\n"
|
||||
"Snapshot: {current_snapshot}\n"
|
||||
"Diff: {diff}\n"
|
||||
"Diff Full: {diff_full}\n"
|
||||
":-)",
|
||||
"notification_format": "Text"}
|
||||
|
||||
|
||||
@@ -56,17 +56,17 @@ def run_filter_test(client, content_filter):
|
||||
|
||||
# Just a regular notification setting, this will be used by the special 'filter not found' notification
|
||||
notification_form_data = {"notification_urls": notification_url,
|
||||
"notification_title": "New ChangeDetection.io Notification - {{watch_url}}",
|
||||
"notification_body": "BASE URL: {{base_url}}\n"
|
||||
"Watch URL: {{watch_url}}\n"
|
||||
"Watch UUID: {{watch_uuid}}\n"
|
||||
"Watch title: {{watch_title}}\n"
|
||||
"Watch tag: {{watch_tag}}\n"
|
||||
"Preview: {{preview_url}}\n"
|
||||
"Diff URL: {{diff_url}}\n"
|
||||
"Snapshot: {{current_snapshot}}\n"
|
||||
"Diff: {{diff}}\n"
|
||||
"Diff Full: {{diff_full}}\n"
|
||||
"notification_title": "New ChangeDetection.io Notification - {watch_url}",
|
||||
"notification_body": "BASE URL: {base_url}\n"
|
||||
"Watch URL: {watch_url}\n"
|
||||
"Watch UUID: {watch_uuid}\n"
|
||||
"Watch title: {watch_title}\n"
|
||||
"Watch tag: {watch_tag}\n"
|
||||
"Preview: {preview_url}\n"
|
||||
"Diff URL: {diff_url}\n"
|
||||
"Snapshot: {current_snapshot}\n"
|
||||
"Diff: {diff}\n"
|
||||
"Diff Full: {diff_full}\n"
|
||||
":-)",
|
||||
"notification_format": "Text"}
|
||||
|
||||
@@ -84,7 +84,6 @@ def run_filter_test(client, content_filter):
|
||||
data=notification_form_data,
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(3)
|
||||
|
||||
|
||||
@@ -101,6 +101,9 @@ def test_check_ignore_text_functionality(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
@@ -196,6 +199,9 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
|
||||
@@ -69,6 +69,8 @@ def test_normal_page_check_works_with_ignore_status_code(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
set_some_changed_response()
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -102,6 +104,9 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
@@ -114,9 +119,11 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server):
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# Make a change
|
||||
set_some_changed_response()
|
||||
|
||||
|
||||
@@ -394,48 +394,6 @@ def check_json_ext_filter(json_filter, client, live_server):
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_ignore_json_order(client, live_server):
|
||||
# A change in order shouldn't trigger a notification
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write('{"hello" : 123, "world": 123}')
|
||||
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', content_type="application/json", _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(2)
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write('{"world" : 123, "hello": 123}')
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(2)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
|
||||
# Just to be sure it still works
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write('{"world" : 123, "hello": 124}')
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(2)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_check_jsonpath_ext_filter(client, live_server):
|
||||
check_json_ext_filter('json:$[?(@.status==Sold)]', client, live_server)
|
||||
|
||||
|
||||
@@ -90,17 +90,17 @@ def test_check_notification(client, live_server):
|
||||
print (">>>> Notification URL: "+notification_url)
|
||||
|
||||
notification_form_data = {"notification_urls": notification_url,
|
||||
"notification_title": "New ChangeDetection.io Notification - {{watch_url}}",
|
||||
"notification_body": "BASE URL: {{base_url}}\n"
|
||||
"Watch URL: {{watch_url}}\n"
|
||||
"Watch UUID: {{watch_uuid}}\n"
|
||||
"Watch title: {{watch_title}}\n"
|
||||
"Watch tag: {{watch_tag}}\n"
|
||||
"Preview: {{preview_url}}\n"
|
||||
"Diff URL: {{diff_url}}\n"
|
||||
"Snapshot: {{current_snapshot}}\n"
|
||||
"Diff: {{diff}}\n"
|
||||
"Diff Full: {{diff_full}}\n"
|
||||
"notification_title": "New ChangeDetection.io Notification - {watch_url}",
|
||||
"notification_body": "BASE URL: {base_url}\n"
|
||||
"Watch URL: {watch_url}\n"
|
||||
"Watch UUID: {watch_uuid}\n"
|
||||
"Watch title: {watch_title}\n"
|
||||
"Watch tag: {watch_tag}\n"
|
||||
"Preview: {preview_url}\n"
|
||||
"Diff URL: {diff_url}\n"
|
||||
"Snapshot: {current_snapshot}\n"
|
||||
"Diff: {diff}\n"
|
||||
"Diff Full: {diff_full}\n"
|
||||
":-)",
|
||||
"notification_screenshot": True,
|
||||
"notification_format": "Text"}
|
||||
@@ -179,6 +179,7 @@ def test_check_notification(client, live_server):
|
||||
logging.debug(">>> Skipping BASE_URL check")
|
||||
|
||||
|
||||
|
||||
# This should insert the {current_snapshot}
|
||||
set_more_modified_response()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
@@ -236,10 +237,10 @@ def test_check_notification(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def test_notification_validation(client, live_server):
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
#live_server_setup(live_server)
|
||||
time.sleep(3)
|
||||
# re #242 - when you edited an existing new entry, it would not correctly show the notification settings
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
@@ -267,6 +268,21 @@ def test_notification_validation(client, live_server):
|
||||
# )
|
||||
# assert b"Notification Body and Title is required when a Notification URL is used" in res.data
|
||||
|
||||
# Now adding a wrong token should give us an error
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={"application-notification_title": "New ChangeDetection.io Notification - {watch_url}",
|
||||
"application-notification_body": "Rubbish: {rubbish}\n",
|
||||
"application-notification_format": "Text",
|
||||
"application-notification_urls": "json://localhost/foobar",
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"fetch_backend": "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert bytes("is not a valid token".encode('utf-8')) in res.data
|
||||
|
||||
# cleanup for the next
|
||||
client.get(
|
||||
url_for("form_delete", uuid="all"),
|
||||
@@ -274,58 +290,3 @@ def test_notification_validation(client, live_server):
|
||||
)
|
||||
|
||||
|
||||
|
||||
def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
||||
time.sleep(1)
|
||||
|
||||
# test_endpoint - that sends the contents of a file
|
||||
# test_notification_endpoint - that takes a POST and writes it to file (test-datastore/notification.txt)
|
||||
|
||||
# CUSTOM JSON BODY CHECK for POST://
|
||||
set_original_response()
|
||||
test_notification_url = url_for('test_notification_endpoint', _external=True).replace('http://', 'post://')+"?xxx={{ watch_url }}"
|
||||
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={"application-notification_title": "New ChangeDetection.io Notification - {{ watch_url }}",
|
||||
"application-notification_body": '{ "url" : "{{ watch_url }}", "secret": 444 }',
|
||||
# https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation
|
||||
"application-notification_urls": test_notification_url,
|
||||
"application-minutes_between_check": 180,
|
||||
"application-fetch_backend": "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b'Settings updated' in res.data
|
||||
|
||||
# Add a watch and trigger a HTTP POST
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tag": 'nice one'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
time.sleep(2)
|
||||
set_modified_response()
|
||||
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(2)
|
||||
|
||||
|
||||
with open("test-datastore/notification.txt", 'r') as f:
|
||||
x=f.read()
|
||||
j = json.loads(x)
|
||||
assert j['url'].startswith('http://localhost')
|
||||
assert j['secret'] == 444
|
||||
|
||||
# URL check, this will always be converted to lowercase
|
||||
assert os.path.isfile("test-datastore/notification-url.txt")
|
||||
with open("test-datastore/notification-url.txt", 'r') as f:
|
||||
notification_url = f.read()
|
||||
assert 'xxx=http' in notification_url
|
||||
|
||||
os.unlink("test-datastore/notification-url.txt")
|
||||
|
||||
|
||||
@@ -11,23 +11,23 @@ def test_check_notification_error_handling(client, live_server):
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(2)
|
||||
time.sleep(3)
|
||||
|
||||
# Set a URL and fetch it, then set a notification URL which is going to give errors
|
||||
# use a different URL so that it doesnt interfere with the actual check until we are ready
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tag": ''},
|
||||
data={"url": "https://changedetection.io/CHANGELOG.txt", "tag": ''},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
time.sleep(2)
|
||||
set_modified_response()
|
||||
time.sleep(10)
|
||||
|
||||
# Check we capture the failure, we can just use trigger_check = y here
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"notification_urls": "jsons://broken-url-xxxxxxxx123/test",
|
||||
data={"notification_urls": "jsons://broken-url.changedetection.io/test",
|
||||
"notification_title": "xxx",
|
||||
"notification_body": "xxxxx",
|
||||
"notification_format": "Text",
|
||||
@@ -36,14 +36,15 @@ def test_check_notification_error_handling(client, live_server):
|
||||
"title": "",
|
||||
"headers": "",
|
||||
"time_between_check-minutes": "180",
|
||||
"fetch_backend": "html_requests"},
|
||||
"fetch_backend": "html_requests",
|
||||
"trigger_check": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
found=False
|
||||
for i in range(1, 10):
|
||||
|
||||
time.sleep(1)
|
||||
logging.debug("Fetching watch overview....")
|
||||
res = client.get(
|
||||
url_for("index"))
|
||||
@@ -52,7 +53,6 @@ def test_check_notification_error_handling(client, live_server):
|
||||
found=True
|
||||
break
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
assert found
|
||||
|
||||
@@ -60,7 +60,7 @@ def test_check_notification_error_handling(client, live_server):
|
||||
# The error should show in the notification logs
|
||||
res = client.get(
|
||||
url_for("notification_logs"))
|
||||
found_name_resolution_error = b"Temporary failure in name resolution" in res.data or b"Name or service not known" in res.data
|
||||
assert found_name_resolution_error
|
||||
assert bytes("Name or service not known".encode('utf-8')) in res.data
|
||||
|
||||
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
# And it should be listed on the watch overview
|
||||
|
||||
@@ -20,8 +20,6 @@ def test_headers_in_request(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
@@ -176,7 +174,6 @@ def test_method_in_request(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(2)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
@@ -184,8 +181,6 @@ def test_method_in_request(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(2)
|
||||
|
||||
# Attempt to add a method which is not valid
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
@@ -211,7 +206,7 @@ def test_method_in_request(client, live_server):
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Give the thread time to pick up the first version
|
||||
time.sleep(2)
|
||||
time.sleep(5)
|
||||
|
||||
# The service should echo back the request verb
|
||||
res = client.get(
|
||||
@@ -222,7 +217,7 @@ def test_method_in_request(client, live_server):
|
||||
# The test call service will return the verb as the body
|
||||
assert b"PATCH" in res.data
|
||||
|
||||
time.sleep(2)
|
||||
time.sleep(5)
|
||||
|
||||
watches_with_method = 0
|
||||
with open('test-datastore/url-watches.json') as f:
|
||||
|
||||
@@ -149,9 +149,6 @@ def live_server_setup(live_server):
|
||||
if data != None:
|
||||
f.write(data)
|
||||
|
||||
with open("test-datastore/notification-url.txt", "w") as f:
|
||||
f.write(request.url)
|
||||
|
||||
print("\n>> Test notification endpoint was hit.\n", data)
|
||||
return "Text was set"
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import queue
|
||||
import time
|
||||
|
||||
from changedetectionio import content_fetcher
|
||||
from changedetectionio import queuedWatchMetaData
|
||||
from changedetectionio.fetch_site_status import FilterNotFoundInResponse
|
||||
|
||||
# A single update worker
|
||||
@@ -158,12 +157,11 @@ class update_worker(threading.Thread):
|
||||
while not self.app.config.exit.is_set():
|
||||
|
||||
try:
|
||||
queued_item_data = self.q.get(block=False)
|
||||
priority, uuid = self.q.get(block=False)
|
||||
except queue.Empty:
|
||||
pass
|
||||
|
||||
else:
|
||||
uuid = queued_item_data.item.get('uuid')
|
||||
self.current_uuid = uuid
|
||||
|
||||
if uuid in list(self.datastore.data['watching'].keys()):
|
||||
@@ -173,11 +171,11 @@ class update_worker(threading.Thread):
|
||||
update_obj= {}
|
||||
xpath_data = False
|
||||
process_changedetection_results = True
|
||||
print("> Processing UUID {} Priority {} URL {}".format(uuid, queued_item_data.priority, self.datastore.data['watching'][uuid]['url']))
|
||||
print("> Processing UUID {} Priority {} URL {}".format(uuid, priority, self.datastore.data['watching'][uuid]['url']))
|
||||
now = time.time()
|
||||
|
||||
try:
|
||||
changed_detected, update_obj, contents = update_handler.run(uuid, skip_when_checksum_same=queued_item_data.item.get('skip_when_checksum_same'))
|
||||
changed_detected, update_obj, contents = update_handler.run(uuid)
|
||||
# Re #342
|
||||
# In Python 3, all strings are sequences of Unicode characters. There is a bytes type that holds raw bytes.
|
||||
# We then convert/.decode('utf-8') for the notification etc
|
||||
@@ -243,10 +241,6 @@ class update_worker(threading.Thread):
|
||||
|
||||
process_changedetection_results = True
|
||||
|
||||
except content_fetcher.checksumFromPreviousCheckWasTheSame as e:
|
||||
# Yes fine, so nothing todo
|
||||
pass
|
||||
|
||||
except content_fetcher.BrowserStepsStepTimout as e:
|
||||
|
||||
if not self.datastore.data['watching'].get(uuid):
|
||||
|
||||
@@ -29,9 +29,8 @@ apprise~=1.2.0
|
||||
# apprise mqtt https://github.com/dgtlmoon/changedetection.io/issues/315
|
||||
paho-mqtt
|
||||
|
||||
# This mainly affects some ARM builds, which unlike the other builds ignores "ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1"
|
||||
# so without this pinning, the newer versions on ARM will forcefully try to build rust, which results in "rust compiler not found"
|
||||
# (introduced once apprise became a dep)
|
||||
# Pinned version of cryptography otherwise
|
||||
# ERROR: Could not build wheels for cryptography which use PEP 517 and cannot be installed directly
|
||||
cryptography~=3.4
|
||||
|
||||
# Used for CSS filtering
|
||||
|
||||
@@ -1 +1 @@
|
||||
python-3.9.15
|
||||
python-3.8.12
|
||||
Reference in New Issue
Block a user