mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-11-01 15:17:20 +00:00
Compare commits
27 Commits
show-which
...
0.39.13.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
034507f14f | ||
|
|
0e385b1c22 | ||
|
|
f28c260576 | ||
|
|
18f0b63b7d | ||
|
|
97045e7a7b | ||
|
|
9807cf0cda | ||
|
|
d4b5237103 | ||
|
|
dc6f76ba64 | ||
|
|
1f2f93184e | ||
|
|
0f08c8dda3 | ||
|
|
68db20168e | ||
|
|
1d4474f5a3 | ||
|
|
613308881c | ||
|
|
f69585b276 | ||
|
|
0179940df1 | ||
|
|
c0d0424e7e | ||
|
|
014dc61222 | ||
|
|
06517bfd22 | ||
|
|
b3a115dd4a | ||
|
|
ffc4215411 | ||
|
|
9e708810d1 | ||
|
|
1e8aa6158b | ||
|
|
015353eccc | ||
|
|
501183e66b | ||
|
|
def74f27e6 | ||
|
|
37775a46c6 | ||
|
|
e4eaa0c817 |
@@ -20,6 +20,11 @@ COPY requirements.txt /requirements.txt
|
||||
|
||||
RUN pip install --target=/dependencies -r /requirements.txt
|
||||
|
||||
# Playwright is an alternative to Selenium
|
||||
# Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing
|
||||
RUN pip install --target=/dependencies playwright~=1.20 \
|
||||
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
|
||||
|
||||
# Final image stage
|
||||
FROM python:3.8-slim
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
recursive-include changedetectionio/templates *
|
||||
recursive-include changedetectionio/static *
|
||||
recursive-include changedetectionio/model *
|
||||
include changedetection.py
|
||||
global-exclude *.pyc
|
||||
global-exclude node_modules
|
||||
|
||||
@@ -170,9 +170,12 @@ Raspberry Pi and linux/arm/v6 linux/arm/v7 arm64 devices are supported! See the
|
||||
|
||||
Do you use changedetection.io to make money? does it save you time or money? Does it make your life easier? less stressful? Remember, we write this software when we should be doing actual paid work, we have to buy food and pay rent just like you.
|
||||
|
||||
Please support us, even small amounts help a LOT.
|
||||
|
||||
BTC `1PLFN327GyUarpJd7nVe7Reqg9qHx5frNn`
|
||||
Firstly, consider taking out a [change detection monthly subscription - unlimited checks and watches](https://lemonade.changedetection.io/start) , even if you don't use it, you still get the warm fuzzy feeling of helping out the project. (And who knows, you might just use it!)
|
||||
|
||||
Or directly donate an amount PayPal [](https://www.paypal.com/donate/?hosted_button_id=7CP6HR9ZCNDYJ)
|
||||
|
||||
Or BTC `1PLFN327GyUarpJd7nVe7Reqg9qHx5frNn`
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/btc-support.png" style="max-width:50%;" alt="Support us!" />
|
||||
|
||||
|
||||
@@ -32,6 +32,7 @@ from flask import (
|
||||
render_template,
|
||||
request,
|
||||
send_from_directory,
|
||||
session,
|
||||
url_for,
|
||||
)
|
||||
from flask_login import login_required
|
||||
@@ -39,7 +40,7 @@ from flask_wtf import CSRFProtect
|
||||
|
||||
from changedetectionio import html_tools
|
||||
|
||||
__version__ = '0.39.12'
|
||||
__version__ = '0.39.13.1'
|
||||
|
||||
datastore = None
|
||||
|
||||
@@ -342,6 +343,8 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
@app.route("/", methods=['GET'])
|
||||
@login_required
|
||||
def index():
|
||||
from changedetectionio import forms
|
||||
|
||||
limit_tag = request.args.get('tag')
|
||||
pause_uuid = request.args.get('pause')
|
||||
|
||||
@@ -378,7 +381,6 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
existing_tags = datastore.get_all_tags()
|
||||
|
||||
from changedetectionio import forms
|
||||
form = forms.quickWatchForm(request.form)
|
||||
|
||||
output = render_template("watch-overview.html",
|
||||
@@ -390,8 +392,10 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
has_unviewed=datastore.data['has_unviewed'],
|
||||
# Don't link to hosting when we're on the hosting environment
|
||||
hosted_sticky=os.getenv("SALTED_PASS", False) == False,
|
||||
guid=datastore.data['app_guid'])
|
||||
|
||||
guid=datastore.data['app_guid'],
|
||||
queued_uuids=update_q.queue)
|
||||
if session.get('share-link'):
|
||||
del(session['share-link'])
|
||||
return output
|
||||
|
||||
|
||||
@@ -430,48 +434,21 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
@login_required
|
||||
def scrub_page():
|
||||
|
||||
import re
|
||||
|
||||
if request.method == 'POST':
|
||||
confirmtext = request.form.get('confirmtext')
|
||||
limit_date = request.form.get('limit_date')
|
||||
limit_timestamp = 0
|
||||
|
||||
# Re #149 - allow empty/0 timestamp limit
|
||||
if len(limit_date):
|
||||
try:
|
||||
limit_date = limit_date.replace('T', ' ')
|
||||
# I noticed chrome will show '/' but actually submit '-'
|
||||
limit_date = limit_date.replace('-', '/')
|
||||
# In the case that :ss seconds are supplied
|
||||
limit_date = re.sub(r'(\d\d:\d\d)(:\d\d)', '\\1', limit_date)
|
||||
|
||||
str_to_dt = datetime.datetime.strptime(limit_date, '%Y/%m/%d %H:%M')
|
||||
limit_timestamp = int(str_to_dt.timestamp())
|
||||
|
||||
if limit_timestamp > time.time():
|
||||
flash("Timestamp is in the future, cannot continue.", 'error')
|
||||
return redirect(url_for('scrub_page'))
|
||||
|
||||
except ValueError:
|
||||
flash('Incorrect date format, cannot continue.', 'error')
|
||||
return redirect(url_for('scrub_page'))
|
||||
|
||||
if confirmtext == 'scrub':
|
||||
changes_removed = 0
|
||||
for uuid, watch in datastore.data['watching'].items():
|
||||
if limit_timestamp:
|
||||
changes_removed += datastore.scrub_watch(uuid, limit_timestamp=limit_timestamp)
|
||||
else:
|
||||
changes_removed += datastore.scrub_watch(uuid)
|
||||
for uuid in datastore.data['watching'].keys():
|
||||
datastore.scrub_watch(uuid)
|
||||
|
||||
flash("Cleared snapshot history ({} snapshots removed)".format(changes_removed))
|
||||
flash("Cleared all snapshot history")
|
||||
else:
|
||||
flash('Incorrect confirmation text.', 'error')
|
||||
|
||||
return redirect(url_for('index'))
|
||||
|
||||
output = render_template("scrub.html")
|
||||
output = render_template("scrub.html")
|
||||
return output
|
||||
|
||||
|
||||
@@ -517,7 +494,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
def edit_page(uuid):
|
||||
from changedetectionio import forms
|
||||
|
||||
|
||||
using_default_check_time = True
|
||||
# More for testing, possible to return the first/only
|
||||
if not datastore.data['watching'].keys():
|
||||
flash("No watches to edit", "error")
|
||||
@@ -530,27 +507,60 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
flash("No watch with the UUID %s found." % (uuid), "error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||
data=datastore.data['watching'][uuid]
|
||||
)
|
||||
# be sure we update with a copy instead of accidently editing the live object by reference
|
||||
default = deepcopy(datastore.data['watching'][uuid])
|
||||
|
||||
if request.method == 'GET':
|
||||
# Set some defaults that refer to the main config when None, we do the same in POST,
|
||||
# probably there should be a nice little handler for this.
|
||||
if datastore.data['watching'][uuid]['fetch_backend'] is None:
|
||||
form.fetch_backend.data = datastore.data['settings']['application']['fetch_backend']
|
||||
if datastore.data['watching'][uuid]['minutes_between_check'] is None:
|
||||
form.minutes_between_check.data = datastore.data['settings']['requests']['minutes_between_check']
|
||||
# Show system wide default if nothing configured
|
||||
if datastore.data['watching'][uuid]['fetch_backend'] is None:
|
||||
default['fetch_backend'] = datastore.data['settings']['application']['fetch_backend']
|
||||
|
||||
# Show system wide default if nothing configured
|
||||
if all(value == 0 or value == None for value in datastore.data['watching'][uuid]['time_between_check'].values()):
|
||||
default['time_between_check'] = deepcopy(datastore.data['settings']['requests']['time_between_check'])
|
||||
|
||||
# Defaults for proxy choice
|
||||
if datastore.proxy_list is not None: # When enabled
|
||||
system_proxy = datastore.data['settings']['requests']['proxy']
|
||||
if default['proxy'] is None:
|
||||
default['proxy'] = system_proxy
|
||||
else:
|
||||
# Does the chosen one exist?
|
||||
if not any(default['proxy'] in tup for tup in datastore.proxy_list):
|
||||
default['proxy'] = datastore.proxy_list[0][0]
|
||||
|
||||
# Used by the form handler to keep or remove the proxy settings
|
||||
default['proxy_list'] = datastore.proxy_list
|
||||
|
||||
# proxy_override set to the json/text list of the items
|
||||
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||
data=default,
|
||||
)
|
||||
|
||||
if datastore.proxy_list is None:
|
||||
# @todo - Couldn't get setattr() etc dynamic addition working, so remove it instead
|
||||
del form.proxy
|
||||
else:
|
||||
form.proxy.choices = datastore.proxy_list
|
||||
if default['proxy'] is None:
|
||||
form.proxy.default='http://hello'
|
||||
|
||||
if request.method == 'POST' and form.validate():
|
||||
extra_update_obj = {}
|
||||
|
||||
# Re #110, if they submit the same as the default value, set it to None, so we continue to follow the default
|
||||
if form.minutes_between_check.data == datastore.data['settings']['requests']['minutes_between_check']:
|
||||
form.minutes_between_check.data = None
|
||||
if form.fetch_backend.data == datastore.data['settings']['application']['fetch_backend']:
|
||||
form.fetch_backend.data = None
|
||||
# Assume we use the default value, unless something relevant is different, then use the form value
|
||||
# values could be None, 0 etc.
|
||||
# Set to None unless the next for: says that something is different
|
||||
extra_update_obj['time_between_check'] = dict.fromkeys(form.time_between_check.data)
|
||||
for k, v in form.time_between_check.data.items():
|
||||
if v and v != datastore.data['settings']['requests']['time_between_check'][k]:
|
||||
extra_update_obj['time_between_check'] = form.time_between_check.data
|
||||
using_default_check_time = False
|
||||
break
|
||||
|
||||
extra_update_obj = {}
|
||||
# Use the default if its the same as system wide
|
||||
if form.fetch_backend.data == datastore.data['settings']['application']['fetch_backend']:
|
||||
extra_update_obj['fetch_backend'] = None
|
||||
|
||||
# Notification URLs
|
||||
datastore.data['watching'][uuid]['notification_urls'] = form.notification_urls.data
|
||||
@@ -576,7 +586,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
# Re #286 - We wait for syncing new data to disk in another thread every 60 seconds
|
||||
# But in the case something is added we should save straight away
|
||||
datastore.sync_to_json()
|
||||
datastore.needs_write_urgent = True
|
||||
|
||||
# Queue the watch for immediate recheck
|
||||
update_q.put(uuid)
|
||||
@@ -595,12 +605,12 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
if request.method == 'POST' and not form.validate():
|
||||
flash("An error occurred, please see below.", "error")
|
||||
|
||||
has_empty_checktime = datastore.data['watching'][uuid].has_empty_checktime
|
||||
|
||||
output = render_template("edit.html",
|
||||
uuid=uuid,
|
||||
watch=datastore.data['watching'][uuid],
|
||||
form=form,
|
||||
has_empty_checktime=has_empty_checktime,
|
||||
has_empty_checktime=using_default_check_time,
|
||||
current_base_url=datastore.data['settings']['application']['base_url'],
|
||||
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False)
|
||||
)
|
||||
@@ -612,10 +622,28 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
def settings_page():
|
||||
from changedetectionio import content_fetcher, forms
|
||||
|
||||
default = deepcopy(datastore.data['settings'])
|
||||
if datastore.proxy_list is not None:
|
||||
# When enabled
|
||||
system_proxy = datastore.data['settings']['requests']['proxy']
|
||||
# In the case it doesnt exist anymore
|
||||
if not any([system_proxy in tup for tup in datastore.proxy_list]):
|
||||
system_proxy = None
|
||||
|
||||
default['requests']['proxy'] = system_proxy if system_proxy is not None else datastore.proxy_list[0][0]
|
||||
# Used by the form handler to keep or remove the proxy settings
|
||||
default['proxy_list'] = datastore.proxy_list
|
||||
|
||||
|
||||
# Don't use form.data on POST so that it doesnt overrid the checkbox status from the POST status
|
||||
form = forms.globalSettingsForm(formdata=request.form if request.method == 'POST' else None,
|
||||
data=datastore.data['settings']
|
||||
data=default
|
||||
)
|
||||
if datastore.proxy_list is None:
|
||||
# @todo - Couldn't get setattr() etc dynamic addition working, so remove it instead
|
||||
del form.requests.form.proxy
|
||||
else:
|
||||
form.requests.form.proxy.choices = datastore.proxy_list
|
||||
|
||||
if request.method == 'POST':
|
||||
# Password unset is a GET, but we can lock the session to a salted env password to always need the password
|
||||
@@ -630,15 +658,15 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
if form.validate():
|
||||
datastore.data['settings']['application'].update(form.data['application'])
|
||||
datastore.data['settings']['requests'].update(form.data['requests'])
|
||||
datastore.needs_write = True
|
||||
|
||||
if not os.getenv("SALTED_PASS", False) and len(form.application.form.password.encrypted_password):
|
||||
datastore.data['settings']['application']['password'] = form.application.form.password.encrypted_password
|
||||
datastore.needs_write = True
|
||||
datastore.needs_write_urgent = True
|
||||
flash("Password protection enabled.", 'notice')
|
||||
flask_login.logout_user()
|
||||
return redirect(url_for('index'))
|
||||
|
||||
datastore.needs_write_urgent = True
|
||||
flash("Settings updated.")
|
||||
|
||||
else:
|
||||
@@ -655,42 +683,37 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
@app.route("/import", methods=['GET', "POST"])
|
||||
@login_required
|
||||
def import_page():
|
||||
import validators
|
||||
remaining_urls = []
|
||||
|
||||
good = 0
|
||||
|
||||
if request.method == 'POST':
|
||||
now=time.time()
|
||||
urls = request.values.get('urls').split("\n")
|
||||
from .importer import import_url_list, import_distill_io_json
|
||||
|
||||
if (len(urls) > 5000):
|
||||
flash("Importing 5,000 of the first URLs from your list, the rest can be imported again.")
|
||||
# URL List import
|
||||
if request.values.get('urls') and len(request.values.get('urls').strip()):
|
||||
# Import and push into the queue for immediate update check
|
||||
importer = import_url_list()
|
||||
importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore)
|
||||
for uuid in importer.new_uuids:
|
||||
update_q.put(uuid)
|
||||
|
||||
for url in urls:
|
||||
url = url.strip()
|
||||
url, *tags = url.split(" ")
|
||||
# Flask wtform validators wont work with basic auth, use validators package
|
||||
# Up to 5000 per batch so we dont flood the server
|
||||
if len(url) and validators.url(url.replace('source:', '')) and good < 5000:
|
||||
new_uuid = datastore.add_watch(url=url.strip(), tag=" ".join(tags), write_to_disk_now=False)
|
||||
# Straight into the queue.
|
||||
update_q.put(new_uuid)
|
||||
good += 1
|
||||
if len(importer.remaining_data) == 0:
|
||||
return redirect(url_for('index'))
|
||||
else:
|
||||
if len(url):
|
||||
remaining_urls.append(url)
|
||||
remaining_urls = importer.remaining_data
|
||||
|
||||
# Distill.io import
|
||||
if request.values.get('distill-io') and len(request.values.get('distill-io').strip()):
|
||||
# Import and push into the queue for immediate update check
|
||||
d_importer = import_distill_io_json()
|
||||
d_importer.run(data=request.values.get('distill-io'), flash=flash, datastore=datastore)
|
||||
for uuid in d_importer.new_uuids:
|
||||
update_q.put(uuid)
|
||||
|
||||
flash("{} Imported in {:.2f}s, {} Skipped.".format(good, time.time()-now,len(remaining_urls)))
|
||||
datastore.needs_write = True
|
||||
|
||||
if len(remaining_urls) == 0:
|
||||
# Looking good, redirect to index.
|
||||
return redirect(url_for('index'))
|
||||
|
||||
# Could be some remaining, or we could be on GET
|
||||
output = render_template("import.html",
|
||||
remaining="\n".join(remaining_urls)
|
||||
import_url_list_remaining="\n".join(remaining_urls),
|
||||
original_distill_json=''
|
||||
)
|
||||
return output
|
||||
|
||||
@@ -986,29 +1009,35 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
from changedetectionio import forms
|
||||
form = forms.quickWatchForm(request.form)
|
||||
|
||||
if form.validate():
|
||||
|
||||
url = request.form.get('url').strip()
|
||||
if datastore.url_exists(url):
|
||||
flash('The URL {} already exists'.format(url), "error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
# @todo add_watch should throw a custom Exception for validation etc
|
||||
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip())
|
||||
# Straight into the queue.
|
||||
update_q.put(new_uuid)
|
||||
|
||||
flash("Watch added.")
|
||||
return redirect(url_for('index'))
|
||||
else:
|
||||
if not form.validate():
|
||||
flash("Error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
url = request.form.get('url').strip()
|
||||
if datastore.url_exists(url):
|
||||
flash('The URL {} already exists'.format(url), "error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
# @todo add_watch should throw a custom Exception for validation etc
|
||||
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip())
|
||||
if new_uuid:
|
||||
# Straight into the queue.
|
||||
update_q.put(new_uuid)
|
||||
flash("Watch added.")
|
||||
|
||||
return redirect(url_for('index'))
|
||||
|
||||
|
||||
|
||||
@app.route("/api/delete", methods=['GET'])
|
||||
@login_required
|
||||
def api_delete():
|
||||
uuid = request.args.get('uuid')
|
||||
|
||||
if uuid != 'all' and not uuid in datastore.data['watching'].keys():
|
||||
flash('The watch by UUID {} does not exist.'.format(uuid), 'error')
|
||||
return redirect(url_for('index'))
|
||||
|
||||
# More for testing, possible to return the first/only
|
||||
if uuid == 'first':
|
||||
uuid = list(datastore.data['watching'].keys()).pop()
|
||||
@@ -1068,6 +1097,59 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
flash("{} watches are queued for rechecking.".format(i))
|
||||
return redirect(url_for('index', tag=tag))
|
||||
|
||||
@app.route("/api/share-url", methods=['GET'])
|
||||
@login_required
|
||||
def api_share_put_watch():
|
||||
"""Given a watch UUID, upload the info and return a share-link
|
||||
the share-link can be imported/added"""
|
||||
import requests
|
||||
import json
|
||||
tag = request.args.get('tag')
|
||||
uuid = request.args.get('uuid')
|
||||
|
||||
# more for testing
|
||||
if uuid == 'first':
|
||||
uuid = list(datastore.data['watching'].keys()).pop()
|
||||
|
||||
# copy it to memory as trim off what we dont need (history)
|
||||
watch = deepcopy(datastore.data['watching'][uuid])
|
||||
if (watch.get('history')):
|
||||
del (watch['history'])
|
||||
|
||||
# for safety/privacy
|
||||
for k in list(watch.keys()):
|
||||
if k.startswith('notification_'):
|
||||
del watch[k]
|
||||
|
||||
for r in['uuid', 'last_checked', 'last_changed']:
|
||||
if watch.get(r):
|
||||
del (watch[r])
|
||||
|
||||
# Add the global stuff which may have an impact
|
||||
watch['ignore_text'] += datastore.data['settings']['application']['global_ignore_text']
|
||||
watch['subtractive_selectors'] += datastore.data['settings']['application']['global_subtractive_selectors']
|
||||
|
||||
watch_json = json.dumps(watch)
|
||||
|
||||
try:
|
||||
r = requests.request(method="POST",
|
||||
data={'watch': watch_json},
|
||||
url="https://changedetection.io/share/share",
|
||||
headers={'App-Guid': datastore.data['app_guid']})
|
||||
res = r.json()
|
||||
|
||||
session['share-link'] = "https://changedetection.io/share/{}".format(res['share_key'])
|
||||
|
||||
|
||||
except Exception as e:
|
||||
flash("Could not share, something went wrong while communicating with the share server.", 'error')
|
||||
|
||||
# https://changedetection.io/share/VrMv05wpXyQa
|
||||
# in the browser - should give you a nice info page - wtf
|
||||
# paste in etc
|
||||
return redirect(url_for('index'))
|
||||
|
||||
|
||||
# @todo handle ctrl break
|
||||
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
|
||||
|
||||
@@ -1175,7 +1257,7 @@ def ticker_thread_check_time_launch_checks():
|
||||
now = time.time()
|
||||
|
||||
recheck_time_minimum_seconds = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
|
||||
recheck_time_system_seconds = int(copied_datastore.data['settings']['requests']['minutes_between_check']) * 60
|
||||
recheck_time_system_seconds = datastore.threshold_seconds
|
||||
|
||||
for uuid, watch in copied_datastore.data['watching'].items():
|
||||
|
||||
@@ -1185,8 +1267,9 @@ def ticker_thread_check_time_launch_checks():
|
||||
|
||||
# If they supplied an individual entry minutes to threshold.
|
||||
threshold = now
|
||||
if watch.threshold_seconds:
|
||||
threshold -= watch.threshold_seconds
|
||||
watch_threshold_seconds = watch.threshold_seconds()
|
||||
if watch_threshold_seconds:
|
||||
threshold -= watch_threshold_seconds
|
||||
else:
|
||||
threshold -= recheck_time_system_seconds
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ import sys
|
||||
|
||||
import eventlet
|
||||
import eventlet.wsgi
|
||||
from . import store, changedetection_app
|
||||
from . import store, changedetection_app, content_fetcher
|
||||
from . import __version__
|
||||
|
||||
def main():
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
from abc import ABC, abstractmethod
|
||||
import chardet
|
||||
import os
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.webdriver.common.proxy import Proxy as SeleniumProxy
|
||||
from selenium.common.exceptions import WebDriverException
|
||||
import requests
|
||||
import time
|
||||
import urllib3.exceptions
|
||||
import sys
|
||||
|
||||
|
||||
class EmptyReply(Exception):
|
||||
@@ -19,13 +16,17 @@ class EmptyReply(Exception):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class Fetcher():
|
||||
error = None
|
||||
status_code = None
|
||||
content = None
|
||||
headers = None
|
||||
|
||||
fetcher_description ="No description"
|
||||
# Will be needed in the future by the VisualSelector, always get this where possible.
|
||||
screenshot = False
|
||||
fetcher_description = "No description"
|
||||
system_http_proxy = os.getenv('HTTP_PROXY')
|
||||
system_https_proxy = os.getenv('HTTPS_PROXY')
|
||||
|
||||
@abstractmethod
|
||||
def get_error(self):
|
||||
@@ -46,10 +47,6 @@ class Fetcher():
|
||||
def quit(self):
|
||||
return
|
||||
|
||||
@abstractmethod
|
||||
def screenshot(self):
|
||||
return
|
||||
|
||||
@abstractmethod
|
||||
def get_last_status_code(self):
|
||||
return self.status_code
|
||||
@@ -59,29 +56,118 @@ class Fetcher():
|
||||
def is_ready(self):
|
||||
return True
|
||||
|
||||
|
||||
# Maybe for the future, each fetcher provides its own diff output, could be used for text, image
|
||||
# the current one would return javascript output (as we use JS to generate the diff)
|
||||
#
|
||||
# Returns tuple(mime_type, stream)
|
||||
# @abstractmethod
|
||||
# def return_diff(self, stream_a, stream_b):
|
||||
# return
|
||||
|
||||
def available_fetchers():
|
||||
import inspect
|
||||
from changedetectionio import content_fetcher
|
||||
p=[]
|
||||
for name, obj in inspect.getmembers(content_fetcher):
|
||||
if inspect.isclass(obj):
|
||||
# @todo html_ is maybe better as fetcher_ or something
|
||||
# In this case, make sure to edit the default one in store.py and fetch_site_status.py
|
||||
if "html_" in name:
|
||||
t=tuple([name,obj.fetcher_description])
|
||||
p.append(t)
|
||||
# See the if statement at the bottom of this file for how we switch between playwright and webdriver
|
||||
import inspect
|
||||
p = []
|
||||
for name, obj in inspect.getmembers(sys.modules[__name__], inspect.isclass):
|
||||
if inspect.isclass(obj):
|
||||
# @todo html_ is maybe better as fetcher_ or something
|
||||
# In this case, make sure to edit the default one in store.py and fetch_site_status.py
|
||||
if name.startswith('html_'):
|
||||
t = tuple([name, obj.fetcher_description])
|
||||
p.append(t)
|
||||
|
||||
return p
|
||||
return p
|
||||
|
||||
class html_webdriver(Fetcher):
|
||||
|
||||
class base_html_playwright(Fetcher):
|
||||
fetcher_description = "Playwright {}/Javascript".format(
|
||||
os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').capitalize()
|
||||
)
|
||||
if os.getenv("PLAYWRIGHT_DRIVER_URL"):
|
||||
fetcher_description += " via '{}'".format(os.getenv("PLAYWRIGHT_DRIVER_URL"))
|
||||
|
||||
browser_type = ''
|
||||
command_executor = ''
|
||||
|
||||
# Configs for Proxy setup
|
||||
# In the ENV vars, is prefixed with "playwright_proxy_", so it is for example "playwright_proxy_server"
|
||||
playwright_proxy_settings_mappings = ['bypass', 'server', 'username', 'password']
|
||||
|
||||
proxy = None
|
||||
|
||||
def __init__(self, proxy_override=None):
|
||||
|
||||
# .strip('"') is going to save someone a lot of time when they accidently wrap the env value
|
||||
self.browser_type = os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').strip('"')
|
||||
self.command_executor = os.getenv(
|
||||
"PLAYWRIGHT_DRIVER_URL",
|
||||
'ws://playwright-chrome:3000/playwright'
|
||||
).strip('"')
|
||||
|
||||
# If any proxy settings are enabled, then we should setup the proxy object
|
||||
proxy_args = {}
|
||||
for k in self.playwright_proxy_settings_mappings:
|
||||
v = os.getenv('playwright_proxy_' + k, False)
|
||||
if v:
|
||||
proxy_args[k] = v.strip('"')
|
||||
|
||||
if proxy_args:
|
||||
self.proxy = proxy_args
|
||||
|
||||
# allow per-watch proxy selection override
|
||||
if proxy_override:
|
||||
self.proxy = {'server': proxy_override}
|
||||
|
||||
def run(self,
|
||||
url,
|
||||
timeout,
|
||||
request_headers,
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False):
|
||||
|
||||
from playwright.sync_api import sync_playwright
|
||||
import playwright._impl._api_types
|
||||
from playwright._impl._api_types import Error, TimeoutError
|
||||
|
||||
with sync_playwright() as p:
|
||||
browser_type = getattr(p, self.browser_type)
|
||||
|
||||
# Seemed to cause a connection Exception even tho I can see it connect
|
||||
# self.browser = browser_type.connect(self.command_executor, timeout=timeout*1000)
|
||||
browser = browser_type.connect_over_cdp(self.command_executor, timeout=timeout * 1000)
|
||||
|
||||
# Set user agent to prevent Cloudflare from blocking the browser
|
||||
# Use the default one configured in the App.py model that's passed from fetch_site_status.py
|
||||
context = browser.new_context(
|
||||
user_agent=request_headers['User-Agent'] if request_headers.get('User-Agent') else 'Mozilla/5.0',
|
||||
proxy=self.proxy
|
||||
)
|
||||
page = context.new_page()
|
||||
page.set_viewport_size({"width": 1280, "height": 1024})
|
||||
try:
|
||||
response = page.goto(url, timeout=timeout * 1000, wait_until='commit')
|
||||
# Wait_until = commit
|
||||
# - `'commit'` - consider operation to be finished when network response is received and the document started loading.
|
||||
# Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
|
||||
# This seemed to solve nearly all 'TimeoutErrors'
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5))
|
||||
page.wait_for_timeout(extra_wait * 1000)
|
||||
except playwright._impl._api_types.TimeoutError as e:
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
if response is None:
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
self.status_code = response.status
|
||||
self.content = page.content()
|
||||
self.headers = response.all_headers()
|
||||
|
||||
# Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it
|
||||
# JPEG is better here because the screenshots can be very very large
|
||||
page.screenshot(type='jpeg', clip={'x': 1.0, 'y': 1.0, 'width': 1280, 'height': 1024})
|
||||
self.screenshot = page.screenshot(type='jpeg', full_page=True, quality=90)
|
||||
context.close()
|
||||
browser.close()
|
||||
|
||||
|
||||
class base_html_webdriver(Fetcher):
|
||||
if os.getenv("WEBDRIVER_URL"):
|
||||
fetcher_description = "WebDriver Chrome/Javascript via '{}'".format(os.getenv("WEBDRIVER_URL"))
|
||||
else:
|
||||
@@ -94,12 +180,11 @@ class html_webdriver(Fetcher):
|
||||
selenium_proxy_settings_mappings = ['proxyType', 'ftpProxy', 'httpProxy', 'noProxy',
|
||||
'proxyAutoconfigUrl', 'sslProxy', 'autodetect',
|
||||
'socksProxy', 'socksVersion', 'socksUsername', 'socksPassword']
|
||||
proxy = None
|
||||
|
||||
def __init__(self, proxy_override=None):
|
||||
from selenium.webdriver.common.proxy import Proxy as SeleniumProxy
|
||||
|
||||
|
||||
proxy=None
|
||||
|
||||
def __init__(self):
|
||||
# .strip('"') is going to save someone a lot of time when they accidently wrap the env value
|
||||
self.command_executor = os.getenv("WEBDRIVER_URL", 'http://browser-chrome:4444/wd/hub').strip('"')
|
||||
|
||||
@@ -110,6 +195,16 @@ class html_webdriver(Fetcher):
|
||||
if v:
|
||||
proxy_args[k] = v.strip('"')
|
||||
|
||||
# Map back standard HTTP_ and HTTPS_PROXY to webDriver httpProxy/sslProxy
|
||||
if not proxy_args.get('webdriver_httpProxy') and self.system_http_proxy:
|
||||
proxy_args['httpProxy'] = self.system_http_proxy
|
||||
if not proxy_args.get('webdriver_sslProxy') and self.system_https_proxy:
|
||||
proxy_args['httpsProxy'] = self.system_https_proxy
|
||||
|
||||
# Allows override the proxy on a per-request basis
|
||||
if proxy_override is not None:
|
||||
proxy_args['httpProxy'] = proxy_override
|
||||
|
||||
if proxy_args:
|
||||
self.proxy = SeleniumProxy(raw=proxy_args)
|
||||
|
||||
@@ -121,6 +216,9 @@ class html_webdriver(Fetcher):
|
||||
request_method,
|
||||
ignore_status_codes=False):
|
||||
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.common.exceptions import WebDriverException
|
||||
# request_body, request_method unused for now, until some magic in the future happens.
|
||||
|
||||
# check env for WEBDRIVER_URL
|
||||
@@ -145,9 +243,8 @@ class html_webdriver(Fetcher):
|
||||
time.sleep(int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)))
|
||||
self.content = self.driver.page_source
|
||||
self.headers = {}
|
||||
|
||||
def screenshot(self):
|
||||
return self.driver.get_screenshot_as_png()
|
||||
self.screenshot = self.driver.get_screenshot_as_png()
|
||||
self.quit()
|
||||
|
||||
# Does the connection to the webdriver work? run a test connection.
|
||||
def is_ready(self):
|
||||
@@ -170,10 +267,14 @@ class html_webdriver(Fetcher):
|
||||
except Exception as e:
|
||||
print("Exception in chrome shutdown/quit" + str(e))
|
||||
|
||||
|
||||
# "html_requests" is listed as the default fetcher in store.py!
|
||||
class html_requests(Fetcher):
|
||||
fetcher_description = "Basic fast Plaintext/HTTP Client"
|
||||
|
||||
def __init__(self, proxy_override=None):
|
||||
self.proxy_override = proxy_override
|
||||
|
||||
def run(self,
|
||||
url,
|
||||
timeout,
|
||||
@@ -182,12 +283,24 @@ class html_requests(Fetcher):
|
||||
request_method,
|
||||
ignore_status_codes=False):
|
||||
|
||||
proxies={}
|
||||
|
||||
# Allows override the proxy on a per-request basis
|
||||
if self.proxy_override:
|
||||
proxies = {'http': self.proxy_override, 'https': self.proxy_override, 'ftp': self.proxy_override}
|
||||
else:
|
||||
if self.system_http_proxy:
|
||||
proxies['http'] = self.system_http_proxy
|
||||
if self.system_https_proxy:
|
||||
proxies['https'] = self.system_https_proxy
|
||||
|
||||
r = requests.request(method=request_method,
|
||||
data=request_body,
|
||||
url=url,
|
||||
headers=request_headers,
|
||||
timeout=timeout,
|
||||
verify=False)
|
||||
data=request_body,
|
||||
url=url,
|
||||
headers=request_headers,
|
||||
timeout=timeout,
|
||||
proxies=proxies,
|
||||
verify=False)
|
||||
|
||||
# If the response did not tell us what encoding format to expect, Then use chardet to override what `requests` thinks.
|
||||
# For example - some sites don't tell us it's utf-8, but return utf-8 content
|
||||
@@ -207,3 +320,11 @@ class html_requests(Fetcher):
|
||||
self.content = r.text
|
||||
self.headers = r.headers
|
||||
|
||||
|
||||
# Decide which is the 'real' HTML webdriver, this is more a system wide config
|
||||
# rather than site-specific.
|
||||
use_playwright_as_chrome_fetcher = os.getenv('PLAYWRIGHT_DRIVER_URL', False)
|
||||
if use_playwright_as_chrome_fetcher:
|
||||
html_webdriver = base_html_playwright
|
||||
else:
|
||||
html_webdriver = base_html_webdriver
|
||||
|
||||
@@ -16,6 +16,34 @@ class perform_site_check():
|
||||
super().__init__(*args, **kwargs)
|
||||
self.datastore = datastore
|
||||
|
||||
# If there was a proxy list enabled, figure out what proxy_args/which proxy to use
|
||||
# if watch.proxy use that
|
||||
# fetcher.proxy_override = watch.proxy or main config proxy
|
||||
# Allows override the proxy on a per-request basis
|
||||
# ALWAYS use the first one is nothing selected
|
||||
|
||||
def set_proxy_from_list(self, watch):
|
||||
proxy_args = None
|
||||
if self.datastore.proxy_list is None:
|
||||
return None
|
||||
|
||||
# If its a valid one
|
||||
if any([watch['proxy'] in p for p in self.datastore.proxy_list]):
|
||||
proxy_args = watch['proxy']
|
||||
|
||||
# not valid (including None), try the system one
|
||||
else:
|
||||
system_proxy = self.datastore.data['settings']['requests']['proxy']
|
||||
# Is not None and exists
|
||||
if any([system_proxy in p for p in self.datastore.proxy_list]):
|
||||
proxy_args = system_proxy
|
||||
|
||||
# Fallback - Did not resolve anything, use the first available
|
||||
if proxy_args is None:
|
||||
proxy_args = self.datastore.proxy_list[0][0]
|
||||
|
||||
return proxy_args
|
||||
|
||||
def run(self, uuid):
|
||||
timestamp = int(time.time()) # used for storage etc too
|
||||
|
||||
@@ -66,8 +94,12 @@ class perform_site_check():
|
||||
# If the klass doesnt exist, just use a default
|
||||
klass = getattr(content_fetcher, "html_requests")
|
||||
|
||||
fetcher = klass()
|
||||
proxy_args = self.set_proxy_from_list(watch)
|
||||
fetcher = klass(proxy_override=proxy_args)
|
||||
|
||||
# Proxy List support
|
||||
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_code)
|
||||
|
||||
# Fetching complete, now filters
|
||||
# @todo move to class / maybe inside of fetcher abstract base?
|
||||
|
||||
@@ -117,11 +149,13 @@ class perform_site_check():
|
||||
# Then we assume HTML
|
||||
if has_filter_rule:
|
||||
# For HTML/XML we offer xpath as an option, just start a regular xPath "/.."
|
||||
if css_filter_rule[0] == '/':
|
||||
html_content = html_tools.xpath_filter(xpath_filter=css_filter_rule, html_content=fetcher.content)
|
||||
if css_filter_rule[0] == '/' or css_filter_rule.startswith('xpath:'):
|
||||
html_content = html_tools.xpath_filter(xpath_filter=css_filter_rule.replace('xpath:', ''),
|
||||
html_content=fetcher.content)
|
||||
else:
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
html_content = html_tools.css_filter(css_filter=css_filter_rule, html_content=fetcher.content)
|
||||
|
||||
if has_subtractive_selectors:
|
||||
html_content = html_tools.element_removal(subtractive_selectors, html_content)
|
||||
|
||||
@@ -141,7 +175,6 @@ class perform_site_check():
|
||||
# Re #340 - return the content before the 'ignore text' was applied
|
||||
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
|
||||
|
||||
|
||||
# Re #340 - return the content before the 'ignore text' was applied
|
||||
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
|
||||
|
||||
@@ -164,8 +197,8 @@ class perform_site_check():
|
||||
else:
|
||||
fetched_md5 = hashlib.md5(stripped_text_from_html).hexdigest()
|
||||
|
||||
# On the first run of a site, watch['previous_md5'] will be an empty string, set it the current one.
|
||||
if not len(watch['previous_md5']):
|
||||
# On the first run of a site, watch['previous_md5'] will be None, set it the current one.
|
||||
if not watch.get('previous_md5'):
|
||||
watch['previous_md5'] = fetched_md5
|
||||
update_obj["previous_md5"] = fetched_md5
|
||||
|
||||
@@ -192,9 +225,4 @@ class perform_site_check():
|
||||
if not watch['title'] or not len(watch['title']):
|
||||
update_obj['title'] = html_tools.extract_element(find='title', html_content=fetcher.content)
|
||||
|
||||
if self.datastore.data['settings']['application'].get('real_browser_save_screenshot', True):
|
||||
screenshot = fetcher.screenshot()
|
||||
|
||||
fetcher.quit()
|
||||
|
||||
return changed_detected, update_obj, text_content_before_ignored_filter, screenshot
|
||||
return changed_detected, update_obj, text_content_before_ignored_filter, fetcher.screenshot
|
||||
|
||||
@@ -37,27 +37,31 @@ valid_method = {
|
||||
|
||||
default_method = 'GET'
|
||||
|
||||
|
||||
class StringListField(StringField):
|
||||
widget = widgets.TextArea()
|
||||
|
||||
def _value(self):
|
||||
if self.data:
|
||||
return "\r\n".join(self.data)
|
||||
# ignore empty lines in the storage
|
||||
data = list(filter(lambda x: len(x.strip()), self.data))
|
||||
# Apply strip to each line
|
||||
data = list(map(lambda x: x.strip(), data))
|
||||
return "\r\n".join(data)
|
||||
else:
|
||||
return u''
|
||||
|
||||
# incoming
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
# Remove empty strings
|
||||
cleaned = list(filter(None, valuelist[0].split("\n")))
|
||||
self.data = [x.strip() for x in cleaned]
|
||||
p = 1
|
||||
if valuelist and len(valuelist[0].strip()):
|
||||
# Remove empty strings, stripping and splitting \r\n, only \n etc.
|
||||
self.data = valuelist[0].splitlines()
|
||||
# Remove empty lines from the final data
|
||||
self.data = list(filter(lambda x: len(x.strip()), self.data))
|
||||
else:
|
||||
self.data = []
|
||||
|
||||
|
||||
|
||||
class SaltyPasswordField(StringField):
|
||||
widget = widgets.PasswordInput()
|
||||
encrypted_password = ""
|
||||
@@ -85,6 +89,13 @@ class SaltyPasswordField(StringField):
|
||||
else:
|
||||
self.data = False
|
||||
|
||||
class TimeBetweenCheckForm(Form):
|
||||
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
hours = IntegerField('Hours', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
minutes = IntegerField('Minutes', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
seconds = IntegerField('Seconds', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
# @todo add total seconds minimum validatior = minimum_seconds_recheck_time
|
||||
|
||||
# Separated by key:value
|
||||
class StringDictKeyValue(StringField):
|
||||
@@ -313,8 +324,7 @@ class watchForm(commonSettingsForm):
|
||||
url = fields.URLField('URL', validators=[validateURL()])
|
||||
tag = StringField('Group tag', [validators.Optional(), validators.Length(max=35)], default='')
|
||||
|
||||
minutes_between_check = fields.IntegerField('Maximum time in minutes until recheck',
|
||||
[validators.Optional(), validators.NumberRange(min=1)])
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
|
||||
css_filter = StringField('CSS/JSON/XPATH Filter', [ValidateCSSJSONXPATHInput()], default='')
|
||||
|
||||
@@ -327,9 +337,9 @@ class watchForm(commonSettingsForm):
|
||||
method = SelectField('Request method', choices=valid_method, default=default_method)
|
||||
ignore_status_codes = BooleanField('Ignore status codes (process non-2xx status codes as normal)', default=False)
|
||||
trigger_text = StringListField('Trigger/wait for text', [validators.Optional(), ValidateListRegex()])
|
||||
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||
save_and_preview_button = SubmitField('Save & Preview', render_kw={"class": "pure-button pure-button-primary"})
|
||||
proxy = RadioField('Proxy')
|
||||
|
||||
def validate(self, **kwargs):
|
||||
if not super().validate():
|
||||
@@ -347,8 +357,10 @@ class watchForm(commonSettingsForm):
|
||||
|
||||
# datastore.data['settings']['requests']..
|
||||
class globalSettingsRequestForm(Form):
|
||||
minutes_between_check = fields.IntegerField('Maximum time in minutes until recheck',
|
||||
[validators.NumberRange(min=1)])
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
proxy = RadioField('Proxy')
|
||||
|
||||
|
||||
# datastore.data['settings']['application']..
|
||||
class globalSettingsApplicationForm(commonSettingsForm):
|
||||
|
||||
@@ -371,4 +383,3 @@ class globalSettingsForm(Form):
|
||||
requests = FormField(globalSettingsRequestForm)
|
||||
application = FormField(globalSettingsApplicationForm)
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||
|
||||
|
||||
133
changedetectionio/importer.py
Normal file
133
changedetectionio/importer.py
Normal file
@@ -0,0 +1,133 @@
|
||||
from abc import ABC, abstractmethod
|
||||
import time
|
||||
import validators
|
||||
|
||||
|
||||
class Importer():
|
||||
remaining_data = []
|
||||
new_uuids = []
|
||||
good = 0
|
||||
|
||||
def __init__(self):
|
||||
self.new_uuids = []
|
||||
self.good = 0
|
||||
self.remaining_data = []
|
||||
|
||||
@abstractmethod
|
||||
def run(self,
|
||||
data,
|
||||
flash,
|
||||
datastore):
|
||||
pass
|
||||
|
||||
|
||||
class import_url_list(Importer):
|
||||
"""
|
||||
Imports a list, can be in <code>https://example.com tag1, tag2, last tag</code> format
|
||||
"""
|
||||
def run(self,
|
||||
data,
|
||||
flash,
|
||||
datastore,
|
||||
):
|
||||
|
||||
urls = data.split("\n")
|
||||
good = 0
|
||||
now = time.time()
|
||||
|
||||
if (len(urls) > 5000):
|
||||
flash("Importing 5,000 of the first URLs from your list, the rest can be imported again.")
|
||||
|
||||
for url in urls:
|
||||
url = url.strip()
|
||||
if not len(url):
|
||||
continue
|
||||
|
||||
tags = ""
|
||||
|
||||
# 'tags' should be a csv list after the URL
|
||||
if ' ' in url:
|
||||
url, tags = url.split(" ", 1)
|
||||
|
||||
# Flask wtform validators wont work with basic auth, use validators package
|
||||
# Up to 5000 per batch so we dont flood the server
|
||||
if len(url) and validators.url(url.replace('source:', '')) and good < 5000:
|
||||
new_uuid = datastore.add_watch(url=url.strip(), tag=tags, write_to_disk_now=False)
|
||||
if new_uuid:
|
||||
# Straight into the queue.
|
||||
self.new_uuids.append(new_uuid)
|
||||
good += 1
|
||||
continue
|
||||
|
||||
# Worked past the 'continue' above, append it to the bad list
|
||||
if self.remaining_data is None:
|
||||
self.remaining_data = []
|
||||
self.remaining_data.append(url)
|
||||
|
||||
flash("{} Imported from list in {:.2f}s, {} Skipped.".format(good, time.time() - now, len(self.remaining_data)))
|
||||
|
||||
|
||||
class import_distill_io_json(Importer):
|
||||
def run(self,
|
||||
data,
|
||||
flash,
|
||||
datastore,
|
||||
):
|
||||
|
||||
import json
|
||||
good = 0
|
||||
now = time.time()
|
||||
self.new_uuids=[]
|
||||
|
||||
|
||||
try:
|
||||
data = json.loads(data.strip())
|
||||
except json.decoder.JSONDecodeError:
|
||||
flash("Unable to read JSON file, was it broken?", 'error')
|
||||
return
|
||||
|
||||
if not data.get('data'):
|
||||
flash("JSON structure looks invalid, was it broken?", 'error')
|
||||
return
|
||||
|
||||
for d in data.get('data'):
|
||||
d_config = json.loads(d['config'])
|
||||
extras = {'title': d['name']}
|
||||
|
||||
if len(d['uri']) and good < 5000:
|
||||
try:
|
||||
# @todo we only support CSS ones at the moment
|
||||
if d_config['selections'][0]['frames'][0]['excludes'][0]['type'] == 'css':
|
||||
extras['subtractive_selectors'] = d_config['selections'][0]['frames'][0]['excludes'][0]['expr']
|
||||
except KeyError:
|
||||
pass
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
try:
|
||||
extras['css_filter'] = d_config['selections'][0]['frames'][0]['includes'][0]['expr']
|
||||
if d_config['selections'][0]['frames'][0]['includes'][0]['type'] == 'xpath':
|
||||
extras['css_filter'] = 'xpath:' + extras['css_filter']
|
||||
|
||||
except KeyError:
|
||||
pass
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
try:
|
||||
extras['tag'] = ", ".join(d['tags'])
|
||||
except KeyError:
|
||||
pass
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
new_uuid = datastore.add_watch(url=d['uri'].strip(),
|
||||
extras=extras,
|
||||
write_to_disk_now=False)
|
||||
|
||||
if new_uuid:
|
||||
# Straight into the queue.
|
||||
self.new_uuids.append(new_uuid)
|
||||
good += 1
|
||||
|
||||
flash("{} Imported from Distill.io in {:.2f}s, {} Skipped.".format(len(self.new_uuids), time.time() - now, len(self.remaining_data)))
|
||||
@@ -10,9 +10,7 @@ from changedetectionio.notification import (
|
||||
)
|
||||
|
||||
class model(dict):
|
||||
def __init__(self, *arg, **kw):
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
self.update({
|
||||
base_config = {
|
||||
'note': "Hello! If you change this file manually, please be sure to restart your changedetection.io instance!",
|
||||
'watching': {},
|
||||
'settings': {
|
||||
@@ -24,9 +22,9 @@ class model(dict):
|
||||
},
|
||||
'requests': {
|
||||
'timeout': 15, # Default 15 seconds
|
||||
# Default 3 hours
|
||||
'minutes_between_check': 3 * 60, # Default 3 hours
|
||||
'workers': 10 # Number of threads, lower is better for slow connections
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': 3, 'minutes': None, 'seconds': None},
|
||||
'workers': 10, # Number of threads, lower is better for slow connections
|
||||
'proxy': None # Preferred proxy connection
|
||||
},
|
||||
'application': {
|
||||
'password': False,
|
||||
@@ -46,4 +44,8 @@ class model(dict):
|
||||
'schema_version' : 0
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
self.update(self.base_config)
|
||||
|
||||
@@ -2,7 +2,7 @@ import os
|
||||
|
||||
import uuid as uuid_builder
|
||||
|
||||
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 5))
|
||||
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
|
||||
|
||||
from changedetectionio.notification import (
|
||||
default_notification_body,
|
||||
@@ -12,18 +12,18 @@ from changedetectionio.notification import (
|
||||
|
||||
|
||||
class model(dict):
|
||||
def __init__(self, *arg, **kw):
|
||||
self.update({
|
||||
base_config = {
|
||||
'url': None,
|
||||
'tag': None,
|
||||
'last_checked': 0,
|
||||
'last_changed': 0,
|
||||
'paused': False,
|
||||
'last_viewed': 0, # history key value of the last viewed via the [diff] link
|
||||
'newest_history_key': "",
|
||||
'newest_history_key': 0,
|
||||
'title': None,
|
||||
'previous_md5': "",
|
||||
'uuid': str(uuid_builder.uuid4()),
|
||||
'previous_md5': False,
|
||||
# UUID not needed, should be generated only as a key
|
||||
# 'uuid':
|
||||
'headers': {}, # Extra headers to send
|
||||
'body': None,
|
||||
'method': 'GET',
|
||||
@@ -39,24 +39,31 @@ class model(dict):
|
||||
'trigger_text': [], # List of text or regex to wait for until a change is detected
|
||||
'fetch_backend': None,
|
||||
'extract_title_as_title': False,
|
||||
'proxy': None, # Preferred proxy connection
|
||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||
# Requires setting to None on submit if it's the same as the default
|
||||
# Should be all None by default, so we use the system default in this case.
|
||||
'minutes_between_check': None
|
||||
})
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None}
|
||||
}
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
self.update(self.base_config)
|
||||
# goes at the end so we update the default object with the initialiser
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
|
||||
|
||||
@property
|
||||
def has_empty_checktime(self):
|
||||
if self.get('minutes_between_check', None):
|
||||
return False
|
||||
return True
|
||||
# using all() + dictionary comprehension
|
||||
# Check if all values are 0 in dictionary
|
||||
res = all(x == None or x == False or x==0 for x in self.get('time_between_check', {}).values())
|
||||
return res
|
||||
|
||||
@property
|
||||
def threshold_seconds(self):
|
||||
sec = self.get('minutes_between_check', None)
|
||||
if sec:
|
||||
sec = sec * 60
|
||||
return sec
|
||||
seconds = 0
|
||||
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
|
||||
for m, n in mtable.items():
|
||||
x = self.get('time_between_check', {}).get(m, None)
|
||||
if x:
|
||||
seconds += x * n
|
||||
return seconds
|
||||
|
||||
@@ -66,19 +66,25 @@ def process_notification(n_object, datastore):
|
||||
if not 'avatar_url' in url:
|
||||
url += k + 'avatar_url=https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png'
|
||||
|
||||
payload_max_size = 1700
|
||||
# Trim everything to a max of 1700 total chars (leave some for padding, control etc)
|
||||
# Incase n_title > 1700
|
||||
# basically trim back the body until the total size fits our threshold
|
||||
# and trim off the n_title to fit always.
|
||||
body_limit = max(0, payload_max_size - len(n_title))
|
||||
body = n_body[0:body_limit] if 'discord://' in url else n_body
|
||||
if url.startswith('tgram://'):
|
||||
# real limit is 4096, but minus some for extra metadata
|
||||
payload_max_size = 3600
|
||||
body_limit = max(0, payload_max_size - len(n_title))
|
||||
n_title = n_title[0:payload_max_size]
|
||||
n_body = n_body[0:body_limit]
|
||||
|
||||
elif url.startswith('discord://'):
|
||||
# real limit is 2000, but minus some for extra metadata
|
||||
payload_max_size = 1700
|
||||
body_limit = max(0, payload_max_size - len(n_title))
|
||||
n_title = n_title[0:payload_max_size]
|
||||
n_body = n_body[0:body_limit]
|
||||
|
||||
apobj.add(url)
|
||||
|
||||
apobj.notify(
|
||||
title=n_title[0:payload_max_size],
|
||||
body=body,
|
||||
title=n_title,
|
||||
body=n_body,
|
||||
body_format=n_format)
|
||||
|
||||
apobj.clear()
|
||||
|
||||
40
changedetectionio/static/images/copy.svg
Normal file
40
changedetectionio/static/images/copy.svg
Normal file
@@ -0,0 +1,40 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
version="1.1"
|
||||
id="Layer_1"
|
||||
x="0px"
|
||||
y="0px"
|
||||
viewBox="0 0 115.77 122.88"
|
||||
style="enable-background:new 0 0 115.77 122.88"
|
||||
xml:space="preserve"
|
||||
sodipodi:docname="copy.svg"
|
||||
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"><defs
|
||||
id="defs11" /><sodipodi:namedview
|
||||
id="namedview9"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
inkscape:zoom="5.5501303"
|
||||
inkscape:cx="57.83648"
|
||||
inkscape:cy="61.439999"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1056"
|
||||
inkscape:window-x="1920"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="g6" /><style
|
||||
type="text/css"
|
||||
id="style2">.st0{fill-rule:evenodd;clip-rule:evenodd;}</style><g
|
||||
id="g6"><path
|
||||
class="st0"
|
||||
d="M89.62,13.96v7.73h12.19h0.01v0.02c3.85,0.01,7.34,1.57,9.86,4.1c2.5,2.51,4.06,5.98,4.07,9.82h0.02v0.02 v73.27v0.01h-0.02c-0.01,3.84-1.57,7.33-4.1,9.86c-2.51,2.5-5.98,4.06-9.82,4.07v0.02h-0.02h-61.7H40.1v-0.02 c-3.84-0.01-7.34-1.57-9.86-4.1c-2.5-2.51-4.06-5.98-4.07-9.82h-0.02v-0.02V92.51H13.96h-0.01v-0.02c-3.84-0.01-7.34-1.57-9.86-4.1 c-2.5-2.51-4.06-5.98-4.07-9.82H0v-0.02V13.96v-0.01h0.02c0.01-3.85,1.58-7.34,4.1-9.86c2.51-2.5,5.98-4.06,9.82-4.07V0h0.02h61.7 h0.01v0.02c3.85,0.01,7.34,1.57,9.86,4.1c2.5,2.51,4.06,5.98,4.07,9.82h0.02V13.96L89.62,13.96z M79.04,21.69v-7.73v-0.02h0.02 c0-0.91-0.39-1.75-1.01-2.37c-0.61-0.61-1.46-1-2.37-1v0.02h-0.01h-61.7h-0.02v-0.02c-0.91,0-1.75,0.39-2.37,1.01 c-0.61,0.61-1,1.46-1,2.37h0.02v0.01v64.59v0.02h-0.02c0,0.91,0.39,1.75,1.01,2.37c0.61,0.61,1.46,1,2.37,1v-0.02h0.01h12.19V35.65 v-0.01h0.02c0.01-3.85,1.58-7.34,4.1-9.86c2.51-2.5,5.98-4.06,9.82-4.07v-0.02h0.02H79.04L79.04,21.69z M105.18,108.92V35.65v-0.02 h0.02c0-0.91-0.39-1.75-1.01-2.37c-0.61-0.61-1.46-1-2.37-1v0.02h-0.01h-61.7h-0.02v-0.02c-0.91,0-1.75,0.39-2.37,1.01 c-0.61,0.61-1,1.46-1,2.37h0.02v0.01v73.27v0.02h-0.02c0,0.91,0.39,1.75,1.01,2.37c0.61,0.61,1.46,1,2.37,1v-0.02h0.01h61.7h0.02 v0.02c0.91,0,1.75-0.39,2.37-1.01c0.61-0.61,1-1.46,1-2.37h-0.02V108.92L105.18,108.92z"
|
||||
id="path4"
|
||||
style="fill:#ffffff;fill-opacity:1" /></g></svg>
|
||||
|
After Width: | Height: | Size: 2.5 KiB |
46
changedetectionio/static/images/spread.svg
Normal file
46
changedetectionio/static/images/spread.svg
Normal file
@@ -0,0 +1,46 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="18"
|
||||
height="19.92"
|
||||
viewBox="0 0 18 19.92"
|
||||
version="1.1"
|
||||
id="svg6"
|
||||
sodipodi:docname="spread.svg"
|
||||
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs10" />
|
||||
<sodipodi:namedview
|
||||
id="namedview8"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:zoom="28.416667"
|
||||
inkscape:cx="9.0087975"
|
||||
inkscape:cy="9.9941348"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1056"
|
||||
inkscape:window-x="1920"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg6" />
|
||||
<path
|
||||
d="M -3,-2 H 21 V 22 H -3 Z"
|
||||
fill="none"
|
||||
id="path2" />
|
||||
<path
|
||||
d="m 15,14.08 c -0.76,0 -1.44,0.3 -1.96,0.77 L 5.91,10.7 C 5.96,10.47 6,10.24 6,10 6,9.76 5.96,9.53 5.91,9.3 L 12.96,5.19 C 13.5,5.69 14.21,6 15,6 16.66,6 18,4.66 18,3 18,1.34 16.66,0 15,0 c -1.66,0 -3,1.34 -3,3 0,0.24 0.04,0.47 0.09,0.7 L 5.04,7.81 C 4.5,7.31 3.79,7 3,7 1.34,7 0,8.34 0,10 c 0,1.66 1.34,3 3,3 0.79,0 1.5,-0.31 2.04,-0.81 l 7.12,4.16 c -0.05,0.21 -0.08,0.43 -0.08,0.65 0,1.61 1.31,2.92 2.92,2.92 1.61,0 2.92,-1.31 2.92,-2.92 0,-1.61 -1.31,-2.92 -2.92,-2.92 z"
|
||||
id="path4"
|
||||
style="fill:#0078e7;fill-opacity:1" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
@@ -4,7 +4,7 @@ $(document).ready(function() {
|
||||
e.preventDefault();
|
||||
email = prompt("Destination email");
|
||||
if(email) {
|
||||
var n = $("#notification_urls");
|
||||
var n = $(".notification-urls");
|
||||
var p=email_notification_prefix;
|
||||
$(n).val( $.trim( $(n).val() )+"\n"+email_notification_prefix+email );
|
||||
}
|
||||
@@ -25,10 +25,10 @@ $(document).ready(function() {
|
||||
|
||||
data = {
|
||||
window_url : window.location.href,
|
||||
notification_urls : $('#notification_urls').val(),
|
||||
notification_title : $('#notification_title').val(),
|
||||
notification_body : $('#notification_body').val(),
|
||||
notification_format : $('#notification_format').val(),
|
||||
notification_urls : $('.notification-urls').val(),
|
||||
notification_title : $('.notification-title').val(),
|
||||
notification_body : $('.notification-body').val(),
|
||||
notification_format : $('.notification-format').val(),
|
||||
}
|
||||
for (key in data) {
|
||||
if (!data[key].length) {
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
window.addEventListener("load", (event) => {
|
||||
// just an example for now
|
||||
function toggleVisible(elem) {
|
||||
// theres better ways todo this
|
||||
var x = document.getElementById(elem);
|
||||
if (x.style.display === "block") {
|
||||
x.style.display = "none";
|
||||
} else {
|
||||
x.style.display = "block";
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -3,4 +3,22 @@ $(function () {
|
||||
$('.diff-link').click(function () {
|
||||
$(this).closest('.unviewed').removeClass('unviewed');
|
||||
});
|
||||
|
||||
$('.with-share-link > *').click(function () {
|
||||
$("#copied-clipboard").remove();
|
||||
|
||||
var range = document.createRange();
|
||||
var n=$("#share-link")[0];
|
||||
range.selectNode(n);
|
||||
window.getSelection().removeAllRanges();
|
||||
window.getSelection().addRange(range);
|
||||
document.execCommand("copy");
|
||||
window.getSelection().removeAllRanges();
|
||||
|
||||
$('.with-share-link').append('<span style="font-size: 80%; color: #fff;" id="copied-clipboard">Copied to clipboard</span>');
|
||||
$("#copied-clipboard").fadeOut(2500, function() {
|
||||
$(this).remove();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
14
changedetectionio/static/js/watch-settings.js
Normal file
14
changedetectionio/static/js/watch-settings.js
Normal file
@@ -0,0 +1,14 @@
|
||||
$(document).ready(function() {
|
||||
function toggle() {
|
||||
if ($('input[name="fetch_backend"]:checked').val() != 'html_requests') {
|
||||
$('#requests-override-options').hide();
|
||||
} else {
|
||||
$('#requests-override-options').show();
|
||||
}
|
||||
}
|
||||
$('input[name="fetch_backend"]').click(function (e) {
|
||||
toggle();
|
||||
});
|
||||
toggle();
|
||||
|
||||
});
|
||||
@@ -180,6 +180,9 @@ body:after, body:before {
|
||||
.messages li.notice {
|
||||
background: rgba(255, 255, 255, 0.5); }
|
||||
|
||||
.messages.with-share-link > *:hover {
|
||||
cursor: pointer; }
|
||||
|
||||
#notification-customisation {
|
||||
border: 1px solid #ccc;
|
||||
padding: 0.5rem;
|
||||
@@ -306,10 +309,10 @@ footer {
|
||||
font-weight: bold; }
|
||||
.pure-form textarea {
|
||||
width: 100%; }
|
||||
.pure-form ul#fetch_backend {
|
||||
.pure-form .inline-radio ul {
|
||||
margin: 0px;
|
||||
list-style: none; }
|
||||
.pure-form ul#fetch_backend > li > * {
|
||||
.pure-form .inline-radio ul li > * {
|
||||
display: inline-block; }
|
||||
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
@@ -440,3 +443,8 @@ ul {
|
||||
padding-left: 1em;
|
||||
padding-top: 0px;
|
||||
margin-top: 4px; }
|
||||
|
||||
.time-check-widget tr {
|
||||
display: inline; }
|
||||
.time-check-widget tr input[type="number"] {
|
||||
width: 4em; }
|
||||
|
||||
@@ -237,6 +237,11 @@ body:after, body:before {
|
||||
background: rgba(255, 255, 255, .5);
|
||||
}
|
||||
}
|
||||
&.with-share-link {
|
||||
> *:hover {
|
||||
cursor:pointer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#notification-customisation {
|
||||
@@ -413,14 +418,16 @@ footer {
|
||||
textarea {
|
||||
width: 100%;
|
||||
}
|
||||
ul#fetch_backend {
|
||||
margin: 0px;
|
||||
list-style: none;
|
||||
> li {
|
||||
> * {
|
||||
display: inline-block;
|
||||
.inline-radio {
|
||||
ul {
|
||||
margin: 0px;
|
||||
list-style: none;
|
||||
li {
|
||||
> * {
|
||||
display: inline-block;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -624,4 +631,13 @@ ul {
|
||||
padding-left: 1em;
|
||||
padding-top: 0px;
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
.time-check-widget {
|
||||
tr {
|
||||
display: inline;
|
||||
input[type="number"] {
|
||||
width: 4em;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,6 @@
|
||||
from flask import (
|
||||
flash
|
||||
)
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
@@ -7,15 +10,21 @@ import uuid as uuid_builder
|
||||
from copy import deepcopy
|
||||
from os import mkdir, path, unlink
|
||||
from threading import Lock
|
||||
import re
|
||||
import requests
|
||||
|
||||
from changedetectionio.model import Watch, App
|
||||
|
||||
from . model import App, Watch
|
||||
|
||||
# Is there an existing library to ensure some data store (JSON etc) is in sync with CRUD methods?
|
||||
# Open a github issue if you know something :)
|
||||
# https://stackoverflow.com/questions/6190468/how-to-trigger-function-on-value-change
|
||||
class ChangeDetectionStore:
|
||||
lock = Lock()
|
||||
# For general updates/writes that can wait a few seconds
|
||||
needs_write = False
|
||||
|
||||
# For when we edit, we should write to disk
|
||||
needs_write_urgent = False
|
||||
|
||||
def __init__(self, datastore_path="/datastore", include_default_watches=True, version_tag="0.0.0"):
|
||||
# Should only be active for docker
|
||||
@@ -23,12 +32,14 @@ class ChangeDetectionStore:
|
||||
self.needs_write = False
|
||||
self.datastore_path = datastore_path
|
||||
self.json_store_path = "{}/url-watches.json".format(self.datastore_path)
|
||||
self.proxy_list = None
|
||||
self.stop_thread = False
|
||||
|
||||
self.__data = App.model()
|
||||
|
||||
# Base definition for all watchers
|
||||
self.generic_definition = Watch.model()
|
||||
# deepcopy part of #569 - not sure why its needed exactly
|
||||
self.generic_definition = deepcopy(Watch.model())
|
||||
|
||||
if path.isfile('changedetectionio/source.txt'):
|
||||
with open('changedetectionio/source.txt') as f:
|
||||
@@ -100,6 +111,17 @@ class ChangeDetectionStore:
|
||||
secret = secrets.token_hex(16)
|
||||
self.__data['settings']['application']['rss_access_token'] = secret
|
||||
|
||||
|
||||
# Proxy list support - available as a selection in settings when text file is imported
|
||||
# CSV list
|
||||
# "name, address", or just "name"
|
||||
proxy_list_file = "{}/proxies.txt".format(self.datastore_path)
|
||||
if path.isfile(proxy_list_file):
|
||||
self.import_proxy_list(proxy_list_file)
|
||||
|
||||
# Bump the update version by running updates
|
||||
self.run_updates()
|
||||
|
||||
self.needs_write = True
|
||||
|
||||
# Finally start the thread that will manage periodic data saves to JSON
|
||||
@@ -131,6 +153,10 @@ class ChangeDetectionStore:
|
||||
|
||||
def update_watch(self, uuid, update_obj):
|
||||
|
||||
# It's possible that the watch could be deleted before update
|
||||
if not self.__data['watching'].get(uuid):
|
||||
return
|
||||
|
||||
with self.lock:
|
||||
|
||||
# In python 3.9 we have the |= dict operator, but that still will lose data on nested structures...
|
||||
@@ -145,6 +171,17 @@ class ChangeDetectionStore:
|
||||
|
||||
self.needs_write = True
|
||||
|
||||
@property
|
||||
def threshold_seconds(self):
|
||||
seconds = 0
|
||||
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
|
||||
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
|
||||
for m, n in mtable.items():
|
||||
x = self.__data['settings']['requests']['time_between_check'].get(m)
|
||||
if x:
|
||||
seconds += x * n
|
||||
return max(seconds, minimum_seconds_recheck_time)
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
has_unviewed = False
|
||||
@@ -207,7 +244,7 @@ class ChangeDetectionStore:
|
||||
|
||||
del self.data['watching'][uuid]
|
||||
|
||||
self.needs_write = True
|
||||
self.needs_write_urgent = True
|
||||
|
||||
# Clone a watch by UUID
|
||||
def clone(self, uuid):
|
||||
@@ -231,61 +268,58 @@ class ChangeDetectionStore:
|
||||
return self.data['watching'][uuid].get(val)
|
||||
|
||||
# Remove a watchs data but keep the entry (URL etc)
|
||||
def scrub_watch(self, uuid, limit_timestamp = False):
|
||||
def scrub_watch(self, uuid):
|
||||
import pathlib
|
||||
|
||||
import hashlib
|
||||
del_timestamps = []
|
||||
self.__data['watching'][uuid].update({'history': {}, 'last_checked': 0, 'last_changed': 0, 'newest_history_key': 0, 'previous_md5': False})
|
||||
self.needs_write_urgent = True
|
||||
|
||||
changes_removed = 0
|
||||
|
||||
for timestamp, path in self.data['watching'][uuid]['history'].items():
|
||||
if not limit_timestamp or (limit_timestamp is not False and int(timestamp) > limit_timestamp):
|
||||
self.unlink_history_file(path)
|
||||
del_timestamps.append(timestamp)
|
||||
changes_removed += 1
|
||||
|
||||
if not limit_timestamp:
|
||||
self.data['watching'][uuid]['last_checked'] = 0
|
||||
self.data['watching'][uuid]['last_changed'] = 0
|
||||
self.data['watching'][uuid]['previous_md5'] = ""
|
||||
|
||||
|
||||
for timestamp in del_timestamps:
|
||||
del self.data['watching'][uuid]['history'][str(timestamp)]
|
||||
|
||||
# If there was a limitstamp, we need to reset some meta data about the entry
|
||||
# This has to happen after we remove the others from the list
|
||||
if limit_timestamp:
|
||||
newest_key = self.get_newest_history_key(uuid)
|
||||
if newest_key:
|
||||
self.data['watching'][uuid]['last_checked'] = int(newest_key)
|
||||
# @todo should be the original value if it was less than newest key
|
||||
self.data['watching'][uuid]['last_changed'] = int(newest_key)
|
||||
try:
|
||||
with open(self.data['watching'][uuid]['history'][str(newest_key)], "rb") as fp:
|
||||
content = fp.read()
|
||||
self.data['watching'][uuid]['previous_md5'] = hashlib.md5(content).hexdigest()
|
||||
except (FileNotFoundError, IOError):
|
||||
self.data['watching'][uuid]['previous_md5'] = ""
|
||||
pass
|
||||
|
||||
self.needs_write = True
|
||||
return changes_removed
|
||||
for item in pathlib.Path(self.datastore_path).rglob(uuid+"/*.txt"):
|
||||
unlink(item)
|
||||
|
||||
def add_watch(self, url, tag="", extras=None, write_to_disk_now=True):
|
||||
if extras is None:
|
||||
extras = {}
|
||||
# Incase these are copied across, assume it's a reference and deepcopy()
|
||||
apply_extras = deepcopy(extras)
|
||||
|
||||
# Was it a share link? try to fetch the data
|
||||
if (url.startswith("https://changedetection.io/share/")):
|
||||
try:
|
||||
r = requests.request(method="GET",
|
||||
url=url,
|
||||
# So we know to return the JSON instead of the human-friendly "help" page
|
||||
headers={'App-Guid': self.__data['app_guid']})
|
||||
res = r.json()
|
||||
|
||||
# List of permisable stuff we accept from the wild internet
|
||||
for k in ['url', 'tag',
|
||||
'paused', 'title',
|
||||
'previous_md5', 'headers',
|
||||
'body', 'method',
|
||||
'ignore_text', 'css_filter',
|
||||
'subtractive_selectors', 'trigger_text',
|
||||
'extract_title_as_title']:
|
||||
if res.get(k):
|
||||
apply_extras[k] = res[k]
|
||||
|
||||
except Exception as e:
|
||||
logging.error("Error fetching metadata for shared watch link", url, str(e))
|
||||
flash("Error fetching metadata for {}".format(url), 'error')
|
||||
return False
|
||||
|
||||
with self.lock:
|
||||
# @todo use a common generic version of this
|
||||
new_uuid = str(uuid_builder.uuid4())
|
||||
new_watch = Watch.model({
|
||||
# #Re 569
|
||||
# Not sure why deepcopy was needed here, sometimes new watches would appear to already have 'history' set
|
||||
# I assumed this would instantiate a new object but somehow an existing dict was getting used
|
||||
new_watch = deepcopy(Watch.model({
|
||||
'url': url,
|
||||
'tag': tag
|
||||
})
|
||||
}))
|
||||
|
||||
|
||||
# Incase these are copied across, assume it's a reference and deepcopy()
|
||||
apply_extras = deepcopy(extras)
|
||||
for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']:
|
||||
if k in apply_extras:
|
||||
del apply_extras[k]
|
||||
@@ -339,7 +373,7 @@ class ChangeDetectionStore:
|
||||
|
||||
def sync_to_json(self):
|
||||
logging.info("Saving JSON..")
|
||||
|
||||
print("Saving JSON..")
|
||||
try:
|
||||
data = deepcopy(self.__data)
|
||||
except RuntimeError as e:
|
||||
@@ -361,6 +395,7 @@ class ChangeDetectionStore:
|
||||
logging.error("Error writing JSON!! (Main JSON file save was skipped) : %s", str(e))
|
||||
|
||||
self.needs_write = False
|
||||
self.needs_write_urgent = False
|
||||
|
||||
# Thread runner, this helps with thread/write issues when there are many operations that want to update the JSON
|
||||
# by just running periodically in one thread, according to python, dict updates are threadsafe.
|
||||
@@ -371,14 +406,14 @@ class ChangeDetectionStore:
|
||||
print("Shutting down datastore thread")
|
||||
return
|
||||
|
||||
if self.needs_write:
|
||||
if self.needs_write or self.needs_write_urgent:
|
||||
self.sync_to_json()
|
||||
|
||||
# Once per minute is enough, more and it can cause high CPU usage
|
||||
# better here is to use something like self.app.config.exit.wait(1), but we cant get to 'app' from here
|
||||
for i in range(30):
|
||||
time.sleep(2)
|
||||
if self.stop_thread:
|
||||
for i in range(120):
|
||||
time.sleep(0.5)
|
||||
if self.stop_thread or self.needs_write_urgent:
|
||||
break
|
||||
|
||||
# Go through the datastore path and remove any snapshots that are not mentioned in the index
|
||||
@@ -394,7 +429,69 @@ class ChangeDetectionStore:
|
||||
import pathlib
|
||||
|
||||
# Only in the sub-directories
|
||||
for item in pathlib.Path(self.datastore_path).rglob("*/*txt"):
|
||||
if not str(item) in index:
|
||||
print ("Removing",item)
|
||||
unlink(item)
|
||||
for uuid in self.data['watching']:
|
||||
for item in pathlib.Path(self.datastore_path).rglob(uuid+"/*.txt"):
|
||||
if not str(item) in index:
|
||||
print ("Removing",item)
|
||||
unlink(item)
|
||||
|
||||
def import_proxy_list(self, filename):
|
||||
import csv
|
||||
with open(filename, newline='') as f:
|
||||
reader = csv.reader(f, skipinitialspace=True)
|
||||
# @todo This loop can could be improved
|
||||
l = []
|
||||
for row in reader:
|
||||
if len(row):
|
||||
if len(row)>=2:
|
||||
l.append(tuple(row[:2]))
|
||||
else:
|
||||
l.append(tuple([row[0], row[0]]))
|
||||
self.proxy_list = l if len(l) else None
|
||||
|
||||
|
||||
# Run all updates
|
||||
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
|
||||
# So therefor - each `update_n` should be very careful about checking if it needs to actually run
|
||||
# Probably we should bump the current update schema version with each tag release version?
|
||||
def run_updates(self):
|
||||
import inspect
|
||||
import shutil
|
||||
|
||||
updates_available = []
|
||||
for i, o in inspect.getmembers(self, predicate=inspect.ismethod):
|
||||
m = re.search(r'update_(\d+)$', i)
|
||||
if m:
|
||||
updates_available.append(int(m.group(1)))
|
||||
updates_available.sort()
|
||||
|
||||
for update_n in updates_available:
|
||||
if update_n > self.__data['settings']['application']['schema_version']:
|
||||
print ("Applying update_{}".format((update_n)))
|
||||
# Wont exist on fresh installs
|
||||
if os.path.exists(self.json_store_path):
|
||||
shutil.copyfile(self.json_store_path, self.datastore_path+"/url-watches-before-{}.json".format(update_n))
|
||||
|
||||
try:
|
||||
update_method = getattr(self, "update_{}".format(update_n))()
|
||||
except Exception as e:
|
||||
print("Error while trying update_{}".format((update_n)))
|
||||
print(e)
|
||||
# Don't run any more updates
|
||||
return
|
||||
else:
|
||||
# Bump the version, important
|
||||
self.__data['settings']['application']['schema_version'] = update_n
|
||||
|
||||
# Convert minutes to seconds on settings and each watch
|
||||
def update_1(self):
|
||||
if self.data['settings']['requests'].get('minutes_between_check'):
|
||||
self.data['settings']['requests']['time_between_check']['minutes'] = self.data['settings']['requests']['minutes_between_check']
|
||||
# Remove the default 'hours' that is set from the model
|
||||
self.data['settings']['requests']['time_between_check']['hours'] = None
|
||||
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
if 'minutes_between_check' in watch:
|
||||
# Only upgrade individual watch time if it was set
|
||||
if watch.get('minutes_between_check', False):
|
||||
self.data['watching'][uuid]['time_between_check']['minutes'] = watch['minutes_between_check']
|
||||
|
||||
@@ -2,13 +2,12 @@
|
||||
{% from '_helpers.jinja' import render_field %}
|
||||
|
||||
{% macro render_common_settings_form(form, current_base_url, emailprefix) %}
|
||||
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_urls, rows=5, placeholder="Examples:
|
||||
Gitter - gitter://token/room
|
||||
Office365 - o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail
|
||||
AWS SNS - sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo
|
||||
SMTPS - mailtos://user:pass@mail.domain.com?to=receivingAddress@example.com")
|
||||
SMTPS - mailtos://user:pass@mail.domain.com?to=receivingAddress@example.com", class="notification-urls")
|
||||
}}
|
||||
<div class="pure-form-message-inline">
|
||||
<ul>
|
||||
@@ -22,20 +21,19 @@
|
||||
<a id="send-test-notification" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Send test notification</a>
|
||||
{% if emailprefix %}
|
||||
<a id="add-email-helper" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Add email</a>
|
||||
|
||||
{% endif %}
|
||||
</div>
|
||||
<div id="notification-customisation" class="pure-control-group">
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_title, class="m-d") }}
|
||||
{{ render_field(form.notification_title, class="m-d notification-title") }}
|
||||
<span class="pure-form-message-inline">Title for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_body , rows=5) }}
|
||||
{{ render_field(form.notification_body , rows=5, class="notification-body") }}
|
||||
<span class="pure-form-message-inline">Body for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_format , rows=5) }}
|
||||
{{ render_field(form.notification_format , rows=5, class="notification-format") }}
|
||||
<span class="pure-form-message-inline">Format for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-controls">
|
||||
|
||||
@@ -94,6 +94,13 @@
|
||||
</ul>
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
|
||||
{% if session['share-link'] %}
|
||||
<ul class="messages with-share-link">
|
||||
<li class="message">Share this link: <span id="share-link">{{ session['share-link'] }}</span> <img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='copy.svg')}}" /></li>
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
{% endblock %}
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
|
||||
{% endif %}
|
||||
</script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||
|
||||
<div class="edit-form monospaced-textarea">
|
||||
@@ -41,7 +42,7 @@
|
||||
<span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.minutes_between_check) }}
|
||||
{{ render_field(form.time_between_check, class="time-check-widget") }}
|
||||
{% if has_empty_checktime %}
|
||||
<span class="pure-form-message-inline">Currently using the <a
|
||||
href="{{ url_for('settings_page', uuid=uuid) }}">default global settings</a>, change to another value if you want to be specific.</span>
|
||||
@@ -57,20 +58,25 @@
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="request">
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.fetch_backend) }}
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_field(form.fetch_backend, class="fetch-backend") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<p>Use the <strong>Basic</strong> method (default) where your watched site doesn't need Javascript to render.</p>
|
||||
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<hr/>
|
||||
<fieldset class="pure-group">
|
||||
|
||||
<span class="pure-form-message-inline">
|
||||
{% if form.proxy %}
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_field(form.proxy, class="fetch-backend-proxy") }}
|
||||
<span class="pure-form-message-inline">
|
||||
Choose a proxy for this watch
|
||||
</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
<fieldset class="pure-group" id="requests-override-options">
|
||||
<div class="pure-form-message-inline">
|
||||
<strong>Request override is currently only used by the <i>Basic fast Plaintext/HTTP Client</i> method.</strong>
|
||||
</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.method) }}
|
||||
</div>
|
||||
@@ -125,7 +131,7 @@ User-Agent: wonderbra 1.0") }}
|
||||
<li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li>
|
||||
<li>JSON - Limit text to this JSON rule, using <a href="https://pypi.org/project/jsonpath-ng/">JSONPath</a>, prefix with <code>"json:"</code>, use <code>json:$</code> to force re-formatting if required, <a
|
||||
href="https://jsonpath.com/" target="new">test your JSONPath here</a></li>
|
||||
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash, example <code>//*[contains(@class, 'sametext')]</code>, <a
|
||||
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash, example <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
|
||||
href="http://xpather.com/" target="new">test your XPath here</a></li>
|
||||
</ul>
|
||||
Please be sure that you thoroughly understand how to write CSS or JSONPath, XPath selector rules before filing an issue on GitHub! <a
|
||||
|
||||
@@ -1,30 +1,86 @@
|
||||
{% extends 'base.html' %}
|
||||
|
||||
{% block content %}
|
||||
<div class="edit-form">
|
||||
<div class="inner">
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<div class="edit-form monospaced-textarea">
|
||||
|
||||
<div class="tabs collapsable">
|
||||
<ul>
|
||||
<li class="tab" id="default-tab"><a href="#url-list">URL List</a></li>
|
||||
<li class="tab"><a href="#distill-io">Distill.io</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="box-wrap inner">
|
||||
<form class="pure-form pure-form-aligned" action="{{url_for('import_page')}}" method="POST">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
||||
<fieldset class="pure-group">
|
||||
<legend>
|
||||
Enter one URL per line, and optionally add tags for each URL after a space, delineated by comma (,):
|
||||
<br>
|
||||
<code>https://example.com tag1, tag2, last tag</code>
|
||||
<br>
|
||||
URLs which do not pass validation will stay in the textarea.
|
||||
</legend>
|
||||
|
||||
<div class="tab-pane-inner" id="url-list">
|
||||
<fieldset class="pure-group">
|
||||
<legend>
|
||||
Enter one URL per line, and optionally add tags for each URL after a space, delineated by comma
|
||||
(,):
|
||||
<br>
|
||||
<code>https://example.com tag1, tag2, last tag</code>
|
||||
<br>
|
||||
URLs which do not pass validation will stay in the textarea.
|
||||
</legend>
|
||||
|
||||
<textarea name="urls" class="pure-input-1-2" placeholder="https://"
|
||||
style="width: 100%;
|
||||
|
||||
<textarea name="urls" class="pure-input-1-2" placeholder="https://"
|
||||
style="width: 100%;
|
||||
font-family:monospace;
|
||||
white-space: pre;
|
||||
overflow-wrap: normal;
|
||||
overflow-x: scroll;" rows="25">{{ remaining }}</textarea>
|
||||
</fieldset>
|
||||
overflow-x: scroll;" rows="25">{{ import_url_list_remaining }}</textarea>
|
||||
</fieldset>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="distill-io">
|
||||
|
||||
|
||||
<fieldset class="pure-group">
|
||||
<legend>
|
||||
Copy and Paste your Distill.io watch 'export' file, this should be a JSON file.</br>
|
||||
This is <i>experimental</i>, supported fields are <code>name</code>, <code>uri</code>, <code>tags</code>, <code>config:selections</code>, the rest (including <code>schedule</code>) are ignored.
|
||||
<br/>
|
||||
<p>
|
||||
How to export? <a href="https://distill.io/docs/web-monitor/how-export-and-import-monitors/">https://distill.io/docs/web-monitor/how-export-and-import-monitors/</a><br/>
|
||||
Be sure to set your default fetcher to Chrome if required.</br>
|
||||
</p>
|
||||
</legend>
|
||||
|
||||
|
||||
<textarea name="distill-io" class="pure-input-1-2" style="width: 100%;
|
||||
font-family:monospace;
|
||||
white-space: pre;
|
||||
overflow-wrap: normal;
|
||||
overflow-x: scroll;" placeholder="Example Distill.io JSON export file
|
||||
|
||||
{
|
||||
"client": {
|
||||
"local": 1
|
||||
},
|
||||
"data": [
|
||||
{
|
||||
"name": "Unraid | News",
|
||||
"uri": "https://unraid.net/blog",
|
||||
"config": "{\"selections\":[{\"frames\":[{\"index\":0,\"excludes\":[],\"includes\":[{\"type\":\"xpath\",\"expr\":\"(//div[@id='App']/div[contains(@class,'flex')]/main[contains(@class,'relative')]/section[contains(@class,'relative')]/div[@class='container']/div[contains(@class,'flex')]/div[contains(@class,'w-full')])[1]\"}]}],\"dynamic\":true,\"delay\":2}],\"ignoreEmptyText\":true,\"includeStyle\":false,\"dataAttr\":\"text\"}",
|
||||
"tags": [],
|
||||
"content_type": 2,
|
||||
"state": 40,
|
||||
"schedule": "{\"type\":\"INTERVAL\",\"params\":{\"interval\":4447}}",
|
||||
"ts": "2022-03-27T15:51:15.667Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
" rows="25">{{ original_distill_json }}</textarea>
|
||||
</fieldset>
|
||||
</div>
|
||||
<button type="submit" class="pure-button pure-input-1-2 pure-button-primary">Import</button>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
This will remove all version snapshots/data, but keep your list of URLs. <br/>
|
||||
This will remove ALL version snapshots/data, but keep your list of URLs. <br/>
|
||||
You may like to use the <strong>BACKUP</strong> link first.<br/>
|
||||
</div>
|
||||
<br/>
|
||||
@@ -17,12 +17,6 @@
|
||||
<span class="pure-form-message-inline">Type in the word <strong>scrub</strong> to confirm that you understand!</span>
|
||||
</div>
|
||||
<br/>
|
||||
<div class="pure-control-group">
|
||||
<label for="confirmtext">Optional: Limit deletion of snapshots to snapshots <i>newer</i> than date/time</label>
|
||||
<input type="datetime-local" id="limit_date" name="limit_date" />
|
||||
<span class="pure-form-message-inline">dd/mm/yyyy hh:mm (24 hour format)</span>
|
||||
</div>
|
||||
<br/>
|
||||
<div class="pure-control-group">
|
||||
<button type="submit" class="pure-button pure-button-primary">Scrub!</button>
|
||||
</div>
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
const email_notification_prefix=JSON.parse('{{emailprefix|tojson}}');
|
||||
{% endif %}
|
||||
</script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='settings.js')}}" defer></script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||
|
||||
@@ -28,7 +27,7 @@
|
||||
<div class="tab-pane-inner" id="general">
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.requests.form.minutes_between_check) }}
|
||||
{{ render_field(form.requests.form.time_between_check, class="time-check-widget") }}
|
||||
<span class="pure-form-message-inline">Default time for all watches, when the watch does not have a specific time setting.</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
@@ -61,7 +60,14 @@
|
||||
{{ render_checkbox_field(form.application.form.real_browser_save_screenshot) }}
|
||||
<span class="pure-form-message-inline">When using a Chrome browser, a screenshot from the last check will be available on the Diff page</span>
|
||||
</div>
|
||||
|
||||
{% if form.requests.proxy %}
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_field(form.requests.form.proxy, class="fetch-backend-proxy") }}
|
||||
<span class="pure-form-message-inline">
|
||||
Choose a default proxy for all watches
|
||||
</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
@@ -74,8 +80,8 @@
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="fetching">
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.application.form.fetch_backend) }}
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_field(form.application.form.fetch_backend, class="fetch-backend") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<p>Use the <strong>Basic</strong> method (default) where your watched sites don't need Javascript to render.</p>
|
||||
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>
|
||||
|
||||
@@ -13,8 +13,7 @@
|
||||
{{ render_simple_field(form.tag, value=active_tag if active_tag else '', placeholder="watch group") }}
|
||||
<button type="submit" class="pure-button pure-button-primary">Watch</button>
|
||||
</fieldset>
|
||||
<!-- add extra stuff, like do a http POST and send headers -->
|
||||
<!-- user/pass r = requests.get('https://api.github.com/user', auth=('user', 'pass')) -->
|
||||
<span style="color:#eee; font-size: 80%;"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" /> Tip: You can also add 'shared' watches. <a href="#">More info</a></a></span>
|
||||
</form>
|
||||
<div>
|
||||
<a href="{{url_for('index')}}" class="pure-button button-tag {{'active' if not active_tag }}">All</a>
|
||||
@@ -46,12 +45,15 @@
|
||||
{% if watch.last_error is defined and watch.last_error != False %}error{% endif %}
|
||||
{% if watch.last_notification_error is defined and watch.last_notification_error != False %}error{% endif %}
|
||||
{% if watch.paused is defined and watch.paused != False %}paused{% endif %}
|
||||
{% if watch.newest_history_key| int > watch.last_viewed| int %}unviewed{% endif %}">
|
||||
{% if watch.newest_history_key| int > watch.last_viewed| int %}unviewed{% endif %}
|
||||
{% if watch.uuid in queued_uuids %}queued{% endif %}">
|
||||
<td class="inline">{{ loop.index }}</td>
|
||||
<td class="inline paused-state state-{{watch.paused}}"><a href="{{url_for('index', pause=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause" title="Pause"/></a></td>
|
||||
|
||||
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
|
||||
<a class="external" target="_blank" rel="noopener" href="{{ watch.url.replace('source:','') }}"></a>
|
||||
<a href="{{url_for('api_share_put_watch', uuid=watch.uuid)}}"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" /></a>
|
||||
|
||||
{%if watch.fetch_backend == "html_webdriver" %}<img style="height: 1em; display:inline-block;" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" />{% endif %}
|
||||
|
||||
{% if watch.last_error is defined and watch.last_error != False %}
|
||||
@@ -72,8 +74,8 @@
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
<a href="{{ url_for('api_watch_checknow', uuid=watch.uuid, tag=request.args.get('tag')) }}"
|
||||
class="pure-button button-small pure-button-primary">Recheck</a>
|
||||
<a {% if watch.uuid in queued_uuids %}disabled="true"{% endif %} href="{{ url_for('api_watch_checknow', uuid=watch.uuid, tag=request.args.get('tag')) }}"
|
||||
class="recheck pure-button button-small pure-button-primary">{% if watch.uuid in queued_uuids %}Queued{% else %}Recheck{% endif %}</a>
|
||||
<a href="{{ url_for('edit_page', uuid=watch.uuid)}}" class="pure-button button-small pure-button-primary">Edit</a>
|
||||
{% if watch.history|length >= 2 %}
|
||||
<a href="{{ url_for('diff_history_page', uuid=watch.uuid) }}" target="{{watch.uuid}}" class="pure-button button-small pure-button-primary diff-link">Diff</a>
|
||||
|
||||
@@ -13,7 +13,7 @@ def test_check_access_control(app, client):
|
||||
res = c.post(
|
||||
url_for("settings_page"),
|
||||
data={"application-password": "foobar",
|
||||
"requests-minutes_between_check": 180,
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -46,7 +46,7 @@ def test_check_access_control(app, client):
|
||||
assert b"BACKUP" in res.data
|
||||
assert b"IMPORT" in res.data
|
||||
assert b"LOG OUT" in res.data
|
||||
assert b"minutes_between_check" in res.data
|
||||
assert b"time_between_check-minutes" in res.data
|
||||
assert b"fetch_backend" in res.data
|
||||
|
||||
##################################################
|
||||
@@ -55,7 +55,7 @@ def test_check_access_control(app, client):
|
||||
res = c.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-minutes_between_check": 180,
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-fetch_backend": "html_webdriver",
|
||||
"application-removepassword_button": "Remove password"
|
||||
},
|
||||
@@ -70,7 +70,7 @@ def test_check_access_control(app, client):
|
||||
res = c.post(
|
||||
url_for("settings_page"),
|
||||
data={"application-password": "",
|
||||
"requests-minutes_between_check": 180,
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -109,7 +109,7 @@ def test_check_basic_change_detection_functionality(client, live_server):
|
||||
# Enable auto pickup of <title> in settings
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={"application-extract_title_as_title": "1", "requests-minutes_between_check": 180, 'application-fetch_backend': "html_requests"},
|
||||
data={"application-extract_title_as_title": "1", "requests-time_between_check-minutes": 180, 'application-fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
@@ -171,11 +171,24 @@ def test_check_ignore_text_functionality(client, live_server):
|
||||
def test_check_global_ignore_text_functionality(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
ignore_text = "XXXXX\r\nYYYYY\r\nZZZZZ"
|
||||
set_original_ignore_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
# Goto the settings page, add our ignore text
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-global_ignore_text": ignore_text,
|
||||
'application-fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
@@ -192,17 +205,6 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# Goto the settings page, add our ignore text
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-minutes_between_check": 180,
|
||||
"application-global_ignore_text": ignore_text,
|
||||
'application-fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Goto the edit page of the item, add our ignore text
|
||||
# Add our URL to the import page
|
||||
@@ -225,12 +227,16 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# so that we are sure everything is viewed and in a known 'nothing changed' state
|
||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
# Make a change
|
||||
|
||||
# Make a change which includes the ignore text
|
||||
set_modified_ignore_response()
|
||||
|
||||
# Trigger a check
|
||||
@@ -243,7 +249,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
# Just to be sure.. set a regular modified change..
|
||||
# Just to be sure.. set a regular modified change that will trigger it
|
||||
set_modified_original_ignore_response()
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
@@ -51,12 +51,11 @@ def test_render_anchor_tag_content_true(client, live_server):
|
||||
# set original html text
|
||||
set_original_ignore_response()
|
||||
|
||||
# Goto the settings page, choose not to ignore links
|
||||
# Goto the settings page, choose to ignore links (dont select/send "application-render_anchor_tag_content")
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-minutes_between_check": 180,
|
||||
"application-render_anchor_tag_content": "true",
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-fetch_backend": "html_requests",
|
||||
},
|
||||
follow_redirects=True,
|
||||
@@ -85,6 +84,30 @@ def test_render_anchor_tag_content_true(client, live_server):
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# We should not see the rendered anchor tag
|
||||
res = client.get(url_for("preview_page", uuid="first"))
|
||||
assert '(/modified_link)' not in res.data.decode()
|
||||
|
||||
# Goto the settings page, ENABLE render anchor tag
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-render_anchor_tag_content": "true",
|
||||
"application-fetch_backend": "html_requests",
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
|
||||
|
||||
# check that the anchor tag content is rendered
|
||||
res = client.get(url_for("preview_page", uuid="first"))
|
||||
assert '(/modified_link)' in res.data.decode()
|
||||
@@ -100,120 +123,3 @@ def test_render_anchor_tag_content_true(client, live_server):
|
||||
follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
def test_render_anchor_tag_content_false(client, live_server):
|
||||
"""Testing that anchor tag content changes are ignored when
|
||||
render_anchor_tag_content setting is set to false"""
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# set the original html text
|
||||
set_original_ignore_response()
|
||||
|
||||
# Goto the settings page, choose to ignore hyperlinks
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-minutes_between_check": 180,
|
||||
"application-render_anchor_tag_content": "false",
|
||||
"application-fetch_backend": "html_requests",
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for("test_endpoint", _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"), data={"urls": test_url}, follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# set a new html text, with a modified link
|
||||
set_modified_ignore_response()
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# check that the anchor tag content is not rendered
|
||||
res = client.get(url_for("preview_page", uuid="first"))
|
||||
assert '(/modified_link)' not in res.data.decode()
|
||||
|
||||
# even though the link has changed, we shouldn't detect a change since
|
||||
# we selected to not render anchor tag content (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
assert b"unviewed" not in res.data
|
||||
assert b"/test-endpoint" in res.data
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
def test_render_anchor_tag_content_default(client, live_server):
|
||||
"""Testing that anchor tag content changes are ignored when the
|
||||
render_anchor_tag_content setting is not explicitly selected"""
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# set the original html text
|
||||
set_original_ignore_response()
|
||||
|
||||
# Goto the settings page, not passing the render_anchor_tag_content setting
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-minutes_between_check": 180,
|
||||
"application-fetch_backend": "html_requests",
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for("test_endpoint", _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"), data={"urls": test_url}, follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# set a new html text, with a modified link
|
||||
set_modified_ignore_response()
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# check that the anchor tag content is not rendered
|
||||
res = client.get(url_for("preview_page", uuid="first"))
|
||||
assert '(/modified_link)' not in res.data.decode()
|
||||
|
||||
# even though the link has changed, we shouldn't detect a change since
|
||||
# we did not select the setting and the default behaviour is to not
|
||||
# render anchor tag content (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
assert b"unviewed" not in res.data
|
||||
assert b"/test-endpoint" in res.data
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
@@ -51,7 +51,7 @@ def test_normal_page_check_works_with_ignore_status_code(client, live_server):
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-minutes_between_check": 180,
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-ignore_status_codes": "y",
|
||||
'application-fetch_backend': "html_requests"
|
||||
},
|
||||
|
||||
@@ -61,7 +61,7 @@ def test_check_ignore_whitespace(client, live_server):
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-minutes_between_check": 180,
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-ignore_whitespace": "y",
|
||||
"application-fetch_backend": "html_requests"
|
||||
},
|
||||
|
||||
@@ -5,18 +5,17 @@ import time
|
||||
from flask import url_for
|
||||
|
||||
from .util import live_server_setup
|
||||
|
||||
|
||||
def test_import(client, live_server):
|
||||
|
||||
def test_setup(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def test_import(client, live_server):
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={
|
||||
"distill-io": "",
|
||||
"urls": """https://example.com
|
||||
https://example.com tag1
|
||||
https://example.com tag1, other tag"""
|
||||
@@ -26,3 +25,96 @@ https://example.com tag1, other tag"""
|
||||
assert b"3 Imported" in res.data
|
||||
assert b"tag1" in res.data
|
||||
assert b"other tag" in res.data
|
||||
res = client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
# Clear flask alerts
|
||||
res = client.get( url_for("index"))
|
||||
res = client.get( url_for("index"))
|
||||
|
||||
def xtest_import_skip_url(client, live_server):
|
||||
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={
|
||||
"distill-io": "",
|
||||
"urls": """https://example.com
|
||||
:ht000000broken
|
||||
"""
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
assert b"ht000000broken" in res.data
|
||||
assert b"1 Skipped" in res.data
|
||||
res = client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
|
||||
# Clear flask alerts
|
||||
res = client.get( url_for("index"))
|
||||
|
||||
def test_import_distillio(client, live_server):
|
||||
|
||||
distill_data='''
|
||||
{
|
||||
"client": {
|
||||
"local": 1
|
||||
},
|
||||
"data": [
|
||||
{
|
||||
"name": "Unraid | News",
|
||||
"uri": "https://unraid.net/blog",
|
||||
"config": "{\\"selections\\":[{\\"frames\\":[{\\"index\\":0,\\"excludes\\":[],\\"includes\\":[{\\"type\\":\\"xpath\\",\\"expr\\":\\"(//div[@id='App']/div[contains(@class,'flex')]/main[contains(@class,'relative')]/section[contains(@class,'relative')]/div[@class='container']/div[contains(@class,'flex')]/div[contains(@class,'w-full')])[1]\\"}]}],\\"dynamic\\":true,\\"delay\\":2}],\\"ignoreEmptyText\\":true,\\"includeStyle\\":false,\\"dataAttr\\":\\"text\\"}",
|
||||
"tags": ["nice stuff", "nerd-news"],
|
||||
"content_type": 2,
|
||||
"state": 40,
|
||||
"schedule": "{\\"type\\":\\"INTERVAL\\",\\"params\\":{\\"interval\\":4447}}",
|
||||
"ts": "2022-03-27T15:51:15.667Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
'''
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={
|
||||
"distill-io": distill_data,
|
||||
"urls" : ''
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
|
||||
|
||||
assert b"Unable to read JSON file, was it broken?" not in res.data
|
||||
assert b"1 Imported from Distill.io" in res.data
|
||||
|
||||
res = client.get( url_for("edit_page", uuid="first"))
|
||||
|
||||
assert b"https://unraid.net/blog" in res.data
|
||||
assert b"Unraid | News" in res.data
|
||||
|
||||
|
||||
# flask/wtforms should recode this, check we see it
|
||||
# wtforms encodes it like id=' ,but html.escape makes it like id='
|
||||
# - so just check it manually :(
|
||||
#import json
|
||||
#import html
|
||||
#d = json.loads(distill_data)
|
||||
# embedded_d=json.loads(d['data'][0]['config'])
|
||||
# x=html.escape(embedded_d['selections'][0]['frames'][0]['includes'][0]['expr']).encode('utf-8')
|
||||
assert b"xpath:(//div[@id='App']/div[contains(@class,'flex')]/main[contains(@class,'relative')]/section[contains(@class,'relative')]/div[@class='container']/div[contains(@class,'flex')]/div[contains(@class,'w-full')])[1]" in res.data
|
||||
|
||||
# did the tags work?
|
||||
res = client.get( url_for("index"))
|
||||
|
||||
assert b"nice stuff" in res.data
|
||||
assert b"nerd-news" in res.data
|
||||
|
||||
res = client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
|
||||
# Clear flask alerts
|
||||
res = client.get(url_for("index"))
|
||||
|
||||
@@ -270,6 +270,7 @@ def test_check_json_filter_bool_val(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(3)
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
@@ -284,6 +285,7 @@ def test_check_json_filter_bool_val(client, live_server):
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
time.sleep(3)
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
|
||||
@@ -156,7 +156,7 @@ def test_check_notification(client, live_server):
|
||||
|
||||
# cleanup for the next
|
||||
client.get(
|
||||
url_for("api_delete", uuid="first"),
|
||||
url_for("api_delete", uuid="all"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -172,8 +172,7 @@ def test_notification_validation(client, live_server):
|
||||
data={"url": test_url, "tag": 'nice one'},
|
||||
follow_redirects=True
|
||||
)
|
||||
with open("xxx.bin", "wb") as f:
|
||||
f.write(res.data)
|
||||
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
# Re #360 some validation
|
||||
@@ -199,7 +198,7 @@ def test_notification_validation(client, live_server):
|
||||
"application-notification_body": "Rubbish: {rubbish}\n",
|
||||
"application-notification_format": "Text",
|
||||
"application-notification_urls": "json://localhost/foobar",
|
||||
"requests-minutes_between_check": 180,
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"fetch_backend": "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
@@ -209,6 +208,6 @@ def test_notification_validation(client, live_server):
|
||||
|
||||
# cleanup for the next
|
||||
client.get(
|
||||
url_for("api_delete", uuid="first"),
|
||||
url_for("api_delete", uuid="all"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -35,7 +35,7 @@ def test_check_notification_error_handling(client, live_server):
|
||||
"tag": "",
|
||||
"title": "",
|
||||
"headers": "",
|
||||
"minutes_between_check": "180",
|
||||
"time_between_check-minutes": "180",
|
||||
"fetch_backend": "html_requests",
|
||||
"trigger_check": "y"},
|
||||
follow_redirects=True
|
||||
|
||||
@@ -27,6 +27,7 @@ def test_headers_in_request(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(3)
|
||||
cookie_header = '_ga=GA1.2.1022228332; cookie-preferences=analytics:accepted;'
|
||||
|
||||
|
||||
|
||||
76
changedetectionio/tests/test_share_watch.py
Normal file
76
changedetectionio/tests/test_share_watch.py
Normal file
@@ -0,0 +1,76 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from urllib.request import urlopen
|
||||
from .util import set_original_response, set_modified_response, live_server_setup
|
||||
import re
|
||||
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
|
||||
def test_share_watch(client, live_server):
|
||||
set_original_response()
|
||||
live_server_setup(live_server)
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
css_filter = ".nice-filter"
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": css_filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
# Check it saved
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first"),
|
||||
)
|
||||
assert bytes(css_filter.encode('utf-8')) in res.data
|
||||
|
||||
# click share the link
|
||||
res = client.get(
|
||||
url_for("api_share_put_watch", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Share this link:" in res.data
|
||||
assert b"https://changedetection.io/share/" in res.data
|
||||
|
||||
html = res.data.decode()
|
||||
share_link_search = re.search('<span id="share-link">(.*)</span>', html, re.IGNORECASE)
|
||||
assert share_link_search
|
||||
|
||||
# Now delete what we have, we will try to re-import it
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": share_link_search.group(1)},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Now hit edit, we should see what we expect
|
||||
# that the import fetched the meta-data
|
||||
|
||||
# Check it saved
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first"),
|
||||
)
|
||||
assert bytes(css_filter.encode('utf-8')) in res.data
|
||||
@@ -20,8 +20,8 @@ def test_check_watch_field_storage(client, live_server):
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={ "notification_urls": "json://myapi.com",
|
||||
"minutes_between_check": 126,
|
||||
data={ "notification_urls": "json://127.0.0.1:30000\r\njson://128.0.0.1\r\n",
|
||||
"time_between_check-minutes": 126,
|
||||
"css_filter" : ".fooclass",
|
||||
"title" : "My title",
|
||||
"ignore_text" : "ignore this",
|
||||
@@ -38,8 +38,14 @@ def test_check_watch_field_storage(client, live_server):
|
||||
url_for("edit_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
# checks that we dont get an error when using blank lines in the field value
|
||||
assert not b"json://127.0.0.1\n\njson" in res.data
|
||||
assert not b"json://127.0.0.1\r\n\njson" in res.data
|
||||
assert not b"json://127.0.0.1\r\n\rjson" in res.data
|
||||
|
||||
assert b"json://127.0.0.1" in res.data
|
||||
assert b"json://128.0.0.1" in res.data
|
||||
|
||||
assert b"json://myapi.com" in res.data
|
||||
assert b"126" in res.data
|
||||
assert b".fooclass" in res.data
|
||||
assert b"My title" in res.data
|
||||
@@ -56,7 +62,7 @@ def test_check_recheck_global_setting(client, live_server):
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-minutes_between_check": 1566,
|
||||
"requests-time_between_check-minutes": 1566,
|
||||
'application-fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
@@ -88,7 +94,7 @@ def test_check_recheck_global_setting(client, live_server):
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-minutes_between_check": 222,
|
||||
"requests-time_between_check-minutes": 222,
|
||||
'application-fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
@@ -108,7 +114,7 @@ def test_check_recheck_global_setting(client, live_server):
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"url": test_url,
|
||||
"minutes_between_check": 55,
|
||||
"time_between_check-minutes": 55,
|
||||
'fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
@@ -124,8 +130,8 @@ def test_check_recheck_global_setting(client, live_server):
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-minutes_between_check": 666,
|
||||
'application-fetch_backend': "html_requests"
|
||||
"requests-time_between_check-minutes": 666,
|
||||
"application-fetch_backend": "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -134,7 +140,7 @@ def test_check_recheck_global_setting(client, live_server):
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"url": test_url,
|
||||
"minutes_between_check": "",
|
||||
"time_between_check-minutes": "",
|
||||
'fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
@@ -147,4 +153,3 @@ def test_check_recheck_global_setting(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"666" in res.data
|
||||
|
||||
|
||||
@@ -116,4 +116,46 @@ def test_xpath_validation(client, live_server):
|
||||
data={"css_filter": "/something horrible", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"is not a valid XPath expression" in res.data
|
||||
assert b"is not a valid XPath expression" in res.data
|
||||
|
||||
|
||||
# actually only really used by the distll.io importer, but could be handy too
|
||||
def test_check_with_prefix_css_filter(client, live_server):
|
||||
res = client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
set_original_response()
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(3)
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(3)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
with open('/tmp/fuck.html', 'wb') as f:
|
||||
f.write(res.data)
|
||||
assert b"Some text thats the same" in res.data #in selector
|
||||
assert b"Some text that will change" not in res.data #not in selector
|
||||
|
||||
client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
@@ -17,12 +17,19 @@ services:
|
||||
# Alternative WebDriver/selenium URL, do not use "'s or 's!
|
||||
# - WEBDRIVER_URL=http://browser-chrome:4444/wd/hub
|
||||
#
|
||||
# WebDriver proxy settings webdriver_proxyType, webdriver_ftpProxy, webdriver_httpProxy, webdriver_noProxy,
|
||||
# webdriver_proxyAutoconfigUrl, webdriver_sslProxy, webdriver_autodetect,
|
||||
# WebDriver proxy settings webdriver_proxyType, webdriver_ftpProxy, webdriver_noProxy,
|
||||
# webdriver_proxyAutoconfigUrl, webdriver_autodetect,
|
||||
# webdriver_socksProxy, webdriver_socksUsername, webdriver_socksVersion, webdriver_socksPassword
|
||||
#
|
||||
# https://selenium-python.readthedocs.io/api.html#module-selenium.webdriver.common.proxy
|
||||
#
|
||||
# Alternative Playwright URL, do not use "'s or 's!
|
||||
# - PLAYWRIGHT_DRIVER_URL=ws://playwright-chrome:3000/
|
||||
#
|
||||
# Playwright proxy settings playwright_proxy_server, playwright_proxy_bypass, playwright_proxy_username, playwright_proxy_password
|
||||
#
|
||||
# https://playwright.dev/python/docs/api/class-browsertype#browser-type-launch-option-proxy
|
||||
#
|
||||
# Plain requsts - proxy support example.
|
||||
# - HTTP_PROXY=socks5h://10.10.1.10:1080
|
||||
# - HTTPS_PROXY=socks5h://10.10.1.10:1080
|
||||
@@ -58,6 +65,13 @@ services:
|
||||
# # Workaround to avoid the browser crashing inside a docker container
|
||||
# # See https://github.com/SeleniumHQ/docker-selenium#quick-start
|
||||
# - /dev/shm:/dev/shm
|
||||
# restart: unless-stopped
|
||||
|
||||
# Used for fetching pages via Playwright+Chrome where you need Javascript support.
|
||||
|
||||
# playwright-chrome:
|
||||
# hostname: playwright-chrome
|
||||
# image: browserless/chrome
|
||||
# restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
|
||||
@@ -17,7 +17,7 @@ wtforms ~= 3.0
|
||||
jsonpath-ng ~= 1.5.3
|
||||
|
||||
# Notification library
|
||||
apprise ~= 0.9.8.1
|
||||
apprise ~= 0.9.8.3
|
||||
|
||||
# apprise mqtt https://github.com/dgtlmoon/changedetection.io/issues/315
|
||||
paho-mqtt
|
||||
@@ -40,3 +40,4 @@ selenium ~= 4.1.0
|
||||
# need to revisit flask login versions
|
||||
werkzeug ~= 2.0.0
|
||||
|
||||
# playwright is installed at Dockerfile build time because it's not available on all platforms
|
||||
|
||||
Reference in New Issue
Block a user